diff --git a/README.md b/README.md index 34ec870..3a40651 100644 --- a/README.md +++ b/README.md @@ -23,3 +23,13 @@ Citation Ladwig, W. (2017). wrf-python (Version x.x.x) [Software]. Boulder, Colorado: UCAR/NCAR. https://doi.org/10.5065/D6W094P1 Note: The version number x.x.x should be set to the version of wrf-python that you are using. + + +-------------------- + +*The National Center for Atmospheric Research is sponsored by the National +Science Foundation. Any opinions, findings and conclusions or recommendations +expressed in this material do not necessarily reflect the views of the +National Science Foundation.* + + diff --git a/chey_intel.py b/chey_intel.py index 78d194f..ec0cb59 100644 --- a/chey_intel.py +++ b/chey_intel.py @@ -13,7 +13,8 @@ compilers = ['IntelFCompiler', 'IntelVisualFCompiler', def intel_version_match(type): # Match against the important stuff in the version string - return simple_version_match(start=r'Intel.*?Fortran.*?(?:%s).*?Version' % (type,)) + return simple_version_match( + start=r'Intel.*?Fortran.*?(?:%s).*?Version'.format(type,)) class BaseIntelFCompiler(FCompiler): @@ -36,13 +37,13 @@ class IntelFCompiler(BaseIntelFCompiler): possible_executables = ['ifort', 'ifc'] executables = { - 'version_cmd' : None, # set by update_executables - 'compiler_f77' : [None, "-72", "-w90", "-w95"], - 'compiler_f90' : [None], - 'compiler_fix' : [None, "-FI"], - 'linker_so' : ["", "-shared"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : ["ranlib"] + 'version_cmd': None, # set by update_executables + 'compiler_f77': [None, "-72", "-w90", "-w95"], + 'compiler_f90': [None], + 'compiler_fix': [None, "-FI"], + 'linker_so': ["", "-shared"], + 'archiver': ["ar", "-cr"], + 'ranlib': ["ranlib"] } pic_flags = ['-fPIC'] @@ -89,13 +90,13 @@ class IntelItaniumFCompiler(IntelFCompiler): possible_executables = ['ifort', 'efort', 'efc'] executables = { - 'version_cmd' : None, - 'compiler_f77' : [None, "-FI", "-w90", "-w95"], - 'compiler_fix' : [None, "-FI"], - 'compiler_f90' : [None], - 'linker_so' : ['', "-shared"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : ["ranlib"] + 'version_cmd': None, + 'compiler_f77': [None, "-FI", "-w90", "-w95"], + 'compiler_fix': [None, "-FI"], + 'compiler_f90': [None], + 'linker_so': ['', "-shared"], + 'archiver': ["ar", "-cr"], + 'ranlib': ["ranlib"] } @@ -104,18 +105,19 @@ class IntelEM64TFCompiler(IntelFCompiler): compiler_aliases = () description = 'Intel Fortran Compiler for 64-bit apps' - version_match = intel_version_match('EM64T-based|Intel\\(R\\) 64|64|IA-64|64-bit') + version_match = intel_version_match( + 'EM64T-based|Intel\\(R\\) 64|64|IA-64|64-bit') possible_executables = ['ifort', 'efort', 'efc'] executables = { - 'version_cmd' : None, - 'compiler_f77' : [None, "-FI"], - 'compiler_fix' : [None, "-FI"], - 'compiler_f90' : [None], - 'linker_so' : ['', "-shared"], - 'archiver' : ["ar", "-cr"], - 'ranlib' : ["ranlib"] + 'version_cmd': None, + 'compiler_f77': [None, "-FI"], + 'compiler_fix': [None, "-FI"], + 'compiler_f90': [None], + 'linker_so': ['', "-shared"], + 'archiver': ["ar", "-cr"], + 'ranlib': ["ranlib"] } def get_flags(self): @@ -147,13 +149,13 @@ class IntelVisualFCompiler(BaseIntelFCompiler): possible_executables = ['ifort', 'ifl'] executables = { - 'version_cmd' : None, - 'compiler_f77' : [None], - 'compiler_fix' : [None], - 'compiler_f90' : [None], - 'linker_so' : [None], - 'archiver' : [ar_exe, "/verbose", "/OUT:"], - 'ranlib' : None + 'version_cmd': None, + 'compiler_f77': [None], + 'compiler_fix': [None], + 'compiler_f90': [None], + 'linker_so': [None], + 'archiver': [ar_exe, "/verbose", "/OUT:"], + 'ranlib': None } compile_switch = '/c ' @@ -163,7 +165,8 @@ class IntelVisualFCompiler(BaseIntelFCompiler): module_include_switch = '/I' def get_flags(self): - opt = ['/nologo', '/MD', '/nbs', '/names:lowercase', '/assume:underscore'] + opt = ['/nologo', '/MD', '/nbs', '/names:lowercase', + '/assume:underscore'] return opt def get_flags_free(self): @@ -192,13 +195,13 @@ class IntelItaniumVisualFCompiler(IntelVisualFCompiler): ar_exe = IntelVisualFCompiler.ar_exe executables = { - 'version_cmd' : None, - 'compiler_f77' : [None, "-FI", "-w90", "-w95"], - 'compiler_fix' : [None, "-FI", "-4L72", "-w"], - 'compiler_f90' : [None], - 'linker_so' : ['', "-shared"], - 'archiver' : [ar_exe, "/verbose", "/OUT:"], - 'ranlib' : None + 'version_cmd': None, + 'compiler_f77': [None, "-FI", "-w90", "-w95"], + 'compiler_fix': [None, "-FI", "-4L72", "-w"], + 'compiler_f90': [None], + 'linker_so': ['', "-shared"], + 'archiver': [ar_exe, "/verbose", "/OUT:"], + 'ranlib': None } diff --git a/doc/source/_templates/product_table.txt b/doc/source/_templates/product_table.txt index e3fe65b..2ecc5f1 100644 --- a/doc/source/_templates/product_table.txt +++ b/doc/source/_templates/product_table.txt @@ -245,6 +245,16 @@ | | | | | | | | mi | | +--------------------+---------------------------------------------------------------+-----------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------+ +| height_agl | Model Height for Mass Grid (AGL) | m | **units** (str) : Set to desired units. Default is *'m'*. | +| | | | | +| | | km | | +| | | | | +| | | dm | | +| | | | | +| | | ft | | +| | | | | +| | | mi | | ++--------------------+---------------------------------------------------------------+-----------------------------+---------------------------------------------------------------------------------------------------------------------------------------------------------+ | zstag | Model Height for Vertically Staggered Grid | m | **msl** (boolean): Set to False to return AGL values. True is for MSL. Default is *True*. | | | | | | | | | km | **units** (str) : Set to desired units. Default is *'m'*. | diff --git a/doc/source/internal_api/index.rst b/doc/source/internal_api/index.rst index cfd3c7c..089bbb0 100644 --- a/doc/source/internal_api/index.rst +++ b/doc/source/internal_api/index.rst @@ -23,6 +23,9 @@ The routines below are called internally by :meth:`wrf.getvar`. wrf.g_dewpoint.get_dp_2m wrf.g_geoht.get_geopt wrf.g_geoht.get_height + wrf.g_geoht.get_height_agl + wrf.g_geoht.get_stag_geopt + wrf.g_geoht.get_stag_height wrf.g_helicity.get_srh wrf.g_helicity.get_uh wrf.g_omega.get_omega diff --git a/fortran/build_help/sub_sizes.py b/fortran/build_help/sub_sizes.py index 4261977..d69b6e8 100644 --- a/fortran/build_help/sub_sizes.py +++ b/fortran/build_help/sub_sizes.py @@ -3,55 +3,55 @@ import os from string import Template import sys + def main(): - - print ("Running sub_sizes") + print("Running sub_sizes") + try: result = subprocess.check_output(["./sizes"]) - except: + except OSError: result = subprocess.check_output(["sizes.exe"]) - + split_result = result.split() - + if sys.version_info >= (3, ): if split_result[0].strip().decode() != "SIZES": raise ValueError("First line is not SIZES") - - subs = {"FOMP_SCHED_KIND" : split_result[1].strip().decode(), - "FOMP_LOCK_KIND" : split_result[2].strip().decode(), - "FOMP_NEST_LOCK_KIND" : split_result[3].strip().decode(), - "FOMP_SCHED_STATIC" : split_result[4].strip().decode(), - "FOMP_SCHED_DYNAMIC" : split_result[5].strip().decode(), - "FOMP_SCHED_GUIDED" : split_result[6].strip().decode(), - "FOMP_SCHED_AUTO" : split_result[7].strip().decode() + + subs = {"FOMP_SCHED_KIND": split_result[1].strip().decode(), + "FOMP_LOCK_KIND": split_result[2].strip().decode(), + "FOMP_NEST_LOCK_KIND": split_result[3].strip().decode(), + "FOMP_SCHED_STATIC": split_result[4].strip().decode(), + "FOMP_SCHED_DYNAMIC": split_result[5].strip().decode(), + "FOMP_SCHED_GUIDED": split_result[6].strip().decode(), + "FOMP_SCHED_AUTO": split_result[7].strip().decode() } else: if split_result[0].strip() != "SIZES": raise ValueError("First line is not SIZES") - - subs = {"FOMP_SCHED_KIND" : split_result[1].strip(), - "FOMP_LOCK_KIND" : split_result[2].strip(), - "FOMP_NEST_LOCK_KIND" : split_result[3].strip(), - "FOMP_SCHED_STATIC" : split_result[4].strip(), - "FOMP_SCHED_DYNAMIC" : split_result[5].strip(), - "FOMP_SCHED_GUIDED" : split_result[6].strip(), - "FOMP_SCHED_AUTO" : split_result[7].strip() + + subs = {"FOMP_SCHED_KIND": split_result[1].strip(), + "FOMP_LOCK_KIND": split_result[2].strip(), + "FOMP_NEST_LOCK_KIND": split_result[3].strip(), + "FOMP_SCHED_STATIC": split_result[4].strip(), + "FOMP_SCHED_DYNAMIC": split_result[5].strip(), + "FOMP_SCHED_GUIDED": split_result[6].strip(), + "FOMP_SCHED_AUTO": split_result[7].strip() } - - + ompgen_temp_path = os.path.join("..", "ompgen.F90.template") ompgen_out_path = os.path.join("..", "ompgen.F90") - + with open(ompgen_temp_path, "r") as ompgen_in: ompgen_template = Template(ompgen_in.read()) - + ompgen_string = ompgen_template.substitute(subs) - - + with open(ompgen_out_path, "w") as ompgen_out: ompgen_out.write(ompgen_string) - - - print ("End sub_sizes") + + print("End sub_sizes") + + if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/fortran/wrf_wind.f90 b/fortran/wrf_wind.f90 index 1d93d20..53728b6 100644 --- a/fortran/wrf_wind.f90 +++ b/fortran/wrf_wind.f90 @@ -1,23 +1,21 @@ ! NCLFORTSTART -SUBROUTINE DCOMPUTEWSPD(wspd, u, v, nx, ny) +SUBROUTINE DCOMPUTEWSPD(wspd, u, v, n) IMPLICIT NONE !f2py threadsafe !f2py intent(in,out) :: wspd - INTEGER, INTENT(IN) :: nx, ny - REAL(KIND=8), DIMENSION(nx,ny), INTENT(OUT) :: wspd - REAL(KIND=8), DIMENSION(nx,ny), INTENT(IN) :: u, v + INTEGER, INTENT(IN) :: n + REAL(KIND=8), DIMENSION(n), INTENT(OUT) :: wspd + REAL(KIND=8), DIMENSION(n), INTENT(IN) :: u, v ! NCLEND - INTEGER i, j + INTEGER i - !$OMP PARALLEL DO COLLAPSE(2) SCHEDULE(runtime) - DO j = 1,ny - DO i = 1,nx - wspd(i,j) = SQRT(u(i,j)*u(i,j) + v(i,j)*v(i,j)) - END DO + !$OMP PARALLEL DO SCHEDULE(runtime) + DO i = 1,n + wspd(i) = SQRT(u(i)*u(i) + v(i)*v(i)) END DO !$OMP END PARALLEL DO @@ -25,7 +23,7 @@ END SUBROUTINE DCOMPUTEWSPD ! NCLFORTSTART -SUBROUTINE DCOMPUTEWDIR(wdir, u, v, nx, ny) +SUBROUTINE DCOMPUTEWDIR(wdir, u, v, n) USE wrf_constants, ONLY : DEG_PER_RAD IMPLICIT NONE @@ -33,18 +31,16 @@ SUBROUTINE DCOMPUTEWDIR(wdir, u, v, nx, ny) !f2py threadsafe !f2py intent(in,out) :: wdir - INTEGER, INTENT(IN) :: nx, ny - REAL(KIND=8), DIMENSION(nx,ny), INTENT(OUT) :: wdir - REAL(KIND=8), DIMENSION(nx,ny), INTENT(IN) :: u, v + INTEGER, INTENT(IN) :: n + REAL(KIND=8), DIMENSION(n), INTENT(OUT) :: wdir + REAL(KIND=8), DIMENSION(n), INTENT(IN) :: u, v ! NCLEND - INTEGER i, j + INTEGER i - !$OMP PARALLEL DO COLLAPSE(2) SCHEDULE(runtime) - DO j = 1,ny - DO i = 1,nx - wdir(i,j) = MOD(270.0 - ATAN2(v(i,j), u(i,j)) * DEG_PER_RAD, 360.) - END DO + !$OMP PARALLEL DO SCHEDULE(runtime) + DO i = 1,n + wdir(i) = MOD(270.0 - ATAN2(v(i), u(i)) * DEG_PER_RAD, 360.) END DO !$OMP END PARALLEL DO diff --git a/setup.py b/setup.py index 4b07221..9304ff4 100755 --- a/setup.py +++ b/setup.py @@ -17,7 +17,7 @@ else: import chey_intel import numpy.distutils.core import numpy.distutils.fcompiler.intel - + numpy.distutils.fcompiler.intel.IntelFCompiler = chey_intel.IntelFCompiler numpy.distutils.fcompiler.intel.IntelVisualFCompiler = ( chey_intel.IntelVisualFCompiler) @@ -31,33 +31,33 @@ else: chey_intel.IntelEM64TFCompiler) ext1 = numpy.distutils.core.Extension( - name = "wrf._wrffortran", - sources = ["fortran/wrf_constants.f90", - "fortran/wrf_testfunc.f90", - "fortran/wrf_user.f90", - "fortran/rip_cape.f90", - "fortran/wrf_cloud_fracf.f90", - "fortran/wrf_fctt.f90", - "fortran/wrf_user_dbz.f90", - "fortran/wrf_relhl.f90", - "fortran/calc_uh.f90", - "fortran/wrf_user_latlon_routines.f90", - "fortran/wrf_pvo.f90", - "fortran/eqthecalc.f90", - "fortran/wrf_rip_phys_routines.f90", - "fortran/wrf_pw.f90", - "fortran/wrf_vinterp.f90", - "fortran/wrf_wind.f90", - "fortran/omp.f90"] + name="wrf._wrffortran", + sources=["fortran/wrf_constants.f90", + "fortran/wrf_testfunc.f90", + "fortran/wrf_user.f90", + "fortran/rip_cape.f90", + "fortran/wrf_cloud_fracf.f90", + "fortran/wrf_fctt.f90", + "fortran/wrf_user_dbz.f90", + "fortran/wrf_relhl.f90", + "fortran/calc_uh.f90", + "fortran/wrf_user_latlon_routines.f90", + "fortran/wrf_pvo.f90", + "fortran/eqthecalc.f90", + "fortran/wrf_rip_phys_routines.f90", + "fortran/wrf_pw.f90", + "fortran/wrf_vinterp.f90", + "fortran/wrf_wind.f90", + "fortran/omp.f90"] ) -with open("src/wrf/version.py") as f: +with open("src/wrf/version.py") as f: exec(f.read()) on_rtd = os.environ.get("READTHEDOCS", None) == "True" -#on_rtd=True +# on_rtd=True if on_rtd: - if sys.version_info < (3,3): + if sys.version_info < (3, 3): requirements = ["mock"] # for python2 and python < 3.3 else: requirements = [] # for >= python3.3 @@ -67,53 +67,53 @@ else: # Place install_requires into the text file "requirements.txt" with open("requirements.txt") as f2: requirements = f2.read().strip().splitlines() - - #if sys.version_info < (3,3): - # requirements.append("mock") + + # if sys.version_info < (3,3): + # requirements.append("mock") ext_modules = [ext1] -numpy.distutils.core.setup( - author = "Bill Ladwig", - author_email = "ladwig@ucar.edu", - description = "Diagnostic and interpolation routines for WRF-ARW data.", - long_description = ("A collection of diagnostic and interpolation routines " - "to be used with WRF-ARW data.\n\n" - "GitHub Repository:\n\n" - "https://github.com/NCAR/wrf-python\n\n" - "Documentation:\n\n" - "http://wrf-python.rtfd.org\n"), - url = "https://github.com/NCAR/wrf-python", - keywords = ["python", "wrf-python", "wrf", "forecast", "model", - "weather research and forecasting", "interpolation", - "plotting", "plots", "meteorology", "nwp", - "numerical weather prediction", "diagnostic", - "science", "numpy"], - install_requires = requirements, - classifiers = ["Development Status :: 5 - Production/Stable", - "Intended Audience :: Science/Research", - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Fortran", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Topic :: Scientific/Engineering :: Atmospheric Science", - "Topic :: Software Development", - "Operating System :: POSIX", - "Operating System :: Unix", - "Operating System :: MacOS", - "Operating System :: Microsoft :: Windows"], - name = "wrf-python", - platforms = ["any"], - license = "Apache License 2.0", - version = __version__, - packages = setuptools.find_packages("src"), - ext_modules = ext_modules, - package_dir = {"" : "src"}, - download_url = "http://python.org/pypi/wrf-python", - package_data={"wrf" : ["data/psadilookup.dat"]}, +numpy.distutils.core.setup( + author="Bill Ladwig", + author_email="ladwig@ucar.edu", + description="Diagnostic and interpolation routines for WRF-ARW data.", + long_description=("A collection of diagnostic and interpolation " + "routines to be used with WRF-ARW data.\n\n" + "GitHub Repository:\n\n" + "https://github.com/NCAR/wrf-python\n\n" + "Documentation:\n\n" + "http://wrf-python.rtfd.org\n"), + url="https://github.com/NCAR/wrf-python", + keywords=["python", "wrf-python", "wrf", "forecast", "model", + "weather research and forecasting", "interpolation", + "plotting", "plots", "meteorology", "nwp", + "numerical weather prediction", "diagnostic", + "science", "numpy"], + install_requires=requirements, + classifiers=["Development Status :: 5 - Production/Stable", + "Intended Audience :: Science/Research", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Fortran", + "Programming Language :: Python :: 2.7", + "Programming Language :: Python :: 3.4", + "Programming Language :: Python :: 3.5", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Topic :: Software Development", + "Operating System :: POSIX", + "Operating System :: Unix", + "Operating System :: MacOS", + "Operating System :: Microsoft :: Windows"], + name="wrf-python", + platforms=["any"], + license="Apache License 2.0", + version=__version__, + packages=setuptools.find_packages("src"), + ext_modules=ext_modules, + package_dir={"": "src"}, + download_url="http://python.org/pypi/wrf-python", + package_data={"wrf": ["data/psadilookup.dat"]}, scripts=[] -) +) diff --git a/src/wrf/extension.py b/src/wrf/extension.py index 177fc64..d28d758 100755 --- a/src/wrf/extension.py +++ b/src/wrf/extension.py @@ -949,12 +949,15 @@ def _wspd(u, v, outview=None): Located in wrf_wind.f90. """ + shape = u.shape if outview is None: outview = np.empty_like(u) - result = dcomputewspd(outview, - u, - v) + result = dcomputewspd(outview.ravel(order="A"), + u.ravel(order="A"), + v.ravel(order="A")) + + result = np.reshape(result, shape, order="F") return result @@ -969,12 +972,15 @@ def _wdir(u, v, outview=None): Located in wrf_wind.f90. """ + shape = u.shape if outview is None: outview = np.empty_like(u) - result = dcomputewdir(outview, - u, - v) + result = dcomputewdir(outview.ravel(order="A"), + u.ravel(order="A"), + v.ravel(order="A")) + + result = np.reshape(result, shape, order="F") return result diff --git a/src/wrf/g_geoht.py b/src/wrf/g_geoht.py index 7e78e5a..7eba862 100755 --- a/src/wrf/g_geoht.py +++ b/src/wrf/g_geoht.py @@ -391,3 +391,71 @@ def get_stag_height(wrfin, timeidx=0, method="cat", squeeze=True, return _get_geoht(wrfin, timeidx, method, squeeze, cache, meta, _key, True, msl, stag=True) + + +@set_height_metadata(geopt=False, stag=False) +@convert_units("height", "m") +def get_height_agl(wrfin, timeidx=0, method="cat", squeeze=True, + cache=None, meta=True, _key=None, units="m"): + """Return the geopotential height (AGL). + + The geopotential height is returned as Above Ground Level (AGL) by + subtracting the terrain height. + + This functions extracts the necessary variables from the NetCDF file + object in order to perform the calculation. + + Args: + + wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \ + iterable): WRF-ARW NetCDF + data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile` + or an iterable sequence of the aforementioned types. + + timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The + desired time index. This value can be a positive integer, + negative integer, or + :data:`wrf.ALL_TIMES` (an alias for None) to return + all times in the file or sequence. The default is 0. + + method (:obj:`str`, optional): The aggregation method to use for + sequences. Must be either 'cat' or 'join'. + 'cat' combines the data along the Time dimension. + 'join' creates a new dimension for the file index. + The default is 'cat'. + + squeeze (:obj:`bool`, optional): Set to False to prevent dimensions + with a size of 1 from being automatically removed from the shape + of the output. Default is True. + + cache (:obj:`dict`, optional): A dictionary of (varname, ndarray) + that can be used to supply pre-extracted NetCDF variables to the + computational routines. It is primarily used for internal + purposes, but can also be used to improve performance by + eliminating the need to repeatedly extract the same variables + used in multiple diagnostics calculations, particularly when using + large sequences of files. + Default is None. + + meta (:obj:`bool`, optional): Set to False to disable metadata and + return :class:`numpy.ndarray` instead of + :class:`xarray.DataArray`. Default is True. + + _key (:obj:`int`, optional): A caching key. This is used for internal + purposes only. Default is None. + + units (:obj:`str`): The desired units. Refer to the :meth:`getvar` + product table for a list of available units for 'height_agl'. + Default is 'm'. + + Returns: + :class:`xarray.DataArray` or :class:`numpy.ndarray`: The + geopotential height. + If xarray is enabled and the *meta* parameter is True, then the result + will be a :class:`xarray.DataArray` object. Otherwise, the result will + be a :class:`numpy.ndarray` object with no metadata. + + """ + + return _get_geoht(wrfin, timeidx, method, squeeze, cache, meta, _key, + True, False) diff --git a/src/wrf/routines.py b/src/wrf/routines.py index 93727a5..3998209 100644 --- a/src/wrf/routines.py +++ b/src/wrf/routines.py @@ -8,7 +8,8 @@ from .g_cape import (get_2dcape, get_3dcape, get_cape2d_only, from .g_ctt import get_ctt from .g_dbz import get_dbz, get_max_dbz from .g_dewpoint import get_dp, get_dp_2m -from .g_geoht import get_geopt, get_height, get_stag_geopt, get_stag_height +from .g_geoht import (get_geopt, get_height, get_stag_geopt, get_stag_height, + get_height_agl) from .g_helicity import get_srh, get_uh from .g_latlon import get_lat, get_lon from .g_omega import get_omega @@ -78,6 +79,7 @@ _FUNC_MAP = {"cape2d": get_2dcape, "cloudfrac": get_cloudfrac, "geopt_stag": get_stag_geopt, "zstag": get_stag_height, + "height_agl": get_height_agl, # Diagnostics below are extracted from multi-product diagnostics "cape2d_only": get_cape2d_only, "cin2d_only": get_cin2d_only, @@ -143,6 +145,7 @@ _VALID_KARGS = {"cape2d": ["missing"], "mid_thresh", "high_thresh"], "geopt_stag": [], "zstag": ["msl", "units"], + "height_agl": ["units"], "cape2d_only": ["missing"], "cin2d_only": ["missing"], "lcl": ["missing"], diff --git a/test/cachetest.py b/test/cachetest.py index fad5e88..451dd2d 100644 --- a/test/cachetest.py +++ b/test/cachetest.py @@ -1,4 +1,4 @@ -from __future__ import (absolute_import, division, print_function, +from __future__ import (absolute_import, division, print_function, unicode_literals) from threading import Thread @@ -9,7 +9,7 @@ except ImportError: from collections import OrderedDict import unittest as ut -import numpy.testing as nt +import numpy.testing as nt from wrf.cache import cache_item, get_cached_item, _get_cache from wrf.config import get_cache_size @@ -20,49 +20,49 @@ class TestThread(Thread): self.num = num self.q = q super(TestThread, self).__init__() - + def run(self): for i in range(get_cache_size() + 10): key = "A" + str(i) cache_item(key, "test", i * self.num) - + item = get_cached_item(key, "test") - + if item != i * self.num: raise RuntimeError("cache is bogus") - + cache = OrderedDict(_get_cache()) - + self.q.put(cache) - + class CacheTest(ut.TestCase): longMessage = True - + def test_thread_local(self): q1 = Queue() q2 = Queue() thread1 = TestThread(2, q1) thread2 = TestThread(40, q2) - + thread1.start() thread2.start() - + result1 = q1.get(True, 1) result2 = q2.get(True, 1) - + thread1.join() thread2.join() - + print(result1) print(result2) - + # Result 1 and 2 shoudl be different self.assertNotEqual(result1, result2) - + # This thread should have no cache self.assertIsNone(_get_cache()) - + if __name__ == "__main__": ut.main() diff --git a/test/ci_tests/make_test_file.py b/test/ci_tests/make_test_file.py index 0b1bfd0..e31d237 100644 --- a/test/ci_tests/make_test_file.py +++ b/test/ci_tests/make_test_file.py @@ -9,19 +9,19 @@ from netCDF4 import Dataset from wrf import (getvar, interplevel, interpline, vertcross, vinterp, py2round, CoordPair, ll_to_xy, xy_to_ll, to_np) -VARS_TO_KEEP = ("Times", "XLAT", "XLONG", "XLAT_U", "XLAT_V", "XLONG_U", - "XLONG_V", "U", "V", "W", "PH", "PHB", "T", "P", "PB", "Q2", - "T2", "PSFC", "U10", "V10", "XTIME", "QVAPOR", "QCLOUD", +VARS_TO_KEEP = ("Times", "XLAT", "XLONG", "XLAT_U", "XLAT_V", "XLONG_U", + "XLONG_V", "U", "V", "W", "PH", "PHB", "T", "P", "PB", "Q2", + "T2", "PSFC", "U10", "V10", "XTIME", "QVAPOR", "QCLOUD", "QGRAUP", "QRAIN", "QSNOW", "QICE", "MAPFAC_M", "MAPFAC_U", "MAPFAC_V", "F", "HGT", "RAINC", "RAINSH", "RAINNC") DIMS_TO_TRIM = ("west_east", "south_north", "bottom_top", "bottom_top_stag", "west_east_stag", "south_north_stag") -WRF_DIAGS = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", - "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", - "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", - "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", +WRF_DIAGS = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", + "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", + "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", + "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", "wa", "uvmet10", "uvmet", "z", "cfrac", "zstag"] INTERP_METHS = ["interplevel", "vertcross", "interpline", "vinterp"] @@ -33,56 +33,56 @@ def copy_and_reduce(opts): infilename = opts.filename outfilename = os.path.expanduser( os.path.join(opts.outdir, "ci_test_file.nc")) - + with Dataset(infilename) as infile, Dataset(outfilename, "w") as outfile: - + # Copy the global attributes outfile.setncatts(infile.__dict__) - + # Copy Dimensions for name, dimension in infile.dimensions.iteritems(): if name in DIMS_TO_TRIM: if name.find("_stag") > 0: - dimsize = (len(dimension) / 2 - if len(dimension) % 2 == 0 + dimsize = (len(dimension) / 2 + if len(dimension) % 2 == 0 else (len(dimension) + 1) / 2) else: - dimsize = (len(dimension) / 2 - if len(dimension) % 2 == 0 + dimsize = (len(dimension) / 2 + if len(dimension) % 2 == 0 else (len(dimension) - 1) / 2) else: dimsize = len(dimension) - + outfile.createDimension(name, dimsize) - - # Copy Variables + + # Copy Variables for name, variable in infile.variables.iteritems(): - if name not in VARS_TO_KEEP: + if name not in VARS_TO_KEEP: continue - - outvar = outfile.createVariable(name, variable.datatype, - variable.dimensions, zlib=True) - + + outvar = outfile.createVariable(name, variable.datatype, + variable.dimensions, zlib=True) + in_slices = tuple(slice(0, dimsize) for dimsize in outvar.shape) - + outvar[:] = variable[in_slices] outvar.setncatts(infile.variables[name].__dict__) - + def add_to_ncfile(outfile, var, varname): dim_d = dict(zip(var.dims, var.shape)) - + for dimname, dimsize in dim_d.items(): if dimname not in outfile.dimensions: outfile.createDimension(dimname, dimsize) - + fill_value = None if "_FillValue" in var.attrs: fill_value = var.attrs["_FillValue"] - + ncvar = outfile.createVariable(varname, var.dtype, var.dims, zlib=True, fill_value=fill_value) - + var_vals = to_np(var) ncvar[:] = var_vals[:] @@ -92,78 +92,79 @@ def make_result_file(opts): os.path.join(opts.outdir, "ci_test_file.nc")) outfilename = os.path.expanduser( os.path.join(opts.outdir, "ci_result_file.nc")) - + with Dataset(infilename) as infile, Dataset(outfilename, "w") as outfile: - + for varname in WRF_DIAGS: var = getvar(infile, varname) add_to_ncfile(outfile, var, varname) - + for interptype in INTERP_METHS: if interptype == "interpline": hts = getvar(infile, "z") p = getvar(infile, "pressure") hts_850 = interplevel(hts, p, 850.) - + add_to_ncfile(outfile, hts_850, "interplevel") - + if interptype == "vertcross": - + hts = getvar(infile, "z") p = getvar(infile, "pressure") - - pivot_point = CoordPair(hts.shape[-1] // 2, hts.shape[-2] // 2) - ht_cross = vertcross(hts, p, pivot_point=pivot_point, + + pivot_point = CoordPair(hts.shape[-1] // 2, hts.shape[-2] // 2) + ht_cross = vertcross(hts, p, pivot_point=pivot_point, angle=90.) - + add_to_ncfile(outfile, ht_cross, "vertcross") - + if interptype == "interpline": - + t2 = getvar(infile, "T2") pivot_point = CoordPair(t2.shape[-1] // 2, t2.shape[-2] // 2) - + t2_line = interpline(t2, pivot_point=pivot_point, angle=90.0) - + add_to_ncfile(outfile, t2_line, "interpline") - + if interptype == "vinterp": - + tk = getvar(infile, "temp", units="k") - - interp_levels = [200,300,500,1000] - - field = vinterp(infile, - field=tk, - vert_coord="theta", - interp_levels=interp_levels, - extrapolate=True, + + interp_levels = [200, 300, 500, 1000] + + field = vinterp(infile, + field=tk, + vert_coord="theta", + interp_levels=interp_levels, + extrapolate=True, field_type="tk", log_p=True) - + add_to_ncfile(outfile, field, "vinterp") - + for latlonmeth in LATLON_METHS: if latlonmeth == "xy": # Hardcoded values from other unit tests lats = [22.0, 25.0, 27.0] lons = [-90.0, -87.5, -83.75] - + xy = ll_to_xy(infile, lats[0], lons[0]) add_to_ncfile(outfile, xy, "xy") else: # Hardcoded from other unit tests x_s = np.asarray([10, 50, 90], int) y_s = np.asarray([10, 50, 90], int) - + ll = xy_to_ll(infile, x_s[0], y_s[0]) add_to_ncfile(outfile, ll, "ll") + def main(opts): copy_and_reduce(opts) make_result_file(opts) - - + + if __name__ == "__main__": DEFAULT_FILE = ("/Users/ladwig/Documents/wrf_files/" "wrf_vortex_multi/moving_nest/" @@ -171,10 +172,10 @@ if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate conda test files " "for unit testing.") parser.add_argument("-f", "--filename", required=False, - default=DEFAULT_FILE, + default=DEFAULT_FILE, help="the WRF test file") parser.add_argument("-o", "--outdir", required=False, default="./", help="the location for the output files") opts = parser.parse_args() - - main(opts) \ No newline at end of file + + main(opts) diff --git a/test/ci_tests/utests.py b/test/ci_tests/utests.py index 67affd5..5ae40e1 100644 --- a/test/ci_tests/utests.py +++ b/test/ci_tests/utests.py @@ -1,14 +1,15 @@ import unittest as ut -import numpy.testing as nt +import numpy.testing as nt import numpy as np import numpy.ma as ma -import os, sys +import os +import sys import subprocess from wrf import (getvar, interplevel, interpline, vertcross, vinterp, disable_xarray, xarray_enabled, to_np, xy_to_ll, ll_to_xy, xy_to_ll_proj, ll_to_xy_proj, - extract_global_attrs, viewitems, CoordPair, + extract_global_attrs, viewitems, CoordPair, omp_get_num_procs, omp_set_num_threads) from wrf.util import is_multi_file @@ -20,82 +21,82 @@ REF_FILE = "ci_result_file.nc" if sys.version_info > (3,): xrange = range -# Using helpful information at: + +# Using helpful information at: # http://eli.thegreenplace.net/2014/04/02/dynamically-generating-python-test-cases def make_test(varname, wrf_in, referent, multi=False, repeat=3, pynio=False): def test(self): - + from netCDF4 import Dataset as NetCDF timeidx = 0 in_wrfnc = NetCDF(wrf_in) refnc = NetCDF(referent) - + # These have a left index that defines the product type - multiproduct = varname in ("uvmet", "uvmet10", "cape_2d", "cape_3d", + multiproduct = varname in ("uvmet", "uvmet10", "cape_2d", "cape_3d", "cfrac") - - + ref_vals = refnc.variables[varname][:] - + if (varname == "tc"): my_vals = getvar(in_wrfnc, "temp", timeidx=timeidx, units="c") tol = 1/100. - atol = .1 # Note: NCL uses 273.16 as conversion for some reason + atol = .1 # Note: NCL uses 273.16 as conversion for some reason nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) elif (varname == "pw"): my_vals = getvar(in_wrfnc, "pw", timeidx=timeidx) tol = .5/100.0 - atol = 0 # NCL uses different constants and doesn't use same - # handrolled virtual temp in method + atol = 0 nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) elif (varname == "cape_2d"): cape_2d = getvar(in_wrfnc, varname, timeidx=timeidx) tol = 0/100. atol = 200.0 - # Let's only compare CAPE values until the F90 changes are + # Let's only compare CAPE values until the F90 changes are # merged back in to NCL. The modifications to the R and CP # changes TK enough that non-lifting parcels could lift, thus # causing wildly different values in LCL - nt.assert_allclose(to_np(cape_2d[0,:]), ref_vals[0,:], tol, atol) + nt.assert_allclose(to_np(cape_2d[0, :]), ref_vals[0, :], tol, atol) elif (varname == "cape_3d"): cape_3d = getvar(in_wrfnc, varname, timeidx=timeidx) # Changing the R and CP constants, while keeping TK within - # 2%, can lead to some big changes in CAPE. Tolerances + # 2%, can lead to some big changes in CAPE. Tolerances # have been set wide when comparing the with the original - # NCL. Change back when the F90 code is merged back with + # NCL. Change back when the F90 code is merged back with # NCL tol = 0/100. atol = 200.0 - - #print np.amax(np.abs(to_np(cape_3d[0,:]) - ref_vals[0,:])) + + # print np.amax(np.abs(to_np(cape_3d[0,:]) - ref_vals[0,:])) nt.assert_allclose(to_np(cape_3d), ref_vals, tol, atol) else: my_vals = getvar(in_wrfnc, varname, timeidx=timeidx) tol = 2/100. atol = 0.1 - #print (np.amax(np.abs(to_np(my_vals) - ref_vals))) + # print (np.amax(np.abs(to_np(my_vals) - ref_vals))) nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) - - + return test + def _get_refvals(referent, varname, repeat, multi): from netCDF4 import Dataset as NetCDF - + refnc = NetCDF(referent) - + ref_vals = refnc.variables[varname][:] - + return ref_vals -def make_interp_test(varname, wrf_in, referent, multi=False, + +def make_interp_test(varname, wrf_in, referent, multi=False, repeat=3, pynio=False): def test(self): from netCDF4 import Dataset as NetCDF - + timeidx = 0 in_wrfnc = NetCDF(wrf_in) - + if (varname == "interplevel"): ref_ht_850 = _get_refvals(referent, "interplevel", repeat, multi) hts = getvar(in_wrfnc, "z", timeidx=timeidx) @@ -103,168 +104,165 @@ def make_interp_test(varname, wrf_in, referent, multi=False, # Check that it works with numpy arrays hts_850 = interplevel(to_np(hts), p, 850) - #print (hts_850) + # print (hts_850) hts_850 = interplevel(hts, p, 850) - + nt.assert_allclose(to_np(hts_850), ref_ht_850) - + elif (varname == "vertcross"): ref_ht_cross = _get_refvals(referent, "vertcross", repeat, multi) - + hts = getvar(in_wrfnc, "z", timeidx=timeidx) p = getvar(in_wrfnc, "pressure", timeidx=timeidx) - - pivot_point = CoordPair(hts.shape[-1] // 2, hts.shape[-2] // 2) + + pivot_point = CoordPair(hts.shape[-1] // 2, hts.shape[-2] // 2) # Check that it works with numpy arrays - ht_cross = vertcross(to_np(hts), to_np(p), + ht_cross = vertcross(to_np(hts), to_np(p), pivot_point=pivot_point, angle=90.) - #print (ht_cross) + # print (ht_cross) ht_cross = vertcross(hts, p, pivot_point=pivot_point, angle=90.) nt.assert_allclose(to_np(ht_cross), ref_ht_cross, rtol=.01) - - + elif (varname == "interpline"): - + ref_t2_line = _get_refvals(referent, "interpline", repeat, multi) - + t2 = getvar(in_wrfnc, "T2", timeidx=timeidx) pivot_point = CoordPair(t2.shape[-1] // 2, t2.shape[-2] // 2) - + # Check that it works with numpy arrays - t2_line1 = interpline(to_np(t2), pivot_point=pivot_point, + t2_line1 = interpline(to_np(t2), pivot_point=pivot_point, angle=90.0) - #print (t2_line1) + # print (t2_line1) t2_line1 = interpline(t2, pivot_point=pivot_point, angle=90.0) - + nt.assert_allclose(to_np(t2_line1), ref_t2_line) - + elif (varname == "vinterp"): # Tk to theta fld_tk_theta = _get_refvals(referent, "vinterp", repeat, multi) fld_tk_theta = np.squeeze(fld_tk_theta) - + tk = getvar(in_wrfnc, "temp", timeidx=timeidx, units="k") - - interp_levels = [200,300,500,1000] - + + interp_levels = [200, 300, 500, 1000] + # Check that it works with numpy arrays - field = vinterp(in_wrfnc, - field=to_np(tk), - vert_coord="theta", - interp_levels=interp_levels, - extrapolate=True, + field = vinterp(in_wrfnc, + field=to_np(tk), + vert_coord="theta", + interp_levels=interp_levels, + extrapolate=True, field_type="tk", - timeidx=timeidx, + timeidx=timeidx, log_p=True) - #print (field) - - field = vinterp(in_wrfnc, - field=tk, - vert_coord="theta", - interp_levels=interp_levels, - extrapolate=True, + # print (field) + + field = vinterp(in_wrfnc, + field=tk, + vert_coord="theta", + interp_levels=interp_levels, + extrapolate=True, field_type="tk", - timeidx=timeidx, + timeidx=timeidx, log_p=True) - + tol = 5/100. atol = 0.0001 - + field = np.squeeze(field) - + nt.assert_allclose(to_np(field), fld_tk_theta, tol, atol) - + return test -def make_latlon_test(testid, wrf_in, referent, single, multi=False, repeat=3, +def make_latlon_test(testid, wrf_in, referent, single, multi=False, repeat=3, pynio=False): def test(self): from netCDF4 import Dataset as NetCDF - + timeidx = 0 in_wrfnc = NetCDF(wrf_in) - + refnc = NetCDF(referent) - + if testid == "xy": - # Since this domain is not moving, the reference values are the + # Since this domain is not moving, the reference values are the # same whether there are multiple or single files ref_vals = refnc.variables["xy"][:] # Lats/Lons taken from NCL script, just hard-coding for now lats = [22.0, 25.0, 27.0] lons = [-90.0, -87.5, -83.75] - + xy = ll_to_xy(in_wrfnc, lats[0], lons[0]) - + nt.assert_allclose(to_np(xy), ref_vals) - - + else: - # Since this domain is not moving, the reference values are the + # Since this domain is not moving, the reference values are the # same whether there are multiple or single files ref_vals = refnc.variables["ll"][:] - - # i_s, j_s taken from NCL script, just hard-coding for now - # NCL uses 1-based indexing for this, so need to subtract 1 + + # i_s, j_s taken from NCL script, just hard-coding for now + # NCL uses 1-based indexing for this, so need to subtract 1 x_s = np.asarray([10, 50, 90], int) y_s = np.asarray([10, 50, 90], int) - + ll = xy_to_ll(in_wrfnc, x_s[0], y_s[0]) - + nt.assert_allclose(to_np(ll), ref_vals) - - + return test + class WRFVarsTest(ut.TestCase): longMessage = True - + + class WRFInterpTest(ut.TestCase): longMessage = True - + + class WRFLatLonTest(ut.TestCase): longMessage = True - + if __name__ == "__main__": ignore_vars = [] # Not testable yet - wrf_vars = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", - "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", - "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", - "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", + wrf_vars = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", + "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", + "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", + "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", "wa", "uvmet10", "uvmet", "z", "cfrac", "zstag"] interp_methods = ["interplevel", "vertcross", "interpline", "vinterp"] latlon_tests = ["xy", "ll"] - + import netCDF4 for var in wrf_vars: if var in ignore_vars: continue - + test_func1 = make_test(var, TEST_FILE, REF_FILE) setattr(WRFVarsTest, 'test_{0}'.format(var), test_func1) - + for method in interp_methods: - test_interp_func1 = make_interp_test(method, TEST_FILE, + test_interp_func1 = make_interp_test(method, TEST_FILE, REF_FILE) - setattr(WRFInterpTest, 'test_{0}'.format(method), + setattr(WRFInterpTest, 'test_{0}'.format(method), test_interp_func1) - + for testid in latlon_tests: for single in (True,): for multi in (False,): - test_ll_func = make_latlon_test(testid, TEST_FILE, - REF_FILE, - single=single, multi=multi, + test_ll_func = make_latlon_test(testid, TEST_FILE, + REF_FILE, + single=single, multi=multi, repeat=3, pynio=False) multistr = "" if not multi else "_multi" singlestr = "_nosingle" if not single else "_single" - test_name = "test_{}{}{}".format(testid, singlestr, - multistr) + test_name = "test_{}{}{}".format(testid, singlestr, multistr) setattr(WRFLatLonTest, test_name, test_ll_func) - - + ut.main() - \ No newline at end of file diff --git a/test/comp_utest.py b/test/comp_utest.py index de46a5f..52db939 100644 --- a/test/comp_utest.py +++ b/test/comp_utest.py @@ -1,10 +1,11 @@ from math import fabs, log, tan, sin, cos import unittest as ut -import numpy.testing as nt +import numpy.testing as nt import numpy as np import numpy.ma as ma -import os, sys +import os +import sys import subprocess from netCDF4 import Dataset as nc @@ -21,37 +22,37 @@ NCGROUP = [NCFILE, NCFILE, NCFILE] if sys.version_info > (3,): xrange = range +ROUTINE_MAP = {"avo": avo, + "eth": eth, + "cape_2d": cape_2d, + "cape_3d": cape_3d, + "ctt": ctt, + "dbz": dbz, + "helicity": srhel, + "omg": omega, + "pvo": pvo, + "pw": pw, + "rh": rh, + "slp": slp, + "td": td, + "tk": tk, + "tv": tvirtual, + "twb": wetbulb, + "updraft_helicity": udhel, + "uvmet": uvmet, + "cloudfrac": cloudfrac} -ROUTINE_MAP = {"avo" : avo, - "eth" : eth, - "cape_2d" : cape_2d, - "cape_3d" : cape_3d, - "ctt" : ctt, - "dbz" : dbz, - "helicity" : srhel, - "omg" : omega, - "pvo" : pvo, - "pw" : pw, - "rh" : rh, - "slp" : slp, - "td" : td, - "tk" : tk, - "tv" : tvirtual, - "twb" : wetbulb, - "updraft_helicity" : udhel, - "uvmet" : uvmet, - "cloudfrac" : cloudfrac} class ProjectionError(RuntimeError): pass + def get_args(varname, wrfnc, timeidx, method, squeeze): if varname == "avo": - ncvars = extract_vars(wrfnc, timeidx, ("U", "V", "MAPFAC_U", - "MAPFAC_V", "MAPFAC_M", - "F"), - method, squeeze, cache=None, meta=True) - + varnames = ("U", "V", "MAPFAC_U", "MAPFAC_V", "MAPFAC_M", "F") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + cache=None, meta=True) + attrs = extract_global_attrs(wrfnc, attrs=("DX", "DY")) u = ncvars["U"] v = ncvars["V"] @@ -59,20 +60,20 @@ def get_args(varname, wrfnc, timeidx, method, squeeze): msfv = ncvars["MAPFAC_V"] msfm = ncvars["MAPFAC_M"] cor = ncvars["F"] - + dx = attrs["DX"] dy = attrs["DY"] - + return (u, v, msfu, msfv, msfm, cor, dx, dy) - + if varname == "pvo": - ncvars = extract_vars(wrfnc, timeidx, ("U", "V", "T", "P", - "PB", "MAPFAC_U", - "MAPFAC_V", "MAPFAC_M", - "F"), - method, squeeze, cache=None, meta=True) + varnames = ("U", "V", "T", "P", "PB", "MAPFAC_U", "MAPFAC_V", + "MAPFAC_M", "F") + ncvars = extract_vars(wrfnc, timeidx, + varnames, + method, squeeze, cache=None, meta=True) attrs = extract_global_attrs(wrfnc, attrs=("DX", "DY")) - + u = ncvars["U"] v = ncvars["V"] t = ncvars["T"] @@ -82,35 +83,35 @@ def get_args(varname, wrfnc, timeidx, method, squeeze): msfv = ncvars["MAPFAC_V"] msfm = ncvars["MAPFAC_M"] cor = ncvars["F"] - + dx = attrs["DX"] dy = attrs["DY"] - + full_t = t + 300 full_p = p + pb - + return (u, v, full_t, full_p, msfu, msfv, msfm, cor, dx, dy) - + if varname == "eth": - varnames=("T", "P", "PB", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB", "QVAPOR") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] - + full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) - + return (qv, tkel, full_p) - + if varname == "cape_2d": - varnames = ("T", "P", "PB", "QVAPOR", "PH","PHB", "HGT", "PSFC") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB", "HGT", "PSFC") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) - + t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] @@ -119,27 +120,27 @@ def get_args(varname, wrfnc, timeidx, method, squeeze): phb = ncvars["PHB"] ter = ncvars["HGT"] psfc = ncvars["PSFC"] - + full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) - + geopt = ph + phb geopt_unstag = destagger(geopt, -3) z = geopt_unstag/Constants.G - + # Convert pressure to hPa p_hpa = ConversionFactors.PA_TO_HPA * full_p - psfc_hpa = ConversionFactors.PA_TO_HPA * psfc - + psfc_hpa = ConversionFactors.PA_TO_HPA * psfc + i3dflag = 0 ter_follow = 1 - + return (p_hpa, tkel, qv, z, ter, psfc_hpa, ter_follow) - + if varname == "cape_3d": varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB", "HGT", "PSFC") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] @@ -149,27 +150,27 @@ def get_args(varname, wrfnc, timeidx, method, squeeze): phb = ncvars["PHB"] ter = ncvars["HGT"] psfc = ncvars["PSFC"] - + full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) - + geopt = ph + phb geopt_unstag = destagger(geopt, -3) z = geopt_unstag/Constants.G - + # Convert pressure to hPa p_hpa = ConversionFactors.PA_TO_HPA * full_p - psfc_hpa = ConversionFactors.PA_TO_HPA * psfc - + psfc_hpa = ConversionFactors.PA_TO_HPA * psfc + i3dflag = 1 ter_follow = 1 - + return (p_hpa, tkel, qv, z, ter, psfc_hpa, ter_follow) - + if varname == "ctt": varnames = ("T", "P", "PB", "PH", "PHB", "HGT", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] @@ -177,382 +178,389 @@ def get_args(varname, wrfnc, timeidx, method, squeeze): ph = ncvars["PH"] phb = ncvars["PHB"] ter = ncvars["HGT"] - qv = ncvars["QVAPOR"] * 1000.0 # g/kg - + qv = ncvars["QVAPOR"] * 1000.0 # g/kg + haveqci = 1 try: - icevars = extract_vars(wrfnc, timeidx, "QICE", + icevars = extract_vars(wrfnc, timeidx, "QICE", method, squeeze, cache=None, meta=False) except KeyError: qice = np.zeros(qv.shape, qv.dtype) haveqci = 0 else: - qice = icevars["QICE"] * 1000.0 #g/kg - + qice = icevars["QICE"] * 1000.0 # g/kg + try: - cldvars = extract_vars(wrfnc, timeidx, "QCLOUD", + cldvars = extract_vars(wrfnc, timeidx, "QCLOUD", method, squeeze, cache=None, meta=False) except KeyError: raise RuntimeError("'QCLOUD' not found in NetCDF file") else: - qcld = cldvars["QCLOUD"] * 1000.0 #g/kg - + qcld = cldvars["QCLOUD"] * 1000.0 # g/kg + full_p = p + pb p_hpa = full_p * ConversionFactors.PA_TO_HPA full_t = t + Constants.T_BASE tkel = tk(full_p, full_t, meta=False) - + geopt = ph + phb geopt_unstag = destagger(geopt, -3) ght = geopt_unstag / Constants.G - + return (p_hpa, tkel, qv, qcld, ght, ter, qice) - + if varname == "dbz": varnames = ("T", "P", "PB", "QVAPOR", "QRAIN") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] qr = ncvars["QRAIN"] - + try: - snowvars = extract_vars(wrfnc, timeidx, "QSNOW", + snowvars = extract_vars(wrfnc, timeidx, "QSNOW", method, squeeze, cache=None, meta=False) except KeyError: qs = np.zeros(qv.shape, qv.dtype) else: qs = snowvars["QSNOW"] - + try: - graupvars = extract_vars(wrfnc, timeidx, "QGRAUP", + graupvars = extract_vars(wrfnc, timeidx, "QGRAUP", method, squeeze, cache=None, meta=False) except KeyError: qg = np.zeros(qv.shape, qv.dtype) else: qg = graupvars["QGRAUP"] - + full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) - + return (full_p, tkel, qv, qr, qs, qg) - + if varname == "helicity": # Top can either be 3000 or 1000 (for 0-1 srh or 0-3 srh) - + ncvars = extract_vars(wrfnc, timeidx, ("HGT", "PH", "PHB"), method, squeeze, cache=None, meta=True) - + ter = ncvars["HGT"] ph = ncvars["PH"] phb = ncvars["PHB"] - + # As coded in NCL, but not sure this is possible varname = "U" - u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, + u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=False) - u = destagger(u_vars[varname], -1) - + u = destagger(u_vars[varname], -1) + varname = "V" - v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, + v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=False) v = destagger(v_vars[varname], -2) - + geopt = ph + phb geopt_unstag = destagger(geopt, -3) - + z = geopt_unstag / Constants.G - + return (u, v, z, ter) - + if varname == "updraft_helicity": - ncvars = extract_vars(wrfnc, timeidx, ("W", "PH", "PHB", "MAPFAC_M"), - method, squeeze, cache=None, meta=True) - + varnames = ("W", "PH", "PHB", "MAPFAC_M") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + cache=None, meta=True) + wstag = ncvars["W"] ph = ncvars["PH"] phb = ncvars["PHB"] mapfct = ncvars["MAPFAC_M"] - - attrs = extract_global_attrs(wrfnc, attrs=("DX", "DY")) + + attrs = extract_global_attrs(wrfnc, attrs=("DX", "DY")) dx = attrs["DX"] dy = attrs["DY"] - + # As coded in NCL, but not sure this is possible varname = "U" - u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, + u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) - u = destagger(u_vars[varname], -1, meta=True) - + u = destagger(u_vars[varname], -1, meta=True) + varname = "V" - v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, + v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) - v = destagger(v_vars[varname], -2, meta=True) - + v = destagger(v_vars[varname], -2, meta=True) + zstag = ph + phb - + return (zstag, mapfct, u, v, wstag, dx, dy) - + if varname == "omg": - varnames=("T", "P", "W", "PB", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "W", "PB", "QVAPOR") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] w = ncvars["W"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] - + wa = destagger(w, -3) full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) - + return (qv, tkel, wa, full_p) - + if varname == "pw": - varnames=("T", "P", "PB", "PH", "PHB", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB", "PH", "PHB", "QVAPOR") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) - + t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] ph = ncvars["PH"] phb = ncvars["PHB"] qv = ncvars["QVAPOR"] - + # Change this to use real virtual temperature! full_p = p + pb ht = (ph + phb)/Constants.G - full_t = t + Constants.T_BASE - + full_t = t + Constants.T_BASE + tkel = tk(full_p, full_t, meta=False) - + return (full_p, tkel, qv, ht) - + if varname == "rh": - varnames=("T", "P", "PB", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB", "QVAPOR") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qvapor = to_np(ncvars["QVAPOR"]) - + full_t = t + Constants.T_BASE full_p = p + pb qvapor[qvapor < 0] = 0 tkel = tk(full_p, full_t, meta=False) - + return (qvapor, full_p, tkel) - + if varname == "slp": - varnames=("T", "P", "PB", "QVAPOR", "PH", "PHB") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) - + t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qvapor = to_np(ncvars["QVAPOR"]) ph = ncvars["PH"] phb = ncvars["PHB"] - + full_t = t + Constants.T_BASE full_p = p + pb qvapor[qvapor < 0] = 0. - + full_ph = (ph + phb) / Constants.G - + destag_ph = destagger(full_ph, -3) - + tkel = tk(full_p, full_t, meta=False) - + return (destag_ph, tkel, full_p, qvapor) - + if varname == "td": - varnames=("P", "PB", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("P", "PB", "QVAPOR") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) - + p = ncvars["P"] pb = ncvars["PB"] qvapor = to_np(ncvars["QVAPOR"]) - + # Algorithm requires hPa full_p = .01*(p + pb) qvapor[qvapor < 0] = 0 - + return (full_p, qvapor) - + if varname == "tk": - varnames=("T", "P", "PB") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] - + full_t = t + Constants.T_BASE full_p = p + pb - + return (full_p, full_t) - + if varname == "tv": - varnames=("T", "P", "PB", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB", "QVAPOR") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) - + t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] - + full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t) - + return (tkel, qv) - + if varname == "twb": - varnames=("T", "P", "PB", "QVAPOR") - ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + varnames = ("T", "P", "PB", "QVAPOR") + ncvars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, cache=None, meta=True) t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] qv = ncvars["QVAPOR"] - + full_t = t + Constants.T_BASE full_p = p + pb - + tkel = tk(full_p, full_t) - + return (full_p, tkel, qv) - + if varname == "uvmet": varname = "U" - u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, + u_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) - + u = destagger(u_vars[varname], -1, meta=True) - + varname = "V" - v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, + v_vars = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) v = destagger(v_vars[varname], -2, meta=True) - + map_proj_attrs = extract_global_attrs(wrfnc, attrs="MAP_PROJ") map_proj = map_proj_attrs["MAP_PROJ"] - - if map_proj in (0,3,6): + + if map_proj in (0, 3, 6): raise ProjectionError("Map projection does not need rotation") - elif map_proj in (1,2): + elif map_proj in (1, 2): lat_attrs = extract_global_attrs(wrfnc, attrs=("TRUELAT1", "TRUELAT2")) radians_per_degree = Constants.PI/180.0 # Rotation needed for Lambert and Polar Stereographic true_lat1 = lat_attrs["TRUELAT1"] true_lat2 = lat_attrs["TRUELAT2"] - + try: lon_attrs = extract_global_attrs(wrfnc, attrs="STAND_LON") except AttributeError: try: - cen_lon_attrs = extract_global_attrs(wrfnc, attrs="CEN_LON") + cen_lon_attrs = extract_global_attrs(wrfnc, + attrs="CEN_LON") except AttributeError: - raise RuntimeError("longitude attributes not found in NetCDF") + raise RuntimeError("longitude attributes not found in " + "NetCDF") else: cen_lon = cen_lon_attrs["CEN_LON"] else: cen_lon = lon_attrs["STAND_LON"] - - + varname = "XLAT" - xlat_var = extract_vars(wrfnc, timeidx, varname, + xlat_var = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) lat = xlat_var[varname] - + varname = "XLONG" - xlon_var = extract_vars(wrfnc, timeidx, varname, + xlon_var = extract_vars(wrfnc, timeidx, varname, method, squeeze, cache=None, meta=True) lon = xlon_var[varname] - + if map_proj == 1: if((fabs(true_lat1 - true_lat2) > 0.1) and - (fabs(true_lat2 - 90.) > 0.1)): - cone = (log(cos(true_lat1*radians_per_degree)) - - log(cos(true_lat2*radians_per_degree))) - cone = (cone / - (log(tan((45.-fabs(true_lat1/2.))*radians_per_degree)) - - log(tan((45.-fabs(true_lat2/2.))*radians_per_degree)))) + (fabs(true_lat2 - 90.) > 0.1)): + cone = (log(cos(true_lat1 * radians_per_degree)) + - log(cos(true_lat2 * radians_per_degree))) + cone = (cone / + (log(tan((45.-fabs(true_lat1/2.)) * + radians_per_degree)) + - log(tan((45.-fabs(true_lat2/2.)) * + radians_per_degree)))) else: - cone = sin(fabs(true_lat1)*radians_per_degree) + cone = sin(fabs(true_lat1) * radians_per_degree) else: cone = 1 - + return (u, v, lat, lon, cen_lon, cone) - + if varname == "cloudfrac": from wrf.g_geoht import get_height - vars = extract_vars(wrfnc, timeidx, ("P", "PB", "QVAPOR", "T"), - method, squeeze, cache=None, meta=True) - + + varnames = ("P", "PB", "QVAPOR", "T") + vars = extract_vars(wrfnc, timeidx, varnames, method, squeeze, + cache=None, meta=True) + p = vars["P"] pb = vars["PB"] qv = vars["QVAPOR"] t = vars["T"] - - geoht_agl = get_height(wrfnc, timeidx, method, squeeze, + + geoht_agl = get_height(wrfnc, timeidx, method, squeeze, cache=None, meta=True, msl=False) - + full_p = p + pb full_t = t + Constants.T_BASE - + tkel = tk(full_p, full_t) relh = rh(qv, full_p, tkel) - + return (geoht_agl, relh, 1, 300., 2000., 6000.) - - + + class WRFVarsTest(ut.TestCase): longMessage = True - + + def make_func(varname, wrfnc, timeidx, method, squeeze, meta): def func(self): - + try: args = get_args(varname, wrfnc, timeidx, method, squeeze) - except ProjectionError: # Don't fail for this + except ProjectionError: # Don't fail for this return - + routine = ROUTINE_MAP[varname] - - kwargs = {"meta" : meta} + + kwargs = {"meta": meta} result = routine(*args, **kwargs) - - ref = getvar(wrfnc, varname, timeidx, method, squeeze, cache=None, + + ref = getvar(wrfnc, varname, timeidx, method, squeeze, cache=None, meta=meta) - + nt.assert_allclose(to_np(result), to_np(ref)) - + if meta: self.assertEqual(result.dims, ref.dims) - + return func def test_cape3d_1d(wrfnc): - + def func(self): varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB", "HGT", "PSFC") - ncvars = extract_vars(wrfnc, 0, varnames, method="cat", squeeze=True, + ncvars = extract_vars(wrfnc, 0, varnames, method="cat", squeeze=True, cache=None, meta=True) - + t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] @@ -561,9 +569,9 @@ def test_cape3d_1d(wrfnc): phb = ncvars["PHB"] ter = ncvars["HGT"] psfc = ncvars["PSFC"] - + col_idxs = (slice(None), t.shape[-2]//2, t.shape[-1]//2) - + t = t[col_idxs] p = p[col_idxs] pb = pb[col_idxs] @@ -572,40 +580,40 @@ def test_cape3d_1d(wrfnc): phb = phb[col_idxs] ter = float(ter[col_idxs[1:]]) psfc = float(psfc[col_idxs[1:]]) - + full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) - + geopt = ph + phb geopt_unstag = destagger(geopt, -1) z = geopt_unstag/Constants.G - + # Convert pressure to hPa p_hpa = ConversionFactors.PA_TO_HPA * full_p - psfc_hpa = ConversionFactors.PA_TO_HPA * psfc - + psfc_hpa = ConversionFactors.PA_TO_HPA * psfc + i3dflag = 1 ter_follow = 1 - + result = cape_3d(p_hpa, tkel, qv, z, ter, psfc_hpa, ter_follow) - + ref = getvar(wrfnc, "cape_3d") ref = ref[(slice(None),) + col_idxs] - + nt.assert_allclose(to_np(result), to_np(ref)) - + return func def test_cape2d_1d(wrfnc): - + def func(self): varnames = ("T", "P", "PB", "QVAPOR", "PH", "PHB", "HGT", "PSFC") - ncvars = extract_vars(wrfnc, 0, varnames, method="cat", squeeze=True, + ncvars = extract_vars(wrfnc, 0, varnames, method="cat", squeeze=True, cache=None, meta=True) - + t = ncvars["T"] p = ncvars["P"] pb = ncvars["PB"] @@ -614,9 +622,9 @@ def test_cape2d_1d(wrfnc): phb = ncvars["PHB"] ter = ncvars["HGT"] psfc = ncvars["PSFC"] - + col_idxs = (slice(None), t.shape[-2]//2, t.shape[-1]//2) - + t = t[col_idxs] p = p[col_idxs] pb = pb[col_idxs] @@ -625,82 +633,76 @@ def test_cape2d_1d(wrfnc): phb = phb[col_idxs] ter = float(ter[col_idxs[1:]]) psfc = float(psfc[col_idxs[1:]]) - + full_t = t + Constants.T_BASE full_p = p + pb tkel = tk(full_p, full_t, meta=False) - + geopt = ph + phb geopt_unstag = destagger(geopt, -1) z = geopt_unstag/Constants.G - + # Convert pressure to hPa p_hpa = ConversionFactors.PA_TO_HPA * full_p - psfc_hpa = ConversionFactors.PA_TO_HPA * psfc - + psfc_hpa = ConversionFactors.PA_TO_HPA * psfc + i3dflag = 0 ter_follow = 1 - + result = cape_2d(p_hpa, tkel, qv, z, ter, psfc_hpa, ter_follow) - + ref = getvar(wrfnc, "cape_2d") ref = ref[(slice(None),) + col_idxs[1:]] - + nt.assert_allclose(to_np(result), to_np(ref)) - + return func - + + if __name__ == "__main__": - from wrf import (omp_set_num_threads, omp_set_schedule, omp_get_schedule, + from wrf import (omp_set_num_threads, omp_set_schedule, omp_get_schedule, omp_set_dynamic, omp_get_num_procs, OMP_SCHED_STATIC) omp_set_num_threads(omp_get_num_procs()//2) omp_set_schedule(OMP_SCHED_STATIC, 0) omp_set_dynamic(False) - - varnames = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", - "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", - "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", - "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", - "wa", "uvmet10", "uvmet", "z", "cloudfrac"] - - #varnames = ["helicity"] - varnames=["avo", "pvo", "eth", "dbz", "helicity", "updraft_helicity", - "omg", "pw", "rh", "slp", "td", "tk", "tv", "twb", "uvmet", - "cloudfrac", "ctt"] - + + varnames = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", + "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", + "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", + "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", + "wa", "uvmet10", "uvmet", "z", "cloudfrac"] + omp_set_num_threads(omp_get_num_procs()-1) omp_set_schedule(OMP_SCHED_STATIC, 0) omp_set_dynamic(False) - + # Turn this one off when not needed, since it's slow varnames += ["cape_2d", "cape_3d"] - + for varname in varnames: - for i,wrfnc in enumerate((NCFILE, NCGROUP)): - for j,timeidx in enumerate((0, ALL_TIMES)): + for i, wrfnc in enumerate((NCFILE, NCGROUP)): + for j, timeidx in enumerate((0, ALL_TIMES)): for method in ("cat", "join"): for squeeze in (True, False): for meta in (True, False): - func = make_func(varname, wrfnc, timeidx, method, - squeeze, meta) + func = make_func(varname, wrfnc, timeidx, method, + squeeze, meta) ncname = "single" if i == 0 else "multi" timename = "t0" if j == 0 else "all" - squeeze_name = "squeeze" if squeeze else "nosqueeze" + squeeze_name = ("squeeze" if squeeze + else "nosqueeze") meta_name = "meta" if meta else "nometa" - test_name = "test_{}_{}_{}_{}_{}_{}".format(varname, - ncname, timename, method, - squeeze_name, meta_name) - + test_name = "test_{}_{}_{}_{}_{}_{}".format( + varname, ncname, timename, method, + squeeze_name, meta_name) + setattr(WRFVarsTest, test_name, func) - + func = test_cape3d_1d(wrfnc) setattr(WRFVarsTest, "test_cape3d_1d", func) - + func = test_cape2d_1d(wrfnc) setattr(WRFVarsTest, "test_cape2d_1d", func) - - + ut.main() - - \ No newline at end of file diff --git a/test/ctt_test.py b/test/ctt_test.py index 6e97909..e5c625d 100644 --- a/test/ctt_test.py +++ b/test/ctt_test.py @@ -4,10 +4,12 @@ from matplotlib.cm import get_cmap import cartopy.crs as crs from cartopy.feature import NaturalEarthFeature -from wrf import to_np, getvar, smooth2d, get_cartopy, cartopy_xlim, cartopy_ylim, latlon_coords +from wrf import (to_np, getvar, smooth2d, get_cartopy, cartopy_xlim, + cartopy_ylim, latlon_coords) # Open the NetCDF file -ncfile = Dataset("/Users/ladwig/Documents/wrf_files/problem_files/cfrac_bug/wrfout_d02_1987-10-01_00:00:00") +ncfile = Dataset("/Users/ladwig/Documents/wrf_files/" + "problem_files/cfrac_bug/wrfout_d02_1987-10-01_00:00:00") # Get the sea level pressure ctt = getvar(ncfile, "ctt") @@ -19,20 +21,23 @@ lats, lons = latlon_coords(ctt) cart_proj = get_cartopy(ctt) # Create a figure -fig = plt.figure(figsize=(12,9)) +fig = plt.figure(figsize=(12, 9)) # Set the GeoAxes to the projection used by WRF ax = plt.axes(projection=cart_proj) # Download and add the states and coastlines -states = NaturalEarthFeature(category='cultural', scale='50m', facecolor='none', +states = NaturalEarthFeature(category='cultural', scale='50m', + facecolor='none', name='admin_1_states_provinces_shp') ax.add_feature(states, linewidth=.5) ax.coastlines('50m', linewidth=0.8) -# Make the contour outlines and filled contours for the smoothed sea level pressure. +# Make the contour outlines and filled contours for the smoothed sea level +# pressure. plt.contour(to_np(lons), to_np(lats), to_np(ctt), 10, colors="black", transform=crs.PlateCarree()) -plt.contourf(to_np(lons), to_np(lats), to_np(ctt), 10, transform=crs.PlateCarree(), +plt.contourf(to_np(lons), to_np(lats), to_np(ctt), 10, + transform=crs.PlateCarree(), cmap=get_cmap("jet")) # Add a color bar diff --git a/test/generator_test.py b/test/generator_test.py index 6a45d89..8cc2816 100644 --- a/test/generator_test.py +++ b/test/generator_test.py @@ -1,17 +1,19 @@ -from __future__ import (absolute_import, division, print_function, unicode_literals) +from __future__ import (absolute_import, division, print_function, + unicode_literals) from wrf import getvar from netCDF4 import Dataset as nc -#ncfile = nc("/Users/ladwig/Documents/wrf_files/wrfout_d01_2016-02-25_18_00_00") + ncfile = nc("/Users/ladwig/Documents/wrf_files/wrfout_d01_2016-10-07_00_00_00") + def gen_seq(): wrfseq = [ncfile, ncfile, ncfile] for wrf in wrfseq: yield wrf - + + p_gen = getvar(gen_seq(), "P", method="join") print(p_gen) del p_gen - diff --git a/test/misc/extract_one_time.py b/test/misc/extract_one_time.py new file mode 100644 index 0000000..9645f98 --- /dev/null +++ b/test/misc/extract_one_time.py @@ -0,0 +1,40 @@ + +from netCDF4 import Dataset + +VARS_TO_KEEP = ("Times", "XLAT", "XLONG", "XLAT_U", "XLAT_V", "XLONG_U", + "XLONG_V", "U", "V", "W", "PH", "PHB", "T", "P", "PB", "Q2", + "T2", "PSFC", "U10", "V10", "XTIME", "QVAPOR", "QCLOUD", + "QGRAUP", "QRAIN", "QSNOW", "QICE", "MAPFAC_M", "MAPFAC_U", + "MAPFAC_V", "F", "HGT", "RAINC", "RAINSH", "RAINNC", + "I_RAINC", "I_RAINNC") + +INPUT_FILE = "wrfout_d02_2005-08-28_00:00:00" +OUTPUT_FILE = "wrfout_problem_file" +DESIRED_TIME_INDEX = 0 + +with Dataset(INPUT_FILE) as infile, Dataset(OUTPUT_FILE, mode="w") as outfile: + # Copy the global attributes + outfile.setncatts(infile.__dict__) + + # Copy Dimensions + for name, dimension in infile.dimensions.items(): + if name != "Time": + dimsize = len(dimension) + outfile.createDimension(name, dimsize) + else: + outfile.createDimension(name, 1) + + # Copy Variables + for name, variable in infile.variables.iteritems(): + if name not in VARS_TO_KEEP: + continue + + outvar = outfile.createVariable(name, variable.datatype, + variable.dimensions, zlib=True) + + if len(variable.dimensions) > 1: + outvar[:] = variable[DESIRED_TIME_INDEX, :] + else: + outvar[:] = variable[DESIRED_TIME_INDEX] + + outvar.setncatts(variable.__dict__) diff --git a/test/misc/loop_and_fill_meta.py b/test/misc/loop_and_fill_meta.py new file mode 100644 index 0000000..7f46217 --- /dev/null +++ b/test/misc/loop_and_fill_meta.py @@ -0,0 +1,70 @@ +from __future__ import print_function, division + +import os +import numpy as np +from netCDF4 import Dataset +from wrf import getvar, ALL_TIMES, to_np +import xarray + +filename_list = ["/Users/ladwig/Documents/wrf_files/" + "wrf_vortex_single/wrfout_d02_2005-08-28_00:00:00", + "/Users/ladwig/Documents/wrf_files/wrf_vortex_single/" + "wrfout_d02_2005-08-28_03:00:00", + "/Users/ladwig/Documents/wrf_files/wrf_vortex_single/" + "wrfout_d02_2005-08-28_06:00:00", + "/Users/ladwig/Documents/wrf_files/wrf_vortex_single/" + "wrfout_d02_2005-08-28_09:00:00"] + +result_shape = (4, 1, 96, 96) + +# Let's get the first time so we can copy the metadata later +f = Dataset(filename_list[0]) +# By setting squeeze to False, you'll get all the dimension names. +z1 = getvar(f, "T2", 0, squeeze=False) +xlat = getvar(f, "XLAT", 0) +xlong = getvar(f, "XLONG", 0) + + +z_final = np.empty(result_shape, np.float32) + +# Modify this number if using more than 1 time per file +times_per_file = 1 + +data_times = [] +xtimes = [] +for timeidx in range(result_shape[0]): + # Compute the file index and the time index inside the file + fileidx = timeidx // times_per_file + file_timeidx = timeidx % times_per_file + + f = Dataset(filename_list[fileidx]) + z = getvar(f, "T2", file_timeidx) + t = getvar(f, "Times", file_timeidx) + xt = getvar(f, "xtimes", file_timeidx) + data_times.append(to_np(t)) + xtimes.append(to_np(xt)) + z_final[timeidx, :] = z[:] + f.close() + +# Let's make the metadata. Dimension names should copy easily if you set +# sqeeze to False, otherwise you can just set them yourself is a tuple of +# dimension names. Since you wanted +# to keep the bottom_top dimension for this 2D variable (which is normally +# removed), I'm doing this manually. +z_dims = ["Time", "bottom_top", "south_north", "west_east"] + +# Xarray doesn't copy coordinates easily (it always complains about shape +# mismatches), so do this manually +z_coords = {} +z_coords["Time"] = data_times +z_coords["XTIME"] = ("Time",), xtimes +z_coords["XLAT"] = ("south_north", "west_east"), xlat +z_coords["XLONG"] = ("south_north", "west_east"), xlong +z_name = "T2" + +# Attributes copy nicely +z_attrs = {} +z_attrs.update(z1.attrs) + +z_with_meta = xarray.DataArray(z_final, coords=z_coords, dims=z_dims, + attrs=z_attrs, name=z_name) diff --git a/test/mocktest.py b/test/misc/mocktest.py similarity index 51% rename from test/mocktest.py rename to test/misc/mocktest.py index 211936f..7dd552a 100644 --- a/test/mocktest.py +++ b/test/misc/mocktest.py @@ -6,39 +6,48 @@ try: except ImportError: from mock import Mock as MagicMock + class Mock(MagicMock): @classmethod def __getattr__(cls, name): return Mock() - -MOCK_MODULES = ["numpy", "numpy.ma", "xarray", "cartopy", + + +MOCK_MODULES = ["numpy", "numpy.ma", "xarray", "cartopy", "pandas", "matplotlib", "netCDF4", "mpl_toolkits.basemap", "wrf._wrffortran"] sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) -consts = {"DEFAULT_FILL" : 9.9692099683868690E36, - "DEFAULT_FILL_INT8" : -127, - "DEFAULT_FILL_INT16" : -32767, - "DEFAULT_FILL_INT32" : -2147483647, - "DEFAULT_FILL_INT64" : -9223372036854775806, - "DEFAULT_FILL_FLOAT" : 9.9692099683868690E36, - "DEFAULT_FILL_DOUBLE" : 9.9692099683868690E36, - "fomp_sched_static" : 1, - "fomp_sched_dynamic" : 2, - "fomp_sched_guided" : 3, - "fomp_sched_auto" : 4} +consts = {"DEFAULT_FILL": 9.9692099683868690E36, + "DEFAULT_FILL_INT8": -127, + "DEFAULT_FILL_INT16": -32767, + "DEFAULT_FILL_INT32": -2147483647, + "DEFAULT_FILL_INT64": -9223372036854775806, + "DEFAULT_FILL_FLOAT": 9.9692099683868690E36, + "DEFAULT_FILL_DOUBLE": 9.9692099683868690E36, + "fomp_sched_static": 1, + "fomp_sched_dynamic": 2, + "fomp_sched_guided": 3, + "fomp_sched_auto": 4} + class MockWrfConstants(object): def __init__(self): self.__dict__ = consts - + + def mock_asscalar(val): - return float(val) + return float(val) + sys.modules["wrf._wrffortran"].wrf_constants = MockWrfConstants() sys.modules["wrf._wrffortran"].omp_constants = MockWrfConstants() sys.modules["numpy"].asscalar = mock_asscalar -import wrf -print (wrf.get_coord_pairs.__doc__) +try: + import wrf +except ImportError: + pass + +print(wrf.get_coord_pairs.__doc__) diff --git a/test/projtest.py b/test/misc/projtest.py similarity index 61% rename from test/projtest.py rename to test/misc/projtest.py index 508b799..e1ac4d1 100644 --- a/test/projtest.py +++ b/test/misc/projtest.py @@ -7,13 +7,15 @@ import matplotlib.cm as cm from netCDF4 import Dataset as NetCDF +from wrf import get_proj_params +from wrf.projection import getproj, RotatedLatLon, PolarStereographic PYNGL = True try: import Ngl except ImportError: PYNGL = False - + BASEMAP = True try: import mpl_toolkits.basemap @@ -25,19 +27,15 @@ try: from cartopy import crs, feature except ImportError: CARTOPY = False - -from wrf import get_proj_params -from wrf.projection import getproj, RotatedLatLon, PolarStereographic FILE_DIR = "/Users/ladwig/Documents/wrf_files/" -WRF_FILES = [ - join(FILE_DIR, "norway", "geo_em.d01.nc"), - join(FILE_DIR, "rotated_pole", "EAS_geo_em.d01.nc"), - join(FILE_DIR, "rotated_pole", "EUR_geo_em.d01.nc"), - join(FILE_DIR,"wrfout_d01_2016-02-25_18_00_00"), - join(FILE_DIR, "wrfout_d01_2008-09-29_23-30-00"), - join(FILE_DIR, "wrfout_d01_2010-06-13_21:00:00")] +WRF_FILES = [join(FILE_DIR, "norway", "geo_em.d01.nc"), + join(FILE_DIR, "rotated_pole", "EAS_geo_em.d01.nc"), + join(FILE_DIR, "rotated_pole", "EUR_geo_em.d01.nc"), + join(FILE_DIR, "wrfout_d01_2016-02-25_18_00_00"), + join(FILE_DIR, "wrfout_d01_2008-09-29_23-30-00"), + join(FILE_DIR, "wrfout_d01_2010-06-13_21:00:00")] def nz_proj(): @@ -45,65 +43,68 @@ def nz_proj(): [-32.669853, -32.669853]]) lons = np.array([[163.839595, -179.693502], [163.839595, -179.693502]]) - - params = {"MAP_PROJ" : 6, - "CEN_LAT" : -41.814869, - "CEN_LON" : 179.693502, - "TRUELAT1" : 0, + + params = {"MAP_PROJ": 6, + "CEN_LAT": -41.814869, + "CEN_LON": 179.693502, + "TRUELAT1": 0, "TRUELAT2": 0, - "MOAD_CEN_LAT" : -41.814869, - "STAND_LON" : 180.0 - 179.693502, - "POLE_LAT" : 48.185131, - "POLE_LON" : 0.0} - + "MOAD_CEN_LAT": -41.814869, + "STAND_LON": 180.0 - 179.693502, + "POLE_LAT": 48.185131, + "POLE_LON": 0.0} + return lats, lons, RotatedLatLon(lats=lats, lons=lons, **params) + def argentina_proj(): lats = np.array([[-57.144064, -57.144064], [-21.154470, -21.154470]]) lons = np.array([[-86.893797, -37.089724], [-86.893797, -37.089724]]) - - params = {"MAP_PROJ" : 6, - "CEN_LAT" : -39.222954, - "CEN_LON" : -65.980109, - "TRUELAT1" : 0, + + params = {"MAP_PROJ": 6, + "CEN_LAT": -39.222954, + "CEN_LON": -65.980109, + "TRUELAT1": 0, "TRUELAT2": 0, - "MOAD_CEN_LAT" : -39.222954, - "STAND_LON" : 180.0 - -65.980109, - "POLE_LAT" : 90 + -39.222954, - "POLE_LON" : 0.0} - + "MOAD_CEN_LAT": -39.222954, + "STAND_LON": 180.0 - -65.980109, + "POLE_LAT": 90 + -39.222954, + "POLE_LON": 0.0} + return lats, lons, RotatedLatLon(lats=lats, lons=lons, **params) + def south_polar_proj(): - lats = np.array([[-30.0,-30.0], - [-30.0,-30.0]]) + lats = np.array([[-30.0, -30.0], + [-30.0, -30.0]]) lons = np.array([[-120, 60], [-120, 60]]) - - params = {"MAP_PROJ" : 2, - "CEN_LAT" : -90.0, - "CEN_LON" : 0, - "TRUELAT1" : -10.0, - "MOAD_CEN_LAT" : -90.0, - "STAND_LON" : 0} - + + params = {"MAP_PROJ": 2, + "CEN_LAT": -90.0, + "CEN_LON": 0, + "TRUELAT1": -10.0, + "MOAD_CEN_LAT": -90.0, + "STAND_LON": 0} + return lats, lons, PolarStereographic(lats=lats, lons=lons, **params) + def north_polar_proj(): - lats = np.array([[30.0,30.0], - [30.0,30.0]]) + lats = np.array([[30.0, 30.0], + [30.0, 30.0]]) lons = np.array([[-45, 140], [-45, 140]]) - - params = {"MAP_PROJ" : 2, - "CEN_LAT" : 90.0, - "CEN_LON" : 10, - "TRUELAT1" : 10.0, - "MOAD_CEN_LAT" : 90.0, - "STAND_LON" : 10} - + + params = {"MAP_PROJ": 2, + "CEN_LAT": 90.0, + "CEN_LON": 10, + "TRUELAT1": 10.0, + "MOAD_CEN_LAT": 90.0, + "STAND_LON": 10} + return lats, lons, PolarStereographic(lats=lats, lons=lons, **params) @@ -112,21 +113,23 @@ def dateline_rot_proj(): [71.717521, 71.717521]]) lons = np.array([[170.332771, -153.456292], [170.332771, -153.456292]]) - - params = {"MAP_PROJ" : 6, - "CEN_LAT" : 66.335764, - "CEN_LON" : -173.143792, - "TRUELAT1" : 0, + + params = {"MAP_PROJ": 6, + "CEN_LAT": 66.335764, + "CEN_LON": -173.143792, + "TRUELAT1": 0, "TRUELAT2": 0, - "MOAD_CEN_LAT" : 66.335764, - "STAND_LON" : 173.143792, - "POLE_LAT" : 90.0 - 66.335764, - "POLE_LON" : 180.0} + "MOAD_CEN_LAT": 66.335764, + "STAND_LON": 173.143792, + "POLE_LAT": 90.0 - 66.335764, + "POLE_LON": 180.0} return lats, lons, RotatedLatLon(lats=lats, lons=lons, **params) + class WRFProjTest(ut.TestCase): longMessage = True + def make_test(wrf_file=None, fixed_case=None): if wrf_file is not None: ncfile = NetCDF(wrf_file) @@ -144,75 +147,76 @@ def make_test(wrf_file=None, fixed_case=None): elif fixed_case == "north_polar": lats, lons, proj = north_polar_proj() elif fixed_case == "dateline_rot": - lats,lons,proj = dateline_rot_proj() - - print ("wrf proj4: {}".format(proj.proj4())) + lats, lons, proj = dateline_rot_proj() + + print("wrf proj4: {}".format(proj.proj4())) if PYNGL: # PyNGL plotting wks_type = bytes("png") - wks = Ngl.open_wks(wks_type,bytes("pyngl_{}".format(name_suffix))) + wks = Ngl.open_wks(wks_type, bytes("pyngl_{}".format(name_suffix))) mpres = proj.pyngl() - map = Ngl.map(wks,mpres) - + map = Ngl.map(wks, mpres) + Ngl.delete_wks(wks) - - if BASEMAP: + + if BASEMAP: # Basemap plotting - fig = plt.figure(figsize=(10,10)) - ax = fig.add_axes([0.1,0.1,0.8,0.8]) - + fig = plt.figure(figsize=(10, 10)) + ax = fig.add_axes([0.1, 0.1, 0.8, 0.8]) + # Define and plot the meridians and parallels min_lat = np.amin(lats) max_lat = np.amax(lats) min_lon = np.amin(lons) max_lon = np.amax(lons) - - parallels = np.arange(np.floor(min_lat), np.ceil(max_lat), + + parallels = np.arange(np.floor(min_lat), np.ceil(max_lat), (max_lat - min_lat)/5.0) - meridians = np.arange(np.floor(min_lon), np.ceil(max_lon), + meridians = np.arange(np.floor(min_lon), np.ceil(max_lon), (max_lon - min_lon)/5.0) - + bm = proj.basemap() bm.drawcoastlines(linewidth=.5) - #bm.drawparallels(parallels,labels=[1,1,1,1],fontsize=10) - #bm.drawmeridians(meridians,labels=[1,1,1,1],fontsize=10) - print ("basemap proj4: {}".format(bm.proj4string)) + # bm.drawparallels(parallels,labels=[1,1,1,1],fontsize=10) + # bm.drawmeridians(meridians,labels=[1,1,1,1],fontsize=10) + print("basemap proj4: {}".format(bm.proj4string)) plt.savefig("basemap_{}.png".format(name_suffix)) plt.close(fig) - + if CARTOPY: # Cartopy plotting - fig = plt.figure(figsize=(10,10)) - ax = plt.axes([0.1,0.1,0.8,0.8], projection=proj.cartopy()) - print ("cartopy proj4: {}".format(proj.cartopy().proj4_params)) - + fig = plt.figure(figsize=(10, 10)) + ax = plt.axes([0.1, 0.1, 0.8, 0.8], projection=proj.cartopy()) + print("cartopy proj4: {}".format(proj.cartopy().proj4_params)) + ax.coastlines('50m', linewidth=0.8) - #print proj.x_extents() - #print proj.y_extents() + # print proj.x_extents() + # print proj.y_extents() ax.set_xlim(proj.cartopy_xlim()) ax.set_ylim(proj.cartopy_ylim()) ax.gridlines() plt.savefig("cartopy_{}.png".format(name_suffix)) plt.close(fig) + if __name__ == "__main__": for wrf_file in WRF_FILES: test_func = make_test(wrf_file=wrf_file) setattr(WRFProjTest, "test_proj", test_func) - + test_func2 = make_test(fixed_case="south_rot") setattr(WRFProjTest, "test_south_rot", test_func2) - + test_func3 = make_test(fixed_case="arg_rot") setattr(WRFProjTest, "test_arg_rot", test_func3) - + test_func4 = make_test(fixed_case="south_polar") setattr(WRFProjTest, "test_south_polar", test_func4) - + test_func5 = make_test(fixed_case="north_polar") setattr(WRFProjTest, "test_north_polar", test_func5) - + test_func6 = make_test(fixed_case="dateline_rot") setattr(WRFProjTest, "test_dateline_rot", test_func6) - - ut.main() \ No newline at end of file + + ut.main() diff --git a/test/misc/quiver_test.py b/test/misc/quiver_test.py new file mode 100644 index 0000000..557167e --- /dev/null +++ b/test/misc/quiver_test.py @@ -0,0 +1,58 @@ +from netCDF4 import Dataset +import numpy as np +import matplotlib.pyplot as plt +from matplotlib.cm import get_cmap +import cartopy.crs as crs +from cartopy.feature import NaturalEarthFeature + +from wrf import (getvar, interplevel, to_np, latlon_coords, get_cartopy, + cartopy_xlim, cartopy_ylim) + +# Open the NetCDF file +ncfile = Dataset("wrfout_d01_2016-10-07_00_00_00") + +# Extract the pressure and wind variables + +p = getvar(ncfile, "pressure") +# Note: This is m/s. +ua = getvar(ncfile, "ua") +va = getvar(ncfile, "va") + +# Interpolate u, and v winds to 950 hPa +u_950 = interplevel(ua, p, 950) +v_950 = interplevel(va, p, 950) + +# Get the lat/lon coordinates +lats, lons = latlon_coords(u_950) + +# Get the map projection information +cart_proj = get_cartopy(u_950) + +# Create the figure +fig = plt.figure(figsize=(12, 9)) +ax = plt.axes(projection=cart_proj) + +# Download and add the states and coastlines +states = NaturalEarthFeature(category='cultural', scale='50m', + facecolor='none', + name='admin_1_states_provinces_shp') +ax.add_feature(states, linewidth=0.5) +ax.coastlines('50m', linewidth=0.8) + +# Add the 950 hPa wind barbs, only plotting every 'thin'ed barb. Adjust thin +# as needed. Also, no scaling is done for the arrows, so you might need to +# mess with the scale argument. +thin = 50 +plt.quiver(to_np(lons[::thin, ::thin]), to_np(lats[::thin, ::thin]), + to_np(u_950[::thin, ::thin]), to_np(v_950[::thin, ::thin]), + transform=crs.PlateCarree()) + +# Set the map bounds +ax.set_xlim(cartopy_xlim(u_950)) +ax.set_ylim(cartopy_ylim(v_950)) + +ax.gridlines() + +plt.title("Arrows") + +plt.show() diff --git a/test/reduce_file.py b/test/misc/reduce_file.py similarity index 93% rename from test/reduce_file.py rename to test/misc/reduce_file.py index 35e61d9..d7e0b08 100644 --- a/test/reduce_file.py +++ b/test/misc/reduce_file.py @@ -13,22 +13,22 @@ for att_name in in_nc.ncattrs(): # Copy Dimensions, but modify the vertical dimensions for dimname, dim in in_nc.dimensions.iteritems(): out_nc.createDimension(dimname, len(dim)) - + # Copy Variables and their Attributes, using the reduced vertical dimension for varname, var in in_nc.variables.iteritems(): if varname in ("T2", "XLAT", "XLONG", "XTIME"): datatype = var.datatype dimensions = var.dimensions shape = var.shape - + new_shape = shape - + new_var = out_nc.createVariable(varname, datatype, dimensions) - + new_var[:] = var[:] - + for att_name in var.ncattrs(): setattr(new_var, att_name, getattr(var, att_name)) - + in_nc.close() -out_nc.close() \ No newline at end of file +out_nc.close() diff --git a/test/misc/snippet.py b/test/misc/snippet.py new file mode 100644 index 0000000..d3df43a --- /dev/null +++ b/test/misc/snippet.py @@ -0,0 +1,29 @@ +import matplotlib.pyplot as plt +from mpl_toolkits.basemap import Basemap + + +def main(): + bm = Basemap(projection="rotpole", + o_lat_p=36.0, + o_lon_p=180.0, + llcrnrlat=-10.590603, + urcrnrlat=46.591976, + llcrnrlon=-139.08585, + urcrnrlon=22.661009, + lon_0=-106.0, + rsphere=6370000, + resolution='l') + + fig = plt.figure(figsize=(8, 8)) + ax = fig.add_axes([0.1, 0.1, 0.8, 0.8]) + + bm.drawcoastlines(linewidth=.5) + + print(bm.proj4string) + + plt.savefig("basemap_map.png") + plt.close(fig) + + +if __name__ == "__main__": + main() diff --git a/test/varcache.py b/test/misc/varcache.py similarity index 68% rename from test/varcache.py rename to test/misc/varcache.py index f8ba15d..54ca5ed 100644 --- a/test/varcache.py +++ b/test/misc/varcache.py @@ -4,33 +4,37 @@ import time from netCDF4 import Dataset from wrf import getvar, ALL_TIMES, extract_vars -wrf_filenames = ["/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-28_00:00:00", - "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-28_12:00:00", - "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-29_00:00:00"] +wrf_filenames = ["/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/" + "moving_nest/wrfout_d02_2005-08-28_00:00:00", + "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/" + "moving_nest/wrfout_d02_2005-08-28_12:00:00", + "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/" + "moving_nest/wrfout_d02_2005-08-29_00:00:00"] wrfin = [Dataset(x) for x in wrf_filenames] -my_cache = extract_vars(wrfin, ALL_TIMES, ("P", "PB", "PH", "PHB", "T", "QVAPOR", "HGT", "U", "V", "W", "PSFC")) +my_cache = extract_vars(wrfin, ALL_TIMES, + ("P", "PB", "PH", "PHB", "T", "QVAPOR", "HGT", "U", + "V", "W", "PSFC")) start = time.time() -for var in ("avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", - "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", - "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", - "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", +for var in ("avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", + "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", + "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", + "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", "wa", "uvmet10", "uvmet", "z", "cfrac", "zstag", "geopt_stag"): v = getvar(wrfin, var, ALL_TIMES) end = time.time() -print ("Time taken without variable cache: ", (end-start)) +print("Time taken without variable cache: ", (end-start)) start = time.time() -for var in ("avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", - "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", - "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", - "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", +for var in ("avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", + "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", + "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", + "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", "wa", "uvmet10", "uvmet", "z", "cfrac", "zstag", "geopt_stag"): v = getvar(wrfin, var, ALL_TIMES, cache=my_cache) end = time.time() -print ("Time taken with variable cache: ", (end-start)) - +print("Time taken with variable cache: ", (end-start)) diff --git a/test/viewtest.py b/test/misc/viewtest.py similarity index 51% rename from test/viewtest.py rename to test/misc/viewtest.py index 5dab455..f9a7064 100644 --- a/test/viewtest.py +++ b/test/misc/viewtest.py @@ -1,26 +1,25 @@ +# Test snippet for f2py import numpy as np import wrf._wrffortran errlen = int(wrf._wrffortran.constants.errlen) - -a = np.ones((3,3,3)) -b = np.zeros((3,3,3,3)) +a = np.ones((3, 3, 3)) +b = np.zeros((3, 3, 3, 3)) c = np.zeros(errlen, "c") errstat = np.array(0) errmsg = np.zeros(errlen, "c") c[:] = "Test String" - for i in xrange(2): - outview = b[i,:] + outview = b[i, :] outview = outview.T - q = wrf._wrffortran.testfunc(a,outview,c,errstat=errstat,errmsg=errmsg) - print errstat - + q = wrf._wrffortran.testfunc(a, outview, c, errstat=errstat, + errmsg=errmsg) + print(errstat) str_bytes = (bytes(char).decode("utf-8") for char in errmsg[:]) -print repr(errmsg) -print "".join(str_bytes).strip() \ No newline at end of file +print(repr(errmsg)) +print("".join(str_bytes).strip()) diff --git a/test/misc/wps.py b/test/misc/wps.py new file mode 100644 index 0000000..b2a80ea --- /dev/null +++ b/test/misc/wps.py @@ -0,0 +1,230 @@ +# Hastily made script to read WPS intermediate files +from __future__ import print_function +import struct + +import numpy as np + +# Number of bytes used at the start and end of a fortran record to +# indicate the record size +SIZE_BYTES = 4 + + +class WPSData(object): + def __init__(self, ifv=None, hdate=None, xfcst=None, map_source=None, + field=None, units=None, desc=None, xlvl=None, nx=None, + ny=None, iproj=None, startloc=None, startlat=None, + startlon=None, deltalat=None, deltalon=None, nlats=None, + dx=None, dy=None, xlonc=None, truelat1=None, truelat2=None, + earth_radius=None, is_wind_earth_rel=None, slab=None): + + self.ifv = ifv + self.hdate = hdate + self.xfcst = xfcst + self.map_source = map_source + self.field = field + self.units = units + self.desc = desc + self.xlvl = xlvl + self.nx = nx + self.ny = ny + self.iproj = iproj + self.startloc = startloc + self.startlat = startlat + self.startlon = startlon + self.deltalat = deltalat + self.deltalon = deltalon + self.nlats = nlats + self.dx = dx + self.dy = dy + self.xlonc = xlonc + self.truelat1 = truelat1 + self.truelat2 = truelat2 + self.earth_radius = earth_radius + self.is_wind_earth_rel = is_wind_earth_rel + self.slab = slab + + +def _parse_record1(data): + result = {} + result["ifv"] = struct.unpack(">i", data) + + return result + + +def _parse_record2(data): + result = {} + parsed = struct.unpack(">24sf32s9s25s46sfiii", data) + result["hdate"] = parsed[0] + result["xfcst"] = parsed[1] + result["map_source"] = parsed[2] + result["field"] = parsed[3] + result["units"] = parsed[4] + result["desc"] = parsed[5] + result["xlvl"] = parsed[6] + result["nx"] = parsed[7] + result["ny"] = parsed[8] + result["iproj"] = parsed[9] + + return result + + +def _parse_record3(data, iproj): + result = {} + if iproj == 0: + fmt = ">8sfffff" + parsed = struct.unpack(fmt, data) + result["startloc"] = parsed[0] + result["startlat"] = parsed[1] + result["startlon"] = parsed[2] + result["deltalat"] = parsed[3] + result["deltalon"] = parsed[4] + result["earth_radius"] = parsed[5] + elif iproj == 1: + fmt = ">8sffffff" + parsed = struct.unpack(fmt, data) + result["startloc"] = parsed[0] + result["startlat"] = parsed[1] + result["startlon"] = parsed[2] + result["dx"] = parsed[3] + result["dy"] = parsed[4] + result["truelat1"] = parsed[5] + result["earth_radius"] = parsed[6] + elif iproj == 3: + fmt = ">8sffffffff" + parsed = struct.unpack(fmt, data) + result["startloc"] = parsed[0] + result["startlat"] = parsed[1] + result["startlon"] = parsed[2] + result["dx"] = parsed[3] + result["dy"] = parsed[4] + result["xlonc"] = parsed[5] + result["truelat1"] = parsed[6] + result["truelat2"] = parsed[7] + result["earth_radius"] = parsed[8] + elif iproj == 4: + fmt = ">8sfffff" + parsed = struct.unpack(fmt, data) + result["startloc"] = parsed[0] + result["startlat"] = parsed[1] + result["startlon"] = parsed[2] + result["nlats"] = parsed[3] + result["deltalon"] = parsed[4] + result["earth_radius"] = parsed[5] + elif iproj == 5: + fmt = ">8sfffffff" + parsed = struct.unpack(fmt, data) + result["startloc"] = parsed[0] + result["startlat"] = parsed[1] + result["startlon"] = parsed[2] + result["dx"] = parsed[3] + result["dy"] = parsed[4] + result["xlonc"] = parsed[5] + result["truelat1"] = parsed[6] + result["earth_radius"] = parsed[7] + + return result + + +def _parse_record4(data): + result = {} + result["is_wind_earth_rel"] = struct.unpack(">i", data) + + return result + + +def _parse_record5(data, nx, ny): + result = {} + + size = nx * ny + fmt = ">{}f".format(size) + parsed = struct.unpack(fmt, data) + + arr = np.array(parsed, dtype=np.float32) + result["slab"] = arr.reshape((nx, ny), order="F") + + return result + + +_PARSE_MAP = {0: _parse_record1, + 1: _parse_record2, + 2: _parse_record3, + 3: _parse_record4, + 4: _parse_record5} + + +def parse_record(record_idx, data, iproj=None, nx=None, ny=None): + + if record_idx == 0 or record_idx == 1 or record_idx == 3: + return _PARSE_MAP[record_idx](data) + elif record_idx == 2: + return _PARSE_MAP[record_idx](data, iproj) + elif record_idx == 4: + return _PARSE_MAP[record_idx](data, nx, ny) + + +def read_wps(wps_file, field=None): + with open(wps_file, "rb") as f: + wps_params = {} + record_list = [] + raw_data = f.read() + + record_size_idx = 0 + end_of_file_idx = len(raw_data) - 1 + + while True: + iproj = None + nx = None + ny = None + keep_record = True + for record_idx in range(5): + # Each record has the size (in SIZE_BYTES bytes) at the + # start and end of each record. This might be compiler + # dependent though, so this might need to be modified. + # Also, the WPS files are stored big endian. + + record_size = struct.unpack( + ">i", + raw_data[record_size_idx: record_size_idx + SIZE_BYTES]) + record_start = record_size_idx + SIZE_BYTES + record_end = record_start + record_size[0] + record_data = raw_data[record_start:record_end] + + parsed_record = parse_record(record_idx, record_data, iproj, + nx, ny) + + try: + field_name = parsed_record["field"].strip() + except KeyError: + pass + else: + if field is not None: + if field_name != field: + keep_record = False + + try: + iproj = parsed_record["iproj"] + except KeyError: + pass + + try: + nx = parsed_record["nx"] + except KeyError: + pass + + try: + ny = parsed_record["ny"] + except KeyError: + pass + + wps_params.update(parsed_record) + + record_size_idx = record_end + SIZE_BYTES + + if keep_record: + record_list.append(WPSData(**wps_params)) + + # Repeat for all record slabs + if record_end + SIZE_BYTES > end_of_file_idx: + break + + return record_list diff --git a/test/listBug.ncl b/test/ncl/listBug.ncl similarity index 100% rename from test/listBug.ncl rename to test/ncl/listBug.ncl diff --git a/test/ncl_get_var.ncl b/test/ncl/ncl_get_var.ncl similarity index 93% rename from test/ncl_get_var.ncl rename to test/ncl/ncl_get_var.ncl index a7b8aa5..408b0fa 100644 --- a/test/ncl_get_var.ncl +++ b/test/ncl/ncl_get_var.ncl @@ -32,7 +32,9 @@ "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", \ "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", \ "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", \ - "wa", "uvmet10", "uvmet", "z", "cfrac", "height_agl" /] + "wa", "uvmet10", "uvmet", "z", "cfrac", "height_agl", \ + "wspd_wdir", "wspd_wdir10", "uvmet_wspd_wdir", \ + "uvmet10_wspd_wdir" /] unique_dimname_list = NewList("fifo") unique_dimsize_list = NewList("fifo") @@ -87,7 +89,7 @@ xopt@timeidx = time xopt@linecoords = True - ht_vertcross1 = wrf_user_vertcross(z, p, pivot, xopt) + ht_vertcross1 = wrf_user_vert_cross(z, p, pivot, xopt) fout->ht_vertcross1 = ht_vertcross1 @@ -100,7 +102,7 @@ xopt@timeidx = time xopt@linecoords = True - ht_vertcross2 = wrf_user_vertcross(z, p, pivot, xopt) + ht_vertcross2 = wrf_user_vert_cross(z, p, pivot, xopt) ht_vertcross2!1 = "vertical2" ht_vertcross2!2 = "cross_line_idx2" @@ -131,7 +133,7 @@ xopt@linecoords = True xopt@autolevels = 1000 - ht_vertcross3 = wrf_user_vertcross(z, p, start_end, xopt) + ht_vertcross3 = wrf_user_vert_cross(z, p, start_end, xopt) ht_vertcross3!0 = "Time" ht_vertcross3!1 = "vertical3" @@ -150,7 +152,7 @@ p_var := p(i,:,:,:) z_var := z(i,:,:,:) - ht_vertcross := wrf_user_vertcross(z_var, p_var, start_end, xopt) + ht_vertcross := wrf_user_vert_cross(z_var, p_var, start_end, xopt) dim0name = sprinti("vertical_t%i",i) dim1name = sprinti("cross_line_idx_t%i",i) @@ -190,8 +192,8 @@ plev := 500. ; 500 MB hlev := 5000 ; 5000 m - z2_500 = wrf_user_interplevel(z,p,plev,False) - p2_5000 = wrf_user_interplevel(p,z,hlev,False) + z2_500 = wrf_user_interp_level(z,p,plev,False) + p2_5000 = wrf_user_interp_level(p,z,hlev,False) fout->z2_500 = z2_500 fout->p2_5000 = p2_5000 @@ -199,8 +201,8 @@ plev := (/1000., 850., 500., 250./) hlev := (/500., 2500., 5000., 10000. /) - z2_multi = wrf_user_interplevel(z,p,plev,False) - p2_multi = wrf_user_interplevel(p,z,hlev,False) + z2_multi = wrf_user_interp_level(z,p,plev,False) + p2_multi = wrf_user_interp_level(p,z,hlev,False) fout->z2_multi = z2_multi fout->p2_multi = p2_multi @@ -208,7 +210,7 @@ pblh = wrf_user_getvar(input_file, "PBLH", time) opts := False opts@inc2dlevs = True - p_lev2d = wrf_user_interplevel(p, z, pblh, opts) + p_lev2d = wrf_user_interp_level(p, z, pblh, opts) fout->p_lev2d = p_lev2d @@ -234,7 +236,7 @@ xopt@timeidx = 0 xopt@linecoords = True - t2_line2 = wrf_user_interpline(t2, pivot, xopt) + t2_line2 = wrf_user_interp_line(t2, pivot, xopt) fout->t2_line2 = t2_line2 @@ -257,7 +259,7 @@ xopt@timeidx = 0 xopt@linecoords = True - t2_line3 = wrf_user_interpline(t2, start_end, xopt) + t2_line3 = wrf_user_interp_line(t2, start_end, xopt) t2_line3!1 = "line_idx_t2_line3" fout->t2_line3 = t2_line3 @@ -270,7 +272,7 @@ name = sprinti("t2_line_t%i", i) dim0name = sprinti("lineidx_t%i",i) var := t2(i,:,:) - t2_line := wrf_user_interpline(var, start_end, xopt) + t2_line := wrf_user_interp_line(var, start_end, xopt) t2_line!0 = dim0name fout->$name$ = t2_line end do @@ -286,7 +288,7 @@ xopt@timeidx = 0 xopt@linecoords = True - t2_line4 = wrf_user_interpline(t2, start_end, xopt) + t2_line4 = wrf_user_interp_line(t2, start_end, xopt) t2_line4!0 = "t2_line4_idx" fout->t2_line4 = t2_line4 diff --git a/test/ncl/ncl_vertcross.ncl b/test/ncl/ncl_vertcross.ncl new file mode 100644 index 0000000..5b9991a --- /dev/null +++ b/test/ncl/ncl_vertcross.ncl @@ -0,0 +1,92 @@ +input_file = addfile("/Users/ladwig/Documents/wrf_files/wrfout_d02_2010-06-13_21:00:00.nc", "r") + +z = wrf_user_getvar(input_file, "z", 0) ; grid point height +p = wrf_user_getvar(input_file, "pressure", 0) ; total pressure + +dimsz = dimsizes(z) +pivot = (/ (dimsz(2)-1)/2, (dimsz(1)-1)/2 /) ; pivot point is center of domain + +; For the new cross section routine +xopt = True +xopt@use_pivot = True +xopt@angle = 45.0 +;xopt@levels = +;xopt@latlon = +xopt@file_handle = input_file +;xopt@timeidx = +xopt@linecoords = True + +ht_vertcross = wrf_user_vertcross(z, p, pivot, xopt) + +printVarSummary(ht_vertcross) +print(min(ht_vertcross@lats)) +print(min(ht_vertcross@lons)) +print(max(ht_vertcross@lats)) +print(max(ht_vertcross@lons)) + + +xopt@use_pivot = False +xopt@angle = 0.0 +;xopt@levels = +xopt@latlon = True +xopt@file_handle = input_file +xopt@timeidx = 0 +xopt@linecoords = True + +loc_param = (/-104.3632, 32.8562, -95.15308, 40.06575 /) ; pivot point is center of domain +ht_vertcross2 = wrf_user_vertcross(z, p, loc_param, xopt) + +printVarSummary(ht_vertcross2) +print(min(ht_vertcross2@lats)) +print(min(ht_vertcross2@lons)) +print(max(ht_vertcross2@lats)) +print(max(ht_vertcross2@lons)) + +print(ht_vertcross2@lats(190)) +print(ht_vertcross2@lons(190)) + +xopt@use_pivot = True +xopt@angle = 45.0 +;xopt@levels = +xopt@latlon = True +xopt@file_handle = input_file +xopt@timeidx = 0 +xopt@linecoords = True + +loc_param := (/-99.98572, 36.54949 /) ; pivot point is center of domain +ht_vertcross3 = wrf_user_vertcross(z, p, loc_param, xopt) + +printVarSummary(ht_vertcross3) +print(min(ht_vertcross3@lats)) +print(min(ht_vertcross3@lons)) +print(max(ht_vertcross3@lats)) +print(max(ht_vertcross3@lons)) + + +xopt@use_pivot = True +xopt@angle = 45.0 +xopt@levels = (/1000., 850., 700., 500., 250. /) +xopt@latlon = True +xopt@file_handle = input_file +xopt@timeidx = 0 +xopt@linecoords = True + +loc_param := (/-99.98572, 36.54949 /) ; pivot point is center of domain +ht_vertcross4 = wrf_user_vertcross(z, p, loc_param, xopt) + +printVarSummary(ht_vertcross4) +print(min(ht_vertcross4@lats)) +print(min(ht_vertcross4@lons)) +print(max(ht_vertcross4@lats)) +print(max(ht_vertcross4@lons)) + +o = True +o@returnInt = False +o@useTime = 0 +l = wrf_user_ll_to_xy(input_file, -99.98572, 36.54949, o) +print(l) + + +l1 = wrf_user_xy_to_ll(input_file, l(1), l(0), o) +print(l1) + diff --git a/test/ncl/refl10_cross.ncl b/test/ncl/refl10_cross.ncl new file mode 100644 index 0000000..c90861e --- /dev/null +++ b/test/ncl/refl10_cross.ncl @@ -0,0 +1,81 @@ + a = addfile("wrfout_d03_2017-04-03_06:00:00_ctrl","r") + + time = 0 + refl_10cm = wrf_user_getvar(a,"REFL_10CM",time) + z = wrf_user_getvar(a, "z", time) + lat = wrf_user_getvar(a, "lat", time) + lon = wrf_user_getvar(a, "lon", time) + + ; convert the lat/lon to x,y + start_lat = 20.9 + start_lon = 92.5 + end_lat = 29.2 + end_lon = 92.5 + + opt = True + + start_ij = wrf_user_ll_to_ij(a, start_lon, start_lat, opt) + start_ij = start_ij - 1 + + end_ij = wrf_user_ll_to_ij(a, end_lon, end_lat, opt) + end_ij = end_ij - 1 + + start_end = (/start_ij(0), start_ij(1), end_ij(0), end_ij(1)/) + + lat_line = wrf_user_intrp2d(lat,start_end,0.0,True) + nlat = dimsizes(lat_line) + + lon_line = wrf_user_intrp2d(lon,start_end,0.0,True) + + refl_cross = wrf_user_intrp3d(refl_10cm,z,"v",start_end,0.,True) + + ; Need to make a vertical coordinate by using the same code as the + ; cross section + + ; Currently, the vertical coordinate is not set, so let's do it + ; manually. This will be fixed in the next version of NCL. + ; If you want to set these levels yourself, you'll need to copy the + ; code I sent before and manually set the levels in the cross section + ; routine, then do it again here. + + z_max = max(z) + z_min = 0. + dz = 0.01 * z_max + nlevels = tointeger( z_max/dz ) + z_var2d = new( (/nlevels/), typeof(z)) + z_var2d(0) = z_min + + do i=1, nlevels-1 + z_var2d(i) = z_var2d(0)+i*dz + end do + + refl_cross&Vertical = z_var2d + + wks = gsn_open_wks("png","cross") + cmap := read_colormap_file("BlAqGrYeOrReVi200") + cmap(0,:) = (/0,0,0,0/) ; make first color fully transparent + + resx = True + resx@gsnMaximize = True + resx@lbLabelAutoStride = True ; default v6.1.0 + + resx@cnFillOn = True ; turn on color fill + resx@cnLinesOn = False ; turn lines on/off ; True is default + resx@cnLineLabelsOn = False ; turn line labels on/off ; True is default + resx@cnFillPalette = cmap + nLabels = 8 ; arbitrary + resx@tmXBLabels = new(nLabels,"string") + resx@tmXBMode = "Explicit" + + resx@tmXBValues := toint(fspan(0,nlat-1,nLabels)) + do i=0,nLabels-1 + x = lon_line(i) + y = lat_line(i) + resx@tmXBLabels(i) = sprintf("%5.1f", y)+"~C~"+sprintf("%5.1f", x) + end do + + resx@tiMainString = "Full South-North Grid Line X-Section" + + + plot1 = gsn_csm_contour(wks, refl_cross, resx ) + \ No newline at end of file diff --git a/test/ncl/rotated_test.ncl b/test/ncl/rotated_test.ncl new file mode 100644 index 0000000..9eb6f94 --- /dev/null +++ b/test/ncl/rotated_test.ncl @@ -0,0 +1,26 @@ + +;---Open file and calculate slp. +a = addfile("/Users/ladwig/Documents/wrf_files/rotated_pole_test.nc","r") + +t2 = wrf_user_getvar(a,"T2",0) + +;---Start the graphics +wks = gsn_open_wks("x11","wrf") + +;---Set some graphical resources +res = True +res@gsnMaximize = True +res@cnFillOn = True +res@tfDoNDCOverlay = True ; This is necessary if you don't + ; set sfXArray/sfYArray + +;---Add additional map resources +res = wrf_map_resources(a,res) + +;---Change some of the resources that were set (these were set to "gray") +res@mpGeophysicalLineColor = "black" +res@mpNationalLineColor = "black" +res@mpUSStateLineColor = "black" + +plot = gsn_csm_contour_map(wks,t2,res) + diff --git a/test/ncl/test_this.ncl b/test/ncl/test_this.ncl new file mode 100644 index 0000000..8421690 --- /dev/null +++ b/test/ncl/test_this.ncl @@ -0,0 +1,21 @@ +ifils = systemfunc ("ls /Users/ladwig/Documents/wrf_files/wrf_vortex_single/wrfout_d02_*") +print(ifils) +a = addfiles(ifils, "r") +;a = addfile("/Users/ladwig/Documents/wrf_files/wrfout_d01_2010-06-13_21:00:00.nc", "r") + +lats := (/22.0, 25.0, 27.0 /) +lons := (/-90.0, -87.5, -83.75 /) + +opt = True +opt@useTime = -1 +opt@returnI = False +xy = wrf_user_ll_to_xy(a, lons, lats, opt) + +print(xy) + +x_s = (/10, 50, 90 /) +y_s = (/10, 50, 90 /) + +ll = wrf_user_xy_to_ll(a, x_s, y_s, opt) +print(ll) + diff --git a/test/test_vinterp.ncl b/test/ncl/test_vinterp.ncl similarity index 100% rename from test/test_vinterp.ncl rename to test/ncl/test_vinterp.ncl diff --git a/test/ncl/wrf_user_vertcross.ncl b/test/ncl/wrf_user_vertcross.ncl new file mode 100644 index 0000000..54df159 --- /dev/null +++ b/test/ncl/wrf_user_vertcross.ncl @@ -0,0 +1,416 @@ +;-------------------------------------------------------------------------------- + +undef("wrf_user_ll_to_xy") +function wrf_user_ll_to_xy( file_handle, longitude:numeric, latitude:numeric, \ + opts_args:logical ) + +; This is the same as wrf_user_ll_to_ij, but returns 0-based indexes + +begin +; +; As of NCL V6.0.0, wrf_user_ll_to_ij can now handle a file +; or a list of files. +; + if(typeof(file_handle).eq."file") then + ISFILE = True + nc_file = file_handle + elseif(typeof(file_handle).eq."list") then + ISFILE = False + nc_file = file_handle[0] + else + print("wrf_user_ll_to_xy: Error: the first argument must be a file or a list of files opened with addfile or addfiles") + return + end if + + opts = opts_args + useT = get_res_value(opts,"useTime",0) + returnI= get_res_value(opts,"returnInt",True) + + res = True + res@MAP_PROJ = nc_file@MAP_PROJ + res@TRUELAT1 = nc_file@TRUELAT1 + res@TRUELAT2 = nc_file@TRUELAT2 + res@STAND_LON = nc_file@STAND_LON + res@DX = nc_file@DX + res@DY = nc_file@DY + + if (res@MAP_PROJ .eq. 6) then + res@POLE_LAT = nc_file@POLE_LAT + res@POLE_LON = nc_file@POLE_LON + res@LATINC = (res@DY*360.)/2.0/3.141592653589793/6370000. + res@LONINC = (res@DX*360.)/2.0/3.141592653589793/6370000. + else + res@POLE_LAT = 90.0 + res@POLE_LON = 0.0 + res@LATINC = 0.0 + res@LONINC = 0.0 + end if + + if(isfilevar(nc_file,"XLAT")) + if(ISFILE) then + XLAT = nc_file->XLAT(useT,:,:) + XLONG = nc_file->XLONG(useT,:,:) + else + XLAT = file_handle[:]->XLAT + XLONG = file_handle[:]->XLONG + end if + else + if(ISFILE) then + XLAT = nc_file->XLAT_M(useT,:,:) + XLONG = nc_file->XLONG_M(useT,:,:) + else + XLAT = file_handle[:]->XLAT_M + XLONG = file_handle[:]->XLONG_M + end if + end if + + + if(dimsizes(dimsizes(XLAT)).eq.2) then +; Rank 2 + res@REF_LAT = XLAT(0,0) + res@REF_LON = XLONG(0,0) + else +; Rank 3 + res@REF_LAT = XLAT(useT,0,0) + res@REF_LON = XLONG(useT,0,0) + end if + res@KNOWNI = 1.0 + res@KNOWNJ = 1.0 + + loc = wrf_ll_to_ij (longitude, latitude, res) + loc = loc - 1 + + if (dimsizes(dimsizes(loc)) .eq. 1) then + loc!0 = "x_y" + elseif (dimsizes(dimsizes(loc)) .eq. 2) then + loc!0 = "x_y" + loc!1 = "idx" + else ; Not currently supported + loc!0 = "x_y" + loc!1 = "domain_idx" + loc!2 = "idx" + end if + + if ( returnI ) then + loci = new(dimsizes(loc),integer) + ;loci@_FillValue = default_fillvalue("integer") ; was -999 + loci = tointeger(loc + .5) + loci!0 = loc!0 + return(loci) + else + return(loc) + end if + + +end + +;-------------------------------------------------------------------------------- + +undef("wrf_user_xy_to_ll") +function wrf_user_xy_to_ll( file_handle, x:numeric, y:numeric, \ + opts_args:logical ) + +begin +; +; As of NCL V6.0.0, wrf_user_ll_to_ij can now handle a file +; or a list of files. +; + if(typeof(file_handle).eq."file") then + ISFILE = True + nc_file = file_handle + elseif(typeof(file_handle).eq."list") then + ISFILE = False + nc_file = file_handle[0] + else + print("wrf_user_xy_to_ll: Error: the first argument must be a file or a list of files opened with addfile or addfiles") + return + end if + + opts = opts_args + useT = get_res_value(opts,"useTime",0) + + res = True + res@MAP_PROJ = nc_file@MAP_PROJ + res@TRUELAT1 = nc_file@TRUELAT1 + res@TRUELAT2 = nc_file@TRUELAT2 + res@STAND_LON = nc_file@STAND_LON + res@DX = nc_file@DX + res@DY = nc_file@DY + + if (res@MAP_PROJ .eq. 6) then + res@POLE_LAT = nc_file@POLE_LAT + res@POLE_LON = nc_file@POLE_LON + res@LATINC = (res@DY*360.)/2.0/3.141592653589793/6370000. + res@LONINC = (res@DX*360.)/2.0/3.141592653589793/6370000. + else + res@POLE_LAT = 90.0 + res@POLE_LON = 0.0 + res@LATINC = 0.0 + res@LONINC = 0.0 + end if + + + if(isfilevar(nc_file,"XLAT")) then + if(ISFILE) then + XLAT = nc_file->XLAT(useT,:,:) + XLONG = nc_file->XLONG(useT,:,:) + else + XLAT = file_handle[:]->XLAT + XLONG = file_handle[:]->XLONG + end if + else + if(ISFILE) then + XLAT = nc_file->XLAT_M(useT,:,:) + XLONG = nc_file->XLONG_M(useT,:,:) + else + XLAT = file_handle[:]->XLAT_M + XLONG = file_handle[:]->XLONG_M + end if + end if + + if(dimsizes(dimsizes(XLAT)).eq.2) then +; Rank 2 + res@REF_LAT = XLAT(0,0) + res@REF_LON = XLONG(0,0) + else +; Rank 3 + res@REF_LAT = XLAT(useT,0,0) + res@REF_LON = XLONG(useT,0,0) + end if + res@KNOWNI = 1.0 + res@KNOWNJ = 1.0 + + ; Convert to 1-based indexes for Fortran + new_x = x + 1 + new_y = y + 1 + + loc = wrf_ij_to_ll (new_x,new_y,res) + + if (dimsizes(dimsizes(loc)) .eq. 1) then + loc!0 = "lon_lat" + elseif (dimsizes(dimsizes(loc)) .eq. 2) then + loc!0 = "lon_lat" + loc!1 = "idx" + else ; Not currently supported + loc!0 = "lon_lat" + loc!1 = "domain_idx" + loc!2 = "idx" + end if + + return(loc) + + +end + +;-------------------------------------------------------------------------------- + +undef("wrf_user_vertcross") +function wrf_user_vertcross(var3d:numeric, z_in:numeric, \ + loc_param:numeric, opts:logical ) + +; var3d - 3d field to interpolate (all input fields must be unstaggered) +; z_in - interpolate to this field (either p/z) +; loc_param - an array of 4 values representing the start point and end point +; for the cross section (start_x, start_y, end_x, end_y) OR a single +; point when opt@use_pivot is True representing the pivot point. +; The values can be in grid coordinates or lat/lon coordinates +; (start_x = start_lon, start_y = start_lat, ...). If using +; lat/lon coordinates, then opt@latlon must be True. +; opts - optional arguments +; use_pivot - set to True to indicate that loc_param and angle are used, +; otherwise loc_param is set to 4 values to indicate a start and +; end point +; angle - an angle for vertical plots - 90 represent a WE cross section, +; ignored if use_pivot is False. +; levels - the vertical levels to use in the same units as z_in. Set to +; False to automatically generate the number of levels specified +; by autolevels. +; latlon - set to True if the values in loc_param are latitude and longitude +; values rather than grid values +; file_handle - must be set to a file handle when latlon is True or +; linecoords is True, otherwise this is ignored. +; timeidx - the time index to use for moving nests when latlon is True. Set +; to 0 if the nest is not moving. +; linecoords - set to True to include the latitude and longitude coordinates +; for the cross section line in the output attributes. +; autolevels - set to the desired number of levels when levels are +; selected automatically (default 100). + +begin + + use_pivot = get_res_value(opts, "use_pivot", False) + angle = get_res_value(opts, "angle", 0.0) + levels = get_res_value(opts, "levels", new(1,integer)) + latlon = get_res_value(opts, "latlon", False) + file_handle = get_res_value(opts, "file_handle", 0) + timeidx = get_res_value(opts, "timeidx", 0) + linecoords = get_res_value(opts, "linecoords", False) + nlevels = get_res_value(opts, "autolevels", 100) + + dims = dimsizes(var3d) + nd = dimsizes(dims) + + dimX = dims(nd-1) + dimY = dims(nd-2) + dimZ = dims(nd-3) + + if ( nd .eq. 4 ) then + z = z_in(0,:,:,:) + else + z = z_in + end if + +; Convert latlon to xy coordinates + + if (use_pivot) then + if (latlon) then + opt = True + opt@returnInt = True + opt@useTime = timeidx + ij := wrf_user_ll_to_xy(file_handle, loc_param(0), loc_param(1), opt) + start_x = ij(0) + start_y = ij(1) + else + start_x = loc_param(0) + start_y = loc_param(1) + end if + else + if (latlon) then + opt = True + opt@returnInt = True + opt@useTime = timeidx + ij := wrf_user_ll_to_xy(file_handle, (/ loc_param(0), loc_param(2) /), (/ loc_param(1), loc_param(3) /), opt) + start_x = ij(0,0) + start_y = ij(1,0) + end_x = ij(0,1) + end_y = ij(1,1) + else + start_x = loc_param(0) + start_y = loc_param(1) + end_x = loc_param(2) + end_y = loc_param(3) + end if + end if + +; get the lat/lons along the cross section line if requested + +; set the cross section line coordinates if requested + if (linecoords) then + + latname = "XLAT" + lonname = "XLONG" + if(.not. isfilevar(file_handle,"XLAT")) then + if(isfilevar(file_handle,"XLAT_M")) then + latname = "XLAT_M" + lonname = "XLONG_M" + end if + end if + + latvar = _get_wrf_var(file_handle, latname, timeidx) + lonvar = _get_wrf_var(file_handle, lonname, timeidx) + + if (use_pivot) then + loc := (/start_x, start_y/) + linelats = wrf_user_intrp2d(latvar, loc, angle, False) + linelons = wrf_user_intrp2d(lonvar, loc, angle, False) + else + loc := (/start_x, start_y, end_x, end_y /) + linelats = wrf_user_intrp2d(latvar, loc, angle, True) + linelons = wrf_user_intrp2d(lonvar, loc, angle, True) + end if + + end if + +; set vertical cross section +; Note for wrf_user_set_xy, opt is False when pivot and angle used. + if (use_pivot) then + xy = wrf_user_set_xy( z, start_x, start_y, \ ; assumes 0-based indexing in v6.5.0 + 0.0, 0.0, angle, False ) + + else + xy = wrf_user_set_xy( z, start_x, start_y, \ ; assumes 0-based indexing in v6.5.0 + end_x, end_y, \ + angle, True) + + end if + xp = dimsizes(xy) + + +; first we interp z + var2dz = wrf_interp_2d_xy( z, xy) + +; interp to constant z grid + if (all(ismissing(levels))) then + if(var2dz(0,0) .gt. var2dz(1,0) ) then ; monotonically decreasing coordinate + z_max = floor(max(z)/10)*10 ; bottom value + z_min = ceil(min(z)/10)*10 ; top value + dz = (1.0/nlevels) * (z_max - z_min) + ;nlevels = tointeger( (z_max-z_min)/dz) + z_var2d = new( (/nlevels/), typeof(z)) + z_var2d(0) = z_max + dz = -dz + else + z_max = max(z) + z_min = 0. + dz = (1.0/nlevels) * z_max + ;nlevels = tointeger( z_max/dz ) + z_var2d = new( (/nlevels/), typeof(z)) + z_var2d(0) = z_min + end if + + do i=1, nlevels-1 + z_var2d(i) = z_var2d(0)+i*dz + end do + else + z_var2d = levels + nlevels = dimsizes(z_var2d) + end if + +; interp the variable + if ( dimsizes(dims) .eq. 4 ) then + var2d = new( (/dims(0), nlevels, xp(0)/), typeof(var2dz)) + do it = 0,dims(0)-1 + var2dtmp = wrf_interp_2d_xy( var3d(it,:,:,:), xy) + do i=0,xp(0)-1 + var2d(it,:,i) = wrf_interp_1d( var2dtmp(:,i), var2dz(:,i), z_var2d) + end do + end do + var2d!0 = var3d!0 + var2d!1 = "vertical" + var2d!2 = "cross_line_idx" + else + var2d = new( (/nlevels, xp(0)/), typeof(var2dz)) + var2dtmp = wrf_interp_2d_xy( var3d, xy) + do i=0,xp(0)-1 + var2d(:,i) = wrf_interp_1d( var2dtmp(:,i), var2dz(:,i), z_var2d) + end do + var2d!0 = "vertical" + var2d!1 = "cross_line_idx" + end if + + st_x = tointeger(xy(0,0)) ; + 1 (removed 1-based indexing in 6.5.0) + st_y = tointeger(xy(0,1)) ; + 1 + ed_x = tointeger(xy(xp(0)-1,0)) ; + 1 + ed_y = tointeger(xy(xp(0)-1,1)) ; + 1 + if (.not. use_pivot) then + var2d@Orientation = "Cross-Section: (" + \ + st_x + "," + st_y + ") to (" + \ + ed_x + "," + ed_y + ")" + else + var2d@Orientation = "Cross-Section: (" + \ + st_x + "," + st_y + ") to (" + \ + ed_x + "," + ed_y + ") ; center=(" + \ + start_x + "," + start_y + \ + ") ; angle=" + angle + end if + + if (linecoords) then + var2d@lats = linelats + var2d@lons = linelons + end if + + var2d&vertical = z_var2d + + return(var2d) + +end diff --git a/test/snippet.py b/test/snippet.py deleted file mode 100644 index 014810f..0000000 --- a/test/snippet.py +++ /dev/null @@ -1,30 +0,0 @@ - -import matplotlib.pyplot as plt -from mpl_toolkits.basemap import Basemap - -def main(): - bm = Basemap(projection = "rotpole", - o_lat_p = 36.0, - o_lon_p = 180.0, - llcrnrlat = -10.590603, - urcrnrlat = 46.591976, - llcrnrlon = -139.08585, - urcrnrlon = 22.661009, - lon_0 = -106.0, - rsphere = 6370000, - resolution = 'l') - - fig = plt.figure(figsize=(8,8)) - ax = fig.add_axes([0.1,0.1,0.8,0.8]) - - bm.drawcoastlines(linewidth=.5) - - print bm.proj4string - - plt.savefig("basemap_map.png") - plt.close(fig) - - - -if __name__ == "__main__": - main() diff --git a/test/test_filevars.py b/test/test_filevars.py index 1cf3c82..13a04f2 100644 --- a/test/test_filevars.py +++ b/test/test_filevars.py @@ -1,77 +1,79 @@ import unittest as ut -import numpy.testing as nt +import numpy.testing as nt import numpy as np import numpy.ma as ma -import os, sys +import os +import sys import subprocess from wrf import getvar, ALL_TIMES TEST_DIR = "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest" TEST_FILENAMES = ["wrfout_d02_2005-08-28_00:00:00", - "wrfout_d02_2005-08-28_12:00:00", - "wrfout_d02_2005-08-29_00:00:00"] + "wrfout_d02_2005-08-28_12:00:00", + "wrfout_d02_2005-08-29_00:00:00"] TEST_FILES = [os.path.join(TEST_DIR, x) for x in TEST_FILENAMES] # Python 3 if sys.version_info > (3,): xrange = range - + class WRFFileVarsTest(ut.TestCase): longMessage = True + def make_test(ncfiles, varname): def test(self): - #import time - #very_start = time.time() - #start = time.time() + # import time + # very_start = time.time() + # start = time.time() t1 = getvar(ncfiles, varname, 0) - - #end = time.time() - #print ("t1: ", start-end) - #start = time.time() + + # end = time.time() + # print("t1: ", start-end) + # start = time.time() t2 = getvar(ncfiles, varname, 0, meta=False) - #end = time.time() - #print ("t2: ", start-end) - #start = time.time() + # end = time.time() + # print("t2: ", start-end) + # start = time.time() t3 = getvar(ncfiles, varname, ALL_TIMES) - #end = time.time() - #print ("t3: ", start-end) - #start = time.time() + # end = time.time() + # print("t3: ", start-end) + # start = time.time() t4 = getvar(ncfiles, varname, ALL_TIMES, meta=False) - #end = time.time() - #print ("t4: ", start-end) - #start = time.time() + # end = time.time() + # print("t4: ", start-end) + # start = time.time() t5 = getvar(ncfiles, varname, ALL_TIMES, method="join") - #end = time.time() - #print ("t5: ", start-end) - #start = time.time() + # end = time.time() + # print("t5: ", start-end) + # start = time.time() t6 = getvar(ncfiles, varname, ALL_TIMES, method="join", meta=False) - #end = time.time() - #print ("t6: ", start-end) - #start = time.time() - - #print ("Total Time: ", (end-start)) + # end = time.time() + # print("t6: ", start-end) + # start = time.time() + + # print("Total Time: ", (end-start)) return test - + if __name__ == "__main__": from netCDF4 import Dataset ncfiles = [Dataset(x) for x in TEST_FILES] - - #import scipy.io - #ncfiles = [scipy.io.netcdf.netcdf_file(x) for x in TEST_FILES] - + + # import scipy.io + # ncfiles = [scipy.io.netcdf.netcdf_file(x) for x in TEST_FILES] + file_vars = ncfiles[0].variables.keys() - + ignore_vars = [] - + for var in file_vars: if var in ignore_vars: continue - + test_func1 = make_test(ncfiles, var) setattr(WRFFileVarsTest, 'test_{0}'.format(var), test_func1) - - ut.main() \ No newline at end of file + + ut.main() diff --git a/test/test_inputs.py b/test/test_inputs.py index 7271681..6f994e4 100644 --- a/test/test_inputs.py +++ b/test/test_inputs.py @@ -1,5 +1,5 @@ import unittest as ut -import numpy.testing as nt +import numpy.testing as nt import numpy as np import numpy.ma as ma import os @@ -17,20 +17,21 @@ TEST_FILES = [os.path.join(IN_DIR, "wrfout_d02_2005-08-28_00:00:00"), os.path.join(IN_DIR, "wrfout_d02_2005-08-28_12:00:00"), os.path.join(IN_DIR, "wrfout_d02_2005-08-29_00:00:00")] + def wrfin_gen(wrf_in): for x in wrf_in: yield x - - + + class wrf_in_iter_class(object): def __init__(self, wrf_in): self._wrf_in = wrf_in self._total = len(wrf_in) self._i = 0 - + def __iter__(self): return self - + def next(self): if self._i >= self._total: raise StopIteration @@ -38,7 +39,7 @@ class wrf_in_iter_class(object): result = self._wrf_in[self._i] self._i += 1 return result - + # Python 3 def __next__(self): return self.next() @@ -48,34 +49,34 @@ def make_test(varname, wrf_in): def test(self): x = getvar(wrf_in, varname, 0) xa = getvar(wrf_in, varname, None) - + return test + def make_interp_test(varname, wrf_in): def test(self): - + # Only testing vinterp since other interpolation just use variables if (varname == "vinterp"): for timeidx in (0, None): eth = getvar(wrf_in, "eth", timeidx=timeidx) - interp_levels = [850,500,5] - field = vinterp(wrf_in, - field=eth, - vert_coord="pressure", - interp_levels=interp_levels, - extrapolate=True, - field_type="theta-e", - timeidx=timeidx, + interp_levels = [850, 500, 5] + field = vinterp(wrf_in, + field=eth, + vert_coord="pressure", + interp_levels=interp_levels, + extrapolate=True, + field_type="theta-e", + timeidx=timeidx, log_p=True) else: pass - - + return test def make_latlon_test(testid, wrf_in): - def test(self): + def test(self): if testid == "xy_out": # Lats/Lons taken from NCL script, just hard-coding for now lats = [-55, -60, -65] @@ -87,42 +88,44 @@ def make_latlon_test(testid, wrf_in): j_s = np.asarray([10, 100, 150], int) ll = xy_to_ll(wrf_in, i_s, j_s, timeidx=0) ll = xy_to_ll(wrf_in, i_s, j_s, timeidx=None) - + return test class WRFVarsTest(ut.TestCase): longMessage = True - + + class WRFInterpTest(ut.TestCase): longMessage = True - + + class WRFLatLonTest(ut.TestCase): longMessage = True + if __name__ == "__main__": - from wrf import (omp_set_num_threads, omp_set_schedule, omp_get_schedule, + from wrf import (omp_set_num_threads, omp_set_schedule, omp_get_schedule, omp_set_dynamic, omp_get_num_procs, OMP_SCHED_STATIC) omp_set_num_threads(omp_get_num_procs()-1) omp_set_schedule(OMP_SCHED_STATIC, 0) omp_set_dynamic(False) - + ignore_vars = [] # Not testable yet - wrf_vars = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", - "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", - "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", - "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", + wrf_vars = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", + "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", + "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", + "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", "wa", "uvmet10", "uvmet", "z", "cfrac", "zstag", "geopt_stag"] interp_methods = ["interplevel", "vertcross", "interpline", "vinterp"] latlon_tests = ["xy_out", "ll_out"] - for nc_lib in ("netcdf4", "pynio", "scipy"): if nc_lib == "netcdf4": try: from netCDF4 import Dataset except ImportError: - print ("netcdf4-python not installed") + print("netcdf4-python not installed") continue else: test_in = [Dataset(x) for x in TEST_FILES] @@ -130,52 +133,46 @@ if __name__ == "__main__": try: from Nio import open_file except ImportError: - print ("PyNIO not installed") + print("PyNIO not installed") continue else: - test_in = [open_file(x +".nc", "r") for x in TEST_FILES] + test_in = [open_file(x + ".nc", "r") for x in TEST_FILES] elif nc_lib == "scipy": try: from scipy.io.netcdf import netcdf_file except ImportError: - print ("scipy.io.netcdf not installed") + print("scipy.io.netcdf not installed") else: test_in = [netcdf_file(x, mmap=False) for x in TEST_FILES] - + input0 = test_in[0] input1 = test_in input2 = tuple(input1) input3 = wrfin_gen(test_in) input4 = wrf_in_iter_class(test_in) - input5 = {"A" : input1, - "B" : input2} - input6 = {"A" : {"AA" : input1}, - "B" : {"BB" : input2}} - - for i,input in enumerate((input0, input1, input2, + input5 = {"A": input1, + "B": input2} + input6 = {"A": {"AA": input1}, + "B": {"BB": input2}} + + for i, input in enumerate((input0, input1, input2, input3, input4, input5, input6)): for var in wrf_vars: if var in ignore_vars: continue test_func1 = make_test(var, input) - - setattr(WRFVarsTest, "test_{0}_input{1}_{2}".format(nc_lib, - i, var), - test_func1) - + + setattr(WRFVarsTest, "test_{0}_input{1}_{2}".format( + nc_lib, i, var), test_func1) for method in interp_methods: test_interp_func1 = make_interp_test(method, input) - setattr(WRFInterpTest, - "test_{0}_input{1}_{2}".format(nc_lib, - i, method), - test_interp_func1) - + setattr(WRFInterpTest, "test_{0}_input{1}_{2}".format( + nc_lib, i, method), test_interp_func1) + for testid in latlon_tests: test_ll_func = make_latlon_test(testid, input) test_name = "test_{0}_input{1}_{2}".format(nc_lib, i, testid) setattr(WRFLatLonTest, test_name, test_ll_func) - + ut.main() - - \ No newline at end of file diff --git a/test/test_multi_cache.py b/test/test_multi_cache.py index 963fca0..480a70a 100644 --- a/test/test_multi_cache.py +++ b/test/test_multi_cache.py @@ -3,56 +3,60 @@ from wrf.cache import _get_cache from wrf import getvar from netCDF4 import Dataset as nc -#a = nc("/Users/ladwig/Documents/wrf_files/wrf_vortex_single/wrfout_d02_2005-08-28_00:00:00") -#b = nc("/Users/ladwig/Documents/wrf_files/wrf_vortex_single/wrfout_d02_2005-08-28_03:00:00") -a = nc("/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-28_00:00:00") -b = nc("/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/wrfout_d02_2005-08-28_12:00:00") -q = {"outoutoutout" : {"outoutout" : {"outout" : {"out1" : {"blah" : [a,b], "blah2" : [a,b]}, "out2" : {"blah" : [a,b], "blah2" : [a,b]} } } } } +a = nc("/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/" + "wrfout_d02_2005-08-28_00:00:00") +b = nc("/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest/" + "wrfout_d02_2005-08-28_12:00:00") +q = {"outoutoutout": + {"outoutout": + {"outout": + {"out1": {"blah": [a, b], "blah2": [a, b]}, + "out2": {"blah": [a, b], "blah2": [a, b]}}}}} t1 = time.time() c = getvar(q, "rh", method="cat", timeidx=None, squeeze=True) t2 = time.time() -print (c) -print ("time taken: {}".format((t2-t1)*1000.)) +print(c) +print("time taken: {}".format((t2-t1)*1000.)) t1 = time.time() c = getvar(q, "rh", method="cat", timeidx=None, squeeze=False) t2 = time.time() -print (c) -print ("time taken: {}".format((t2-t1)*1000.)) +print(c) +print("time taken: {}".format((t2-t1)*1000.)) t1 = time.time() c = getvar(q, "rh", method="cat", timeidx=1, squeeze=True) t2 = time.time() -print (c) -print ("time taken: {}".format((t2-t1)*1000.)) +print(c) +print("time taken: {}".format((t2-t1)*1000.)) t1 = time.time() c = getvar(q, "rh", method="cat", timeidx=1, squeeze=False) t2 = time.time() print(c) -print ("time taken: {}".format((t2-t1)*1000.)) +print("time taken: {}".format((t2-t1)*1000.)) t1 = time.time() c = getvar(q, "rh", method="join", timeidx=None, squeeze=True) t2 = time.time() -print (c) -print ("time taken: {}".format((t2-t1)*1000.)) +print(c) +print("time taken: {}".format((t2-t1)*1000.)) t1 = time.time() c = getvar(q, "rh", method="join", timeidx=None, squeeze=False) t2 = time.time() print(c) -print ("time taken: {}".format((t2-t1)*1000.)) +print("time taken: {}".format((t2-t1)*1000.)) t1 = time.time() c = getvar(q, "rh", method="join", timeidx=1, squeeze=True) t2 = time.time() -print (c) -print ("time taken: {}".format((t2-t1)*1000.)) +print(c) +print("time taken: {}".format((t2-t1)*1000.)) t1 = time.time() c = getvar(q, "rh", method="join", timeidx=1, squeeze=False) t2 = time.time() -print (c) -print ("time taken: {}".format((t2-t1)*1000.)) +print(c) +print("time taken: {}".format((t2-t1)*1000.)) diff --git a/test/test_omp.py b/test/test_omp.py index 5c621a9..718ab2f 100644 --- a/test/test_omp.py +++ b/test/test_omp.py @@ -1,16 +1,16 @@ -from __future__ import (absolute_import, division, print_function, +from __future__ import (absolute_import, division, print_function, unicode_literals) import unittest as ut -import numpy.testing as nt +import numpy.testing as nt -from wrf import (omp_set_num_threads, omp_get_num_threads, +from wrf import (omp_set_num_threads, omp_get_num_threads, omp_get_max_threads, omp_get_thread_num, - omp_get_num_procs, omp_in_parallel, + omp_get_num_procs, omp_in_parallel, omp_set_dynamic, omp_get_dynamic, omp_set_nested, - omp_get_nested, omp_set_schedule, - omp_get_schedule, omp_get_thread_limit, - omp_set_max_active_levels, + omp_get_nested, omp_set_schedule, + omp_get_schedule, omp_get_thread_limit, + omp_set_max_active_levels, omp_get_max_active_levels, omp_get_level, omp_get_ancestor_thread_num, omp_get_team_size, omp_get_active_level, omp_in_final, @@ -20,103 +20,102 @@ from wrf import (omp_set_num_threads, omp_get_num_threads, omp_unset_lock, omp_unset_nest_lock, omp_test_lock, omp_test_nest_lock, omp_get_wtime, omp_get_wtick, - OMP_SCHED_STATIC, OMP_SCHED_DYNAMIC, - OMP_SCHED_GUIDED, OMP_SCHED_AUTO) - + OMP_SCHED_STATIC, OMP_SCHED_DYNAMIC, + OMP_SCHED_GUIDED, OMP_SCHED_AUTO) + class OmpTest(ut.TestCase): longMessage = True - + def test_locks(self): - l = omp_init_lock() - omp_set_lock(l) - omp_unset_lock(l) - omp_test_lock(l) - omp_destroy_lock(l) - + lk = omp_init_lock() + omp_set_lock(lk) + omp_unset_lock(lk) + omp_test_lock(lk) + omp_destroy_lock(lk) + nl = omp_init_nest_lock() omp_set_nest_lock(nl) omp_unset_nest_lock(nl) omp_test_nest_lock(nl) omp_destroy_nest_lock(nl) - - + def test_thread_set(self): omp_set_num_threads(4) max_threads = omp_get_max_threads() self.assertEqual(max_threads, 4) - + num_threads = omp_get_num_threads() - self.assertEqual(num_threads, 1) # Always 1 outside of parallel region - + # Always 1 outside of parallel region + self.assertEqual(num_threads, 1) + thread_num = omp_get_thread_num() - self.assertEqual(thread_num, 0) # Always 0 outside of parallel region + # Always 0 outside of parallel region + self.assertEqual(thread_num, 0) num_procs = omp_get_num_procs() in_parallel = omp_in_parallel() - self.assertFalse(in_parallel) # Always False outside of parallel region - + # Always False outside of parallel region + self.assertFalse(in_parallel) + limit = omp_get_thread_limit() - - + def test_dynamic(self): omp_set_dynamic(True) dynamic = omp_get_dynamic() self.assertTrue(dynamic) - + omp_set_dynamic(False) dynamic = omp_get_dynamic() self.assertFalse(dynamic) - + def test_nested(self): omp_set_nested(True) nested = omp_get_nested() self.assertTrue(nested) - + omp_set_nested(False) nested = omp_get_nested() self.assertFalse(nested) - - + def test_schedule(self): omp_set_schedule(OMP_SCHED_STATIC, 100000) kind, modifier = omp_get_schedule() self.assertEqual(kind, OMP_SCHED_STATIC) self.assertEqual(modifier, 100000) - + omp_set_schedule(OMP_SCHED_DYNAMIC, 10000) kind, modifier = omp_get_schedule() self.assertEqual(kind, OMP_SCHED_DYNAMIC) self.assertEqual(modifier, 10000) - - omp_set_schedule(OMP_SCHED_GUIDED, 100) + + omp_set_schedule(OMP_SCHED_GUIDED, 100) kind, modifier = omp_get_schedule() self.assertEqual(kind, OMP_SCHED_GUIDED) self.assertEqual(modifier, 100) - - omp_set_schedule(OMP_SCHED_AUTO, 10) + + omp_set_schedule(OMP_SCHED_AUTO, 10) kind, modifier = omp_get_schedule() self.assertEqual(kind, OMP_SCHED_AUTO) - self.assertNotEqual(modifier, 10) # The modifier argument is ignored, - # so it will be set to the previous - # value of 100. - - + # The modifier argument is ignored, + # so it will be set to the previous + # value of 100. + self.assertNotEqual(modifier, 10) + def test_team_level(self): omp_set_max_active_levels(10) active_levels = omp_get_max_active_levels() self.assertEqual(active_levels, 10) - + level = omp_get_level() ancestor_thread = omp_get_ancestor_thread_num(level) team_size = omp_get_team_size(level) active_level = omp_get_active_level() in_final = omp_in_final() - - + def test_time(self): wtime = omp_get_wtime() wtick = omp_get_wtick() + if __name__ == "__main__": ut.main() - \ No newline at end of file diff --git a/test/test_proj_params.py b/test/test_proj_params.py index 77a0ec7..460fae5 100644 --- a/test/test_proj_params.py +++ b/test/test_proj_params.py @@ -1,314 +1,302 @@ import unittest as ut -import numpy.testing as nt +import numpy.testing as nt import numpy as np import numpy.ma as ma -import os, sys +import os +import sys import subprocess from wrf import xy_to_ll_proj, ll_to_xy_proj, to_np - + class WRFLatLonProjTest(ut.TestCase): longMessage = True + def make_test(xy_or_ll_out): def test(self): - + # Python 3 - if sys.version_info > (3,): + if sys.version_info > (3, ): assert_raises_regex = self.assertRaisesRegex xrange = range else: assert_raises_regex = self.assertRaisesRegexp - + if xy_or_ll_out == "xy": # Test the required failures - with assert_raises_regex(ValueError, ".*map_proj.*" ): - ll_to_xy_proj(30,-110) - - with assert_raises_regex(ValueError, ".*ref_lat.*" ): - ll_to_xy_proj(30,-110, map_proj=1) - - with assert_raises_regex(ValueError, ".*ref_lon.*" ): - ll_to_xy_proj(30,-110, map_proj=1, ref_lat=45) - - with assert_raises_regex(ValueError, ".*known_x.*" ): - ll_to_xy_proj(30,-110, map_proj=1, ref_lat=45.0, - ref_lon=-120.) - - with assert_raises_regex(ValueError, ".*known_y.*" ): - ll_to_xy_proj(30,-110, map_proj=1, ref_lat=45.0, + with assert_raises_regex(ValueError, ".*map_proj.*"): + ll_to_xy_proj(30, -110) + + with assert_raises_regex(ValueError, ".*ref_lat.*"): + ll_to_xy_proj(30, -110, map_proj=1) + + with assert_raises_regex(ValueError, ".*ref_lon.*"): + ll_to_xy_proj(30, -110, map_proj=1, ref_lat=45) + + with assert_raises_regex(ValueError, ".*known_x.*"): + ll_to_xy_proj(30, -110, map_proj=1, ref_lat=45.0, + ref_lon=-120.) + + with assert_raises_regex(ValueError, ".*known_y.*"): + ll_to_xy_proj(30, -110, map_proj=1, ref_lat=45.0, ref_lon=-120., known_x=1) - - with assert_raises_regex(ValueError, ".*dx.*" ): - ll_to_xy_proj(30,-110, map_proj=1, ref_lat=45.0, - ref_lon=-120., known_x=0, known_y=0) - - ####### Now test the projections - + + with assert_raises_regex(ValueError, ".*dx.*"): + ll_to_xy_proj(30, -110, map_proj=1, ref_lat=45.0, + ref_lon=-120., known_x=0, known_y=0) + + # Now test the projections + # Lambert Conformal - truelat1, stand_lon required - with assert_raises_regex(ValueError, ".*truelat1.*" ): - ll_to_xy_proj(30,-110, map_proj=1, ref_lat=45.0, + with assert_raises_regex(ValueError, ".*truelat1.*"): + ll_to_xy_proj(30, -110, map_proj=1, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=3000.) - - with assert_raises_regex(ValueError, ".*stand_lon.*" ): - ll_to_xy_proj(30,-110, map_proj=1, ref_lat=45.0, + dx=3000.) + + with assert_raises_regex(ValueError, ".*stand_lon.*"): + ll_to_xy_proj(30, -110, map_proj=1, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, dx=3000., - truelat1=60.) - + truelat1=60.) + # Make sure it runs with all params set vs not - p_all = ll_to_xy_proj(28., -89., map_proj=1, ref_lat=17.803, - ref_lon=-100.7747, known_x=0, known_y=0, - dx=30000., truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=90., pole_lon=0.) - - - p_def = ll_to_xy_proj(28., -89., map_proj=1, ref_lat=17.803, - ref_lon=-100.7747, known_x=0, known_y=0, - dx=30000., truelat1=30., - stand_lon=-89.) - + p_all = ll_to_xy_proj(28., -89., map_proj=1, ref_lat=17.803, + ref_lon=-100.7747, known_x=0, known_y=0, + dx=30000., truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=90., pole_lon=0.) + + p_def = ll_to_xy_proj(28., -89., map_proj=1, ref_lat=17.803, + ref_lon=-100.7747, known_x=0, known_y=0, + dx=30000., truelat1=30., stand_lon=-89.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - + # Polar Stereographic - truelat1, stand_lon - - with assert_raises_regex(ValueError, ".*truelat1.*" ): - ll_to_xy_proj(30,-110, map_proj=2, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*truelat1.*"): + ll_to_xy_proj(30, -110, map_proj=2, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=3000.) - - with assert_raises_regex(ValueError, ".*stand_lon.*" ): - ll_to_xy_proj(30,-110, map_proj=2, ref_lat=45.0, + dx=3000.) + + with assert_raises_regex(ValueError, ".*stand_lon.*"): + ll_to_xy_proj(30, -110, map_proj=2, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, dx=3000., - truelat1=60.) - - p_all = ll_to_xy_proj(28., -89., map_proj=2, ref_lat=17.933, - ref_lon=-100.0735, known_x=0, known_y=0, - dx=30000., truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=90., pole_lon=0.) - - - p_def = ll_to_xy_proj(28., -89., map_proj=2, ref_lat=17.933, - ref_lon=-100.0735, known_x=0, known_y=0, - dx=30000., truelat1=30., - stand_lon=-89.) - + truelat1=60.) + + p_all = ll_to_xy_proj(28., -89., map_proj=2, ref_lat=17.933, + ref_lon=-100.0735, known_x=0, known_y=0, + dx=30000., truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=90., pole_lon=0.) + + p_def = ll_to_xy_proj(28., -89., map_proj=2, ref_lat=17.933, + ref_lon=-100.0735, known_x=0, known_y=0, + dx=30000., truelat1=30., stand_lon=-89.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - - + # Mercator - truelat1 - - with assert_raises_regex(ValueError, ".*truelat1.*" ): - ll_to_xy_proj(30,-110, map_proj=2, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*truelat1.*"): + ll_to_xy_proj(30, -110, map_proj=2, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=3000.) - - p_all = ll_to_xy_proj(28., -89., map_proj=3, ref_lat=19.1075, - ref_lon=-101.008, known_x=0, known_y=0, - dx=30000., truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=90., pole_lon=0.) - - - p_def = ll_to_xy_proj(28., -89., map_proj=3, ref_lat=19.1075, - ref_lon=-101.008, known_x=0, known_y=0, - dx=30000., truelat1=30.) - + dx=3000.) + + p_all = ll_to_xy_proj(28., -89., map_proj=3, ref_lat=19.1075, + ref_lon=-101.008, known_x=0, known_y=0, + dx=30000., truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=90., pole_lon=0.) + + p_def = ll_to_xy_proj(28., -89., map_proj=3, ref_lat=19.1075, + ref_lon=-101.008, known_x=0, known_y=0, + dx=30000., truelat1=30.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - + # Lat/lon - stand_lon, dy, pole_lat, pole_lon - - with assert_raises_regex(ValueError, ".*stand_lon.*" ): - ll_to_xy_proj(30,-110, map_proj=6, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*stand_lon.*"): + ll_to_xy_proj(30, -110, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=.2698388) - - with assert_raises_regex(ValueError, ".*dy.*" ): - ll_to_xy_proj(30,-110, map_proj=6, ref_lat=45.0, + dx=.2698388) + + with assert_raises_regex(ValueError, ".*dy.*"): + ll_to_xy_proj(30, -110, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, stand_lon=89.0, - dx=.2698388) - - with assert_raises_regex(ValueError, ".*pole_lat.*" ): - ll_to_xy_proj(30,-110, map_proj=6, ref_lat=45.0, + dx=.2698388) + + with assert_raises_regex(ValueError, ".*pole_lat.*"): + ll_to_xy_proj(30, -110, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, stand_lon=89.0, - dx=.2698388, dy=.2698388) - - with assert_raises_regex(ValueError, ".*pole_lon.*" ): - ll_to_xy_proj(30,-110, map_proj=6, ref_lat=45.0, + dx=.2698388, dy=.2698388) + + with assert_raises_regex(ValueError, ".*pole_lon.*"): + ll_to_xy_proj(30, -110, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, stand_lon=89.0, dx=.2698388, dy=.2698388, - pole_lat=62.0) - - p_all = ll_to_xy_proj(28.,-89., map_proj=6, ref_lat=17.6759, - ref_lon=-101.4286, known_x=0, known_y=0, - dx=30000, dy=30000, - truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=62.0, - pole_lon=180.) - - - p_def = ll_to_xy_proj(28., -89., map_proj=6, ref_lat=17.6759, - ref_lon=-101.4286, known_x=0, known_y=0, - stand_lon=-89., - dx=30000, dy=30000, pole_lat=62.0, - pole_lon=180.) - + pole_lat=62.0) + + p_all = ll_to_xy_proj(28., -89., map_proj=6, ref_lat=17.6759, + ref_lon=-101.4286, known_x=0, known_y=0, + dx=30000, dy=30000, + truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=62.0, + pole_lon=180.) + + p_def = ll_to_xy_proj(28., -89., map_proj=6, ref_lat=17.6759, + ref_lon=-101.4286, known_x=0, known_y=0, + stand_lon=-89., + dx=30000, dy=30000, pole_lat=62.0, + pole_lon=180.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - - + if xy_or_ll_out == "ll": - + # Test the required failures - with assert_raises_regex(ValueError, ".*map_proj.*" ): + with assert_raises_regex(ValueError, ".*map_proj.*"): xy_to_ll_proj(45, 50) - - with assert_raises_regex(ValueError, ".*ref_lat.*" ): - xy_to_ll_proj(45, 50, map_proj=1) - - with assert_raises_regex(ValueError, ".*ref_lon.*" ): + + with assert_raises_regex(ValueError, ".*ref_lat.*"): + xy_to_ll_proj(45, 50, map_proj=1) + + with assert_raises_regex(ValueError, ".*ref_lon.*"): xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45) - - with assert_raises_regex(ValueError, ".*known_x.*" ): - xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, - ref_lon=-120.) - - with assert_raises_regex(ValueError, ".*known_y.*" ): - xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*known_x.*"): + xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, + ref_lon=-120.) + + with assert_raises_regex(ValueError, ".*known_y.*"): + xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, ref_lon=-120., known_x=1) - - with assert_raises_regex(ValueError, ".*dx.*" ): - xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*dx.*"): + xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0) - - ####### Now test the projections - + + # Now test the projections + # Lambert Conformal - truelat1, stand_lon required - with assert_raises_regex(ValueError, ".*truelat1.*" ): - xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, + with assert_raises_regex(ValueError, ".*truelat1.*"): + xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=3000.) - - with assert_raises_regex(ValueError, ".*stand_lon.*" ): - xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, + dx=3000.) + + with assert_raises_regex(ValueError, ".*stand_lon.*"): + xy_to_ll_proj(45, 50, map_proj=1, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, dx=3000., - truelat1=60.) - + truelat1=60.) + # Make sure it runs with all params set vs not - p_all = xy_to_ll_proj(45, 50, map_proj=1, ref_lat=17.803, - ref_lon=-100.7747, known_x=0, known_y=0, - dx=30000., truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=90., pole_lon=0.) - - - p_def = xy_to_ll_proj(45, 50, map_proj=1, ref_lat=17.803, - ref_lon=-100.7747, known_x=0, known_y=0, - dx=30000., truelat1=30., - stand_lon=-89.) - + p_all = xy_to_ll_proj(45, 50, map_proj=1, ref_lat=17.803, + ref_lon=-100.7747, known_x=0, known_y=0, + dx=30000., truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=90., pole_lon=0.) + + p_def = xy_to_ll_proj(45, 50, map_proj=1, ref_lat=17.803, + ref_lon=-100.7747, known_x=0, known_y=0, + dx=30000., truelat1=30., + stand_lon=-89.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - + # Polar Stereographic - truelat1, stand_lon - - with assert_raises_regex(ValueError, ".*truelat1.*" ): - xy_to_ll_proj(45, 50, map_proj=2, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*truelat1.*"): + xy_to_ll_proj(45, 50, map_proj=2, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=3000.) - - with assert_raises_regex(ValueError, ".*stand_lon.*" ): - xy_to_ll_proj(45, 50, map_proj=2, ref_lat=45.0, + dx=3000.) + + with assert_raises_regex(ValueError, ".*stand_lon.*"): + xy_to_ll_proj(45, 50, map_proj=2, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, dx=3000., - truelat1=60.) - - p_all = xy_to_ll_proj(45, 50, map_proj=2, ref_lat=17.933, - ref_lon=-100.0735, known_x=0, known_y=0, - dx=30000., truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=90., pole_lon=0.) - - - p_def = xy_to_ll_proj(45, 50, map_proj=2, ref_lat=17.933, - ref_lon=-100.0735, known_x=0, known_y=0, - dx=30000., truelat1=30., - stand_lon=-89.) - + truelat1=60.) + + p_all = xy_to_ll_proj(45, 50, map_proj=2, ref_lat=17.933, + ref_lon=-100.0735, known_x=0, known_y=0, + dx=30000., truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=90., pole_lon=0.) + + p_def = xy_to_ll_proj(45, 50, map_proj=2, ref_lat=17.933, + ref_lon=-100.0735, known_x=0, known_y=0, + dx=30000., truelat1=30., + stand_lon=-89.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - - + # Mercator - truelat1 - - with assert_raises_regex(ValueError, ".*truelat1.*" ): - xy_to_ll_proj(45, 50, map_proj=2, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*truelat1.*"): + xy_to_ll_proj(45, 50, map_proj=2, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=3000.) - - p_all = xy_to_ll_proj(45, 50, map_proj=3, ref_lat=19.1075, - ref_lon=-101.008, known_x=0, known_y=0, - dx=30000., truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=90., pole_lon=0.) - - - p_def = xy_to_ll_proj(45, 50, map_proj=3, ref_lat=19.1075, - ref_lon=-101.008, known_x=0, known_y=0, - dx=30000., truelat1=30.) - + dx=3000.) + + p_all = xy_to_ll_proj(45, 50, map_proj=3, ref_lat=19.1075, + ref_lon=-101.008, known_x=0, known_y=0, + dx=30000., truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=90., pole_lon=0.) + + p_def = xy_to_ll_proj(45, 50, map_proj=3, ref_lat=19.1075, + ref_lon=-101.008, known_x=0, known_y=0, + dx=30000., truelat1=30.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - + # Lat/lon - stand_lon, dy, pole_lat, pole_lon - - with assert_raises_regex(ValueError, ".*stand_lon.*" ): - xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, + + with assert_raises_regex(ValueError, ".*stand_lon.*"): + xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, - dx=.2698388) - - with assert_raises_regex(ValueError, ".*dy.*" ): - xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, + dx=.2698388) + + with assert_raises_regex(ValueError, ".*dy.*"): + xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, stand_lon=89.0, - dx=.2698388) - - with assert_raises_regex(ValueError, ".*pole_lat.*" ): - xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, + dx=.2698388) + + with assert_raises_regex(ValueError, ".*pole_lat.*"): + xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, stand_lon=89.0, - dx=.2698388, dy=.2698388) - - with assert_raises_regex(ValueError, ".*pole_lon.*" ): - xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, + dx=.2698388, dy=.2698388) + + with assert_raises_regex(ValueError, ".*pole_lon.*"): + xy_to_ll_proj(45, 50, map_proj=6, ref_lat=45.0, ref_lon=-120., known_x=0, known_y=0, stand_lon=89.0, dx=.2698388, dy=.2698388, - pole_lat=62.0) - - p_all = xy_to_ll_proj(64, 40, map_proj=6, ref_lat=17.6759, - ref_lon=-101.4286, known_x=0, known_y=0, - dx=30000, dy=30000, - truelat1=30., truelat2=30., - stand_lon=-89., pole_lat=62.0, - pole_lon=180.) - - - p_def = xy_to_ll_proj(64, 40, map_proj=6, ref_lat=17.6759, - ref_lon=-101.4286, known_x=0, known_y=0, - stand_lon=-89., - dx=30000, dy=30000, pole_lat=62.0, - pole_lon=180.) - + pole_lat=62.0) + + p_all = xy_to_ll_proj(64, 40, map_proj=6, ref_lat=17.6759, + ref_lon=-101.4286, known_x=0, known_y=0, + dx=30000, dy=30000, + truelat1=30., truelat2=30., + stand_lon=-89., pole_lat=62.0, + pole_lon=180.) + + p_def = xy_to_ll_proj(64, 40, map_proj=6, ref_lat=17.6759, + ref_lon=-101.4286, known_x=0, known_y=0, + stand_lon=-89., + dx=30000, dy=30000, pole_lat=62.0, + pole_lon=180.) + nt.assert_allclose(to_np(p_all), to_np(p_def)) - + return test - + if __name__ == "__main__": - + for v in ("xy", "ll"): test_func = make_test(v) setattr(WRFLatLonProjTest, 'test_{0}'.format(v), test_func) - + ut.main() - \ No newline at end of file diff --git a/test/test_units.py b/test/test_units.py index 40c07eb..f1cab6f 100644 --- a/test/test_units.py +++ b/test/test_units.py @@ -1,7 +1,7 @@ import sys import unittest as ut -import numpy.testing as nt +import numpy.testing as nt import numpy as np import numpy.ma as ma from netCDF4 import Dataset as nc @@ -13,44 +13,43 @@ TEST_FILE = "/Users/ladwig/Documents/wrf_files/wrfout_d01_2010-06-13_21:00:00" # Python 3 if sys.version_info > (3,): xrange = range - + class TestUnits(ut.TestCase): longMessage = True - + def test_temp_units(self): wrfnc = nc(TEST_FILE) - + for units in ("K", "degC", "degF"): var = getvar(wrfnc, "temp", units=units) - + self.assertEqual(var.attrs["units"], units) - + def test_wind_units(self): wrfnc = nc(TEST_FILE) - + for units in ("m s-1", "kt", "mi h-1", "km h-1", "ft s-1"): var = getvar(wrfnc, "uvmet", units=units) - + self.assertEqual(var.attrs["units"], units) - + def test_pres_units(self): wrfnc = nc(TEST_FILE) - + for units in ("Pa", "hPa", "mb", "torr", "mmHg", "atm"): var = getvar(wrfnc, "slp", units=units) - + self.assertEqual(var.attrs["units"], units) - + def test_height_units(self): wrfnc = nc(TEST_FILE) - + for units in ("m", "km", "dam", "ft", "mi"): var = getvar(wrfnc, "z", units=units) - + self.assertEqual(var.attrs["units"], units) - - + if __name__ == "__main__": - ut.main() \ No newline at end of file + ut.main() diff --git a/test/utests.py b/test/utests.py index bf5359a..eb95167 100644 --- a/test/utests.py +++ b/test/utests.py @@ -1,8 +1,9 @@ import unittest as ut -import numpy.testing as nt +import numpy.testing as nt import numpy as np import numpy.ma as ma -import os, sys +import os +import sys import subprocess import glob @@ -14,7 +15,6 @@ from wrf.util import is_multi_file NCL_EXE = "/Users/ladwig/miniconda2/envs/ncl_build/bin/ncl" NCARG_ROOT = "/Users/ladwig/miniconda2/envs/ncl_build" -#TEST_FILE = "/Users/ladwig/Documents/wrf_files/wrfout_d01_2010-06-13_21:00:00" DIRS = ["/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/moving_nest", "/Users/ladwig/Documents/wrf_files/wrf_vortex_multi/static_nest"] PATTERN = "wrfout_d02_*" @@ -25,62 +25,62 @@ NEST = ["moving", "static"] if sys.version_info > (3,): xrange = range + def setUpModule(): - #ncarg_root = os.environ.get("NCARG_ROOT", None) - #if ncarg_root is None: + # ncarg_root = os.environ.get("NCARG_ROOT", None) + # if ncarg_root is None: # raise RuntimeError("$NCARG_ROOT environment variable not set") - os.environ["NCARG_ROOT"] = NCARG_ROOT os.environ["NCARG_NCARG"] = os.path.join(NCARG_ROOT, "lib", "ncarg") os.environ["OMP_NUM_THREADS"] = "4" - - + this_path = os.path.realpath(__file__) - ncl_script = os.path.join(os.path.dirname(this_path), + ncl_script = os.path.join(os.path.dirname(this_path), "ncl", "ncl_get_var.ncl") - - for dir,outfile in zip(DIRS, REF_NC_FILES): - cmd = "%s %s 'dir=\"%s\"' 'pattern=\"%s\"' 'out_file=\"%s\"'" % ( + + for dir, outfile in zip(DIRS, REF_NC_FILES): + cmd = "{} {} 'dir=\"{}\"' 'pattern=\"{}\"' 'out_file=\"{}\"'" .format( NCL_EXE, ncl_script, dir, PATTERN, outfile) - + print(cmd) - + if not os.path.exists(outfile): status = subprocess.call(cmd, shell=True) if (status != 0): raise RuntimeError("NCL script failed. Could not set up test.") -# Using helpful information at: -# http://eli.thegreenplace.net/2014/04/02/dynamically-generating-python-test-cases + +# Using helpful information at: +# http://eli.thegreenplace.net/2014/04/02/ +# dynamically-generating-python-test-cases def make_test(varname, dir, pattern, referent, multi=False, pynio=False): def test(self): - try: from netCDF4 import Dataset as NetCDF - except: + except ImportError: pass - + try: import Nio - except: + except ImportError: pass - + timeidx = 0 if not multi else None pat = os.path.join(dir, pattern) wrf_files = glob.glob(pat) wrf_files.sort() - + wrfin = [] for fname in wrf_files: if not pynio: f = NetCDF(fname) try: f.set_always_mask(False) - except: + except AttributeError: pass wrfin.append(f) else: @@ -94,160 +94,126 @@ def make_test(varname, dir, pattern, referent, multi=False, pynio=False): refnc = NetCDF(referent) try: refnc.set_auto_mask(False) - except: + except AttributeError: pass - + # These have a left index that defines the product type - multiproduct = varname in ("uvmet", "uvmet10", "cape_2d", "cape_3d", - "cfrac") - multi2d = ("uvmet10", "cape_2d", "cfrac") - multi3d = ("uvmet", "cape_3d") - + multiproduct = varname in ("uvmet", "uvmet10", "wspd_wdir", + "wspd_wdir10", "uvmet_wspd_wdir", + "uvmet10_wspd_wdir", + "cape_2d", "cape_3d", "cfrac") + multi2d = ("uvmet10", "wspd_wdir10", "uvmet10_wspd_wdir", + "cape_2d", "cfrac") + multi3d = ("uvmet", "wspd_wdir", "uvmet_wspd_wdir", "cape_3d") + # These varnames don't have NCL functions to test against ignore_referent = ("zstag", "geopt_stag") - + if varname not in ignore_referent: if not multi: if varname in multi2d: - ref_vals = refnc.variables[varname][...,0,:,:] + ref_vals = refnc.variables[varname][..., 0, :, :] elif varname in multi3d: - ref_vals = refnc.variables[varname][...,0,:,:,:] + ref_vals = refnc.variables[varname][..., 0, :, :, :] else: - ref_vals = refnc.variables[varname][0,:] + ref_vals = refnc.variables[varname][0, :] else: ref_vals = refnc.variables[varname][:] - - if (varname == "tc"): - my_vals = getvar(wrfin, "temp", timeidx=timeidx, units="c") - tol = 1/100. - atol = .1 # Note: NCL uses 273.16 as conversion for some reason - nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) - elif (varname == "height_agl"): - # Change the vert_type to height_agl when NCL gets updated. - my_vals = getvar(wrfin, "z", timeidx=timeidx, msl=False) - tol = 1/100. - atol = .1 # Note: NCL uses 273.16 as conversion for some reason - nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) - elif (varname == "cfrac"): - # Change the vert_type to height_agl when NCL gets updated. - my_vals = getvar(wrfin, "cfrac", timeidx=timeidx) - tol = 1/100. - atol = .1 # Note: NCL uses 273.16 as conversion for some reason - nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) - elif (varname == "pw"): - my_vals = getvar(wrfin, "pw", timeidx=timeidx) - tol = .5/100.0 - atol = 0 # NCL uses different constants and doesn't use same - # handrolled virtual temp in method - try: - nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) - except AssertionError: - print (np.amax(np.abs(to_np(my_vals) - ref_vals))) - raise - elif (varname == "cape_2d"): - cape_2d = getvar(wrfin, varname, timeidx=timeidx) - tol = 0/100. - atol = 200.0 - # Let's only compare CAPE values until the F90 changes are - # merged back in to NCL. The modifications to the R and CP - # changes TK enough that non-lifting parcels could lift, thus - # causing wildly different values in LCL - nt.assert_allclose(to_np(cape_2d[0,:]), ref_vals[0,:], tol, atol) - elif (varname == "cape_3d"): - cape_3d = getvar(wrfin, varname, timeidx=timeidx) - # Changing the R and CP constants, while keeping TK within - # 2%, can lead to some big changes in CAPE. Tolerances - # have been set wide when comparing the with the original - # NCL. Change back when the F90 code is merged back with - # NCL - tol = 0/100. - atol = 200.0 - - #print np.amax(np.abs(to_np(cape_3d[0,:]) - ref_vals[0,:])) - nt.assert_allclose(to_np(cape_3d), ref_vals, tol, atol) - elif (varname == "zstag" or varname == "geopt_stag"): + + if (varname == "zstag" or varname == "geopt_stag"): v = getvar(wrfin, varname, timeidx=timeidx) # For now, only make sure it runs without crashing since no NCL - # to compare with yet. + # to compare with yet.0 else: - my_vals = getvar(wrfin, varname, timeidx=timeidx) - tol = 2/100. - atol = 0.1 - #print (np.amax(np.abs(to_np(my_vals) - ref_vals))) + + if varname == "cape_2d" or varname == "cape_3d": + # Cape still has a small floating point issue that + # hasn't been completely resolved, so for now check + # that cape is within 50. + my_vals = getvar(wrfin, varname, timeidx=timeidx) + rtol = 0 + atol = 50. + else: + # All other tests should be within .001 of each other + my_vals = getvar(wrfin, varname, timeidx=timeidx) + rtol = 0 + atol = 1.0E-3 + try: - nt.assert_allclose(to_np(my_vals), ref_vals, tol, atol) - except: + nt.assert_allclose(to_np(my_vals), ref_vals, rtol, atol) + except AssertionError: absdiff = np.abs(to_np(my_vals) - ref_vals) maxdiff = np.amax(absdiff) print(maxdiff) print(np.argwhere(absdiff == maxdiff)) - + raise - - + return test + def _get_refvals(referent, varname, multi): try: from netCDF4 import Dataset as NetCDF - except: + except ImportError: pass - + refnc = NetCDF(referent) try: pass - #refnc.set_auto_mask(False) - except: + except ImportError: pass - - multi2d = ("uvmet10", "cape_2d", "cfrac") - multi3d = ("uvmet", "cape_3d") - interpline = ("t2_line","t2_line2", "t2_line3") - + + multi2d = ("uvmet10", "wspd_wdir10", "uvmet10_wspd_wdir", "cape_2d", + "cfrac") + multi3d = ("uvmet", "wspd_wdir", "uvmet_wspd_wdir", "cape_3d") + interpline = ("t2_line", "t2_line2", "t2_line3") + if not multi: if varname in multi2d: - ref_vals = refnc.variables[varname][...,0,:,:] + ref_vals = refnc.variables[varname][..., 0, :, :] elif varname in multi3d: - ref_vals = refnc.variables[varname][...,0,:,:,:] + ref_vals = refnc.variables[varname][..., 0, :, :, :] else: v = refnc.variables[varname][:] if v.ndim == 2: if varname in interpline: - ref_vals = v[0,:] + ref_vals = v[0, :] else: ref_vals = v else: - ref_vals = v[0,:] + ref_vals = v[0, :] else: ref_vals = refnc.variables[varname][:] - + return ref_vals -def make_interp_test(varname, dir, pattern, referent, multi=False, + +def make_interp_test(varname, dir, pattern, referent, multi=False, pynio=False): def test(self): try: from netCDF4 import Dataset as NetCDF - except: + except ImportError: pass - + try: import Nio - except: + except ImportError: pass - + timeidx = 0 if not multi else None pat = os.path.join(dir, pattern) wrf_files = glob.glob(pat) wrf_files.sort() - + wrfin = [] for fname in wrf_files: if not pynio: f = NetCDF(fname) try: f.set_always_mask(False) - except: + except AttributeError: pass wrfin.append(f) else: @@ -257,568 +223,706 @@ def make_interp_test(varname, dir, pattern, referent, multi=False, _fname = fname f = Nio.open_file(_fname) wrfin.append(f) - + if (varname == "interplevel"): ref_ht_500 = _get_refvals(referent, "z_500", multi) ref_p_5000 = _get_refvals(referent, "p_5000", multi) ref_ht_multi = _get_refvals(referent, "z_multi", multi) ref_p_multi = _get_refvals(referent, "p_multi", multi) - + ref_ht2_500 = _get_refvals(referent, "z2_500", multi) ref_p2_5000 = _get_refvals(referent, "p2_5000", multi) ref_ht2_multi = _get_refvals(referent, "z2_multi", multi) ref_p2_multi = _get_refvals(referent, "p2_multi", multi) - + ref_p_lev2d = _get_refvals(referent, "p_lev2d", multi) - + hts = getvar(wrfin, "z", timeidx=timeidx) p = getvar(wrfin, "pressure", timeidx=timeidx) wspd_wdir = getvar(wrfin, "wspd_wdir", timeidx=timeidx) - + # Make sure the numpy versions work first hts_500 = interplevel(to_np(hts), to_np(p), 500) hts_500 = interplevel(hts, p, 500) - - # Note: the '*2*' versions in the reference are testing + + # Note: the '*2*' versions in the reference are testing # against the new version of interplevel in NCL nt.assert_allclose(to_np(hts_500), ref_ht_500) nt.assert_allclose(to_np(hts_500), ref_ht2_500) - + # Make sure the numpy versions work first p_5000 = interplevel(to_np(p), to_np(hts), 5000) p_5000 = interplevel(p, hts, 5000) - - + nt.assert_allclose(to_np(p_5000), ref_p_5000) nt.assert_allclose(to_np(p_5000), ref_p2_5000) - - hts_multi= interplevel(to_np(hts), to_np(p), - [1000., 850., 500., 250.]) + + hts_multi = interplevel(to_np(hts), to_np(p), + [1000., 850., 500., 250.]) hts_multi = interplevel(hts, p, [1000., 850., 500., 250.]) - + nt.assert_allclose(to_np(hts_multi), ref_ht_multi) nt.assert_allclose(to_np(hts_multi), ref_ht2_multi) - - p_multi= interplevel(to_np(p), to_np(hts), - [500., 2500., 5000., 10000. ]) - p_multi = interplevel(p, hts, [500., 2500., 5000., 10000. ]) - + + p_multi = interplevel(to_np(p), to_np(hts), + [500., 2500., 5000., 10000.]) + p_multi = interplevel(p, hts, [500., 2500., 5000., 10000.]) + nt.assert_allclose(to_np(p_multi), ref_p_multi) nt.assert_allclose(to_np(p_multi), ref_p2_multi) - + pblh = getvar(wrfin, "PBLH", timeidx=timeidx) p_lev2d = interplevel(to_np(p), to_np(hts), to_np(pblh)) p_lev2d = interplevel(p, hts, pblh) nt.assert_allclose(to_np(p_lev2d), ref_p_lev2d) - + # Just make sure these run below wspd_wdir_500 = interplevel(to_np(wspd_wdir), to_np(p), 500) wspd_wdir_500 = interplevel(wspd_wdir, p, 500) - #print(wspd_wdir_500) - - wspd_wdir_multi= interplevel(to_np(wspd_wdir), - to_np(p), [1000,500,250]) - wdpd_wdir_multi = interplevel(wspd_wdir, p, [1000,500,250]) - - + + wspd_wdir_multi = interplevel(to_np(wspd_wdir), to_np(p), + [1000, 500, 250]) + wdpd_wdir_multi = interplevel(wspd_wdir, p, [1000, 500, 250]) + wspd_wdir_pblh = interplevel(to_np(wspd_wdir), to_np(hts), pblh) wspd_wdir_pblh = interplevel(wspd_wdir, hts, pblh) - + if multi: - wspd_wdir_pblh_2 = interplevel(to_np(wspd_wdir), - to_np(hts), pblh[0,:]) - wspd_wdir_pblh_2 = interplevel(wspd_wdir, hts, pblh[0,:]) - + wspd_wdir_pblh_2 = interplevel(to_np(wspd_wdir), + to_np(hts), pblh[0, :]) + wspd_wdir_pblh_2 = interplevel(wspd_wdir, hts, pblh[0, :]) + # Since PBLH doesn't change in this case, it should match - # the 0 time from previous computation. Note that this + # the 0 time from previous computation. Note that this # only works when the data has 1 time step that is repeated. - # If you use a different case with multiple times, + # If you use a different case with multiple times, # this will probably fail. - nt.assert_allclose(to_np(wspd_wdir_pblh_2[:,0,:]), - to_np(wspd_wdir_pblh[:,0,:])) - - nt.assert_allclose(to_np(wspd_wdir_pblh_2[:,-1,:]), - to_np(wspd_wdir_pblh[:,0,:])) - - + nt.assert_allclose(to_np(wspd_wdir_pblh_2[:, 0, :]), + to_np(wspd_wdir_pblh[:, 0, :])) + + nt.assert_allclose(to_np(wspd_wdir_pblh_2[:, -1, :]), + to_np(wspd_wdir_pblh[:, 0, :])) + elif (varname == "vertcross"): ref_ht_cross = _get_refvals(referent, "ht_cross", multi) ref_p_cross = _get_refvals(referent, "p_cross", multi) - ref_ht_vertcross1 = _get_refvals(referent, "ht_vertcross1", - multi) - ref_ht_vertcross2 = _get_refvals(referent, "ht_vertcross2", - multi) - ref_ht_vertcross3 = _get_refvals(referent, "ht_vertcross3", - multi) - + ref_ht_vertcross1 = _get_refvals(referent, "ht_vertcross1", + multi) + ref_ht_vertcross2 = _get_refvals(referent, "ht_vertcross2", + multi) + ref_ht_vertcross3 = _get_refvals(referent, "ht_vertcross3", + multi) + hts = getvar(wrfin, "z", timeidx=timeidx) p = getvar(wrfin, "pressure", timeidx=timeidx) - - pivot_point = CoordPair(hts.shape[-1] / 2, hts.shape[-2] / 2) - + + pivot_point = CoordPair(hts.shape[-1] / 2, hts.shape[-2] / 2) + # Beginning in wrf-python 1.3, users can select number of levels. # Originally, for pressure, dz was 10, so let's back calculate # the number of levels. p_max = np.floor(np.amax(p)/10) * 10 # bottom value p_min = np.ceil(np.amin(p)/10) * 10 # top value - - p_autolevels = int((p_max - p_min) /10) - + + p_autolevels = int((p_max - p_min) / 10) + # Make sure the numpy versions work first - - ht_cross = vertcross(to_np(hts), to_np(p), + + ht_cross = vertcross(to_np(hts), to_np(p), pivot_point=pivot_point, angle=90., autolevels=p_autolevels) - + ht_cross = vertcross(hts, p, pivot_point=pivot_point, angle=90., autolevels=p_autolevels) - - nt.assert_allclose(to_np(ht_cross), ref_ht_cross, rtol=.01) - + + try: + nt.assert_allclose(to_np(ht_cross), ref_ht_cross, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(ht_cross) - ref_ht_cross) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + lats = hts.coords["XLAT"] lons = hts.coords["XLONG"] - + # Test the manual projection method with lat/lon - # Only do this for the non-multi case, since the domain + # Only do this for the non-multi case, since the domain # might be moving if not multi: - if lats.ndim > 2: # moving nest - lats = lats[0,:] - lons = lons[0,:] - + if lats.ndim > 2: # moving nest + lats = lats[0, :] + lons = lons[0, :] + ll_point = ll_points(lats, lons) - - pivot = CoordPair(lat=lats[int(lats.shape[-2]/2), - int(lats.shape[-1]/2)], - lon=lons[int(lons.shape[-2]/2), - int(lons.shape[-1]/2)]) - - v1 = vertcross(hts,p,wrfin=wrfin,pivot_point=pivot_point, - angle=90.0) - v2 = vertcross(hts,p,projection=hts.attrs["projection"], - ll_point=ll_point, - pivot_point=pivot_point, angle=90.) - - nt.assert_allclose(to_np(v1), to_np(v2), rtol=.01) - + + pivot = CoordPair(lat=lats[int(lats.shape[-2]/2), + int(lats.shape[-1]/2)], + lon=lons[int(lons.shape[-2]/2), + int(lons.shape[-1]/2)]) + + v1 = vertcross(hts, p, wrfin=wrfin, pivot_point=pivot_point, + angle=90.0) + v2 = vertcross(hts, p, projection=hts.attrs["projection"], + ll_point=ll_point, pivot_point=pivot_point, + angle=90.) + + try: + nt.assert_allclose(to_np(v1), to_np(v2), atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(v1) - to_np(v2)) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Test opposite - - p_cross1 = vertcross(p,hts,pivot_point=pivot_point, angle=90.0) - - nt.assert_allclose(to_np(p_cross1), - ref_p_cross, - rtol=.01) + + p_cross1 = vertcross(p, hts, pivot_point=pivot_point, angle=90.0) + + try: + nt.assert_allclose(to_np(p_cross1), ref_p_cross, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(p_cross1) - ref_p_cross) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Test point to point start_point = CoordPair(0, hts.shape[-2]/2) end_point = CoordPair(-1, hts.shape[-2]/2) - - - p_cross2 = vertcross(p,hts,start_point=start_point, - end_point=end_point) - - nt.assert_allclose(to_np(p_cross1), - to_np(p_cross2)) - + + p_cross2 = vertcross(p, hts, start_point=start_point, + end_point=end_point) + + try: + nt.assert_allclose(to_np(p_cross1), to_np(p_cross2), + atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(p_cross1) - to_np(p_cross2)) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Check the new vertcross routine - pivot_point = CoordPair(hts.shape[-1] / 2, hts.shape[-2] / 2) - ht_cross = vertcross(hts, p, - pivot_point=pivot_point, angle=90., + pivot_point = CoordPair(hts.shape[-1] / 2, hts.shape[-2] / 2) + ht_cross = vertcross(hts, p, pivot_point=pivot_point, angle=90., latlon=True) - - nt.assert_allclose(to_np(ht_cross), - to_np(ref_ht_vertcross1), atol=.01) - - - + + try: + nt.assert_allclose(to_np(ht_cross), + to_np(ref_ht_vertcross1), atol=.005) + except AssertionError: + absdiff = np.abs(to_np(ht_cross) - to_np(ref_ht_vertcross1)) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + levels = [1000., 850., 700., 500., 250.] - ht_cross = vertcross(hts, p, - pivot_point=pivot_point, angle=90., + ht_cross = vertcross(hts, p, pivot_point=pivot_point, angle=90., levels=levels, latlon=True) - - nt.assert_allclose(to_np(ht_cross), - to_np(ref_ht_vertcross2), atol=.01) - + + try: + nt.assert_allclose(to_np(ht_cross), to_np(ref_ht_vertcross2), + atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(ht_cross) - to_np(ref_ht_vertcross2)) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + idxs = (0, slice(None)) if lats.ndim > 2 else (slice(None),) - - start_lat = np.amin(lats[idxs]) + .25*(np.amax(lats[idxs]) - np.amin(lats[idxs])) - end_lat = np.amin(lats[idxs]) + .65*(np.amax(lats[idxs]) - np.amin(lats[idxs])) - - start_lon = np.amin(lons[idxs]) + .25*(np.amax(lons[idxs]) - np.amin(lons[idxs])) - end_lon = np.amin(lons[idxs]) + .65*(np.amax(lons[idxs]) - np.amin(lons[idxs])) - + + start_lat = np.amin(lats[idxs]) + .25*(np.amax(lats[idxs]) + - np.amin(lats[idxs])) + end_lat = np.amin(lats[idxs]) + .65*(np.amax(lats[idxs]) + - np.amin(lats[idxs])) + + start_lon = np.amin(lons[idxs]) + .25*(np.amax(lons[idxs]) + - np.amin(lons[idxs])) + end_lon = np.amin(lons[idxs]) + .65*(np.amax(lons[idxs]) + - np.amin(lons[idxs])) + start_point = CoordPair(lat=start_lat, lon=start_lon) end_point = CoordPair(lat=end_lat, lon=end_lon) - + ll_point = ll_points(lats[idxs], lons[idxs]) - - ht_cross = vertcross(hts, p, - start_point=start_point, - end_point=end_point, - projection=hts.attrs["projection"], - ll_point=ll_point, + + ht_cross = vertcross(hts, p, + start_point=start_point, + end_point=end_point, + projection=hts.attrs["projection"], + ll_point=ll_point, latlon=True, autolevels=1000) - - - nt.assert_allclose(to_np(ht_cross), - to_np(ref_ht_vertcross3), - rtol=.01) - + + try: + nt.assert_allclose(to_np(ht_cross), to_np(ref_ht_vertcross3), + atol=.01) + except AssertionError: + absdiff = np.abs(to_np(ht_cross) - to_np(ref_ht_vertcross3)) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + if multi: ntimes = hts.shape[0] - + for t in range(ntimes): hts = getvar(wrfin, "z", timeidx=t) p = getvar(wrfin, "pressure", timeidx=t) - - ht_cross = vertcross(hts, p, - start_point=start_point, - end_point=end_point, - wrfin=wrfin, - timeidx=t, - latlon=True, - autolevels=1000) - + + ht_cross = vertcross( + hts, p, start_point=start_point, end_point=end_point, + wrfin=wrfin, timeidx=t, latlon=True, autolevels=1000) + refname = "ht_vertcross_t{}".format(t) ref_ht_vertcross = _get_refvals(referent, refname, False) - - nt.assert_allclose(to_np(ht_cross), - to_np(ref_ht_vertcross),rtol=.02) - - + + try: + nt.assert_allclose(to_np(ht_cross), + to_np(ref_ht_vertcross), atol=.01) + except AssertionError: + absdiff = np.abs(to_np(ht_cross) - + to_np(ref_ht_vertcross)) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + elif (varname == "interpline"): - + ref_t2_line = _get_refvals(referent, "t2_line", multi) ref_t2_line2 = _get_refvals(referent, "t2_line2", multi) ref_t2_line3 = _get_refvals(referent, "t2_line3", multi) - + t2 = getvar(wrfin, "T2", timeidx=timeidx) pivot_point = CoordPair(t2.shape[-1] / 2, t2.shape[-2] / 2) - + # Make sure the numpy version works - t2_line1 = interpline(to_np(t2), pivot_point=pivot_point, + t2_line1 = interpline(to_np(t2), pivot_point=pivot_point, angle=90.0) t2_line1 = interpline(t2, pivot_point=pivot_point, angle=90.0) - + nt.assert_allclose(to_np(t2_line1), ref_t2_line) - + # Test the new NCL wrf_user_interplevel result - nt.assert_allclose(to_np(t2_line1), ref_t2_line2) - + try: + nt.assert_allclose(to_np(t2_line1), ref_t2_line2) + except AssertionError: + absdiff = np.abs(to_np(t2_line1) - ref_t2_line2) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Test the manual projection method with lat/lon lats = t2.coords["XLAT"] lons = t2.coords["XLONG"] if multi: - if lats.ndim > 2: # moving nest - lats = lats[0,:] - lons = lons[0,:] - + if lats.ndim > 2: # moving nest + lats = lats[0, :] + lons = lons[0, :] + ll_point = ll_points(lats, lons) - - pivot = CoordPair(lat=lats[int(lats.shape[-2]/2), + + pivot = CoordPair(lat=lats[int(lats.shape[-2]/2), int(lats.shape[-1]/2)], - lon=lons[int(lons.shape[-2]/2), + lon=lons[int(lons.shape[-2]/2), int(lons.shape[-1]/2)]) - l1 = interpline(t2,wrfin=wrfin,pivot_point=pivot_point, - angle=90.0) - - l2 = interpline(t2,projection=t2.attrs["projection"], - ll_point=ll_point, - pivot_point=pivot_point, angle=90.) - nt.assert_allclose(to_np(l1), to_np(l2), rtol=.01) - + l1 = interpline(t2, wrfin=wrfin, pivot_point=pivot_point, + angle=90.0) + + l2 = interpline(t2, projection=t2.attrs["projection"], + ll_point=ll_point, pivot_point=pivot_point, + angle=90.) + try: + nt.assert_allclose(to_np(l1), to_np(l2)) + except AssertionError: + absdiff = np.abs(to_np(l1) - to_np(l2)) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Test point to point start_point = CoordPair(0, t2.shape[-2]/2) end_point = CoordPair(-1, t2.shape[-2]/2) - - t2_line2 = interpline(t2, start_point=start_point, + + t2_line2 = interpline(t2, start_point=start_point, end_point=end_point) - - nt.assert_allclose(to_np(t2_line1), to_np(t2_line2)) - + + try: + nt.assert_allclose(to_np(t2_line1), to_np(t2_line2)) + except AssertionError: + absdiff = np.abs(to_np(t2_line1) - t2_line2) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Now test the start/end with lat/lon points - - start_lat = float(np.amin(lats) + .25*(np.amax(lats) + + start_lat = float(np.amin(lats) + .25*(np.amax(lats) - np.amin(lats))) - end_lat = float(np.amin(lats) + .65*(np.amax(lats) + end_lat = float(np.amin(lats) + .65*(np.amax(lats) - np.amin(lats))) - - start_lon = float(np.amin(lons) + .25*(np.amax(lons) + + start_lon = float(np.amin(lons) + .25*(np.amax(lons) - np.amin(lons))) - end_lon = float(np.amin(lons) + .65*(np.amax(lons) + end_lon = float(np.amin(lons) + .65*(np.amax(lons) - np.amin(lons))) - + start_point = CoordPair(lat=start_lat, lon=start_lon) end_point = CoordPair(lat=end_lat, lon=end_lon) - - t2_line3 = interpline(t2,wrfin=wrfin,timeidx=0, + + t2_line3 = interpline(t2, wrfin=wrfin, timeidx=0, start_point=start_point, - end_point=end_point,latlon=True) - - - nt.assert_allclose(to_np(t2_line3), ref_t2_line3, rtol=.01) - + end_point=end_point, latlon=True) + + try: + nt.assert_allclose(to_np(t2_line3), ref_t2_line3, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(t2_line3) - ref_t2_line3) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Test all time steps if multi: refnc = NetCDF(referent) ntimes = t2.shape[0] - + for t in range(ntimes): t2 = getvar(wrfin, "T2", timeidx=t) - - line = interpline(t2,wrfin=wrfin,timeidx=t, - start_point=start_point, - end_point=end_point,latlon=True) - + + line = interpline( + t2, wrfin=wrfin, timeidx=t, start_point=start_point, + end_point=end_point, latlon=True) + refname = "t2_line_t{}".format(t) refline = refnc.variables[refname][:] - - nt.assert_allclose(to_np(line), - to_np(refline),rtol=.005) - + + try: + nt.assert_allclose(to_np(line), to_np(refline), + atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(line) - refline) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + refnc.close() - + # Test NCLs single time case if not multi: refnc = NetCDF(referent) ref_t2_line4 = refnc.variables["t2_line4"][:] - + t2 = getvar(wrfin, "T2", timeidx=0) - line = interpline(t2,wrfin=wrfin,timeidx=0, + line = interpline(t2, wrfin=wrfin, timeidx=0, start_point=start_point, - end_point=end_point,latlon=True) - - nt.assert_allclose(to_np(line), - to_np(ref_t2_line4),rtol=.005) - refnc.close() + end_point=end_point, latlon=True) + + try: + nt.assert_allclose(to_np(line), to_np(ref_t2_line4), + atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(line) - ref_t2_line4) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + finally: + refnc.close() elif (varname == "vinterp"): # Tk to theta fld_tk_theta = _get_refvals(referent, "fld_tk_theta", multi) fld_tk_theta = np.squeeze(fld_tk_theta) - + tk = getvar(wrfin, "temp", timeidx=timeidx, units="k") - - interp_levels = [200,300,500,1000] - + + interp_levels = [200, 300, 500, 1000] + # Make sure the numpy version works - field = vinterp(wrfin, - field=to_np(tk), - vert_coord="theta", - interp_levels=interp_levels, - extrapolate=True, + field = vinterp(wrfin, + field=to_np(tk), + vert_coord="theta", + interp_levels=interp_levels, + extrapolate=True, field_type="tk", - timeidx=timeidx, + timeidx=timeidx, log_p=True) - - field = vinterp(wrfin, - field=tk, - vert_coord="theta", - interp_levels=interp_levels, - extrapolate=True, + + field = vinterp(wrfin, + field=tk, + vert_coord="theta", + interp_levels=interp_levels, + extrapolate=True, field_type="tk", - timeidx=timeidx, + timeidx=timeidx, log_p=True) - - tol = 5/100. + + tol = 0/100. atol = 0.0001 - + field = np.squeeze(field) - #print (np.amax(np.abs(to_np(field) - fld_tk_theta))) - nt.assert_allclose(to_np(field), fld_tk_theta, tol, atol) - + + try: + nt.assert_allclose(to_np(field), fld_tk_theta) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_tk_theta) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Tk to theta-e fld_tk_theta_e = _get_refvals(referent, "fld_tk_theta_e", multi) fld_tk_theta_e = np.squeeze(fld_tk_theta_e) - - interp_levels = [200,300,500,1000] - - field = vinterp(wrfin, - field=tk, - vert_coord="theta-e", - interp_levels=interp_levels, - extrapolate=True, - field_type="tk", - timeidx=timeidx, + + interp_levels = [200, 300, 500, 1000] + + field = vinterp(wrfin, + field=tk, + vert_coord="theta-e", + interp_levels=interp_levels, + extrapolate=True, + field_type="tk", + timeidx=timeidx, log_p=True) - - tol = 3/100. - atol = 50.0001 - + field = np.squeeze(field) - #print (np.amax(np.abs(to_np(field) - fld_tk_theta_e)/fld_tk_theta_e)*100) - nt.assert_allclose(to_np(field), fld_tk_theta_e, tol, atol) - + try: + nt.assert_allclose(to_np(field), fld_tk_theta_e, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_tk_theta_e) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Tk to pressure fld_tk_pres = _get_refvals(referent, "fld_tk_pres", multi) fld_tk_pres = np.squeeze(fld_tk_pres) - - interp_levels = [850,500] - - field = vinterp(wrfin, - field=tk, - vert_coord="pressure", - interp_levels=interp_levels, - extrapolate=True, + + interp_levels = [850, 500] + + field = vinterp(wrfin, + field=tk, + vert_coord="pressure", + interp_levels=interp_levels, + extrapolate=True, field_type="tk", - timeidx=timeidx, + timeidx=timeidx, log_p=True) - + field = np.squeeze(field) - - #print (np.amax(np.abs(to_np(field) - fld_tk_pres))) - nt.assert_allclose(to_np(field), fld_tk_pres, tol, atol) - + + try: + nt.assert_allclose(to_np(field), fld_tk_pres, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_tk_pres) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Tk to geoht_msl fld_tk_ght_msl = _get_refvals(referent, "fld_tk_ght_msl", multi) fld_tk_ght_msl = np.squeeze(fld_tk_ght_msl) - interp_levels = [1,2] - - field = vinterp(wrfin, - field=tk, - vert_coord="ght_msl", - interp_levels=interp_levels, - extrapolate=True, + interp_levels = [1, 2] + + field = vinterp(wrfin, + field=tk, + vert_coord="ght_msl", + interp_levels=interp_levels, + extrapolate=True, field_type="tk", - timeidx=timeidx, + timeidx=timeidx, log_p=True) - + field = np.squeeze(field) - #print (np.amax(np.abs(to_np(field) - fld_tk_ght_msl))) - nt.assert_allclose(to_np(field), fld_tk_ght_msl, tol, atol) - + try: + nt.assert_allclose(to_np(field), fld_tk_ght_msl, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_tk_ght_msl) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Tk to geoht_agl fld_tk_ght_agl = _get_refvals(referent, "fld_tk_ght_agl", multi) fld_tk_ght_agl = np.squeeze(fld_tk_ght_agl) - interp_levels = [1,2] - - field = vinterp(wrfin, - field=tk, - vert_coord="ght_agl", - interp_levels=interp_levels, - extrapolate=True, - field_type="tk", - timeidx=timeidx, + interp_levels = [1, 2] + + field = vinterp(wrfin, + field=tk, + vert_coord="ght_agl", + interp_levels=interp_levels, + extrapolate=True, + field_type="tk", + timeidx=timeidx, log_p=True) - + field = np.squeeze(field) - #print (np.amax(np.abs(to_np(field) - fld_tk_ght_agl))) - nt.assert_allclose(to_np(field), fld_tk_ght_agl, tol, atol) - + + try: + nt.assert_allclose(to_np(field), fld_tk_ght_agl, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_tk_ght_agl) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Hgt to pressure fld_ht_pres = _get_refvals(referent, "fld_ht_pres", multi) fld_ht_pres = np.squeeze(fld_ht_pres) - + z = getvar(wrfin, "height", timeidx=timeidx, units="m") - interp_levels = [500,50] - field = vinterp(wrfin, - field=z, - vert_coord="pressure", - interp_levels=interp_levels, - extrapolate=True, - field_type="ght", - timeidx=timeidx, + interp_levels = [500, 50] + field = vinterp(wrfin, + field=z, + vert_coord="pressure", + interp_levels=interp_levels, + extrapolate=True, + field_type="ght", + timeidx=timeidx, log_p=True) - + field = np.squeeze(field) - #print (np.amax(np.abs(to_np(field) - fld_ht_pres))) - nt.assert_allclose(to_np(field), fld_ht_pres, tol, atol) - + + try: + nt.assert_allclose(to_np(field), fld_ht_pres, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_ht_pres) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Pressure to theta fld_pres_theta = _get_refvals(referent, "fld_pres_theta", multi) fld_pres_theta = np.squeeze(fld_pres_theta) - + p = getvar(wrfin, "pressure", timeidx=timeidx) - interp_levels = [200,300,500,1000] - field = vinterp(wrfin, - field=p, - vert_coord="theta", - interp_levels=interp_levels, - extrapolate=True, - field_type="pressure", - timeidx=timeidx, + interp_levels = [200, 300, 500, 1000] + field = vinterp(wrfin, + field=p, + vert_coord="theta", + interp_levels=interp_levels, + extrapolate=True, + field_type="pressure", + timeidx=timeidx, log_p=True) - + field = np.squeeze(field) - #print (np.amax(np.abs(to_np(field) - fld_pres_theta))) - nt.assert_allclose(to_np(field), fld_pres_theta, tol, atol) - + + try: + nt.assert_allclose(to_np(field), fld_pres_theta, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_pres_theta) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + # Theta-e to pres fld_thetae_pres = _get_refvals(referent, "fld_thetae_pres", multi) fld_thetae_pres = np.squeeze(fld_thetae_pres) - + eth = getvar(wrfin, "eth", timeidx=timeidx) - interp_levels = [850,500,5] - field = vinterp(wrfin, - field=eth, - vert_coord="pressure", - interp_levels=interp_levels, - extrapolate=True, - field_type="theta-e", - timeidx=timeidx, + interp_levels = [850, 500, 5] + field = vinterp(wrfin, + field=eth, + vert_coord="pressure", + interp_levels=interp_levels, + extrapolate=True, + field_type="theta-e", + timeidx=timeidx, log_p=True) - + field = np.squeeze(field) - #print (np.amax(np.abs(to_np(field) - fld_thetae_pres))) - nt.assert_allclose(to_np(field), fld_thetae_pres, tol, atol) - + + try: + nt.assert_allclose(to_np(field), fld_thetae_pres, atol=.0001) + except AssertionError: + absdiff = np.abs(to_np(field) - fld_thetae_pres) + maxdiff = np.amax(absdiff) + print(maxdiff) + print(np.argwhere(absdiff == maxdiff)) + raise + return test + def extract_proj_params(wrfnc, timeidx=0): attrs = extract_global_attrs(wrfnc, ("MAP_PROJ", "TRUELAT1", "TRUELAT2", - "STAND_LON", "POLE_LAT", "POLE_LON", + "STAND_LON", "POLE_LAT", "POLE_LON", "DX", "DY")) - - result = {key.lower(): val for key,val in viewitems(attrs)} - + + result = {key.lower(): val for key, val in viewitems(attrs)} + _timeidx = timeidx if is_multi_file(wrfnc): wrfnc0 = wrfnc[0] num_times_per_file = len(wrfnc0.dimensions["Time"]) file_idx = timeidx // num_times_per_file _timeidx = timeidx % num_times_per_file - + wrfnc = wrfnc[file_idx] - + result["known_x"] = 0 result["known_y"] = 0 - result["ref_lat"] = wrfnc.variables["XLAT"][_timeidx,0,0] - result["ref_lon"] = wrfnc.variables["XLONG"][_timeidx,0,0] - + result["ref_lat"] = wrfnc.variables["XLAT"][_timeidx, 0, 0] + result["ref_lon"] = wrfnc.variables["XLONG"][_timeidx, 0, 0] + return result -def make_latlon_test(testid, dir, pattern, referent, single, + +def make_latlon_test(testid, dir, pattern, referent, single, multi=False, pynio=False): def test(self): try: from netCDF4 import Dataset as NetCDF - except: + except ImportError: pass - + try: import Nio - except: + except ImportError: pass - + timeidx = 0 if not multi else None pat = os.path.join(dir, pattern) wrf_files = glob.glob(pat) wrf_files.sort() - + refnc = NetCDF(referent) try: refnc.set_always_mask(False) - except: + except AttributeError: pass - + wrfin = [] for fname in wrf_files: if not pynio: f = NetCDF(fname) try: f.set_auto_mask(False) - except: + except AttributeError: pass wrfin.append(f) else: @@ -828,41 +932,39 @@ def make_latlon_test(testid, dir, pattern, referent, single, _fname = fname f = Nio.open_file(_fname) wrfin.append(f) - + if testid == "xy": - + # Lats/Lons taken from NCL script, just hard-coding for now lats = [22.0, 25.0, 27.0] lons = [-90.0, -87.5, -83.75] - + # Just call with a single lat/lon if single: timeidx = 8 ref_vals = refnc.variables["xy2"][:] - + xy = ll_to_xy(wrfin, lats[0], lons[0], timeidx=timeidx, as_int=True) - ref = ref_vals[:,0] - + ref = ref_vals[:, 0] + nt.assert_allclose(to_np(xy), ref) - + # Next make sure the 'proj' version works projparams = extract_proj_params(wrfin, timeidx=timeidx) - xy_proj = ll_to_xy_proj(lats[0], lons[0], - as_int=True, + xy_proj = ll_to_xy_proj(lats[0], lons[0], as_int=True, **projparams) - + nt.assert_allclose(to_np(xy_proj), to_np(xy)) - - + else: ref_vals = refnc.variables["xy1"][:] xy = ll_to_xy(wrfin, lats, lons, timeidx=None, as_int=False) - + ref = ref_vals[:] - + nt.assert_allclose(to_np(xy), ref) - + if xy.ndim > 2: # Moving nest is_moving = True @@ -870,49 +972,48 @@ def make_latlon_test(testid, dir, pattern, referent, single, else: is_moving = False numtimes = 1 - + for tidx in range(9): - + # Next make sure the 'proj' version works projparams = extract_proj_params(wrfin, timeidx=tidx) xy_proj = ll_to_xy_proj(lats, lons, as_int=False, **projparams) - + if is_moving: idxs = (slice(None), tidx, slice(None)) else: idxs = (slice(None),) - + nt.assert_allclose(to_np(xy_proj), to_np(xy[idxs])) - + else: - # i_s, j_s taken from NCL script, just hard-coding for now - # NCL uses 1-based indexing for this, so need to subtract 1 + # i_s, j_s taken from NCL script, just hard-coding for now + # NCL uses 1-based indexing for this, so need to subtract 1 x_s = np.asarray([10, 50, 90], int) y_s = np.asarray([10, 50, 90], int) - + if single: timeidx = 8 ref_vals = refnc.variables["ll2"][:] ll = xy_to_ll(wrfin, x_s[0], y_s[0], timeidx=timeidx) - ref = ref_vals[::-1,0] - + ref = ref_vals[::-1, 0] + nt.assert_allclose(to_np(ll), ref) - + # Next make sure the 'proj' version works projparams = extract_proj_params(wrfin, timeidx=8) ll_proj = xy_to_ll_proj(x_s[0], y_s[0], **projparams) - + nt.assert_allclose(to_np(ll_proj), to_np(ll)) - - + else: ref_vals = refnc.variables["ll1"][:] ll = xy_to_ll(wrfin, x_s, y_s, timeidx=None) - ref = ref_vals[::-1,:] - + ref = ref_vals[::-1, :] + nt.assert_allclose(to_np(ll), ref) - + if ll.ndim > 2: # Moving nest is_moving = True @@ -920,48 +1021,52 @@ def make_latlon_test(testid, dir, pattern, referent, single, else: is_moving = False numtimes = 1 - + for tidx in range(numtimes): # Next make sure the 'proj' version works projparams = extract_proj_params(wrfin, timeidx=tidx) ll_proj = xy_to_ll_proj(x_s, y_s, **projparams) - + if is_moving: idxs = (slice(None), tidx, slice(None)) else: idxs = (slice(None),) - + nt.assert_allclose(to_np(ll_proj), to_np(ll[idxs])) - + return test + class WRFVarsTest(ut.TestCase): longMessage = True - + + class WRFInterpTest(ut.TestCase): longMessage = True - + + class WRFLatLonTest(ut.TestCase): longMessage = True - + if __name__ == "__main__": - from wrf import (omp_set_num_threads, omp_set_schedule, omp_get_schedule, + from wrf import (omp_set_num_threads, omp_set_schedule, omp_get_schedule, omp_set_dynamic, omp_get_num_procs, OMP_SCHED_STATIC) omp_set_num_threads(omp_get_num_procs()//2) omp_set_schedule(OMP_SCHED_STATIC, 0) omp_set_dynamic(False) - + ignore_vars = [] # Not testable yet - wrf_vars = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", - "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", - "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", - "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", + wrf_vars = ["avo", "eth", "cape_2d", "cape_3d", "ctt", "dbz", "mdbz", + "geopt", "helicity", "lat", "lon", "omg", "p", "pressure", + "pvo", "pw", "rh2", "rh", "slp", "ter", "td2", "td", "tc", + "theta", "tk", "tv", "twb", "updraft_helicity", "ua", "va", "wa", "uvmet10", "uvmet", "z", "cfrac", "zstag", "geopt_stag", - "height_agl"] + "height_agl", "wspd_wdir", "wspd_wdir10", "uvmet_wspd_wdir", + "uvmet10_wspd_wdir"] interp_methods = ["interplevel", "vertcross", "interpline", "vinterp"] latlon_tests = ["xy", "ll"] - + for dir, ref_nc_file, nest in zip(DIRS, REF_NC_FILES, NEST): try: import netCDF4 @@ -971,36 +1076,41 @@ if __name__ == "__main__": for var in wrf_vars: if var in ignore_vars: continue - + test_func1 = make_test(var, dir, PATTERN, ref_nc_file) - test_func2 = make_test(var, dir, PATTERN, ref_nc_file, multi=True) - setattr(WRFVarsTest, 'test_{0}_{1}'.format(nest,var), test_func1) - setattr(WRFVarsTest, 'test_{0}_multi_{1}'.format(nest,var), test_func2) - + test_func2 = make_test(var, dir, PATTERN, ref_nc_file, + multi=True) + setattr(WRFVarsTest, 'test_{0}_{1}'.format(nest, var), + test_func1) + setattr(WRFVarsTest, 'test_{0}_multi_{1}'.format(nest, var), + test_func2) + for method in interp_methods: - test_interp_func1 = make_interp_test(method, dir, PATTERN, + test_interp_func1 = make_interp_test(method, dir, PATTERN, ref_nc_file) - test_interp_func2 = make_interp_test(method, dir, PATTERN, + test_interp_func2 = make_interp_test(method, dir, PATTERN, ref_nc_file, multi=True) - setattr(WRFInterpTest, 'test_{0}_{1}'.format(nest,method), + setattr(WRFInterpTest, 'test_{0}_{1}'.format(nest, method), test_interp_func1) - setattr(WRFInterpTest, 'test_{0}_multi_{1}'.format(nest,method), + setattr(WRFInterpTest, + 'test_{0}_multi_{1}'.format(nest, method), test_interp_func2) - + for testid in latlon_tests: for single in (True, False): for multi in (True, False): - test_ll_func = make_latlon_test(testid, dir, PATTERN, - ref_nc_file, - single=single, - multi=multi, + test_ll_func = make_latlon_test(testid, dir, PATTERN, + ref_nc_file, + single=single, + multi=multi, pynio=False) multistr = "" if not multi else "_multi" singlestr = "_nosingle" if not single else "_single" - test_name = "test_{}_{}{}{}".format(nest, testid, singlestr, - multistr) + test_name = "test_{}_{}{}{}".format(nest, testid, + singlestr, + multistr) setattr(WRFLatLonTest, test_name, test_ll_func) - + try: import PyNIO except ImportError: @@ -1009,38 +1119,40 @@ if __name__ == "__main__": for var in wrf_vars: if var in ignore_vars: continue - - test_func1 = make_test(var, dir, PATTERN, ref_nc_file, pynio=True) - test_func2 = make_test(var, dir, PATTERN, ref_nc_file, multi=True, + + test_func1 = make_test(var, dir, PATTERN, ref_nc_file, pynio=True) - setattr(WRFVarsTest, 'test_pynio_{0}_{1}'.format(nest,var), test_func1) - setattr(WRFVarsTest, 'test_pynio_{0}_multi_{1}'.format(nest,var), - test_func2) - + test_func2 = make_test(var, dir, PATTERN, ref_nc_file, + multi=True, + pynio=True) + setattr(WRFVarsTest, 'test_pynio_{0}_{1}'.format( + nest, var), test_func1) + setattr(WRFVarsTest, 'test_pynio_{0}_multi_{1}'.format( + nest, var), test_func2) + for method in interp_methods: - test_interp_func1 = make_interp_test(method, dir, PATTERN, + test_interp_func1 = make_interp_test(method, dir, PATTERN, ref_nc_file) - test_interp_func2 = make_interp_test(method, dir, PATTERN, + test_interp_func2 = make_interp_test(method, dir, PATTERN, ref_nc_file, multi=True) - setattr(WRFInterpTest, 'test_pynio_{0}_{1}'.format(nest,method), + setattr(WRFInterpTest, 'test_pynio_{0}_{1}'.format(nest, + method), test_interp_func1) - setattr(WRFInterpTest, 'test_pynio_{0}_multi_{1}'.format(nest,method), - test_interp_func2) - + setattr(WRFInterpTest, 'test_pynio_{0}_multi_{1}'.format( + nest, method), test_interp_func2) + for testid in latlon_tests: for single in (True, False): for multi in (True, False): - test_ll_func = make_latlon_test(testid, dir, PATTERN, - ref_nc_file, - single=single, - multi=multi, + test_ll_func = make_latlon_test(testid, dir, PATTERN, + ref_nc_file, + single=single, + multi=multi, pynio=False) multistr = "" if not multi else "_multi" singlestr = "_nosingle" if not single else "_single" - test_name = "test_pynio_{}_{}{}{}".format(nest, testid, - singlestr, - multistr) + test_name = "test_pynio_{}_{}{}{}".format( + nest, testid, singlestr, multistr) setattr(WRFLatLonTest, test_name, test_ll_func) - + ut.main() - \ No newline at end of file