Browse Source

Added the ipython notebook demo. Modified the unit tests to remove the var package

main
Bill Ladwig 9 years ago
parent
commit
465582a6e7
  1. 907
      wrf_open/test/ipynb/WRF_python_demo.ipynb
  2. 2
      wrf_open/test/utests.py

907
wrf_open/test/ipynb/WRF_python_demo.ipynb

@ -0,0 +1,907 @@ @@ -0,0 +1,907 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# 1.0 Basic Variable Extraction"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from __future__ import (absolute_import, division, print_function, unicode_literals)\n",
"\n",
"from wrf import getvar\n",
"from netCDF4 import Dataset as nc\n",
"ncfile = nc(\"/Users/ladwig/Documents/wrf_files/wrfout_d01_2016-02-25_18_00_00\")\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"p = getvar(ncfile, \"P\")\n",
"print(p)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 1.0.1 DataArray attributes: 'dims', 'coords', 'attrs'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"print(\"dims: \", p.dims)\n",
"print(\"coords: \", p.coords) \n",
"print(\"attrs: \", p.attrs)\n",
"del p"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 1.0.2 Removing implicit 'squeeze' behavior to preserve single sized dimensions"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"p_nosqueeze = getvar(ncfile, \"P\", timeidx=0, squeeze=False)\n",
"print (p_nosqueeze)\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 1.0.3 Single element metadata"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"print (p_nosqueeze[0,0,100,200])\n",
"del p_nosqueeze"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 1.0.4 Disabling/Enabling xarray"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from wrf import disable_xarray, enable_xarray\n",
"\n",
"# Disable xarray completely\n",
"disable_xarray()\n",
"p_no_meta = getvar(ncfile, \"P\")\n",
"print(type(p_no_meta))\n",
"print (p_no_meta)\n",
"del p_no_meta\n",
"enable_xarray()\n",
"\n",
"# Disable on extraction\n",
"p_no_meta = getvar(ncfile, \"P\", meta=False)\n",
"print(\"\\n\")\n",
"print(type(p_no_meta))\n",
"print(p_no_meta)\n",
"del p_no_meta\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# 2.0 Sequences of Input Files "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2.0.1 Combining via the 'cat' method"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"wrflist = [ncfile, ncfile, ncfile]\n",
"p_cat = getvar(wrflist, \"P\", method=\"cat\")\n",
"print(p_cat)\n",
"del p_cat"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2.0.2 Combining via the 'join' method"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"p_join = getvar(wrflist, \"P\", method=\"join\")\n",
"print(p_join)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Note how the Time dimension was replaced with the file dimension, due to the 'squeezing' of the Time dimension.\n",
"\n",
"\n",
"To maintain the Time dimension, set squeeze to False."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"p_join = getvar(wrflist, \"P\", timeidx=0, method=\"join\", squeeze=False)\n",
"print(p_join)\n",
"del p_join"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2.0.3 Dictionary Sequences"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"wrf_dict = {\"label1\" : [ncfile, ncfile],\n",
" \"label2\" : [ncfile, ncfile]}\n",
"p_dict = getvar(wrf_dict, \"P\")\n",
"print(p_dict)\n",
"del p_dict"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2.0.4 Generator Sequences"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"def gen_seq():\n",
" wrfseq = [ncfile, ncfile, ncfile]\n",
" for wrf in wrfseq:\n",
" yield wrf\n",
" \n",
"p_gen = getvar(gen_seq(), \"P\", method=\"join\")\n",
"print(p_gen)\n",
"del p_gen"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 2.0.5 Custom Iterable Classes"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"class FileGen(object):\n",
" def __init__(self, ncfile, count=3):\n",
" self._total = count\n",
" self._i = 0\n",
" self.ncfile = [ncfile]*count\n",
" \n",
" def __iter__(self):\n",
" return self\n",
" \n",
" def next(self):\n",
" if self._i >= self._total:\n",
" raise StopIteration\n",
" else:\n",
" val = self.ncfile[self._i]\n",
" self._i += 1\n",
" return val\n",
" \n",
" # Python 3\n",
" def __next__(self):\n",
" return self.next()\n",
"\n",
"obj_gen = FileGen(ncfile, 3)\n",
"\n",
"p_obj_gen = getvar(gen_seq(), \"P\", method=\"join\", squeeze=False)\n",
"print(p_obj_gen)\n",
"\n",
"del p_obj_gen\n",
" "
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# 3.0 WRF Variable Computational Routines"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"wrf_vars = [\"avo\", \"eth\", \"cape_2d\", \"cape_3d\", \"ctt\", \"dbz\", \"mdbz\", \n",
" \"geopt\", \"helicity\", \"lat\", \"lon\", \"omg\", \"p\", \"pressure\", \n",
" \"pvo\", \"pw\", \"rh2\", \"rh\", \"slp\", \"ter\", \"td2\", \"td\", \"tc\",\n",
" \"theta\", \"tk\", \"tv\", \"twb\", \"updraft_helicity\", \"ua\", \"va\", \n",
" \"wa\", \"uvmet10\", \"uvmet\", \"z\", \"ctt\"]\n",
"\n",
"vard = {varname: getvar(ncfile, varname, method=\"join\", squeeze=False) for varname in wrf_vars}\n",
"for varname in wrf_vars:\n",
" print(vard[varname])\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"(Note all of the NaNs in the above routines which produce missing values (e.g. cape_2d). xarray always converts all masked_array missing values to NaN in order to work with pandas. To get back the original missing values in a numpy masked_array, you need to use the 'npvalues' method from wrf.)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from wrf import npvalues\n",
"masked_ndarray = npvalues(vard[\"cape_2d\"])\n",
"print(masked_ndarray)\n",
"del masked_ndarray"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"\n",
"for key in vard.keys():\n",
" del vard[key]"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 3.1 Interpolation Routines"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 3.1.1 Horizontal Level Interpolation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# 500 MB Heights\n",
"from wrf import getvar, interplevel\n",
"\n",
"z = getvar(ncfile, \"z\")\n",
"p = getvar(ncfile, \"pressure\")\n",
"ht_500mb = interplevel(z, p, 500)\n",
"\n",
"print(ht_500mb)\n",
"del ht_500mb, z, p"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 3.1.2 Vertical Cross Section Interpolation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Pressure using pivot and angle\n",
"from wrf import getvar, vertcross\n",
"\n",
"z = getvar(ncfile, \"z\")\n",
"p = getvar(ncfile, \"pressure\")\n",
"pivot_point = (z.shape[-2] / 2, z.shape[-1] / 2) \n",
"angle = 90.0\n",
"\n",
"p_vert = vertcross(p, z, pivot_point=pivot_point, angle=angle)\n",
"print(p_vert)\n",
"del p_vert\n",
"\n",
"# Pressure using start_point and end_point\n",
"start_point = (z.shape[-2]/2, 0)\n",
"end_point = (z.shape[-2]/2, -1)\n",
"\n",
"p_vert = vertcross(p, z, start_point=start_point, end_point=end_point)\n",
"print(p_vert)\n",
"del p_vert, p, z"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 3.1.3 Interpolate 2D Variable to a Line"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# T2 using pivot and angle\n",
"from wrf import interpline, getvar\n",
"\n",
"t2 = getvar(ncfile, \"T2\")\n",
"pivot_point = (t2.shape[-2] / 2, t2.shape[-1] / 2) \n",
"angle = 90.0\n",
"\n",
"t2_line = interpline(t2, pivot_point=pivot_point, angle=angle)\n",
"print(t2_line)\n",
"\n",
"del t2_line\n",
"\n",
"# T2 using start_point and end_point\n",
"start_point = (t2.shape[-2]/2, 0)\n",
"end_point = (t2.shape[-2]/2, -1)\n",
"\n",
"t2_line = interpline(t2, start_point=start_point, end_point=end_point)\n",
"print(t2_line)\n",
"\n",
"del t2_line, t2"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### 3.1.4 Vertical Coordinate Interpolation"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from wrf import vinterp, getvar\n",
"\n",
"# Interpolate tk to theta levels \n",
"tk = getvar(ncfile, \"tk\") \n",
"interp_levels = [200, 300, 500, 1000]\n",
"\n",
"interp_field = vinterp(ncfile, \n",
" field=tk, \n",
" vert_coord=\"theta\", \n",
" interp_levels=interp_levels, \n",
" extrapolate=True, \n",
" field_type=\"tk\", \n",
" log_p=True)\n",
"\n",
"print(interp_field)\n",
"del interp_field\n",
"\n",
"# Interpolate tk to theta-e levels \n",
" \n",
"interp_levels = [200, 300, 500, 1000]\n",
"\n",
"interp_field = vinterp(ncfile, \n",
" field=tk, \n",
" vert_coord=\"eth\", \n",
" interp_levels=interp_levels, \n",
" extrapolate=True, \n",
" field_type=\"tk\", \n",
" log_p=True)\n",
"\n",
"print(interp_field)\n",
"del interp_field\n",
"\n",
"# Interpolate tk to geopotential height (MSL) levels \n",
" \n",
"interp_levels = [30, 60, 90]\n",
"\n",
"interp_field = vinterp(ncfile, \n",
" field=tk, \n",
" vert_coord=\"ght_msl\", \n",
" interp_levels=interp_levels, \n",
" extrapolate=True, \n",
" field_type=\"tk\", \n",
" log_p=True)\n",
"\n",
"print(interp_field)\n",
"del interp_field\n",
"\n",
"# Interpolate tk to geopotential height (MSL) levels \n",
" \n",
"interp_levels = [30, 60, 90]\n",
"\n",
"interp_field = vinterp(ncfile, \n",
" field=tk, \n",
" vert_coord=\"ght_agl\", \n",
" interp_levels=interp_levels, \n",
" extrapolate=True, \n",
" field_type=\"tk\", \n",
" log_p=True)\n",
"\n",
"print(interp_field)\n",
"del interp_field\n",
"\n",
"# Interpolate tk to pressure levels\n",
"interp_levels = [850, 500]\n",
" \n",
"interp_field = vinterp(ncfile, \n",
" field=tk, \n",
" vert_coord=\"pressure\", \n",
" interp_levels=interp_levels, \n",
" extrapolate=True, \n",
" field_type=\"tk\", \n",
" log_p=True)\n",
"\n",
"print(interp_field)\n",
"del interp_field, tk\n"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## 3.2 Lat/Lon to X/Y Routines"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"from wrf.latlon import get_ll, get_ij # These names are going to change\n",
"\n",
"a = get_ll(ncfile, [400,105], [200,205])\n",
"b = get_ij(ncfile, 45.5, -110.8)\n",
"\n",
"# Note: Lists/Dictionaries of files will add a new dimension ('domain') only if the domain is moving\n",
"c = get_ll([ncfile, ncfile, ncfile], [400,105], [200,205])\n",
"d = get_ll({\"label1\" : [ncfile, ncfile],\n",
" \"label2\" : [ncfile, ncfile]},\n",
" [400,105], [200,205])\n",
"\n",
"print(a)\n",
"print(\"\\n\")\n",
"print(b)\n",
"print(\"\\n\")\n",
"print(c)\n",
"print(\"\\n\")\n",
"print(d)"
]
},
{
"cell_type": "markdown",
"metadata": {
"collapsed": true
},
"source": [
"# 4.0 Plotting with Cartopy"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"%matplotlib inline"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# SLP\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.cm import get_cmap\n",
"import cartopy.crs as crs\n",
"from cartopy.feature import NaturalEarthFeature\n",
"\n",
"from wrf import npvalues, getvar\n",
"\n",
"slp = getvar(ncfile, \"slp\")\n",
"lons = slp.coords[\"XLONG\"]\n",
"lats = slp.coords[\"XLAT\"]\n",
"\n",
"wrf_proj = slp.attrs[\"projection\"]\n",
"cart_proj = wrf_proj.cartopy()\n",
"\n",
"fig = plt.figure(figsize=(20,20))\n",
"ax = plt.axes([0.1,0.1,0.8,0.8], projection=cart_proj)\n",
"\n",
"states = NaturalEarthFeature(category='cultural', scale='50m', facecolor='none',\n",
" name='admin_1_states_provinces_shp')\n",
"ax.add_feature(states, linewidth=.5)\n",
"ax.coastlines('50m', linewidth=0.8)\n",
"\n",
"# Can only get this to work if I manually transform the lat/lon points to projected space.\n",
"xform_coords = cart_proj.transform_points(crs.PlateCarree(), npvalues(lons), npvalues(lats))\n",
"x = xform_coords[:,:,0]\n",
"y = xform_coords[:,:,1]\n",
"\n",
"plt.contour(x, y, npvalues(slp), 20, cmap=get_cmap(\"gist_ncar\"))\n",
"plt.colorbar(ax=ax, shrink=.7)\n",
"\n",
"ax.set_xlim(wrf_proj.cartopy_xlim())\n",
"ax.set_ylim(wrf_proj.cartopy_ylim())\n",
"ax.gridlines()\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# 500 MB Heights and Winds\n",
"\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.cm import get_cmap\n",
"import cartopy.crs as crs\n",
"from cartopy.feature import NaturalEarthFeature\n",
"\n",
"from wrf import getvar, interplevel, npvalues\n",
"\n",
"\n",
"p = getvar(ncfile, \"pressure\")\n",
"z = getvar(ncfile, \"z\", units=\"dm\")\n",
"ua = getvar(ncfile, \"ua\", units=\"kts\")\n",
"va = getvar(ncfile, \"va\", units=\"kts\")\n",
"\n",
"ht_500 = interplevel(z, p, 500)\n",
"u_500 = interplevel(ua, p, 500)\n",
"v_500 = interplevel(va, p, 500)\n",
"\n",
"lons = ht_500.coords[\"XLONG\"]\n",
"lats = ht_500.coords[\"XLAT\"]\n",
"\n",
"wrf_proj = slp.attrs[\"projection\"]\n",
"cart_proj = wrf_proj.cartopy()\n",
"\n",
"fig = plt.figure(figsize=(20,20))\n",
"ax = plt.axes([0.1,0.1,0.8,0.8], projection=cart_proj)\n",
"\n",
"states = NaturalEarthFeature(category='cultural', scale='50m', facecolor='none',\n",
" name='admin_1_states_provinces_shp')\n",
"ax.add_feature(states, linewidth=0.5)\n",
"ax.coastlines('50m', linewidth=0.8)\n",
"\n",
"# Can only get this to work if I manually transform the lat/lon points to projected space.\n",
"xform_coords = cart_proj.transform_points(crs.PlateCarree(), npvalues(lons), npvalues(lats))\n",
"x = xform_coords[:,:,0]\n",
"y = xform_coords[:,:,1]\n",
"\n",
"plt.contour(x, y, npvalues(ht_500), 20, cmap=get_cmap(\"plasma\"))\n",
"plt.barbs(x[::50,::50], y[::50,::50], npvalues(u_500[::50, ::50]), npvalues(v_500[::50, ::50]))\n",
"plt.colorbar(ax=ax, shrink=.7)\n",
"\n",
"ax.set_xlim(wrf_proj.cartopy_xlim())\n",
"ax.set_ylim(wrf_proj.cartopy_ylim())\n",
"ax.gridlines()\n",
"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"# Cross-section of pressure using xarray's builtin plotting\n",
"import numpy as np\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib.cm import get_cmap\n",
"\n",
"from wrf import getvar, vertcross, npvalues\n",
"\n",
"p = getvar(ncfile, \"pressure\")\n",
"z = getvar(ncfile, \"z\", units=\"dm\")\n",
"\n",
"pivot_point = (z.shape[-2] / 2, z.shape[-1] / 2) \n",
"angle = 90.0\n",
"\n",
"p_vert = vertcross(p, z, pivot_point=pivot_point, angle=angle)\n",
"\n",
"fig = plt.figure(figsize=(20,8))\n",
"ax = plt.axes([0.1,0.1,0.8,0.8])\n",
"\n",
"p_vert.plot.contour(ax=ax, levels=[0 + 50*n for n in xrange(20)], cmap=get_cmap(\"viridis\"))\n",
"\n",
"\n"
]
},
{
"cell_type": "markdown",
"metadata": {
"collapsed": false
},
"source": [
"# Multi-time Moving Domain Files"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"import os\n",
"from wrf import getvar\n",
"from netCDF4 import Dataset as nc\n",
"\n",
"dir = \"/Users/ladwig/Documents/wrf_files/wrf_vortex_multi\"\n",
"ncfilenames = [os.path.join(dir, x) for x in os.listdir(dir) if x.find(\"_d02_\") > 0]\n",
"ncfiles = [nc(x) for x in ncfilenames]\n",
"#print (ncfiles[0].variables[\"XLONG\"][0,0,-1], ncfiles[0].variables[\"XLONG\"][-1,0,-1])\n",
"#print (ncfiles[1].variables[\"XLONG\"][0,0,-1], ncfiles[1].variables[\"XLONG\"][-1,0,-1])\n",
"#print (ncfiles[-1].variables[\"XLONG\"][0,0,-1], ncfiles[-1].variables[\"XLONG\"][-1,0,-1])\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"p = getvar(ncfiles, \"P\", timeidx=3, method=\"join\", meta=True, squeeze=True)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"print (p)\n",
"#print (p.attrs[\"projection\"].shape)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"print (p.attrs[\"projection\"])\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"ncfiles[2].variables[\"XTIME\"][:]\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"p = getvar(ncfiles, \"P\", timeidx=None, method=\"cat\", meta=True, squeeze=True)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"print (p)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"print (type(p.coords[\"Time\"]))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"import datetime\n",
"import pandas\n",
"print (type(p.coords[\"Time\"].values.astype(datetime.datetime)))\n",
"print (repr(datetime.datetime.utcfromtimestamp(p.coords[\"Time\"][0].values.astype(int) * 1E-9)))\n",
"print (pandas.to_datetime(p.coords[\"Time\"].values))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": false
},
"outputs": [],
"source": [
"wrf_vars = [\"avo\", \"eth\", \"cape_2d\", \"cape_3d\", \"ctt\", \"dbz\", \"mdbz\", \n",
" \"geopt\", \"helicity\", \"lat\", \"lon\", \"omg\", \"p\", \"pressure\", \n",
" \"pvo\", \"pw\", \"rh2\", \"rh\", \"slp\", \"ter\", \"td2\", \"td\", \"tc\",\n",
" \"theta\", \"tk\", \"tv\", \"twb\", \"updraft_helicity\", \"ua\", \"va\", \n",
" \"wa\", \"uvmet10\", \"uvmet\", \"z\", \"ctt\"]\n",
"\n",
"vard = {varname: getvar(ncfiles, varname, method=\"join\", squeeze=False) for varname in wrf_vars}\n",
"for varname in wrf_vars:\n",
" print(vard[varname])"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.11"
}
},
"nbformat": 4,
"nbformat_minor": 0
}

2
wrf_open/test/utests.py

@ -5,7 +5,7 @@ import numpy.ma as ma @@ -5,7 +5,7 @@ import numpy.ma as ma
import os, sys
import subprocess
from wrf.var import (getvar, interplevel, interpline, vertcross, vinterp,
from wrf import (getvar, interplevel, interpline, vertcross, vinterp,
disable_xarray, xarray_enabled, npvalues)
NCL_EXE = "/Users/ladwig/nclbuild/6.3.0/bin/ncl"

Loading…
Cancel
Save