Browse Source

Add comments

FGeo 1 year ago
parent
commit
07c37b3066
  1. 2
      2_Vostok_measurements_images.ipynb
  2. 3
      3_WRF_T2_images.ipynb
  3. 108
      4_IP_simulations_temporal_images.ipynb
  4. 98
      5_IP_simulations_spatial_images.ipynb
  5. 2
      readme.md

2
2_Vostok_measurements_images.ipynb

@ -347,7 +347,7 @@ @@ -347,7 +347,7 @@
"outputs": [],
"source": [
"# calculate seasonal variation parameters for specific years: \n",
"# average FW PG, count of FW days, and the sum of squares of FW PG hourly values.\n",
"# average FW PG, count of FW days, the sum of squares of FW PG hourly values\n",
"\n",
"data, data_counter, data_sqr = np.array(\n",
" [\n",

3
3_WRF_T2_images.ipynb

@ -5,7 +5,8 @@ @@ -5,7 +5,8 @@
"id": "0f09a974-37fe-440e-8daf-4aa0f2caae69",
"metadata": {},
"source": [
"# Analysis of temperature seasonal variation across a latitude bands"
"# Analysis of temperature seasonal variation across a latitude bands\n",
"Figure 1.4 and 2.3"
]
},
{

108
4_IP_simulations_temporal_images.ipynb

@ -86,6 +86,7 @@ @@ -86,6 +86,7 @@
"metadata": {},
"outputs": [],
"source": [
"# numbers of simulated days for analysis\n",
"wrf_N_days = 4992\n",
"inm_N_days = 3650"
]
@ -97,6 +98,9 @@ @@ -97,6 +98,9 @@
"metadata": {},
"outputs": [],
"source": [
"# dates corresponding to the indices (0 axis) of the data arrays\n",
"# note: for WRF dates correspond to real dates\n",
"\n",
"wrf_dt_indicies = np.array(\n",
" [dt.date(1980, 1, 1) + dt.timedelta(i * 3) for i in range(wrf_N_days)]\n",
")\n",
@ -113,12 +117,23 @@ @@ -113,12 +117,23 @@
"metadata": {},
"outputs": [],
"source": [
"# dictionaries where processed data is saved. The dictionary keys represent the\n",
"# threshold value of CAPE - integer numbers from the list 500, 800, 1000, 1200,\n",
"# where the key 500 relates only to temperature-based modeling and is present \n",
"# only in dictionaries wrf_<...>, while keys 800, 1000, 1200 correspond to \n",
"# classical modeling using the WRF or INMCM model.\n",
"\n",
"# dict contains arrays with dimensions (4992, 24)\n",
"# where axis 0 contains the day index (see `wrf_dt_indicies`)\n",
"wrf_hourly_total_ip = {\n",
" key: np.load(f\"./data/WRF/WRF_HOURLY_TOTAL_IP_{parameters}.npy\")[:wrf_N_days]\n",
" for key, parameters in zip([500, 800, 1000, 1200],\n",
" [\"500_T2_25\", \"800\", \"1000\", \"1200\"])\n",
"}\n",
"\n",
"# dict contains arrays with dimensions (3650, 24)\n",
"# where axis 0 contains the day index (see `inm_dt_indicies`)\n",
"# note: years in `inm_dt_indicies` is not real\n",
"inm_hourly_total_ip = {\n",
" key: np.load(f\"./data/INMCM/INMCM_HOURLY_TOTAL_IP_{parameters}.npy\")[:inm_N_days]\n",
" for key, parameters in zip([800, 1000, 1200],\n",
@ -141,37 +156,64 @@ @@ -141,37 +156,64 @@
"metadata": {},
"outputs": [],
"source": [
"# calculate seasonal variation parameters for different parametrizations\n",
"\n",
"# 7 sets for different axes of the figure\n",
"# each set with 12 values for each month\n",
"\n",
"# monthly mean values of IP \n",
"data = np.zeros((7, 12))\n",
"\n",
"# count of days per month\n",
"data_counter = np.zeros((7, 12), dtype=int)\n",
"\n",
"# the sum of squares of IP daily values\n",
"data_sqr = np.zeros((7, 12))\n",
"\n",
"for j, cape_thres in enumerate([800, 1000, 1200]):\n",
" for m in range(12):\n",
"\n",
" # fill data for WRF panels\n",
" # the indices `ax_idx` from 0 to 6 correspond to the axes:\n",
" # 0 - WRF, 1980-2020, CAPE=800,\n",
" # 1 - INMCM, 10 years, CAPE=800,\n",
" # 2 - WRF, 1980-2020, CAPE=1000,\n",
" # 3 - INMCM, 10 years, CAPE=1000,\n",
" # 4 - WRF, 1980-2020, CAPE=1200,\n",
" # 5 - INMCM, 10 years, CAPE=1200,\n",
" # 6 - Vostok station, 2006-2020\n",
"\n",
" # calculate axes index for WRF axes (0, 2, 4)\n",
" ax_idx = j * 2\n",
"\n",
" # filtering day indices belonging to a specific month\n",
" wrf_inds = [i for i, date in enumerate(wrf_dt_indicies) \n",
" if date.month == m + 1]\n",
"\n",
" # slice IP for specific CAPE and day indices\n",
" ip = wrf_hourly_total_ip[cape_thres][wrf_inds]\n",
"\n",
" # calculate seasonal variation parameters\n",
" data[ax_idx, m] = ip.mean()\n",
" data_counter[ax_idx, m] = len(ip)\n",
" data_sqr[ax_idx, m] = np.sum(\n",
" ip.mean(axis=-1) ** 2\n",
" ) / len(ip)\n",
" data_sqr[ax_idx, m] = np.sum(ip.mean(axis=-1) ** 2) / len(ip)\n",
"\n",
" # fill data for INMCM panels\n",
" # calculate axes index for INMCM axes (1, 3, 5)\n",
" ax_idx = j * 2 + 1\n",
"\n",
" # filtering day indices belonging to a specific month\n",
" inmcm_inds =[i for i, date in enumerate(inm_dt_indicies) \n",
" if date.month == m + 1]\n",
" \n",
" # slice IP for specific CAPE and day indices\n",
" ip = inm_hourly_total_ip[cape_thres][inmcm_inds]\n",
"\n",
" # calculate seasonal variation parameters\n",
" data[ax_idx, m] = ip.mean()\n",
" data_counter[ax_idx, m] = len(ip)\n",
" data_sqr[ax_idx, m] = np.sum(\n",
" ip.mean(axis=-1) ** 2\n",
" ) / len(ip)\n",
" data_sqr[ax_idx, m] = np.sum(ip.mean(axis=-1) ** 2) / len(ip)\n",
"\n",
"# latest point is Vostok results from previous script\n",
"# keys: mean, counter, sqr\n",
"# the last set is loaded from the processed data of Vostok station in script 2\n",
"# the data is loaded as a dictionary with keys `mean`, `counter`, `sqr`\n",
"# index 0: 2006-2020, index 1: 2006-2012, index 2: 2013-2020\n",
"vostok_results = np.load(\"./data/Vostok/vostok_2006_2020_results.npz\")\n",
"\n",
@ -304,10 +346,26 @@ @@ -304,10 +346,26 @@
"metadata": {},
"outputs": [],
"source": [
"# calculate seasonal variation parameters for different temporal parts\n",
"\n",
"# 8 sets for different axes of the figure\n",
"# each set with 12 values for each month\n",
"\n",
"# monthly mean values of IP \n",
"data = np.zeros((7, 12))\n",
"\n",
"# count of days per month\n",
"data_counter = np.zeros((7, 12), dtype=int)\n",
"\n",
"# the sum of squares of IP daily values\n",
"data_sqr = np.zeros((7, 12))\n",
"\n",
"data = np.zeros((8, 12))\n",
"data_counter = np.zeros((8, 12), dtype=int)\n",
"data_sqr = np.zeros((8, 12))\n",
"\n",
"# to construct this figure we divide the datasets into equal ranges of years\n",
"# the dictionary keys below denote the axis number on the figure\n",
"wrf_ranges = {\n",
" 0: range(1981, 1990 + 1),\n",
" 1: range(1991, 2000 + 1),\n",
@ -323,11 +381,17 @@ @@ -323,11 +381,17 @@
"for m in range(12):\n",
" for ax_idx in range(6):\n",
" if ax_idx in [0, 1, 2, 3]:\n",
"\n",
" # filtering day indices belonging to a specific month\n",
" wrf_inds = [i for i, date in enumerate(wrf_dt_indicies)\n",
" if date.month == m + 1\n",
" and date.year in wrf_ranges[ax_idx]\n",
" ]\n",
" ip = wrf_hourly_total_ip[1000][wrf_inds]\n",
"\n",
" # slice IP for CAPE = 1000 and aforementioned day indices\n",
" ip = wrf_hourly_total_ip[1000][wrf_inds]\n",
"\n",
" # calculate seasonal variation parameters\n",
" data[ax_idx, m] = ip.mean()\n",
" data_counter[ax_idx, m] = len(ip)\n",
" data_sqr[ax_idx, m] = np.sum(ip.mean(axis=-1) ** 2) / len(ip)\n",
@ -337,23 +401,29 @@ @@ -337,23 +401,29 @@
" if date.month == m + 1\n",
" and i//365 in inm_ranges[ax_idx]\n",
" ]\n",
"\n",
" # slice IP for CAPE = 1000 and aforementioned day indices\n",
" ip = inm_hourly_total_ip[1000][inmcm_inds]\n",
"\n",
" # calculate seasonal variation parameters\n",
" data[ax_idx, m] = ip.mean()\n",
" data_counter[ax_idx, m] = len(ip)\n",
" data_sqr[ax_idx, m] = np.sum(ip.mean(axis=-1) ** 2) / len(ip)\n",
"\n",
"# latest point is Vostok results from previous script\n",
"# keys: mean, counter, sqr\n",
"# the last sets is loaded from the processed data of Vostok station in script 2\n",
"# the data is loaded as a dictionary with keys `mean`, `counter`, `sqr`\n",
"# index 0: 2006-2020, index 1: 2006-2012, index 2: 2013-2020\n",
"vostok_results = np.load(\"./data/Vostok/vostok_2006_2020_results.npz\")\n",
"\n",
"data[-2] = vostok_results[\"mean\"][1]\n",
"data_counter[-2] = vostok_results[\"counter\"][1]\n",
"data_sqr[-2] = vostok_results[\"sqr\"][1]\n",
"# part 2006-2012\n",
"data[6] = vostok_results[\"mean\"][1]\n",
"data_counter[6] = vostok_results[\"counter\"][1]\n",
"data_sqr[6] = vostok_results[\"sqr\"][1]\n",
"\n",
"data[-1] = vostok_results[\"mean\"][2]\n",
"data_counter[-1] = vostok_results[\"counter\"][2]\n",
"data_sqr[-1] = vostok_results[\"sqr\"][2]"
"# part 2013-2020\n",
"data[7] = vostok_results[\"mean\"][2]\n",
"data_counter[7] = vostok_results[\"counter\"][2]\n",
"data_sqr[7] = vostok_results[\"sqr\"][2]"
]
},
{

98
5_IP_simulations_spatial_images.ipynb

@ -67,6 +67,7 @@ @@ -67,6 +67,7 @@
"metadata": {},
"outputs": [],
"source": [
"# area factors for different latitudes\n",
"area_factor = (\n",
" np.cos(np.arange(180) * np.pi / 180) - \n",
" np.cos(np.arange(1, 181) * np.pi / 180)\n",
@ -196,6 +197,7 @@ @@ -196,6 +197,7 @@
"metadata": {},
"outputs": [],
"source": [
"# numbers of simulated days for analysis\n",
"wrf_N_days = 4992\n",
"inm_N_days = 3650"
]
@ -207,6 +209,9 @@ @@ -207,6 +209,9 @@
"metadata": {},
"outputs": [],
"source": [
"# dates corresponding to the indices (0 axis) of the data arrays\n",
"# note: for WRF dates correspond to real dates\n",
"\n",
"wrf_dt_indicies = np.array(\n",
" [dt.date(1980, 1, 1) + dt.timedelta(i * 3) for i in range(wrf_N_days)]\n",
")\n",
@ -229,20 +234,31 @@ @@ -229,20 +234,31 @@
},
"outputs": [],
"source": [
"# dictionaries where processed data is saved. The dictionary keys represent the\n",
"# threshold value of CAPE - integer numbers from the list 500, 800, 1000, 1200,\n",
"# where the key 500 relates only to temperature-based modeling and is present \n",
"# only in dictionaries wrf_<...>, while keys 800, 1000, 1200 correspond to \n",
"# classical modeling using the WRF or INMCM model.\n",
"\n",
"# averaged air temperature at the height of 2 meter with the shape (180, 12)\n",
"wrf_LATxMON_t2 = np.load(\"./data/WRF/WRF_T2_LATxMON.npy\")\n",
"\n",
"# summed WRF IP along longitudes and averaged over months with shape (180, 12)\n",
"wrf_LATxMON_ip = {\n",
" key: np.load(f\"./data/WRF/WRF_IP_{parameters}_LATxMON.npy\")\n",
" for key, parameters in zip([500, 800, 1000, 1200],\n",
" [\"500_T2_25\", \"800\", \"1000\", \"1200\"])\n",
"}\n",
"\n",
"# the same for INMCM\n",
"inm_LATxMON_ip = {\n",
" key: np.load(f\"./data/INMCM/INMCM_IP_{parameters}_LATxMON.npy\")\n",
" for key, parameters in zip([800, 1000, 1200],\n",
" [\"800\", \"1000\", \"1200\"])\n",
"}\n",
"\n",
"# dict contains arrays with dimensions (4992, 24)\n",
"# where axis 0 contains the day index (see `wrf_dt_indicies`)\n",
"wrf_hourly_total_ip = {\n",
" key: np.load(f\"./data/WRF/WRF_HOURLY_TOTAL_IP_{parameters}.npy\")[:wrf_N_days]\n",
" for key, parameters in zip([500, 800, 1000, 1200],\n",
@ -313,23 +329,39 @@ @@ -313,23 +329,39 @@
"source": [
"# calculate the average contribution of the regions to \n",
"# the ionospheric potential (as % of total IP)\n",
"# for specific regions, models and parameters CAPE / T2\n",
"# for specific latitudinal bands and parameterizations\n",
"\n",
"# select the latitudinal bands for analysis\n",
"band_names = [\"9S-9N\", \"18S-18N\", \"30S-30N\"]\n",
"wrf_bands = [LatitudeBand(t, model=\"WRF\") for t in band_names]\n",
"inmcm_bands = [LatitudeBand(t, model=\"INMCM\") for t in band_names]\n",
" \n",
"\n",
"print(\"Model | CAPE | T2,°C | Band | % \")\n",
"\n",
"# iterate over the latitude bands selected above\n",
"for i, (wrf_band, inm_band) in enumerate(zip(wrf_bands, inmcm_bands)):\n",
" print(\"-\"*41)\n",
"\n",
" # we will analyze all parameterizations only in the 9N-9S band; in the \n",
" # other bands, we will consider only the parameterization with CAPE = 1000\n",
" capes = [1000]\n",
" if i == 0:\n",
" capes = [500, 800, 1000, 1200]\n",
"\n",
" # iterate over different parameterizations\n",
" for cape in capes:\n",
" # calculation of the seasonal variation of contribution to IP \n",
" # from i-nth band -> array (12,)\n",
" wrf_ip_MON = wrf_LATxMON_ip[cape][wrf_band.slice].sum(axis=0)\n",
"\n",
" # calculation of the contribution of the n-th band \n",
" # to the total IP in percentage -> single value\n",
" wrf_share = (wrf_ip_MON*wrf_numdays).sum() / wrf_numdays.sum() / 240e3 * 100\n",
" # note: The averaging by months takes into account the number of \n",
" # days taken from the original series for each month, as direct \n",
" # averaging may lead to a slight error\n",
"\n",
" # the same for INMCM\n",
" if cape != 500:\n",
" inm_ip_MIN = inm_LATxMON_ip[cape][inm_band.slice].sum(axis=0)\n",
" inm_share = (inm_ip_MIN*inm_numdays).sum() / inm_numdays.sum() / 240e3 * 100\n",
@ -384,26 +416,46 @@ @@ -384,26 +416,46 @@
}
],
"source": [
"# calculation of: \n",
"# (i) the positions of the extrema of the seasonal \n",
"# variation of the contribution to IP from \n",
"# individual latitudinal bands,\n",
"# (ii) the amplitudes of the peak-to-peak amplitudes \n",
"# of the seasonal variation <-//->,\n",
"# for different parameterizations\n",
"\n",
"# select the latitudinal bands for analysis\n",
"band_names = [\"90S-9S\", \"9S-9N\", \"9N-90N\"]\n",
"wrf_bands = [LatitudeBand(t, model=\"WRF\") for t in band_names]\n",
"inmcm_bands = [LatitudeBand(t, model=\"INMCM\") for t in band_names]\n",
"\n",
"print(\"Model | CAPE | T2,°C | Band | Min | Max | Peak-to-peak, %\")\n",
"\n",
"# iterate over the latitude bands selected above\n",
"for i, (wrf_band, inm_band) in enumerate(zip(wrf_bands, inmcm_bands)):\n",
" print(\"-\"*63)\n",
" for cape in [500, 800, 1000, 1200]:\n",
" \n",
" # calculation of the seasonal variation of contribution to IP \n",
" # from i-nth band -> array (12,)\n",
" seas_var = wrf_LATxMON_ip[cape][wrf_band.slice].sum(axis=0)\n",
"\n",
" # calculation of the positions of the extrema\n",
" month_min = month_name_3[np.argmin(seas_var)]\n",
" month_max = month_name_3[np.argmax(seas_var)]\n",
"\n",
" # calculation of the peak-to-peak amplitude of the seasonal variation\n",
" pk_pk_ampl = (seas_var.max() - seas_var.min())/seas_var.mean() * 100\n",
"\n",
" # the same for INMCM\n",
" if cape != 500:\n",
" seas_var = inm_LATxMON_ip[cape][inm_band.slice].sum(axis=0) \n",
" month_min = month_name_3[np.argmin(seas_var)]\n",
" month_max = month_name_3[np.argmax(seas_var)]\n",
" pk_pk_ampl = (seas_var.max() - seas_var.min())/seas_var.mean() * 100\n",
"\n",
" print(f\"INMCM | {cape:4} | - |{inm_band.label:>11} | {month_min} | {month_max} | {pk_pk_ampl:>6.2f}%\")\n",
" \n",
" seas_var = wrf_LATxMON_ip[cape][wrf_band.slice].sum(axis=0)\n",
" month_min = month_name_3[np.argmin(seas_var)]\n",
" month_max = month_name_3[np.argmax(seas_var)]\n",
" pk_pk_ampl = (seas_var.max() - seas_var.min())/seas_var.mean() * 100\n",
" \n",
"\n",
" p = \"25 \" if cape == 500 else \"- \"\n",
" print(f\"WRF | {cape:4} | {p:<5} |{inm_band.label:>11} | {month_min} | {month_max} | {pk_pk_ampl:>6.2f}%\")"
]
@ -449,19 +501,27 @@ @@ -449,19 +501,27 @@
}
],
"source": [
"# calculation of the maximum values of the contribution\n",
"# to IP from latitudinal bands, calculation is needed for \n",
"# manual refinement of the figure boundaries.\n",
"\n",
"# select the latitudinal bands for analysis\n",
"band_names = [\"18N-90N\", \"9N-18N\", \"0-9N\", \"0-9S\", \"9S-18S\", \"18S-90S\"]\n",
"wrf_bands = [LatitudeBand(t, model=\"WRF\") for t in band_names]\n",
"inmcm_bands = [LatitudeBand(t, model=\"INMCM\") for t in band_names]\n",
"\n",
"print(\"CAPE = 1000 J/kg\\n\")\n",
"print(\"Model | Band | Max IP contrib.\")\n",
"\n",
"# iterate over the latitude bands selected above\n",
"for i, (wrf_band, inm_band) in enumerate(zip(wrf_bands, inmcm_bands)):\n",
" print(\"-\"*31)\n",
"\n",
" # calculation of the maximum (over months axis) values, V\n",
" wrf_max = wrf_LATxMON_ip[1000][wrf_band.slice].sum(axis=0).max()\n",
" inm_max = inm_LATxMON_ip[1000][inm_band.slice].sum(axis=0).max()\n",
" print(f\"WRF | {wrf_band.label:>9} | {wrf_max:>9.2f} V\")\n",
" print(f\"INMCM | {inm_band.label:>9} | {inm_max:>9.2f} V\")\n",
" # inm_LATxMON_ip[1000]"
" print(f\"INMCM | {inm_band.label:>9} | {inm_max:>9.2f} V\")"
]
},
{
@ -492,12 +552,22 @@ @@ -492,12 +552,22 @@
}
],
"source": [
"# calculation of the maximum and minimum values of the temperature at the\n",
"# 2-meter level in each latitude band. The temperature is averaged over \n",
"# spatial cells within each band. Calculation is needed for manual \n",
"# refinement of the figure boundaries.\n",
"\n",
"# select the latitudinal bands for analysis\n",
"band_names = [\"18N-30N\", \"9N-18N\", \"0-9N\", \"0-9S\", \"9S-18S\", \"18S-30S\"]\n",
"wrf_bands = [LatitudeBand(t, model=\"WRF\") for t in band_names]\n",
"\n",
"print(\"Model | Band | Min T2 | Max T2\")\n",
"print(\"-\" * 37)\n",
"\n",
"# iterate over the latitude bands selected above\n",
"for wrf_band in wrf_bands:\n",
"\n",
" # averaging is performed with consideration of the area factor\n",
" tmp_t2 = (\n",
" wrf_LATxMON_t2[wrf_band.slice] * area_factor[wrf_band.slice, np.newaxis]\n",
" ).sum(axis=0) / area_factor[wrf_band.slice, np.newaxis].sum()\n",
@ -526,6 +596,7 @@ @@ -526,6 +596,7 @@
"metadata": {},
"outputs": [],
"source": [
"# select the latitudinal bands for analysis\n",
"band_names = [\"0-9N\", \"0-9S\", \"9S-18S\", \"18S-90S\", \"9N-18N\", \"18N-90N\"]\n",
"wrf_bands = [LatitudeBand(t, model=\"WRF\") for t in band_names]\n",
"inmcm_bands = [LatitudeBand(t, model=\"INMCM\") for t in band_names]\n",
@ -660,6 +731,7 @@ @@ -660,6 +731,7 @@
"metadata": {},
"outputs": [],
"source": [
"# select the latitudinal bands for analysis\n",
"band_names = [\"18N-90N\", \"9N-18N\", \"0-9N\", \"0-9S\", \"9S-18S\", \"18S-90S\"]\n",
"\n",
"wrf_bands = [LatitudeBand(t, model=\"WRF\") for t in band_names]\n",
@ -964,11 +1036,6 @@ @@ -964,11 +1036,6 @@
"axM.set_title('Partition of the Earth’s surface into regions',\n",
" fontsize='large', pad=10)\n",
"\n",
"\n",
"#####################\n",
"### Plotting data ###\n",
"#####################\n",
"\n",
"for j in range(6):\n",
" ax[2*j].bar(range(12), wrf_ip_sum_over_bands[j],\n",
" 0.8, color=wrf_bands[j].color)\n",
@ -1087,6 +1154,7 @@ @@ -1087,6 +1154,7 @@
"metadata": {},
"outputs": [],
"source": [
"# select the latitudinal bands for analysis\n",
"band_names = [\"0-9N\", \"0-9S\", \"9S-18S\", \"18S-90S\", \"9N-18N\", \"18N-90N\"]\n",
"wrf_bands = [LatitudeBand(t, model=\"WRF\") for t in band_names]\n",
"inmcm_bands = [LatitudeBand(t, model=\"INMCM\") for t in band_names]\n",

2
readme.md

@ -77,7 +77,7 @@ We then find the regression coefficients `temp_coeffs`, `wind_coeffs`, and `pres @@ -77,7 +77,7 @@ We then find the regression coefficients `temp_coeffs`, `wind_coeffs`, and `pres
Using the found regression coefficients, we remove the linear relationship with meteorological parameter anomalies. The corrected PG is saved in `meteo_df["CorrectedField"]`.
Finally, we construct Figure 4 using the prepared data in the same manner as was done for Figures 1.2 and 1.3.
Finally, we construct Figure 1.5 using the prepared data in the same manner as was done for Figures 1.2 and 1.3.
## Script `3_WRF_T2_images.ipynb`

Loading…
Cancel
Save