smc_1D-2D_1979-2012_NAmerica_Normal1.py
# smc_1D-2D_1979-2012_NAmerica_Normal1.py
# This Python2.7 code was used on CEDA JASMIN to produce monthly
# animation frames for an animated gif that was then made into an MP4
# for showing the soil moisture and the seasonal anomaly of soil moisture
# as two time series over Normal, Illinois from 1979-2012
# using (as input) monthly-averaged JULES land-only 1D data.
# This Python2.7 code was adapted from the code given in Emma Robinson’s
# data visualization tutorial for plotting JULES data at http://jules.jchmr.org/content/training .
# The adaptation was done in November-December 2017 by Patrick McGuire and Pier Luigi Vidale
# at the University of Reading (email: p.mcguire@reading.ac.uk )
from netCDF4 import Dataset, num2date, date2num
# Import libraries useful for times
import datetime as dt
import calendar as cal
# widget library
#from ipywidgets.widgets import *
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
# Import locators for fancying up plots
from matplotlib.dates import YearLocator, MonthLocator
from matplotlib.ticker import MultipleLocator
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.colors import ListedColormap
def ReadData(fname, vname, rescale, compute_timebnds):
# Open the file
f = Dataset(fname,’r’)
# Print file headers
#print f
# Print variable information
#print f.variables[‘fqw_gb’]
#print f.variables[‘ftl_gb’]
# Read the evaporation variable
data = f.variables[vname][:]
# Get the shape
nt, ny, nx = data.shape
# Since we know that the y dimension is degenerate, we select y=0 and collapse
# the data to 2d (nt*nx)
data = data[:,0,:]
# Get the fill value for later use
fill_value = f.variables[vname]._FillValue
# Get latitude and longitude variables
# latitude and longitude are ny*nx, so we select y=0 and collapse to a vector
lat = f.variables[‘latitude’][0,:]
lon = f.variables[‘longitude’][0,:]
# Find information about times
startyr, startmn, startdy = [int(t) \
for t in f.variables[‘time’].units.split()[2].split(‘-‘)]
calendar=f.variables[‘time’].calendar
if(compute_timebnds):
# Read the time and convert to datetime data structures
# Since we’re looking at monthly averages, we use the time at the start of the
# month. So we read the time_bounds array and use the lower value
# time_bounds = f.variables[‘time_bounds’][:]
time_bounds = f.variables[‘time_bnds’][:]
time = [dt.datetime(startyr,startmn,startdy)+dt.timedelta(seconds=int(t)) \
for t in time_bounds[:,0] ]
# We want to rescale the precip from kg/m2/s to mm/month, so we need to
# know how long the month is
days_in_month = np.array([ [cal.monthrange(t.year,t.month)[1],] for t in time ])
secs_in_day = 86400
if(rescale):
# Rescale precip
data *= (days_in_month * secs_in_day)
else:
time = 0
# Close the file
f.close()
return data, lon, lat, nt, nx, time, calendar, fill_value
def VectorToGrid(data,lon,lat,nt,nx,fill_value):
# Define the grid we want to end up on
lon_min = -180.0
lon_max = 180.0
dlon = 0.5
lat_min = -90.0
lat_max = 90.0
dlat = 0.5
# Create the grid
grid_lon, grid_lat = np.meshgrid( np.arange( lon_min+dlon/2., lon_max, dlon ), \
np.arange( lat_min+dlat/2., lat_max, dlat ) )
# Grid shape
ny_grid, nx_grid = grid_lon.shape
# Map the vector to the grid
# If it’s not a regular lat/lon grid, then use the np.where function to find
# the right point in the grid
# indx = [np.where( np.logical_and( grid_lon == lon[i], grid_lat == lat[i] )) \
# for i in range(nx) ]
# But, since this is regular lat/lon, we can save time and calculate where
# each point will be relative to the minimum values
# This is quicker than the np.where function call
indx = zip(* [ [ int((lat[i] – lat_min)/dlat), int((lon[i] – lon_min)/dlon) ] \
for i in range(nx)] )
# Create a new masked array with no data in it
data_grid = np.ma.masked_equal( np.ones([nt,ny_grid,nx_grid])*fill_value, \
fill_value )
# Put the vector data into the grid
data_grid[:,indx[0],indx[1]] = data[:]
return data_grid, grid_lon, grid_lat, lon_min, lon_max, lat_min, lat_max
def custom_div_cmap(numcolors=13, name=’custom_div_cmap’,
mincol=’darkred’, midcol=’white’, maxcol=’darkblue’):
“”” Create a custom diverging colormap with three colors
Default is red to white to blue with 11 colors. Colors can be specified
in any way understandable by matplotlib.colors.ColorConverter.to_rgb()
“””
cmap2 = LinearSegmentedColormap.from_list(name=name,
colors =[mincol, midcol, maxcol],
N=numcolors)
return cmap2
bwr_custom = custom_div_cmap()
#Make sure to pick subset LAT/LON’s to be a half a pixel off of where you want (0.25- or 0.75-degree offsets from a degree)
#Africa
#lon_min_Subset = -30.25
#lon_max_Subset = 60.25
#lat_min_Subset = -40.25
#lat_max_Subset = 40.25
#NAmerica
lon_min_Subset = -170.25
lon_max_Subset = -49.75
lat_min_Subset = 4.75
lat_max_Subset = 80.25
#Normal, Illinois
#to nearest quarter degree
lon_Point = -89.00
lat_Point = 40.50
#fvarname = [‘EVAP’, ‘GPP’, ‘WAIR’]
#ovarname= [‘EnsembleLEcor_May12′,’gpp’,’WAI’]
#mvarname= [‘esoil_mean’,’gpp_mean’,’smc_avail_tot_mean’]
##Set JULES experiments start/end dates
#dStart = datetime.datetime(1979,1,1)
#dEnd = datetime.datetime(1982,12,31)
#dateIndex = pandas.date_range(start=dStart, end=dEnd, freq=’M’)
##Now set the plot limits
##t0=datetime.datetime(1997,01,01,00,00) # x axis startpoint
##t1=datetime.datetime(1999,01,01,00,00) # x axis endpoint
#t0=datetime.datetime(1979,01,01,00,00) # x axis startpoint
#t1=datetime.datetime(1982,12,31,00,00) # x axis endpoint
#obs_dir=’/group_workspaces/jasmin2/ncas_generic/users/pmcguire/analysis/obs/BGI_validation/data/BGI/’
#vname = ‘precip’
#vnamelong = ‘Precipitation’
#vnamedir = ‘precip’
#cmap0 = ‘gist_rainbow’
##units = ‘mm/s’
##vmin0 = 0
##vmax0 = 1e-4
##tickformat = ‘%.0e’
#units = ‘mm/month’
#vmin0 = 0
#vmax0 = 300
#tickformat = ‘%.0f’
#rescale = 1
#PlotArray = 0
#PlotSeries = 1
vname = ‘smc_avail_tot’
vnamelong = ‘Soil_moisture_avail_tot’
vnamedir = ‘smc_NAmerica_Normal’
#cmap0 = custom_div_cmap(numcolors=5)
cmap0=ListedColormap([‘DarkRed’,’Salmon’,’Orange’,’Yellow’,’LightGreen’,’Green’,’Cyan’,’LightBlue’,’Blue’,’DarkBlue’,’Indigo’,’DarkViolet’])
units = ‘mm’
vmin0 = 0
vmax0 = 600
tickformat = ‘%.0f’
tickspacing = np.arange(0,650,50.0)
rescale = 0
PlotArray = 0
PlotSeries = 1
Location = ‘Normal, Illinois’
YearMin = 1979
YearMax = 2012
YEARS = YearMax-YearMin+1
# vname = ‘npp_gb’
# vnamelong = ‘Net_primary_production(NPP)’
# vnamedir = ‘npp’
# cmap0 = ‘PRGn’
# units = ‘kg m-2 s-1’
# vmin0 = -5e-8
# vmax0 = 5e-8
# tickformat = ‘%.0e’
# rescale = 0
# PlotArray = 0
# PlotSeries = 1
t0=dt.datetime(YearMin,01,01,00,00) # x axis startpoint
t1=dt.datetime(YearMax,12,31,00,00) # x axis endpoint
fname_avg = ‘Euro44_bvv_nancil_CTL-BCJ-GL_jules-vn4.9p_u-as052globeE_monmean_1979_2012_ymonavg.nc’
fname_std1 = ‘Euro44_bvv_nancil_CTL-BCJ-GL_jules-vn4.9p_u-as052globeE_monmean_1979_2012_ymonstd1.nc’
data_avg, lon_avg, lat_avg, nt_avg, nx_avg, time_avg, calendar_avg, fill_value_avg = ReadData(fname_avg,vname,rescale,compute_timebnds=0)
data_grid_avg,grid_lon_avg,grid_lat_avg,lon_min_avg,lon_max_avg,lat_min_avg,lat_max_avg = VectorToGrid(data_avg,lon_avg,lat_avg,nt_avg,nx_avg,fill_value_avg)
data_std1, lon_std1, lat_std1, nt_std1, nx_std1, time_std1, calendar_std1, fill_value_std1 = ReadData(fname_std1,vname,rescale,compute_timebnds=0)
data_grid_std1,grid_lon_std1,grid_lat_std1,lon_min_std1,lon_max_std1,lat_min_std1,lat_max_std1 = VectorToGrid(data_std1,lon_std1,lat_std1,nt_std1,nx_std1,fill_value_std1)
dates = [dt.date(year,month,1) for year in range(YearMin,YearMax+1) for month in range(1,13)]
data_series = np.zeros(12*YEARS)
data_series_avg = np.zeros(12*YEARS)
data_series_std1 = np.zeros(12*YEARS)
data_series_anom = np.zeros(12*YEARS)
data_series_ones = np.ones(12*YEARS)
data_series_zeros = np.zeros(12*YEARS)
for year in range(YearMin,YearMax+1):
#fname = ‘/work/scratch/pmcguire/config/outputs/wfdei_WRR2_4.9positiverain_16proc45min_U-aq934.monthly.2011.nc’
# print ‘Year=’+str(year)
fname = ‘Euro44_bvv_nancil_CTL-BCJ-GL_jules-vn4.9p_u-as052globeE_monmean_’+str(year)+’.nc’
fname_anom = ‘Euro44_bvv_nancil_CTL-BCJ-GL_jules-vn4.9p_u-as052globeE_monmean_’+str(year)+’__1979_2012_ymonstd1anom.nc’
data, lon, lat, nt, nx, time, calendar0, fill_value = ReadData(fname,vname,rescale, compute_timebnds=1)
data_grid,grid_lon,grid_lat,lon_min,lon_max,lat_min,lat_max = VectorToGrid(data,lon,lat,nt,nx,fill_value)
data_anom, lon_anom, lat_anom, nt_anom, nx_anom, time_anom, calendar_anom, fill_value_anom = ReadData(fname_anom,vname,rescale, compute_timebnds=1)
data_grid_anom,grid_lon_anom,grid_lat_anom,lon_min_anom,lon_max_anom,lat_min_anom,lat_max_anom = VectorToGrid(data_anom,lon_anom,lat_anom,nt_anom,nx_anom,fill_value_anom)
#print data_grid.shape
for month in range(0,12):
dates[month+(year-1979)*12] = time[month]
if(month<9):
month2=’0’+str(month+1)
else:
month2=str(month+1)
# print ‘Year=’+str(year)+’ Month=’+month2
# latitude lower and upper index
latli = np.argmin( np.abs( grid_lat[:,0] – lat_min_Subset ) )
latui = np.argmin( np.abs( grid_lat[:,0] – lat_max_Subset ) )
# print ‘Lat index min/max=’+str(latli)+’ ‘+str(latui)
# print ‘Lat min/max=’+str(grid_lat[latli,0])+’ ‘+str(grid_lat[latui,0])
lati = np.argmin( np.abs( grid_lat[:,0] – lat_Point) )
# print ‘Lat index =’+str(lati)
# print ‘Lat =’+str(grid_lat[lati,0])
# longitude lower and upper index
lonli = np.argmin( np.abs( grid_lon[0,:] – lon_min_Subset) )
lonui = np.argmin( np.abs( grid_lon[0,:] – lon_max_Subset) )
# print ‘Lon index min/max=’+str(lonli)+’ ‘+str(lonui)
# print ‘Lon min/max=’+str(grid_lon[0,lonli])+’ ‘+str(grid_lon[0,lonui])
loni = np.argmin( np.abs( grid_lon[0,:] – lon_Point) )
# print ‘Lon index =’+str(loni)
# print ‘Lon =’+str(grid_lon[0,loni])
data_series[month+(year-1979)*12] = data_grid[month,lati,loni]
data_series_avg[month+(year-1979)*12] = data_grid_avg[month,lati,loni]
data_series_std1[month+(year-1979)*12] = data_grid_std1[month,lati,loni]
data_series_anom[month+(year-1979)*12] = data_grid_anom[month,lati,loni]
if(PlotArray):
fig = plt.figure(figsize=(12.,8.))
# Create a new plot
fig,ax = plt.subplots(1,1)
# Make a world map
m = Basemap(projection=’cyl’, resolution=’c’, ax = ax , \
llcrnrlat = lat_min_Subset, \
llcrnrlon = lon_min_Subset, \
urcrnrlat = lat_max_Subset, \
urcrnrlon = lon_max_Subset )
m.drawcoastlines(zorder=2)
m.fillcontinents([0.8,0.8,0.8],zorder=0)
# Plot array as a colormap
#im = ax.imshow(data_grid[7,:,:], cmap = ‘gist_ncar’, \
im = ax.imshow(data_grid[month,latli:latui,lonli:lonui], cmap = cmap0, \
interpolation = ‘nearest’, origin = ‘lower’, \
vmin = vmin0, vmax = vmax0, zorder=1, \
extent = [lon_min_Subset, lon_max_Subset, lat_min_Subset, lat_max_Subset])
# Set grid
ax.xaxis.set_major_locator(MultipleLocator(30))
ax.yaxis.set_major_locator(MultipleLocator(30))
ax.grid(True)
# Create a colorbar
cb=plt.colorbar(im, ax = ax, orientation=’horizontal’, \
label=’Monthly ‘+vnamelong+’ (‘+units+’): ‘+str(month2)+’-‘+str(year), format = tickformat, ticks = tickspacing, extend=’both’ )
# Save the figure
#fig.savefig(‘python_sensible_map.png’,dpi=600)
fig.savefig(vnamedir+’_pngs/’+str(vname)+’_’+str(year)+’-‘+str(month2)+’.png’,dpi=150)
# Show the figures on screen
#plt.show()
plt.close(fig)
if(PlotSeries):
fig = plt.figure(figsize=(12.,8.))
# dates2 = dates
# dates2 = num2date(dates[:],units=’days since 1582-11-15 00:00:00′,calendar=calendar0)
# Create a new plot
fig,ax = plt.subplots(2,1)
fig.suptitle(‘JULES Soil Moisture in ‘+Location, fontsize=14)
# years = dates.YearLocator() # every year
# ax.xaxis.set_major_locator(years)
plt.subplot(2,1,1)
plt.xlim(t0,t1)
print data_series
p0, = plt.plot(dates, data_series, ‘k.-‘,color=’red’, label=’JULES model’)
p1, = plt.plot(dates, data_series_avg, ‘k’,color=’black’, label=’1979-2012 JULES model avg.’)
plt.fill_between(dates, data_series_avg+data_series_std1, data_series_avg-data_series_std1, facecolor=’lightgrey’, edgecolor=’white’)
plt.ylabel(‘Soil Moisture (mm)’)
plt.xlabel(‘Time (yrs)’)
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102),loc=3,ncol=2, mode=”expand”,borderaxespad=0.,fontsize=10)
plt.subplot(2,1,2)
## years = mdates.YearLocator() # every year
## ax.xaxis.set_major_locator(years)
plt.xlim(t0,t1)
plt.ylabel(‘Anomaly in Soil Moisture (std. dev.)’)
plt.xlabel(‘Time (yrs)’)
p0, = plt.plot(dates, data_series_anom, ‘k.-‘,color=’red’, label=’anomaly of JULES model’)
p1, = plt.plot(dates, data_series_zeros, ‘k’,color=’black’, label=’1979-2012 JULES model avg.’)
plt.fill_between(dates, data_series_ones, -data_series_ones, facecolor=’lightgrey’, edgecolor=’white’)
# Save the figure
#fig.savefig(‘python_sensible_map.png’,dpi=600)
fig.savefig(vnamedir+’_pngs/’+str(vname)+’.png’,dpi=150)
plt.close(fig)
#air = f.variables[‘esoil_gb’]
#print air
#lat = f.variables[‘latitude’]
#lon = f.variables[‘longitude’]
#print lat
#print lon
#m = Basemap(projection=’npstere’,boundinglat=60,lon_0=0,resolution=’l’)
#nlon, nlat = np.meshgrid(lon[0,:], lat[0,:])
#print nlon,nlat
#x, y = m(lon, lat)
#print x,y
#m.fillcontinents(color=’gray’,lake_color=’gray’)
#m.drawparallels(np.arange(-80.,81.,20.))
#m.drawmeridians(np.arange(-180.,181.,20.))
#m.drawmapboundary(fill_color=’white’)
#cs = m.contourf(x,y,air[0,0,:])
##imshow(vv[time,:,:])
#m.colorbar()
#fig.savefig(figname2)
#plt.close
#plt.show()