This Version 2.0 is authored by Briana Ramirez, edited by Samuel Shen. Liu Yang and Sandra Villamar contributed codes to this version.
Video tutorial for the python code can be found at the following URL:
This version is based upon the previous version described in the following box.
######################################################################################################################
#This Python Code for Climate Science is written for the book entitled "Climate Mathematics: Theory and Applications"#
#A Cambridge University Press book authored by SSP Shen and RCJ Somerville in July 2019 #
#The Python codes were based on the R codes written by Samuel Shen Distinguished Professor, #
#San Diego State University, USA and were translated from R by Louis Selstad, Stephen Shen, #
#Gregori Clarke, and Dakota Newmann and edited by Samuel Shen. #
######################################################################################################################
#FIRST TIME Python users*****
#These package need to be installed (on the terminal or anaconda interface) before importing them below.
#Follow this tutorial for package installation before
# https://towardsdatascience.com/importerror-no-module-named-xyz-45e4a5339e1b
#Change your file path to the folder where your downloaded data is stored
#MAC HELP: https://support.apple.com/guide/mac-help/go-directly-to-a-specific-folder-on-mac-mchlp1236/mac
#PC HELP: https://www.sony.com/electronics/support/articles/00015251
import os
os.chdir("/Users/brianaramirez/Downloads/data/")
%%javascript
IPython.OutputArea.prototype._should_scroll = function(lines) {
return false;
}
#Style Dictionary to standarize plotting scheme between different python scripts
import matplotlib.pyplot as plt
styledict = {'xtick.labelsize':20,
'xtick.major.size':9,
'xtick.major.width':1,
'ytick.labelsize':20,
'ytick.major.size':9,
'ytick.major.width':1,
'legend.framealpha':0.0,
'legend.fontsize':15,
'axes.labelsize':20,
'axes.titlesize':25,
'axes.linewidth':2,
'figure.figsize':(12,8),
'savefig.format':'jpg'}
plt.rcParams.update(**styledict)
#Function that creates personalized discrete Colormap
import numpy as np
from matplotlib import cm as cm1
from matplotlib.colors import ListedColormap, to_rgba
def newColMap(colors):
"""
This function creates a new color map from a list of colors given
as a parameter. Recommended length of list of colors is at least 6.
"""
first = np.repeat([to_rgba(colors[0])], 2, axis = 0)
last = np.repeat([to_rgba(colors[-1])], 2, axis = 0)
v = cm1.get_cmap('viridis', 16*(len(colors)-2))
newcolors = v(np.linspace(0, 1, 16*(len(colors)-2)))
for (i, col) in enumerate(colors[1:-1]):
newcolors[16*i : 16*(i+1), :] = to_rgba(col)
return ListedColormap(np.append(np.append(first,newcolors, axis=0), last, axis=0))
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex
from matplotlib.patches import Polygon
import matplotlib.mlab as mlab
import pandas as pd
from mpl_toolkits.basemap import Basemap
# Having an issue with the import “mpl_toolkits.basemap” package?
# Unlike other package installations, this package requires additional steps to install.
# 1.Download basemap package from this website
# https://www.lfd.uci.edu/~gohlke/pythonlibs/#basemap
# *Note the package version needs to be the same as Python version
# 2.Run the following line in your terminal
# pip install basemap-1.2.2-cp37-cp37m-win_amd64.whl
from mpl_toolkits import mplot3d
from netCDF4 import Dataset as ds
from urllib import request
import scipy as sp
from scipy import stats as stt
from sklearn import datasets, linear_model
from sklearn.neighbors import KernelDensity as kd
from sklearn.linear_model import LinearRegression
from random import randint
from scipy.stats import norm
import statsmodels
import sklearn
import math
import statistics
import sympy as sy
from sympy import symbols, diff
import statsmodels.api as sm
from datetime import date
Python As a Smart Calculator
# Basic arithmetic
1+4
# Basic arithmetic
2 + np.pi/4 - 0.8
# Variable Assignment
x = -1
y = 2
z = 4
# x**y means x^y: x to the power of y
t = 2*x**y-z
t
# Variable Assignment
u=2
v=3
u+v
# np.sin is sine function in Python
np.sin(u*v)
Define a Sequence in Python
# Enter the temperature data
tmax=[77, 72,75,73,66,64,59]
# Show the temperarure data
tmax
# A Python sequence starts from 0 while R from 1
np.arange(9)
# A sequence is called an array in Python
# np.arange(x,y) creates an array of numbers incremented by 1 between integer x and integer y-1
np.arange(1, 9)
# np.arange(x,y) can take negative and real numbers with increment 1
np.arange(-5.5, 2)
Define a Function in Python
# Define the x^2 function and call it samfctn(x)
def samfctn(x):
return x * x
samfctn(4)
#Fig 2.3
x = np.linspace(1,8,7)
tmax=[77, 72,75,73,66,64,59]
plt.plot(x, tmax, "o")
# Define a multivaraite function "x+y-z/2"
def fctn2(x, y, z):
return x + y - z / 2;
fctn2(1, 2, 3)
Plot with Python
Plot the curve of y=sin(x) from -pi to 2 pi
# math.pi is the same as np.pi and scipy.pi, just using different packages
t = np.arange(-math.pi, 2 *math.pi, 0.1)
plt.plot(t, np.sin(t),"o")
plt.show()
Plot x-y data based on a formula
# Uniform division using 20 points
t = np.linspace(-3, 2, 20)
plt.plot(t, t ** 2)
plt.show()
Plot 3D Surface
x = np.linspace(-1, 1, 100)
y = np.linspace(-1, 1, 100)
# Defines a formula for Z
def f(x, y):
return 1 - x**2 - y**2
X, Y = np.meshgrid(x, y)
# Renders z function on the x, y grid
Z = f(X, Y)
# Creates the 3D plot
fig = plt.figure()
ax = plt.axes(projection = '3d')
ax.contour3D(X, Y, Z, 30, cmap = 'binary')
plt.show()
Contour plot
#Fig 2.4
x = np.linspace(-1, 1, 30)
y = np.linspace(-1, 1, 30)
X, Y = np.meshgrid(x, y)
Z = f(X, Y)
# Creates and colors the contour plot
plt.contourf(X, Y, Z, 20, cmap = 'cool') #cmap='color-scheme'
# Adds colorbar for color map of contours
plt.colorbar()
plt.show()
Plot 3D Contour Plot
x = np.linspace(-1, 1, 100)
y = np.linspace(-1, 1, 100)
# Defines a formula for Z
def f(x, y):
return 1 - x**2 - y**2
X, Y = np.meshgrid(x, y)
# Renders z function on the x, y grid
Z = f(X, Y)
# Creates the 3D plot
fig = plt.figure()
ax = plt.axes(projection = '3d')
ax.plot_surface(X, Y, Z, rstride=1, cstride=1,
cmap='viridis', edgecolor='none') #cmap = 'color'
# Labels the plot
ax.set_title('Contour surface')
ax.set_xlabel('x', labelpad=15)
plt.xticks(fontsize=14)
ax.set_ylabel('y', labelpad=15)
plt.yticks(fontsize=14)
ax.set_zlabel('z', labelpad=15)
ax.set_zticklabels([-1,-.75,-.5,-.25,0,.25,.5,.75,1],Fontsize=14)
ax.view_init(30)
plt.show()
Symbolic Calculations with Python
# Makes 'x' and 'y' symbols used for derivatives
x = symbols('x')
y = symbols('y')
# 'diff' command takes the derivative of x^2 w.r.t. x
diff(x ** 2, x)
# Creates a function of x^2
fx = x ** 2
# Takes the derivative of function w.r.t x
rslt = diff(fx, x)
print('The derivative of the function fx w.r.t x is: ', rslt)
# Creates a function of x^2*sin(x)
fx = (x ** 2) * sy.sin(x)
# Take the derivative of function w.r.t x
rslt = diff(fx, x)
print('The derivative of the function w.r.t x is: ', rslt)
# Create a multivariate function: x^2+y^2
fxy = x ** 2 + y ** 2
# Take partial derivative of function w.r.t x
rslt_x = diff(fxy, x)
# Take partial derivative of function w.r.t y
rslt_y = diff(fxy, y)
print('The partial derivative of the function w.r.t x is: ', rslt_x)
print('The partial derivative of the function w.r.t y is: ', rslt_y)
# Integrates x^2 from 0 to 1
sy.integrate(x ** 2, (x, 0, 1))
# Integrates cos(x) from 0 to pi/2
sy.integrate(sy.cos(x), (x, 0, math.pi / 2))
Vectors and Matrices
# Creates a 5X1 matrix
np.mat([[1], [6], [3], [np.pi], [-3]])
# Creates a sequence from 2 to 6
np.arange(2, 7)
# Creates a sequence incremented by 2 from 1 to 10
np.arange(1, 10, 2)
# Creates a 4X1 matrix
x = np.mat([[1], [-1], [1], [-1]])
# Adds number 1 to each element of x
print(x + 1)
# Multiplies number 2 to each element of x
print(x * 2)
# Divides each element of x by 2
print(x / 2)
# Creates a 1X4 vector with a sequence incremented by 1 from 1 to 4
y = np.mat(np.arange(1, 5))
# This multiplication multiples each pair of elements and results in 4X1 vector
np.multiply(x, y.T)
# Dot product and results in 1X1 matrix
np.dot(x.T, y.T)
# Transforms x into a row 1X4 vector
x.T.shape
#The dot product of two vectors
np.dot(y, x)
#This column times row yields a 4X4 matrix
# Note: For detailed difference between np.multiply and np.dot visit the following URL:
# https://www.geeksforgeeks.org/difference-between-numpy-dot-and-operation-in-python/
np.matmul(x, y)
# Convert a vector into a matrix of the same number of elements
# the matrix elements go by column, first column, second, etc
my = y.reshape(2, 2).transpose()
print(my)
# Finds dimensions of a matrix
my.shape
# Converts a matrix to a vector, again via columns
my.flatten()
# Reshapes x to a 2X2 matrix
mx = x.reshape(2, -1).transpose()
# Multiplication between each pair of elements
np.multiply(mx, my)
# Division between each pair of elements
np.divide(mx, my)
# Subtract 2 times a matrix from another
np.subtract(mx, 2 * my)
# This is matrix multiplication in linear algebra
np.matmul(mx, my)
# Determinant of a matrix
x = np.linalg.det(my)
print(x)
# Inverse of a matrix
z = np.linalg.inv(my)
print(z)
#Verifies that the inverse of the matrix is correct by multiplying
# the original matrix and the inverse matrix
# Note: np.multiply is element-wise multiplication and np.matmul is matrix multiplication
a = np.matmul(z, my)
print(a)
# Returns a left to right diagonal vector of a matrix
np.diagonal(my)
# Returns the eigenvalues and eigenvectors of the "my" matrix
myeigvect = np.linalg.eig(my)
print(myeigvect)
# Returns the singular value decomposition(svd) of the "my" matrix
mysvd = np.linalg.svd(my)
print(mysvd)
# Returns a solved linear matrix equation of the "my" matrix and array [1,3]
ysolve = np.linalg.solve(my, [1,3])
print(ysolve)
# Verifies the solution is correct by multiplying the "my" matrix with "ysolve"
n = np.matmul(my, ysolve)
print(n)
Simple Statistics with Python
# Generates 10 normally distributed numbers
random = np.random.normal(0, 1, 10)
print(random)
# Returns the mean of that random sample
mean = np.mean(random)
print(mean)
# Returns the variance of that random sample
var = np.var(random)
print(var)
# Returns the standard deviation of that random sample
std = np.std(random)
print(std)
# returns the median of that random sample
median = np.median(random)
print(median)
#Returns quantile values in [0,.25,.5,.75,1]
quantile = pd.DataFrame(random).quantile([0, 0.25, 0.5, 0.75, 1])
print(quantile)
# Returns range of values (max - min) in random sample
rang = np.ptp(random)
print(rang)
# Returns the min value in random
minimum = np.min(random)
print(minimum)
# Returns the max value in random
maximum = np.max(random)
print(maximum)
# Yields the box plot of random values
fig = plt.figure(1, figsize=(8, 10))
ax = fig.add_subplot(111)
bp = ax.boxplot(random)
plt.show()
# Yeilds statistical summary of the data sequence
random = np.random.normal(0, 1, 12)
summ = pd.DataFrame(random).describe()
print(summ)
# Yields the histogram of 1000 random numbers with a normal distribution
w = np.random.normal(0, 1, 1000)
hist = plt.hist(w, facecolor='black')
plt.show()
#Fig 2.5
# Linear Regression and linear trend line
# 2007-2016 data of global temperature anamolies
# Source: NOAAGlobalTemp data
from sklearn.linear_model import LinearRegression
t = np.linspace(2007, 2016, 10)
T = [.36,.3,.39,.46,.33,.38,.42,.5,.66,.7]
lm = LinearRegression()
lm.fit(t.reshape(-1,1), T)
predictions = lm.predict(t.reshape(-1, 1))
plt.plot(t, predictions, '-r')
plt.plot(t,T,"-o", color="black")
plt.title("2007-2016 Global Temperature Anomalies")
plt.xlabel("Year")
plt.ylabel("Temperature")
plt.show()
Input Data by reading a file
# CSV file
file = "NOAAGlobalT.csv"
data = pd.read_csv(file)
# TXT file
file = 'http://shen.sdsu.edu/data/' + 'aravg.mon.land_ocean.90S.90N.v4.0.1.201703.asc.txt'
data = pd.read_table(file, header = None, delim_whitespace = True)
# ASC file
file = open('NOAAGlobalTemp.gridded.v4.0.1.201701.asc', "r")
# read float numbers from the file
data = []
for line in file:
x = line.strip().split()
for f in x:
data.append(float(f))
# NetCDF file
file = "air.mon.mean.nc"
data = ds(file,"r+")
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex
from matplotlib.patches import Polygon
import matplotlib.mlab as mlab
import pandas as pd
from mpl_toolkits.basemap import Basemap
from mpl_toolkits import mplot3d
from netCDF4 import Dataset as ds
from urllib import request
import scipy as sp
from scipy import stats as stt
from sklearn import datasets, linear_model
from sklearn.neighbors import KernelDensity as kd
from sklearn.linear_model import LinearRegression
from random import randint
from scipy.stats import norm
import statsmodels
import sklearn
import math
import statistics
import sympy as sy
from sympy import symbols, diff
import statsmodels.api as sm
from datetime import date
from scipy.stats import norm, kurtosis
from sklearn.linear_model import LinearRegression
import seaborn as sns
import statsmodels.api as sm
import pylab as py
from scipy.stats import norm, t
from scipy.integrate import quad
import warnings
warnings.filterwarnings("ignore")
# download the NCEI spatial average time series of monthly data
# https://www1.ncdc.noaa.gov/pub/data/noaaglobaltemp/operational/
# timeseries/aravg.mon.land_ocean.90S.90N.v4.0.1.201702.asc
aveNCEI = pd.read_table('http://shen.sdsu.edu/data/' +
'aravg.mon.land_ocean.90S.90N.v4.0.1.201703.asc.txt',
header = None, delim_whitespace = True)
aveNCEI = aveNCEI.values
aveNCEI
# Yields the dimensions of data
aveNCEI.shape
# Take only the second column of this data matrix
tmean15 = aveNCEI[:,2]
# Yields the first 6 values
tmean15[:6]
# Yields the sample Mean
np.mean(tmean15)
# Yields the sample Standard deviation
np.std(tmean15)
# Yields the sample Variance
np.var(tmean15)
# Yields the sample Skewness
pd.DataFrame(tmean15).skew()
# Yields the sample Kurtosis
kurtosis(tmean15)
# Yields the sample Median
np.median(tmean15)
# Yields desired quantiles
pd.DataFrame(tmean15).quantile([0, 0.25, 0.5, 0.75, 1])
# Creating a year sequence
timemo = np.linspace(1880, 2015, 1647)
# Creating Liner Regression
lm = LinearRegression()
# Training the model
lm.fit(timemo.reshape(-1, 1), tmean15)
# Using the model to make predictions
predictions = lm.predict(timemo.reshape(-1, 1))
print("Coeff of Linear Model:", lm.coef_)
print("Intercept of Linear Model: ", lm.intercept_)
#Fig 3.1
# Linear Model Visualization
plt.close()
plt.plot(timemo, tmean15, '-', color="black")
plt.title('Global Annual Mean Land and Ocean Surface \n Temperature Anomalies: 1880-2015')
plt.xlabel('Year')
plt.ylabel('Temperature anomaly [$\degree$C]')
plt.plot(timemo, predictions, '-', color="red")
font = {'family': 'serif',
'color': 'red',
'weight': 'normal',
'size': 18,
}
plt.text(1885, .5, 'Linear trend: 0.69 [$\degree$C] per century', fontdict = font)
plt.show()
# Yields the covariance
np.cov(timemo,tmean15)[0][1]
# Yields the correlation
np.corrcoef(timemo,tmean15)[0][1]
# Verify the correlation result
x = timemo
y = tmean15
sdx = np.std(x)
sdy = np.std(y)
cxy = np.cov(x,y)
rxy = cxy / (sdx*sdy)
rxy[0][1]
# Verify the trend result
rxy*sdy/sdx
# Verify the trend result
cxy/(sdx**2)
Histogram
#Fig 3.2
# Plot a histogram of the tmean15 data
h, mids = np.histogram(tmean15)
xfit = np.linspace(np.min(tmean15),np.max(tmean15),30)
# Set histograms bin width
areat = (np.diff(mids[0:2])*np.size(tmean15))
# Create the fitter y-values for the normal curve
yfit = areat*(norm.pdf(xfit,np.mean(tmean15),np.std(tmean15)))
plt.figure(figsize=(10,8))
plt.hist(tmean15,facecolor='w',edgecolor="k")
# Plot the normal fit on the histogram
plt.plot(xfit,yfit,color = "blue")
plt.title("Histogram of 1880-2015 Temperature Anomalies")
plt.xlabel("Temperature anomalies")
plt.ylabel("Frequency")
plt.show()
Box Plot
#Fig 3.3
# Boxplot of the global annual mean temperature anomalies from 1880-2015
plt.figure(figsize=(7, 9))
plt.boxplot(tmean15)
plt.ylabel('Temperature Anamolies in deg C')
plt.show()
Scatter Plot
#Fig 3.4
# Retrieving all necessary data
ust = pd.read_csv('USJantemp1951-2016-nohead.csv'
,header=None)
soi = pd.read_csv('soi-data-nohead.csv'
,header=None)
soid = np.array(soi.iloc[:,1])
soim = np.reshape(soid,(66,12))
# Make the SOI into a matrix with each month as a column
# Take the first column for January SOI
soij = soim[:,0]
# Take the third column for January US temperature data
ustj = np.array(ust.iloc[:,2])
# Set up figure
plt.figure(figsize=(12,7))
# Create scatter plot
plt.scatter(soij,ustj,color='k')
# Create a linear trendline for the data
lin = np.polyfit(soij, ustj, 1)
linreg = np.poly1d(lin)
plt.plot(soij,linreg(soij),'r')
plt.ylabel('US Temp $\degree$F')
plt.xlabel('SOI[dimensionless]')
plt.title("Scatter plot between Jan SOI and US temp")
plt.show()
Q-Q Plot
#Fig 3.5
# Q-Q plot for the standarized temperature anomalies
tstand = (tmean15-np.mean(tmean15))/np.std(tmean15)
# Simulate 136 points that follow a norm(0,1) distribution
qn = np.random.normal(0,1,136)
# Sort the points
qns = np.sort(qn)
# Create the Q-Q line
sm.qqplot(tstand, line ='45', color="black")
# Create the Q-Q plot
py.scatter(qns,qns, color = "purple")
py.title("Q-Q plot for the Standarized Global Temp Anomalies vs N(0,1)")
py.ylabel("Quantile of Temperature Anomalies")
py.xlabel("Quantile of N(0,1)")
py.show()
#Fig 3.6
# Display the different cloudiness climates of three cities
labels = ['Las Vegas', 'San Diego', 'Seattle']
clear = [0.58,0.40,0.16]
cloudy = [0.42,0.6,0.84]
# Create the label locations
x = np.arange(len(labels))
# Set the width of the bars
width = 0.35
# Prepare figure
fig, ax = plt.subplots(figsize=(12,10))
# Adding data to figure
rects1 = ax.bar(x - width/2, clear, width, label='Clear')
rects2 = ax.bar(x + width/2, cloudy, width, label='Cloudy')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Probability')
ax.set_title('Probability Distribution of Cloudiness')
ax.set_xticks(x)
ax.set_xticklabels(labels)
ax.legend()
# Creating a function to label each rectange with its respective frequency
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect in rects:
height = rect.get_height()
ax.annotate('{}'.format(height),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom', fontsize=15)
# Labeling each rectangle
autolabel(rects1)
autolabel(rects2)
plt.show()
PDF of the Standard Normal Distribution
#Fig 3.7
# Set mu and sigma
mu=0
sigma=1
# Create normal probability function
def intg(x):
return 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (x - mu)**2 / (2 * sigma**2))
x = np.linspace(-3, 3)
y = intg(x)
# Set up figure
fig, ax = plt.subplots(figsize=(12,10))
ax.plot(x, y, 'black', linewidth=1)
ax.set_ylim(bottom=0)
# Set desired quantities
a_ , b_ = -3,-1.5
a, b = -1.2, 3
# Make the shaded region
ix_ = np.linspace(a_,b_)
ix = np.linspace(a, b)
# Using Integrating function
iy_ = intg(ix_)
iy = intg(ix)
# Creating desired vertical seperations
verts_ = [(a_, 0), *zip(ix_, iy_), (b_, 0)]
verts = [(a, 0), *zip(ix, iy), (b, 0)]
# Creating desired polygons
poly_ = Polygon(verts_, facecolor='lightblue', edgecolor='black', linewidth=0.5)
poly = Polygon(verts, facecolor='lightblue', edgecolor='black', linewidth=0.5)
# Adding polygon
ax.add_patch(poly_)
ax.add_patch(poly)
ax.text(-0.4,0.25,"Area = 1", fontsize=20)
ax.text(-1.78, .05, "$f(x)$", fontsize=15)
ax.text(-1.62, .01, "$x$", fontsize=15)
ax.text(-1.45, .08, "$dx$", fontsize=15)
ax.text(-1.2, .01, "$x+dx$", fontsize=15)
ax.text(-0.4, .09, '$\int^{\infty}_{-\infty}f(x)dx=1$', fontsize=15)
# Creating desired arrow
plt.arrow(-2,.2,0.65,-0.1)
ax.text(-2.3,0.2,"$dA=f(x)dx$", fontsize=15, fontweight="bold")
# Label figure
plt.title("PDF of Standard Normal Distribution")
plt.xlabel("Random Variable x")
plt.ylabel("Probability Density")
plt.show()
Normal Distribution
#Fig 3.8
# Creation of five different normal distributions
bins = np.linspace(-8,8,200)
mu, sigma = 0,1
y1 = 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (bins - mu)**2 / (2 * sigma**2) )
mu, sigma = 0,2
y2 = 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (bins - mu)**2 / (2 * sigma**2) )
mu, sigma = 0,1/2
y3 = 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (bins - mu)**2 / (2 * sigma**2) )
mu, sigma = 3,1
y4 = 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (bins - mu)**2 / (2 * sigma**2) )
mu, sigma = -4,1
y5 = 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (bins - mu)**2 / (2 * sigma**2) )
df=pd.DataFrame({'x': bins, 'y1': y1, 'y2': y2, 'y3': y3, 'y4': y4, 'y5': y5})
plt.figure(figsize=(12,10))
# Multiple line plot
plt.plot( 'x', 'y1', data=df, marker='', markerfacecolor='red', markersize=8, color='red'
, linewidth=4, label="$\mu = 0$ and $\sigma = 1$")
plt.plot( 'x', 'y2', data=df, marker='', color='blue', linewidth=2, label="$\mu = 0$ and $\sigma = 2$")
plt.plot( 'x', 'y3', data=df, marker='', color='black', linewidth=2, label="$\mu = 0$ and $\sigma = 1/2$")
plt.plot( 'x', 'y4', data=df, marker='', color='purple', linewidth=2, label="$\mu = 3$ and $\sigma = 1$")
plt.plot( 'x', 'y5', data=df, marker='', color='green', linewidth=2, label="$\mu = -4$ and $\sigma = 1$")
plt.legend()
plt.title("Normal Distribution N$(\mu,\sigma^2)$")
plt.xlabel("Random Variable X")
plt.ylabel("Probability Density")
# Verifying the normal equation
mu=0
sigma=1
def intg(x):
return 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (x - mu)**2 / (2 * sigma**2) )
i = quad(intg, -2, 2)
print("The integral evaluated to:", round(i[0],6), "and the absolute error is:", i[1])
Student's t-distribution
#Fig 3.9
x = np.linspace(-4,4,200)
# The t function in python dose not allow for using infinity as a prapmeter, so we will use 100 million to
# simulate the t-distribution with infity degrees of freedom.
myinf = 100000000
plt.figure(figsize=(12,10))
# Plot the t-distribution with various degrees of freedom
plt.plot(x, t(3).pdf(x),linewidth=4,color="red", label="$df=3$")
plt.plot(x, t(1).pdf(x),linewidth=2,color="blue", label="$df=1$")
plt.plot(x, t(2).pdf(x),linewidth=2,color="black", label="$df=2$")
plt.plot(x, t(6).pdf(x),linewidth=2,color="purple", label="$df=6$")
plt.plot(x, t(myinf).pdf(x),linewidth=2,color="green", label="$df=\inf$")
# Creating legend
plt.legend()
# Labeling plot
plt.xlabel("Random Variable t")
plt.ylabel("Probability density")
plt.title("Student t-distribution T$(t,df)$")
plt.show()
# Confidence interval simulation
# True mean
mu = 14
# True sd
sig = 0.3
# Sample size
n = 50
d = 1.96*sig/n**(1/2)
lowerlim = mu-d
upperlim = mu+d
# Number of simulations
ksim = 10000
# Simulation counter
k=0
xbar = []
for i in range(ksim):
x = np.mean(np.random.normal(mu,sig,n))
xbar.append(x)
if (x >= lowerlim) & (x <= upperlim):
k = k+1
print("Simulation Counter:", k,"and total number of simulations:", ksim)
print("Lower CI:", round(lowerlim,2),"and upper CI:", round(upperlim,2))
#Fig 3.10
# Confidence Interval Simulation
np.random.seed(19680801)
num_bins = 51
# Create histogram with parameters
fig = plt.subplots(figsize=(12,10))
n, bins,patches = plt.hist(xbar,num_bins, color='blue', alpha=0.5, histtype='bar', ec='black')
# Label histogram
plt.title("Histogram of the Simulated Sample Mean Temperatures")
plt.ylabel("frequency")
plt.xlabel("Temperature [$\degree$C]")
# Show the plot with extra information
print("95% Confidence Interval (13.92,14.08)")
plt.show()
#Fig 3.11
# Plot confidence intervals and tail probabilities
# Set mu and sigma
mu=0
sigma=1
# Create normal probability function
def intg(x):
return 1/(sigma * np.sqrt(2 * np.pi)) *np.exp( - (x - mu)**2 / (2 * sigma**2) )
# Set desired quantities
a_ , b_ = -3,3
a, b = -1.96, 1.96
a1, b1 = -1.0, 1.0 # integral limits
x = np.linspace(-3, 3)
y = intg(x)
# Set up figure
fig, ax = plt.subplots(figsize=(12,10))
ax.plot(x, y, 'black', linewidth=1)
ax.set_ylim(bottom=0)
# Make the shaded region
ix_ = np.linspace(a_,b_)
ix = np.linspace(a, b)
ix1 = np.linspace(a1, b1)
# Using Integrating function
iy_ = intg(ix_)
iy = intg(ix)
iy1 = intg(ix1)
# Creating desired vertical seperations
verts_ = [(a_, 0), *zip(ix_, iy_), (b_, 0)]
verts = [(a, 0), *zip(ix, iy), (b, 0)]
verts1 = [(a1, 0), *zip(ix1, iy1), (b1, 0)]
# Creating desired polygons
poly_ = Polygon(verts_, facecolor='red', edgecolor='black', linewidth=0.5)
poly = Polygon(verts, facecolor='lightblue', edgecolor='black')
poly1 = Polygon(verts1, facecolor='white', edgecolor='black')
# Adding polygon
ax.add_patch(poly_)
ax.add_patch(poly)
ax.add_patch(poly1)
# Adding appropriate text
ax.text(0.5 * (a + b), 0.24, "Probability = 0.68",
horizontalalignment='center', verticalalignment="center", fontsize=15, fontweight="bold")
ax.text(-1.55, .02, "SE", fontsize=15)
ax.text(1.40, .02, "SE", fontsize=15)
ax.text(-0.9, .02, "SE", fontsize=15)
ax.text(0.65, .02, "SE", fontsize=15)
ax.text(-0.05, .02, r'$\bar{x}$', fontsize=15)
# Creating desired arrows
plt.arrow(-2.2,.02,-0.5,0.05)
ax.text(-3,0.08,"Probability = \n 0.025", fontsize=15, fontweight="bold")
# Label the figure
plt.ylabel("Probability density")
plt.xlabel("True mean as a random variable")
plt.title("Confidence Intervals and Confidence Levels")
ax.set_xticks((a,a1,0,b1,b))
ax.set_xticklabels(('','','','',''))
plt.show()
Confidence interval for NOAAGlobalTemp 1880-2015
# Estimate the mean and error bar for a large sample
dat1 = pd.read_table('http://shen.sdsu.edu/data/' +
'aravg.mon.land_ocean.90S.90N.v4.0.1.201703.asc.txt',
header = None, delim_whitespace = True)
dat1 = dat1.values
tmean15 = dat1[:,2]
MeanEst = np.mean(tmean15)
sd1 = np.std(tmean15)
StandErr = sd1/(len(tmean15))**(1/2)
ErrorMar = 1.96*StandErr
print("Mean Estimate:", MeanEst, "\n Lower bound:", MeanEst-ErrorMar, "\n Upper bound:", MeanEst+ErrorMar)
Statistical inference for xbar using a z-score
#Fig 3.12
# Setting mu and sigma
mu=0
sig=1
# Creating normal probability function
def intg(x):
return 1/(sig * np.sqrt(2 * np.pi)) *np.exp( - (x - mu)**2 / (2 * sigma**2) )
# Creating desired quantities
a_ , b_ = -3,-2.4
a, b = -1.96, 3
x = np.linspace(-3, 3)
y = intg(x)
# Creatinf figure
fig, ax = plt.subplots(figsize=(12,10))
ax.plot(x, y, 'black', linewidth=1)
ax.set_ylim(bottom=0)
# Make the shaded region
ix_ = np.linspace(a_,b_)
ix = np.linspace(a, b)
# Integrating apprpriate values
iy_ = intg(ix_)
iy = intg(ix)
# Cerating array of verticle seperations
verts_ = [(a_, 0), *zip(ix_, iy_), (b_, 0)]
verts = [(a, 0), *zip(ix, iy), (b, 0)]
# Creating shaded polygons
poly_ = Polygon(verts_, facecolor='lightblue', edgecolor='black', linewidth=0.5)
poly = Polygon(verts, facecolor='#A4C9A4', edgecolor='black')
# Adding desired polygons
ax.add_patch(poly_)
ax.add_patch(poly)
# Adding text
ax.text(-0.05, 0.1, "$H_0$ Probability 0.975",
horizontalalignment='center', verticalalignment="center", fontsize=15)
# Creating appropriatly placed arrow
plt.arrow(-2.50,.02,-0.45,0.05)
ax.text(-3,0.08,"p-value", fontsize=15)
# Label plot
plt.ylabel("Probability density")
plt.xlabel("x: Standard normal randomvariable")
plt.title("Z-Score, p-value, and significance level")
plt.ylim((0,.5))
# Set various parameters
ax.set_xticks((b_,a))
ax.set_xticklabels(('z','$z_{0.025}$'),fontsize=15)
ax.text(1.15,0.3,"Where z is your z-score \n and $z_{0.025}=-1.96$", fontsize=15)
ax.set_yticks(())
ax.set_yticklabels((""))
ax.axvline(x=-1.96, linewidth=2, color='r')
plt.arrow(-3,0.43,0.9,0, color="lightblue", linewidth=5, head_width=0.002, head_length=0.02)
plt.text(-3,0.46,"$H_1$ region", fontsize=15)
plt.arrow(3,0.43,-4.8,0, color='#A4C9A4', linewidth=5, head_width=0.002, head_length=0.02)
plt.text(1.75,0.46,"$H_0$ region", fontsize=15)
plt.show()
Mean of a Small Sample Size t-Test
# Hypothesis testing for NOAAGlobalTemp 2006-2015
dat1 = pd.read_table('http://shen.sdsu.edu/data/' +
'aravg.mon.land_ocean.90S.90N.v4.0.1.201703.asc.txt',
header = None, delim_whitespace = True)
dat1 = dat1.values
tm0615 = dat1[127:136,2]
MeanEst = np.mean(tm0615)
sd1 = np.std(tm0615)
n = 10
t_score = (MeanEst-0)/ (sd1/np.sqrt(n))
print(" Mean:", MeanEst, "\n","SD:",sd1, "\n","t-score:", t_score)
# Yields the p-value
t.pdf(t_score,df=n-1)
# Yields the critical t-score
t.ppf(1-0.025,n-1)
Comparing Temperatures of Two Short Periods
# Hypothesis testing for Global Temp for 1981-1990 and 1991-2000
dat1 = pd.read_table('http://shen.sdsu.edu/data/' +
'aravg.mon.land_ocean.90S.90N.v4.0.1.201703.asc.txt',
header = None, delim_whitespace = True)
dat1 = dat1.values
# Seperating data into 2 sets
tm8190 = dat1[101:111,2]
tm9100 = dat1[112:121,2]
# Finding mean of data
barT1 = np.mean(tm8190)
barT2 = np.mean(tm9100)
# Finding standard deviation
S1sd = np.std(tm8190)
S2sd = np.std(tm9100)
# Setting sample size
n1 = n2 = 10
Spool = np.sqrt((n1-1)*S1sd**2 + (n2-1)*S2sd**2)/(n1+n2-2)
# Yields t-score and boundary conditions for critical value
T = (barT2-barT1)/(Spool*np.sqrt(1/n1 + 1/n2))
tlow = t.ppf(0.025,n1+n2-2)
tup = t.ppf(1-0.025,n1+n2-2)
print(" t-score:", round(T,5), "\n",
"tlow =", tlow, "\n",
"tup=", tup)
# Yields the p-value
print("p-value:",t.pdf(T,n1+n2-2))
# Prints means of the seperated data
print(" 1981-1990 temp =", barT1, "\n", "1991-2000 temp =", barT2)
# Yields the difference between
barT2-barT1
dat1 = pd.read_table('http://shen.sdsu.edu/data/' +
'aravg.mon.land_ocean.90S.90N.v4.0.1.201703.asc.txt',
header = None, delim_whitespace = True)
dat1 = dat1.values
tm = dat1[:,2]
x = np.linspace(1880,2015,len(tm))
lm = LinearRegression()
lm.fit(x.reshape(-1, 1), tm)
predictions = lm.predict(timemo.reshape(-1, 1))
print("Coeff of Linear Model:", lm.coef_)
print("Intercept of Linear Model: ", lm.intercept_)
print("The r-squared of Model$: ", lm.score(x.reshape(-1, 1),tm))
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex
from matplotlib.patches import Polygon
import matplotlib.mlab as mlab
import pandas as pd
from mpl_toolkits.basemap import Basemap
from mpl_toolkits import mplot3d
from netCDF4 import Dataset as ds
from urllib import request
import scipy as sp
from scipy import stats as stt
from sklearn import datasets, linear_model
from sklearn.neighbors import KernelDensity as kd
from sklearn.linear_model import LinearRegression
from random import randint
from scipy.stats import norm
import statsmodels
import sklearn
import math
import statistics
import sympy as sy
from sympy import symbols, diff
import statsmodels.api as sm
from datetime import date
from scipy.stats import norm, kurtosis
from sklearn.linear_model import LinearRegression
import seaborn as sns
import statsmodels.api as sm
import pylab as py
from scipy.stats import norm, t
import warnings
warnings.filterwarnings("ignore")
# Creating a Matrix
np.mat([[1,2], [3,4]])
# Matrix Subtraction
np.mat([[1,1], [1,-1]]) - np.mat([[1,2], [3,0]])
# Matrix Multiplcation
np.mat([[1,1], [1,-1]]) * np.mat([[1,3], [2,4]])
# Matrix Multiplcation
np.mat([[1,3], [2,4]]) * np.mat([[1,1], [1,-1]])
#Yields matrix inverse
np.linalg.inv(np.mat([[1,1], [1,-1]]))
# Verify the result of inverse
np.mat([[0.5,0.5], [0.5,-0.5]]) * np.mat([[1,1], [1,-1]])
# Yields the solved linear system
np.linalg.solve(np.mat([[1,1], [1,-1]]),[20,4])
# Creating Matrix A
A = np.mat([[1,2,3], [-1,0,1]])
A
# Yields the Covariance Matrix of A
covm = (1/A.shape[1])*A*A.T
covm
# Yields new vector in a different direction
u = np.mat([[1,-1]]).T
# Vectors u and v are in different directions
v = covm*u
v
# Yields eigen values and vectors in one output
ew = np.linalg.eig(covm)
# Yields the eigen values of covm
ew[0]
# Yields the eigen vectors
ew[1]
# Verfiy the eigen value and vectors
covm * ew[1] / ew[0]
# The first column corresponds with the first eigen value
# Develop a 2-by-3 space time data matrix for SVD
A = np.mat([[1,2,3],[-1,0,1]])
A
# Perform SVD calculation
msvd = np.linalg.svd(A)
# Yields the D vector in SVD
d = msvd[1]
d
# Yields the U matrix in SVD
u = msvd[0].T
u
# Yields the V matrix of SVD
v = msvd[2]
v = np.delete(v,2,0).T
v
# Verify that A=UDV', where V' is the transpose of V.
# Yields Diagonal of D
diagd = np.diag(d)
# Yields the Transpose of V
vT = v.T
# Verify the SVD result
verim = u*diagd*vT
np.matrix.round(verim)
# This is the original matrix A
# Yields the covariance of matrix A
covm = (1/A.shape[1])*A*A.T
# Yields the eign vectors and values of matrix A
eigcov = np.linalg.eig(covm)
# The first element of eigcov are the eigen values of matrix A
eigcov[0]
# Yields the eigen vectors of matrix A
eigcov[1]
# Verify eigen values with SVD
((d**2)/A.shape[1])
# Yields true eigen values of matrix A
eigcov[0]
Download pressure data for Tahiti from https://shen.sdsu.edu/data/PSTANDtahiti into your directory
Plot the SOI and the standarized SLP data at Darwin and Tahiti
# Set up File path
file = 'PSTANDtahiti.txt'
Pta = np.loadtxt(file,skiprows=0)
PTA = np.reshape(Pta[:,1:],(780))
# Make an array of years for our x-axis
xtime = np.linspace(1951,2016,(65*12)+1)[0:780]
#Fig 4.2 (a)
# Set up Figure
plt.figure(figsize= (15.,7.))
# Add data to plot
plt.plot(xtime,PTA,color='r')
# Label plot
plt.xlabel('Year')
plt.ylabel('Pressure')
plt.title('Standardized Tahiti SLP Anomalies')
plt.show()
Download pressure data for Tahiti from http://shen.sdsu.edu/data/PSTANDdarwin.txt into your directory
# Upload and set up data
Pda = np.loadtxt('PSTANDdarwin.txt',skiprows=0)
PDA = np.reshape(Pda[:,1:],(780))
#Fig 4.2 (b)
plt.figure(figsize=(15.,7.))
plt.plot(xtime,PDA,color='b')
plt.xlabel('Year')
plt.ylabel('Pressure')
plt.title('Standardized Darwin SLP Anomalies')
plt.show()
Plot the SOI index
#Fig 4.2 (c)
# Set up figure
fig, soi = plt.subplots(figsize=(15,7))
# Ploting data against time
soi.plot(xtime,PTA-PDA,color='k')
soi.set_xlabel('Year')
soi.set_ylabel('SOI Index')
# Making left Y axis
ax2 = soi.twiny()
# Plotting data against difference of Darwin and Tahiti standarized SLP data
ax2.plot(xtime,PTA-PDA,color='k')
ax2.set_xlabel('Year')
# Mirroring X axis for difference data
ax3 = soi.twinx()
ax3.plot(xtime,PTA-PDA,color='k')
ax3.set_ylabel('SOI Index')
# Adding some text
ax3.text(1973,4.6,"Difference in Darwin and Tahiti standarized SLP data", fontsize=15)
plt.show()
CSOI and AMO time series comparison
#Fig 4.2 (d)
# Preparing variables
SOI = PTA-PDA
cnegsoi = -SOI.cumsum()
# Load AMO data
amodat = np.loadtxt('AMO1951-2015.txt',skiprows=0)
amots = amodat[:,1:13].T.flatten("F")
# Set up figures
fig, ax1 = plt.subplots(figsize=(15,7))
# Plot the data against CSOI Index
ax1.set_xlabel('Year')
ax1.set_ylabel('Negative CSOI Index')
ax1.plot(xtime, cnegsoi, color="purple", label="CSOI")
# Create a second axes that shares the same x-axis
ax2 = ax1.twinx()
# Set labels for new axis
ax2.set_ylabel('AMO Index')
ax2.plot(xtime, amots, color="darkgreen", label="AMO")
# Set legends
ax1.legend(loc=(.5,.90))
ax2.legend(loc=(.5,.85))
# Yields the full view of the right y-label
fig.tight_layout()
plt.show()
Weighed SOI Computed by the SVD Method
# Yields transpose of the data
ptamonv = PTA.T
pdamonv = PDA.T
# Creates an array of arrays
ptada = [ptamonv,pdamonv]
# Compute SVD of ptada
svdptd_u, svdptd_s, svdptd_vh = np.linalg.svd(ptada,full_matrices=False)
# Verify SVD
recontada = np.dot(np.dot(svdptd_u,np.diag(svdptd_s)),svdptd_vh)
# Compute error of calculations
svd_error = ptada - recontada
# Error maximum
svd_error.max()
# Error minimum
svd_error.min()
# Gather appropriate variables
Dd = np.diag(svdptd_s)
Uu = svdptd_u
Vv = svdptd_vh
# Set up time array
xtime = np.linspace(1951,2016,(65*12)+1)[0:780]
# Compute WSOI1 for plotting
wsoi1 = Dd[0,0] * (Vv)[0,:]
Plot WSOI1
#Fig 4.3 (a)
# Prepare figure
plt.figure(figsize=(15,7))
# Plot data
plt.plot(xtime,wsoi1,color='k')
plt.xlabel('Year')
plt.ylabel('Weighted SOI 1')
plt.text(1980,-3.1,"Weighted SOI1 Index", fontsize=15)
plt.show()
Plot WSOI2
#Fig 4.3 (b)
# Set up data for WSOI2
wsoi2 = Dd[1,1] * Vv[1,:]
# Prepare figure
fig, wsoi = plt.subplots(figsize=(15,7))
# Plot data
wsoi.plot(xtime,wsoi2,color='darkblue')
wsoi.set_xlabel('Year')
wsoi.set_ylabel('WSOI2')
# Create second instance of axis
ax2 = wsoi.twiny()
ax2.plot(xtime,wsoi2,color='darkblue')
ax2.set_xlabel('Year')
ax2.text(1980,2.19,"Weighted SOI2 Index", fontsize=15)
plt.show()
Plot comparison between CWSOI1 and the smoothed AMO index
#Fig 4.3(c)
# Create needed variables
cwsoi1 = np.cumsum(wsoi1)
# Prepare figure
fig, plott = plt.subplots(figsize=(15,7))
# Plot cumulative sums
plott.plot(xtime,cwsoi1,color='black',label='CWSOI1')
plott.set_xlabel('Year')
plott.set_ylabel('Cumaltive Weighted SOI 1')
# Superimpose CSOI time series on this CWSOI1
cnegsoi = -np.cumsum(ptamonv-pdamonv)
plt.plot(xtime,cnegsoi,color='purple',label='CSOI')
ax2 = plott.twinx()
# Create 24-month ahead moving average of the monthly AMO index
i = 0
window_size = 24
moving_averages = []
while i < len(amots) - window_size + 1:
this_window = amots[i : i + window_size]
window_average = sum(this_window) / window_size
moving_averages.append(window_average)
i += 1
# Creating time sequence spanning from 1951 to 2016
xtime = np.linspace(1951,2016,(65*12)+1)[0:757]
# Plot the moving averages
ax2.plot(xtime, moving_averages,color='darkgreen',label='AMO Index')
ax2.set_ylabel('Smoothed AMO Index')
# Set legend
plott.legend(loc=(.5,.85))
ax2.legend(loc=(.5,.8))
# Ensures we can see all axis labels
fig.tight_layout()
plt.show()
Plot the cumulative WSOI2: CWSOI2
# Create appropriate variables from SVD
uu, ss, vvh = np.linalg.svd(ptada,full_matrices=False)
PC = vvh
PC1 = PC[0,:]
PC2 = PC[1,:]
#Fig 4.3 (d)
# Prepare figure
plt.figure(figsize=(15,7))
# Prepare time sequence
xtime = np.linspace(1951,2016,(65*12)+1)[0:780]
# Plot the cumulative sum
plt.plot(xtime,PC2.cumsum(),color='b',label='Principal Component 2')
# Label appropriately
plt.xlabel('Year')
plt.ylabel('CWSOI2')
plt.title('CWSOI2 Index: The Cumulative PC2')
plt.show()
#Fig 4.4 (a)
# Display the two ENSO modes on a world map
# Prepare the figure
plt.figure(figsize=(15., 7.))
# Using the basemap package to put the cylindrical projection map into the image
wmap2 = Basemap(projection='cyl',llcrnrlat=-90.,urcrnrlat=90.,\
llcrnrlon=0.,urcrnrlon=360.,resolution='c')
# Draw coastlines, latitudes, and longitutes on the map
wmap2.drawcoastlines(color='black', linewidth=1)
wmap2.drawparallels(np.arange(-90.,91.,30.), labels = [1,0,0,1], fontsize=15)
wmap2.drawmeridians(np.arange(00.,360.,30.), labels= [1,0,0,1], fontsize=15)
# Add data to plot
plt.scatter(131.,-12.,color='b',s=150)
plt.scatter(220.,-20,color='r',s=150)
# Add desired text to plot
plt.text(100,-24, 'Darwin -0.79', color = 'b', fontsize = 22)
plt.text(200,-32, 'Tahiti 0.61', color = 'r', fontsize = 22)
plt.text(60,40, 'El Nino Southern Oscillation Model 1', color = 'm', fontsize = 30)
plt.show()
#Fig 4.4 (b)
# Display the two ENSO modes on a world map
# Prepare figure
plt.figure(figsize=(15, 7))
# Using the basemap package to put the cylindrical projection map into the image
wmap2 = Basemap(projection='cyl',llcrnrlat=-90.,urcrnrlat=90.,\
llcrnrlon=0.,urcrnrlon=360.,resolution='c')
# Draw coastlines, latitudes, and longitutes on the map
wmap2.drawcoastlines(color='black', linewidth=1)
wmap2.drawparallels(np.arange(-90.,91.,30.), labels = [1,0,0,1], fontsize=15)
wmap2.drawmeridians(np.arange(-180.,181.,30.), labels= [1,0,0,1], fontsize=15)
# Add data to plot
plt.scatter(131.,-12.,color='r',s=150)
plt.scatter(220.,-20,color='r',s=150)
# Add desired text
plt.text(105,-24, 'Darwin 0.61', color = 'r', fontsize = 22)
plt.text(200,-32, 'Tahiti 0.79', color = 'r', fontsize = 22)
plt.text(60,40, 'El Nino Southern Oscillation Model 2', color = 'm', fontsize = 30)
plt.show()
Example 4.9
# Given the coordinates of 3 points
x1 = [1,2,3]
x2 = [2,1,3]
y = [-1,2,1]
# Put them into a dataframe
df = pd.DataFrame([x1,x2,y]).T
X_df = pd.DataFrame([x1,x2]).T
# Create the linear regression
lm = LinearRegression()
# Fit the linear regression
lm.fit(X_df, df[2])
# Display Coefficients
lm.coef_
# Verfiy the 3 points determining a plane
b1 = [i*1.667 for i in x1]
b2 = [j*1.333 for j in x2]
difference = []
zip_object = zip(b1, b2)
for b1_i, b2_i in zip_object:
difference.append(b1_i-b2_i)
difference
Example 4.10
u = [1,2,3,1]
v = [2,4,3,-1]
w = [1,-2,3,4]
df = pd.DataFrame([u,v,w]).T
X_df = pd.DataFrame([u,v]).T
lm = LinearRegression()
lm.fit(X_df, df[2])
lm.coef_
lm.intercept_
Example 4.11
# Show a general multivariate linear regrssion with 3 independent random variables
dat = np.mat([np.random.normal(0, 1, 4),np.random.normal(0, 1, 4),
np.random.normal(0, 1, 4),np.random.normal(0, 1, 4),
np.random.normal(0, 1, 4),np.random.normal(0, 1, 4),
np.random.normal(0, 1, 4),np.random.normal(0, 1, 4),
np.random.normal(0, 1, 4),np.random.normal(0, 1, 4)])
fdat = pd.DataFrame(dat)
lm = LinearRegression()
lm.fit(fdat.iloc[:,0:3], fdat.iloc[:,3])
lm.coef_
lm.intercept_
import numpy as np
import matplotlib
from matplotlib import cm
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex
from matplotlib.patches import Polygon
import matplotlib.mlab as mlab
import pandas as pd
from mpl_toolkits.basemap import Basemap
from mpl_toolkits import mplot3d
from netCDF4 import Dataset as ds
from urllib import request
import scipy as sp
from scipy import stats as stt
from sklearn import datasets, linear_model
from sklearn.neighbors import KernelDensity as kd
from sklearn.linear_model import LinearRegression
from random import randint
from scipy.stats import norm
import statsmodels
import sklearn
import math
import statistics
import sympy as sy
from sympy import symbols, diff
import statsmodels.api as sm
from datetime import date
from scipy.stats import norm, kurtosis
from sklearn.linear_model import LinearRegression
import seaborn as sns
import statsmodels.api as sm
import pylab as py
from scipy.stats import norm, t
import warnings
import os
warnings.filterwarnings("ignore")
# NASA Diviner Data Source:
# http://pds-geosciences.wustl.edu/lro/lro-l-dlre-4-rdr-v1/lrodlr_1001/data/gcp
d19 = pd.read_csv("tbol_snapshot.pbin4d-19.out-180-0.txt", sep='\s+', header=None)
m19 = np.array(d19.iloc[:,2]).reshape(720,360)
# Set up latitude and longitude sequences
lat1 = np.linspace(-89.75, 89.75, 360)
lon1 = np.linspace(-189.75, 169.75, 720)
# Created desired color sequence
colors = ['skyblue', 'green', 'darkgreen', 'slateblue', 'indigo',
'darkviolet', 'yellow', 'orange', 'pink', 'red', 'maroon', 'purple', 'black']
myColMap = newColMap(colors)
# Prepare data
mapmat = m19.T
mapmat[mapmat == mapmat.max()] = 400
mapmat[mapmat == mapmat.min()] = 0
# Create levels for contour map
clev = np.linspace(mapmat.min(), mapmat.max(), 60)
#Fig 5.2 (top panel)
plt.figure(figsize=(14,6))
contf = plt.contourf(lon1, lat1, mapmat, clev, cmap=myColMap)
colbar = plt.colorbar(contf, drawedges=True, ticks=[0, 100, 200, 300, 400], aspect = 10)
plt.title("Moon Surface Temperature Observed by \n NASA Diviner, Snapshot 19")
plt.xlabel("Longitude")
plt.ylabel("Latitude")
plt.text(190, 95, "K", size=20)
plt.grid();
plt.show()
#Fig 5.2 (middle panel)
# Plot the equator temperature for a snapshot
equ = np.where((lat1 < .5) & (lat1 > -.5))[0]
equatorial = mapmat[equ[0]]
merid = np.where((lon1 < 90.5) & (lon1 > 89.5))[0]
meridion = mapmat[:, merid[0]]
ax = plt.figure()
plt.plot(lon1, equatorial, 'r-')
plt.text(-125, 250, "Nighttime", size=20)
plt.text(50, 250, "Daytime", size=20, color='orange')
plt.xlabel("Longitude")
plt.ylabel("Temperature [K]")
plt.title("Moon's Equatorial Temperature at Snapshot 19");
plt.show()
#Fig 5.2 (bottom panel)
# Plot the noon time meridional temperature for a snapshot
ax = plt.figure()
plt.plot(lat1, meridion, 'r-')
plt.xlabel("Latitude")
plt.ylabel("Temperature [K]")
plt.title("Moon's Noon Time Meridional Temperature\nat Snapshot 19");
plt.show()
# Compute the bright side average temperature
bt = np.array(d19.iloc[129600:].astype(np.float))
type(bt)
aw = np.cos(bt[:,1]*np.pi/180)
wbt = bt[:,2]*aw
bta = np.sum(wbt)/np.sum(aw)
print("Average temperature of bright side of moon:", round(bta,3), "degrees Kelvin")
# Compute the dark side average temperature
dt=np.array(d19.iloc[:129600].astype(np.float))
aw=np.cos(dt[:,1]*np.pi/180)
wdt=dt[:,2]*aw
dta=np.sum(wdt)/np.sum(aw)
print("Average temperature of dark side of moon:", round(dta,3), "degrees Kelvin")
5.1.3 EBM Prediction for the Moon Surface Temperature
from scipy import optimize as opt
# Making a function to numerically solve an EBM
def ebm_maker(a, s, l, e, si, ka, t0, he):
"""
This function accept a set of Energy Balance Model parameters
and returns a lambda function with those parameters as a function
of t.
"""
return lambda t: (1-a)*s*np.cos(l) - e*si*(t**4) - ka*(t - t0)/he
# Equator noon temperature of the moon from an EBM
lat = 0*np.pi/180
sigma = 5.670367e-8
alpha = 0.12
S = 1368
ep = 0.98
k = 7.4e-4
h = 0.4
T0 = 260
fEBM = ebm_maker(alpha, S, lat, ep, sigma, k, T0, h)
# Numerically solve the EBM: fEBM = 0
res = opt.root(fEBM, x0 = 400)
x0 = res['x'][0]
x0
# Define a piecewise albedo function
a1 = 0.7
a2 = 1 - a1
T1 = 250
T2 = 280
a = []
def ab(T):
try:
for i in T:
if i < T1:
a.append(a1)
else:
if i < T2:
a.append(((a1-a2)/(T1-T2))*(i-T2) + a2)
else:
a.append(a2)
return a
except TypeError:
if T < T1:
a.append(a1)
else:
if T < T2:
a.append(((a1-a2)/(T1-T2))*(T-T2) + a2)
else:
a.append(a2)
return a
# Define the range of temperatures
t = np.linspace(200, 350, 1001)
#Fig 5.5
# Plot the albedo as a nonlinear function of T
a = []
plt.figure(figsize=(12,9))
plt.plot(t, ab(t), 'k-')
plt.ylim(0,1)
plt.xlabel("Surface temperature [K]")
plt.ylabel("Albedo");
plt.title("Nonlinear Albedo Feedback")
plt.show()
#Fig 5.6
# Formulate and solve an EBM
a = []
S = 1368
ep = 0.6
sg = 5.670373e-8
y1 = [(1-i)*(S/4.0) for i in ab(t)]
y2 = ep*sg*(t**4)
y3 = np.zeros(t.size)
plt.figure(figsize=(12,8))
plt.plot(t, y1, 'r-', label="Incoming Energy")
plt.plot(t, y2, 'b-', label="Outgoing Energy")
plt.plot(t, y2 - y1, 'k-', label="Difference between outgoing and incoming energy")
plt.ylim(-10,300)
plt.axhline(y=0, alpha=.7, color='m')
plt.grid()
# Label the plot
plt.title("Simple Nonlinear EBM with Albedo Feedback: $E_{out}$ = $E_{in}$", fontsize = 18)
plt.xlabel("Surface Temperature T[K]")
plt.ylabel("Energy [Wm$^{-2}$]")
plt.text(288,275,"$E_{out}$", fontsize=20, color="b")
plt.text(310,248,"$E_{in}$", fontsize=20, color="r")
plt.text(290,100,"$E_{out} - E_{in}$", fontsize=20, color="k")
plt.text(230,7, "T1", fontsize=16)
plt.text(263,7, "T2", fontsize=16)
plt.text(285,7, "T3", fontsize=16)
plt.scatter(234,0,s=30, c = "black")
plt.scatter(263,0,s=30, c = "black")
plt.scatter(290,0,s=30, c = "black")
plt.show()
# Verify the roots
S = 1365
ep = 0.6
sg = 5.670373e-8
roots = []
# Redefine the albedo fuction using lambda variable
ab = lambda t: 0.5 - 0.2 *np.tanh((t-265)/10)
# Create fuction F
def f(T):
try:
return ep*sg*T**4 - (1 - ab(T))*(S/4.0)
except TypeError:
for i in T:
r = ep*sg*i**4 - (1 - ab(i))*(S/4.0)
roots.append(r)
return r
# Find roots of Albedo function
opt.bisect(f, 220, 240)
opt.bisect(f, 275, 295)
opt.bisect(f, 220, 240)
import numpy as np
import matplotlib
from matplotlib import cm
import matplotlib.pyplot as plt
import pandas as pd
import warnings
import os
warnings.filterwarnings("ignore")
from scipy import optimize as opt
import sympy as sm
import math as m
#Fig 6.1
# Cess' Budyko Parameters
A = 212
B = 1.6
ep = 0.6
sg = 5.670367e-8
T = np.linspace(-50,50,10*100)
S = 1365
alf = 0.3
# Plot the Data
plt.plot(T, A+B*T, "k-", linewidth=3, label="Budyko Radiation Formula")
plt.plot(T, ep*sg*(273.15+T)**4, "b-", linewidth=3, label="Stepfan-Boltzmann Radiation Law")
plt.plot(T, 189.4+2.77*T, "m-", linewidth=3, label="Linear Approximation of SB Law")
plt.plot(T, (1-alf)*S/4*np.repeat(1,len(T)), "r-", linewidth=3, label="Observed Outgoing Radiation")
# Add line Labels
plt.text(-49,180,"Budyko Formula",size=18, color="k")
plt.text(8,350,"Stepfan-Boltzmann Law",size=18, color="b")
plt.text(-32,80,"Linear Approximation of Stefan-Boltzmann Law",size=18, color="m")
plt.text(-40,250,"E$_{out}=239$[Wm$^{-2}$]",size=18, color="r")
# Modify Plot parameters
plt.title("Radiation Law and Its Approximations")
plt.xlabel("Temperature T[$\degree$C]")
plt.ylabel("Outgoing Radiation: E$_{out}$ [Wm$^{-2}$]")
plt.ylim(0,500)
plt.legend(loc='upper left', fontsize="x-large")
plt.show()
Linear approximation of $$f(x)=(1+x)^4$$ by $$L(x)=1+4x$$
# Produce table 6.1
x = np.linspace(-0.3,0.3,7)
fx = list(range(1,8))
lx = list(range(1,8))
ex = list(range(1,8))
for i in range(7):
fx[i] = (1+x[i])**4
lx[i] = 1+4*x[i]
ex[i] = round(((1+x[i])**4 - (1+4*x[i]))/((1+x[i])**4)*100, 1)
pd.set_option('display.max_columns', None)
my_tbl = pd.DataFrame({"X":x,"f(x)":fx, "L(x)":lx, "Error[%]":ex})
my_tbl
#Example 6.2
def func(x):
return x**3-x-1
opt.bisect(func, 0, 2)
#Example 6.3
S = 1368
ep = 0.6
sg = 5.670367e-8
def f2(T):
return ep*sg*T**4 - (1 - (0.5 - 0.2*np.tanh((T-265)/10))) * (S/4)
opt.bisect(f2, 270, 300)
# Create Newton Fuction
def newton(f, tol=1e-12, x0=1,N=20):
h = 0.001
i = 1
x1 = x0
p = []
while i <= N:
df_dx = (f(x0+h)-f(x0))/h
x1 = (x0 - (f(x0)/df_dx))
p.append(x1)
i = i + 1
if abs(x1-x0) < tol:
break
x0 = x1
return p[:i-1]
# Create test function for input in Newton Function
def f(x):
return x**3 + 4*x**2 - 10
root = newton(f, tol=1e-12, x0=1,N=10)
root
#Fig 6.2
# Illustration of Newton's Method
x = np.linspace(0.2,1.7,30)
def f(x): return x**3 + 4*x**2 - 10
def g(x): return 3*x**2 + 8*x
x0 = 1
x1 = 1.4543
x2 = 1.3689
fig, ax = plt.subplots(frameon=False)
# Plot the data
plt.plot(x,f(x),"k-")
plt.plot(np.linspace(0,1.75,100),np.zeros(100), "k-")
plt.plot(x, f(x0)+g(x0)*(x-x0),"r--")
plt.plot(x, f(x1)+g(x1)*(x-x1),"r--")
plt.scatter(1,0,s=30,c="k")
plt.scatter(x1,0,s=30,c="k")
plt.scatter(x2,0,s=30,c="k")
plt.scatter(x1,f(x1), s=30, c="r")
plt.scatter(x0,f(x0), s=30, c="r")
# Creating Appropriate tick marks on X-Axis
plt.text(0.5,0,"|")
plt.text(0.47,-0.6,"0.5", fontsize=18)
plt.text(0.96,-0.6,"1.0", fontsize=18)
plt.text(1.5,0,"|")
plt.text(1.47,-0.6,"1.5", fontsize=18)
plt.text(1.7,0,"|")
plt.text(1.67,-0.6,"1.7", fontsize=18)
# Add line Labels and text
plt.text(0.25,-7.6,"$y=x^3+4x^2-10$",size=18, color="k")
plt.text(0.97,0.23,"$x_0$",size=18, color="b")
plt.text(0.87,.9,"Initial guess",size=16, color="b")
plt.text(x1-0.05,0.25,"$x_1$",size=18, color="b")
plt.text(x2-0.05,0.25,"$x_2$",size=18, color="b")
plt.text(0.35,5.2,"Follow the blue arrows from the initial guess",size=18, color="b")
plt.text(0.60,4.5,"$x_0 \Rightarrow P_0 \Rightarrow x_1 \Rightarrow P_1 \Rightarrow x_2 \Rightarrow ...$",
size=18, color="b")
plt.text(x1+0.01,f(x1)-0.3, "$P_1$", color="r",size=14)
plt.text(0.4, -11.2, "Tangent line \n through $P_0$", color="r",size=15)
plt.text(x0+0.01,f(x0)-0.3, "$P_0$", color="r",size=14)
plt.text(1.05, -7, "Tangent line \n through $P_1$", color="r",size=15)
# Add Descriptive arrows
plt.arrow(1,0,0,f(x0)+0.3,head_width=0.02, head_length=0.2,color="b")
plt.arrow(1,f(x0),0.42,4.6,head_width=0.02, head_length=0.2,color="b")
plt.arrow(x1,0,0,f(x1)-0.3,head_width=0.02, head_length=0.2,color="b")
plt.arrow(x1,f(x1),-0.07,-f(x1)+0.3,head_width=0.02, head_length=0.2,color="b")
# Modify Plot parameters
plt.title("Newton's method for finding a root:")
plt.xlim(0,1.75)
plt.ylim(-10,6)
ax.spines['top'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().set_ticks([])
plt.show()
def f(x): return (x+1)**4 - (2+x)
root = newton(f, N=10)
root
# Define Parameters
S = 1365
ep = 0.6
sg = 5.670367e-8
# Define the function for energy
def f3(T): return ep*sg*T**4 - (1 - (0.5 - 0.2*np.tanh((T-265)/10))) * (S/4)
# Compute Solutions
root1 = newton(f3, x0=220)
root1
# Compute Solutions
root2 = newton(f3, x0=270)
root2
# Compute Solutions
root3 = newton(f3, x0=300)
root3
# Compute Solutions
root5 = newton(f3, x0=100)
root5
x = sm.Symbol("x")
sm.diff(x**2)
x = sm.Symbol("x")
def e(i):
return m.e**i
sm.diff(e(-x**2),x)
# Higher order derivatives
x = sm.Symbol("x")
g = sm.Symbol("g")
t = sm.Symbol("t")
v0 = sm.Symbol("v0")
h0 = sm.Symbol("h0")
# First Derivative
fst_dev = sm.diff(-g*t**2/2+v0*t+h0,t)
fst_dev
# Second Derivative
sec_dev = sm.diff(fst_dev,t)
sec_dev
# Third derivative
thr_dev = sm.diff(sec_dev,t)
thr_dev
import numpy as np
import pandas as pd
import warnings
import os
warnings.filterwarnings("ignore")
import plotly.express as px
import sympy as sm
import math as m
import os
import netCDF4 as nc
import matplotlib
from matplotlib import cm as cm1
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.patches import Polygon
import matplotlib.animation as animation
from mpl_toolkits.basemap import Basemap, cm, shiftgrid, addcyclic
from datetime import datetime
import cartopy
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from cartopy.mpl.ticker import LongitudeFormatter, LatitudeFormatter
from scipy import optimize as opt
from scipy.ndimage import gaussian_filter
from tkinter import *
import time
Two different Time Series on the Same Plot
#Set up Variables
time = np.arange(2001,2011)
Tmean = np.array([12.06, 11.78, 11.81, 11.72, 12.02,
12.36, 12.03, 11.27, 11.33, 11.66])
Prec = np.array([737.11, 737.87, 774.95, 844.55, 764.03,
757.43, 741.17, 793.50, 820.42, 796.80])
#Fig 9.1
# set up figure
fig, ax = plt.subplots(1,1,figsize=(12, 8))
# plot data
ax.plot(time, Tmean, 'o-r', label="$T_{mean}$")
# set labels
ax.set_ylabel("Tmean [$\degree$C]", size=18)
ax.set_xlabel("Year", size=18)
ax.set_title("Contiguous U.S. Annual Mean Temperature and Total Precipitation", size=18, fontweight="bold")
plt.yticks(rotation=90, size=18)
plt.xticks(size=18)
# creates twin axis
ax1 = ax.twinx()
# plot of twin axis
ax1.plot(time, Prec, 'o-b', label="Precipitation")
ax1.set_ylabel("Precipitation [mm]")
plt.yticks(rotation=90, size=18)
# create legend
hand1, lab1 = ax.get_legend_handles_labels()
hand2, lab2 = ax1.get_legend_handles_labels()
ax.legend(handles=hand1+hand2, labels=lab1+lab2,
loc='upper left', prop={'size': 20});
ax1.tick_params(direction='in')
ax.tick_params(direction='in')
# show plot
plt.show()
Figure Setups: Margins, Fonts,Mathematical Symbols, and More
#Fig 9.2
x = 0.25*np.arange(-30, 31)
# find constraint on variables
yPositive = np.where(np.sin(x) > 0)[0]
yNegative = np.where(np.sin(x) < 0)[0]
# set up figure
fig, ax = plt.subplots(figsize=(14,8))
plt.rcParams['hatch.color'] = 'blue'
# plot bar plot
ax.bar(x[yPositive], np.sin(x[yPositive]), width=.1, color='red')
ax.bar(x[yNegative], np.sin(x[yNegative]), hatch='.', fc='white', color="blue")
# add annotations
ax.text(-5, .7, "$Sine\ Waves$", size=75, color='green')
ax.set_xlabel(r"Angle in Radians: $\theta - \phi_0$", color='red',
labelpad=-75)
ax.set_xticks([2*i for i in range(-3, 4)])
ax.set_xlim(x.min(), x.max())
ax.set_ylabel(r"$y = \sin(\theta - \phi_0)$", color='blue')
ax.set_yticks([-1., 0., .5, 1.])
ax.spines['bottom'].set_position(('data',-1.4))
ax.spines['bottom'].set_bounds(-6,6)
# twin axis
ax1 = ax.twinx()
# plot on twin axis
ax1.plot(x, np.sin(x), alpha=0)
# set labels
ax1.set_yticks([-1., -.5, 0., .5, 1.])
ax1.set_yticklabels(['A', 'B', 'C', 'D', 'E'])
ax1.tick_params(width=1, length=9)
fig.text(0.28,.95,"Text outside of the figure on side 3", size=25, fontweight="bold")
# show plot
fig.show()
#Fig 9.3
#Illistrating how to adjust font size, axis labels space, and margins.
# create Random Variable
randVals = np.random.standard_normal(200)
x = np.linspace(0, 10, randVals.size)
# set up Figure
fig, ax = plt.subplots(1, figsize=(12,8))
fig.suptitle("Normal Random Values", fontsize=25, y=.93)
# plot data
ax.scatter(x, randVals, color = 'k', alpha=.6)
# add plot labels
ax.set_title("Sub-title: 200 Random Values", y=-.2, fontsize=20)
ax.set_ylabel("Random Values")
plt.yticks(rotation=90)
ax.set_yticks([i for i in range(-2, 3)])
ax.set_xlabel("Time")
# show plot
fig.show()
# read in data
NOAATemp = np.loadtxt("aravg.ann.land_ocean.90S.90N.v4.0.1.2016.txt",skiprows=0)
# set up variables
x=NOAATemp[:,0]
y=NOAATemp[:,1]
z = [-99] * x.size
def forz(z,y):
for i in range(2,134):
rslt = [(y[i-2],y[i-1],y[i],y[i+1],y[i+2])]
z[i] = np.mean(rslt)
return z
zz = forz(z,y)
def n1func():
n1 = []
for i in range(0,137):
if y[i] >= 0:
n1.append(i)
return n1
def n2func():
n2 = []
for i in range(0,137):
if y[i] < 0:
n2.append(i)
return n2
n1 = n1func()
n2 = n2func()
x1= x[n1]
y1= y[n1]
x2 = x[n2]
y2 = y[n2]
x3 = x[2:134]
y3 = z[2:134]
#Fig 9.4
# fancy plot of the NOAAGlobalTemp time series
plt.bar(x1,y1,color='r',width=1.)
plt.bar(x2,y2,color='b',width=1.)
plt.plot(x3,y3,color='k')
plt.xlim(1877,2020)
plt.xlabel('Time')
plt.ylabel('Temperature [$\degree$C]')
plt.title('NOAA Global Average Annual Mean Temperature Anomalies', fontweight="bold", size=22)
plt.show()
#Fig 9.5
#Contiguous United States (a) annual mean temperature and (b) annual total precipitation
# plot the US temp and prec time series on the same figure
fig, ax = plt.subplots(2,1, figsize=(10,8))
# add additional axis
ax[0].axes.get_xaxis().set_visible(False)
# plot first subplot data
ax[0].plot(time, Tmean, 'ro-', alpha=.6
, label="(a)")
ax[0].set_yticks([11.4, np.mean([11.4, 11.8]), 11.8
, np.mean([11.8, 12.2]), 12.2])
ax[0].set_yticklabels([11.4, None, 11.8, None, 12.2],rotation=90)
ax[0].set_ylabel("Tmean [$\degree$C]")
ax[0].text(2004,12.0,"US Annual Mean Temperature"
, fontsize = "xx-large", fontweight="bold")
ax[0].text(2001,12.3,"(a)", size=18)
# plot second subplot data
ax[1].plot(time, Prec, 'bo-', alpha=.6
, label="(b)")
ax[1].text(2004,810,"US Annual Total Precipitation"
, fontsize = "xx-large", fontweight="bold")
plt.ylabel("Prec [mm]")
plt.yticks(rotation=90)
#set labels
plt.text(2001,840,"(b)", size=18)
# ensures no overlap in plots
fig.tight_layout(pad=-1.5)
# show plot
fig.show()
Basic Principals for a Python Contour Plot
#9.2.1
# set up variables
x, y = np.linspace(-1, 1, 25), np.linspace(-1, 1, 25)
z = np.random.standard_normal(size=(25,25))
#set up figure
fig, ax = plt.subplots(1,figsize=(12,12))
# plot contour plot
mymap = ax.contourf(x,y,z, cmap=cm1.get_cmap('jet'))
# creat and set labels
ticklabs = [-1., None, -.5, None, 0., None, .5, None, 1.]
ax.set_yticklabels(ticklabs)
ax.set_xticklabels(ticklabs)
ax.set_ylabel("$y$")
ax.set_xlabel("$x$")
ax.set_title("Filled Contour Plot of Normal Random Values")
fig.colorbar(mymap)
# show plot
fig.show()
Plot Contour Color Maps for Random Values on a Map
#Fig 9.6
# plot a 5-by-5 grid of global map of standard normal random values
# set up varibles
lat = np.linspace(-90, 90, 37)
lon = np.linspace(0, 360, 72)
# create data to be plotted
mapmat = np.random.standard_normal(size=(lat.size, lon.size))
levels = np.linspace(-3, 3, 81)
# set up figure
fig, ax = plt.subplots(figsize=(16, 10))
ax = plt.subplot(1,1,1, projection=cartopy.crs.PlateCarree(central_longitude=180))
ax.coastlines()
ax.set_global()
ax.set_title("Standard Normal Random Values")
# create desired Colormap
colors = ["darkred","firebrick","firebrick","indianred","lightcoral","lightcoral","lightpink"
,"bisque","moccasin","yellow","yellow","greenyellow","yellowgreen","limegreen","limegreen"
,"mediumspringgreen","palegreen","honeydew","lavender","mediumpurple",
"blueviolet","blueviolet","mediumorchid","mediumvioletred","darkmagenta","indigo","black"]
colors.reverse()
myColMap = newColMap(colors)
# plot data
contf = ax.contourf(lon-180, lat, mapmat, levels
, cmap=myColMap)
# add labels and colorbar
colbar = plt.colorbar(contf, drawedges=True, aspect=12)
colbar.set_ticks([i for i in range(-3, 4)])
ax.set_aspect('auto')
ax.set_xticks([0, 50, 100, 150, 200, 250, 300, 350]
,crs=cartopy.crs.PlateCarree())
ax.set_xticklabels([0, 50, 100, 150, 200, 250, 300, 350])
ax.set_xlabel("Longitude")
ax.set_yticks([-50, 0, 50], crs= cartopy.crs.PlateCarree())
ax.set_ylabel("Latitude")
# show plot
fig.show()
#Fig 9.7
# plot a 5-by-5 grid regional map to cover USA and Canada
# set up varibles
lat3 = np.linspace(10, 70, 13)
lon3 = np.linspace(230, 295, 14)
mapdat = np.random.standard_normal(size=(lat3.size,lon3.size))
# set up figure
fig, ax = plt.subplots(figsize=(14,10))
ax = plt.subplot(111, projection=cartopy.crs.PlateCarree())
ax.set_extent([230, 295, 10, 65])
ax.coastlines()
ax.add_feature(cartopy.feature.BORDERS)
ax.set_title("Standard Normal Random Values")
# plot data
contf = ax.contourf(lon3, lat3, mapdat, levels, cmap=myColMap)
# add colorbar and labels
colbar = plt.colorbar(contf, drawedges=True, aspect=12)
colbar.set_ticks([i for i in range(-3, 4)])
ax.set_aspect('auto')
ax.set_yticks([10*i for i in range(1, 8)],crs=cartopy.crs.PlateCarree())
ax.set_ylabel("Latitude")
ax.set_xticks([10*i for i in range(-13, -6)], crs=cartopy.crs.PlateCarree())
ax.set_xticklabels([10*i for i in range(23,30)])
ax.set_xlabel("Longitude")
ax.tick_params(length=9)
# show plot
fig.show()
9.2.3 Plot Contour Maps from Climate Model Data in NetCDF Files
#Import Data
datamat = nc.Dataset("air.mon.mean.nc")
#Preparing data for plotting
Lon = datamat.variables['lon'][:]
Lat = datamat.variables['lat'][:]
Time = datamat.variables['time']
precnc = datamat.variables['air']
#Fig 9.8
# set up figure
fig, ax = plt.subplots(1, figsize=(12,8))
# plot data
ax.plot(np.linspace(90, -90, precnc.shape[1]), precnc[0, :, 64], '-k',
linewidth=4)
# set labels and ticks
ax.set_ylabel("Temperature $[\degree$C]")
ax.set_yticks([10*i for i in range(-3, 3)])
ax.set_xlabel("Latitude")
ax.set_xticks([-50, 0, 50])
ax.set_title("90S-90N Temperature [$\degree$C] \nalong a meridional line at 160$\degree$E: January 1948")
ax.tick_params(length=9, width=1)
# show plot
fig.show()
#Fig 9.9(a)
#Compute and plot climantology and standard deviation Jan 1984 to Dec 2015
# set up variables for map
# number of colors that correspond to data values
contour_levels = np.linspace(-50,50,81)
# create color palette
myColMap = LinearSegmentedColormap.from_list(name='my_list',
colors=['black','blue','darkgreen','green','white','yellow',
'pink','red','maroon'], N=100)
JMon = precnc[12*np.arange(68)]
sdmat = np.std(precnc, axis=0)
climmat = np.mean(JMon, axis=0)
levels1 = np.linspace(-50, 50, 81)
levels2 = np.linspace(0, 20, 81)
# set up figure
fig, ax = plt.subplots(figsize=(14,8))
ax = plt.subplot(111, projection=cartopy.crs.PlateCarree(central_longitude=180))
ax.coastlines()
ax.set_title("NCEP RA 1948-2015 January Climatology [$\degree$C]")
# plot data
contf = ax.contourf(Lon-180, Lat, climmat, levels1, cmap=myColMap)
# add colorbar and labels
colbar = plt.colorbar(contf, drawedges=True, aspect=12, ticks=[])
colbar.set_ticks([20*i for i in range(-2, 3)])
ax.set_aspect('auto')
ax.set_yticks([50*i for i in range(-1, 2)],crs=cartopy.crs.PlateCarree())
ax.set_ylabel("Latitude")
ax.set_xticks([50*i for i in range(8)], crs=cartopy.crs.PlateCarree())
ax.set_xticklabels([50*i for i in range(8)])
ax.set_xlabel("Longitude")
ax.tick_params(length=9, width=1)
# show plot
fig.show()
#Fig 9.9(b)
# plot the standard deviation
# set up figure
fig, ax = plt.subplots(figsize=(14,8))
ax = plt.subplot(111, projection=cartopy.crs.PlateCarree(central_longitude=180))
ax.coastlines()
ax.set_title("NCEP 1948-2015 Jan SAT RA Standard Deviation [$\degree$C]")
# plot data
contf = ax.contourf(Lon-180, Lat, sdmat, levels2, cmap=myColMap)
# add colorbar nd labels
colbar = plt.colorbar(contf, drawedges=True, aspect=12)
colbar.set_ticks([5*i for i in range(5)])
ax.set_aspect('auto')
ax.set_yticks([50*i for i in range(-1, 2)],crs=cartopy.crs.PlateCarree())
ax.set_ylabel("Latitude")
ax.set_xticks([50*i for i in range(8)], crs=cartopy.crs.PlateCarree())
ax.set_xticklabels([50*i for i in range(8)])
ax.set_xlabel("Longitude")
ax.tick_params(length=9, width=1)
# show and save figure
fig.savefig("CH9;JanuarySTD.jpg", bbox_inches='tight');
Plot Data for Displaying Climate Features
#Fit 9.10
# plot the January 1983 temperature using the above setup
mapmat = climmat
# set up figure
fig, ax = plt.subplots(figsize=(14,8))
ax = plt.subplot(111, projection=cartopy.crs.PlateCarree(central_longitude=180))
ax.coastlines()
ax.set_title("January 1983 Surface Air Temperature [$\degree$C]")
# plot data
contf = ax.contourf(Lon-180, Lat, mapmat, levels1, cmap=myColMap)
# add colorbar and labels
colbar = plt.colorbar(contf, drawedges=True, aspect=12)
colbar.set_ticks([20*i for i in range(-2, 3)])
ax.set_aspect('auto')
ax.set_yticks([50*i for i in range(-1, 2)],crs=cartopy.crs.PlateCarree())
ax.set_ylabel("Latitude")
ax.set_xticks([50*i for i in range(8)], crs=cartopy.crs.PlateCarree())
ax.set_xticklabels([50*i for i in range(8)])
ax.set_xlabel("Longitude")
plt.text(197,95,"[$\degree$C]", size=20)
ax.tick_params(length=9, width=1)
# show figure
fig.show()
#Fit 9.11
# plot the January 1983 anomaly from NCEP data
# create variables
levels1 = np.linspace(-6, 6, 81)
anomat = precnc[420,:,:] - climmat
# supress values into (-6,6) range
for i in range(73):
for j in range(144):
if anomat[i,j] > 6:
anomat[i,j] = 6
if anomat[i,j] < -6:
anomat[i,j] = -6
# set up figure
fig, ax = plt.subplots(figsize=(14,8))
ax = plt.subplot(111, projection=cartopy.crs.PlateCarree(central_longitude=180))
ax.coastlines()
ax.add_feature(cartopy.feature.BORDERS, edgecolor='black')
ax.set_title("January 1983 surface air temperature anomaly [$\degree$C]", size=22)
# plot data
contf = ax.contourf(Lon-180, Lat, anomat, levels1, cmap=myColMap)
# add colorbar and labels
colbar = plt.colorbar(contf, drawedges=True, aspect=12)
colbar.set_ticks([2*i for i in range(-4, 4)])
ax.set_aspect('auto')
ax.set_yticks([50*i for i in range(-1, 2)],crs=cartopy.crs.PlateCarree())
ax.set_ylabel("Latitude")
ax.set_xticks([50*i for i in range(8)], crs=cartopy.crs.PlateCarree())
ax.set_xticklabels([50*i for i in range(8)])
ax.set_xlabel("Longitude")
plt.text(199,95,"[$\degree$C]", size=20)
ax.tick_params(length=9, width=1)
# show figure
fig.show()