This Version 3.0 is authored by Briana Ramirez, edited by Samuel Shen. Liu Yang, Sandra Villamar, and Joaquin Stawsky contributed codes to this version.
Video tutorial for the python code can be found at the following URL: https://www.youtube.com/channel/UC7D9i0kBMzPTHyEhU0h6W9g
This version is based upon the previous version described in the following box.
######################################################################################################################
#This Python Code for Climate Science is written for the book entitled "Climate Mathematics: Theory and Applications"#
#A Cambridge University Press book authored by SSP Shen and RCJ Somerville in July 2019 #
#The Python codes were based on the R codes written by Samuel Shen Distinguished Professor, #
#San Diego State University, USA and were translated from R by Louis Selstad, Stephen Shen, #
#Gregori Clarke, and Dakota Newmann and edited by Samuel Shen. #
######################################################################################################################
#FIRST TIME Python users*****
#These package need to be installed (on the terminal or anaconda interface) before importing them below.
#Follow this tutorial for package installation before
# https://towardsdatascience.com/importerror-no-module-named-xyz-45e4a5339e1b
#Change your file path to the folder where your downloaded data is stored
#MAC HELP: https://support.apple.com/guide/mac-help/go-directly-to-a-specific-folder-on-mac-mchlp1236/mac
#PC HELP: https://www.sony.com/electronics/support/articles/00015251
import os
# os.chdir("/Users/sshen/climmath/data")
os.chdir('/Users/HP/Documents/sshen/climmath/data')
%%javascript
IPython.OutputArea.prototype._should_scroll = function(lines) {
return false;
}
#Style Dictionary to standardize plotting scheme between different python scripts
import matplotlib.pyplot as plt
styledict = {'xtick.labelsize':20,
'xtick.major.size':9,
'xtick.major.width':1,
'ytick.labelsize':20,
'ytick.major.size':9,
'ytick.major.width':1,
'legend.framealpha':0.0,
'legend.fontsize':15,
'axes.labelsize':20,
'axes.titlesize':25,
'axes.linewidth':2,
'figure.figsize':(12,8),
'savefig.format':'jpg'}
plt.rcParams.update(**styledict)
#Function that creates personalized discrete Colormap
import numpy as np
from matplotlib import cm as cm1
from matplotlib.colors import ListedColormap, to_rgba
def newColMap(colors):
"""
This function creates a new color map from a list of colors given
as a parameter. Recommended length of list of colors is at least 6.
"""
first = np.repeat([to_rgba(colors[0])], 2, axis = 0)
last = np.repeat([to_rgba(colors[-1])], 2, axis = 0)
v = cm1.get_cmap('viridis', 16*(len(colors)-2))
newcolors = v(np.linspace(0, 1, 16*(len(colors)-2)))
for (i, col) in enumerate(colors[1:-1]):
newcolors[16*i : 16*(i+1), :] = to_rgba(col)
return ListedColormap(np.append(np.append(first,newcolors, axis=0), last, axis=0))
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import rgb2hex
from matplotlib.patches import Polygon
import matplotlib.mlab as mlab
import pandas as pd
from cartopy import crs, mpl
import matplotlib.ticker as mticker
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
from mpl_toolkits.basemap import Basemap
from mpl_toolkits import mplot3d
from netCDF4 import Dataset as ds
from urllib import request
import scipy as sp
from scipy import stats as stt
from sklearn import datasets, linear_model
from sklearn.neighbors import KernelDensity as kd
from sklearn.linear_model import LinearRegression
from random import randint
from scipy.stats import norm
import statsmodels
import sklearn
import math
import statistics
import sympy as sy
from sympy import symbols, diff
import statsmodels.api as sm
from datetime import date
from scipy.stats import norm, kurtosis
from sklearn.linear_model import LinearRegression
import seaborn as sns
import pylab as py
from scipy.stats import norm, t
import warnings
warnings.filterwarnings("ignore")
# Creating a Matrix
np.mat([[1,2], [3,4]])
matrix([[1, 2], [3, 4]])
# Matrix Subtraction
np.mat([[1,1], [1,-1]]) - np.mat([[1,2], [3,0]])
matrix([[ 0, -1], [-2, -1]])
# Matrix Multiplication
np.mat([[1,1], [1,-1]]) * np.mat([[1,3], [2,4]])
matrix([[ 3, 7], [-1, -1]])
# Matrix Multiplication
np.mat([[1,3], [2,4]]) * np.mat([[1,1], [1,-1]])
matrix([[ 4, -2], [ 6, -2]])
# Yields matrix inverse
np.linalg.inv(np.mat([[1,1], [1,-1]]))
matrix([[ 0.5, 0.5], [ 0.5, -0.5]])
# Verify the result of inverse
np.mat([[0.5,0.5], [0.5,-0.5]]) * np.mat([[1,1], [1,-1]])
matrix([[1., 0.], [0., 1.]])
# Yields the solved linear system
np.linalg.solve(np.mat([[1,1], [1,-1]]),[20,4])
array([12., 8.])
# Creating Matrix A
A = np.mat([[1,2,3], [-1,0,1]])
A
matrix([[ 1, 2, 3], [-1, 0, 1]])
# Yields the Covariance Matrix of A
covm = (1/A.shape[1])*A*A.T
covm
matrix([[4.66666667, 0.66666667], [0.66666667, 0.66666667]])
# Initializing arbitrary vector u
u = np.mat([[1,-1]]).T
# Yields new vector v in a different direction
v = covm*u
v
matrix([[4.], [0.]])
# Yields eigenvalues and eigenvectors in one output
ew = np.linalg.eig(covm)
# Yields the eigenvalues of covm
ew[0]
array([4.77485177, 0.55848156])
# Yields the eigenvectors
ew[1]
matrix([[ 0.98708746, -0.16018224], [ 0.16018224, 0.98708746]])
# Verify the eigenvalue and eigenvectors
covm * ew[1] / ew[0]
# The first column corresponds to the first eigenvalue
matrix([[ 0.98708746, -0.16018224], [ 0.16018224, 0.98708746]])
# Develop a 2-by-3 space time data matrix for SVD
A = np.mat([[1,2,3],[-1,0,1]])
A
matrix([[ 1, 2, 3], [-1, 0, 1]])
# Perform SVD calculation
msvd = np.linalg.svd(A)
# Yields the D vector in SVD
d = msvd[1]
d
array([3.78477943, 1.29438969])
# Yields the U matrix in SVD
u = msvd[0].T
u
matrix([[-0.98708746, -0.16018224], [-0.16018224, 0.98708746]])
# Yields the V matrix of SVD
v = msvd[2]
v = np.delete(v,2,0).T
v
matrix([[-0.21848175, -0.88634026], [-0.52160897, -0.24750235], [-0.8247362 , 0.39133557]])
# Verify that A = UDV', where V' is the transpose of V.
# Yields Diagonal of D
diagd = np.diag(d)
# Yields the Transpose of V
vT = v.T
# Verify the SVD result
verim = u*diagd*vT
np.matrix.round(verim)
# This is the original matrix A
matrix([[ 1., 2., 3.], [-1., 0., 1.]])
# Yields the covariance of matrix A
covm = (1/A.shape[1])*A*A.T
# Yields the eigenvectors and eigenvalues of matrix A
eigcov = np.linalg.eig(covm)
# The first element of eigcov are the eigenvalues of matrix A
eigcov[0]
array([4.77485177, 0.55848156])
# Yields the eigenvectors of matrix A
eigcov[1]
matrix([[ 0.98708746, -0.16018224], [ 0.16018224, 0.98708746]])
# Verify eigenvalues with SVD
((d**2)/A.shape[1])
array([4.77485177, 0.55848156])
# Yields true eigenvalues of matrix A
eigcov[0]
array([4.77485177, 0.55848156])
Download pressure data for Tahiti from https://shen.sdsu.edu/data/PSTANDtahiti into your directory
Plot the SOI and the standardized SLP data at Darwin and Tahiti
# Set up File path
file = 'PSTANDtahiti.txt'
Pta = np.loadtxt(file,skiprows=0)
PTA = np.reshape(Pta[:,1:],(780))
# Make an array of years for our x-axis
xtime = np.linspace(1951,2016,(65*12)+1)[0:780]
#Fig 4.2 (a)
# Set up Figure
plt.figure(figsize = (15.,7.))
# Add data to plot
plt.plot(xtime,PTA,color='r')
# Add text to plot
plt.text(1950,2.7,'(a)', size = 20, alpha = 0.7)
# Label plot
plt.xlabel('Year')
plt.ylabel('Pressure')
plt.title('Standardized Tahiti SLP Anomalies')
plt.grid()
plt.show()
Download pressure data for Tahiti from http://shen.sdsu.edu/data/PSTANDdarwin.txt into your directory
# Upload and set up data
Pda = np.loadtxt('PSTANDdarwin.txt',skiprows=0)
PDA = np.reshape(Pda[:,1:],(780))
#Fig 4.2 (b)
plt.figure(figsize = (15.,7.))
plt.plot(xtime,PDA,color='b')
plt.text(1950, 2.7, '(b)', size = 20, alpha = 0.7)
plt.xlabel('Year')
plt.ylabel('Pressure')
plt.title('Standardized Darwin SLP Anomalies')
plt.grid()
plt.show()
Plot the SOI index
#Fig 4.2 (c)
# Set up figure
fig, soi = plt.subplots(figsize=(15,7))
# Ploting data against time
soi.plot(xtime,PTA-PDA, color = 'k')
soi.text(1950, 4.11, '(c)', size = 20, alpha = 0.7)
soi.axhline(y = 0.0, color = 'k', linestyle = '-')
soi.set_xlabel('Year')
soi.set_ylabel('SOI Index')
soi.grid()
# Making left Y axis
ax2 = soi.twiny()
# Plotting data against difference of Darwin and Tahiti standardized SLP data
ax2.plot(xtime,PTA-PDA, color='k')
ax2.set_xlabel('Year')
# Mirroring X axis for difference data
ax3 = soi.twinx()
ax3.plot(xtime,PTA-PDA, color='k')
ax3.set_ylabel('SOI Index')
# Adding some text
ax3.text(1960,4.6,"SOI = Tahiti Standardized SLP - Darwin Standardized SLP", fontsize=18)
plt.show()
CSOI and AMO time series comparison
#Fig 4.2 (d)
# Preparing variables
SOI = PTA-PDA
cnegsoi = -SOI.cumsum()
# Load AMO data
amodat = np.loadtxt('AMO1951-2015.txt',skiprows=0)
amots = amodat[:,1:13].T.flatten("F")
# Set up figures
fig, ax1 = plt.subplots(figsize=(15,7))
fig.text(0.123, 0.22, '(d)', size = 20, alpha = 0.7)
# Plot the data against CSOI Index
ax1.set_xlabel('Year')
ax1.set_ylabel('Negative CSOI Index')
ax1.plot(xtime, cnegsoi, color="purple", label="CSOI")
ax1.grid()
# Create a second, vertical axis that shares the same x-axis
ax2 = ax1.twinx()
# Set labels for new axis
ax2.set_ylabel('AMO Index')
ax2.plot(xtime, amots, color="darkgreen", label="AMO Index")
# Set legends
# ax1.legend(loc = (.169,.89))
# ax2.legend(loc = (.169,.84))
ax1.legend(loc = (.125,.13))
ax2.legend(loc = (.125,.08))
# Yields the full view of the right y-label
fig.tight_layout()
plt.title('CSOI and AMO Index Comparison', pad = 8)
plt.show()