In [33]:
#libraries
%matplotlib inline
import numpy as np
import math
from matplotlib.pyplot import *
import scipy.stats as stats

Beer-Lambert Law and Calibration Curve

In [34]:
#Assemble matrices for linear regression

#Absorbance is dependent variable, all absorbances in mAbs
A0 = [27, 29, 31] #solution 0
A1 = [48, 50, 51] #solution 1
A2 = [129, 130, 129] # solution 2
A3 = [232, 234, 236] #solution 3
Astock = [501, 503, 504] #solution 4
Abs = [np.mean(A0),np.mean(A1),np.mean(A2),np.mean(A3),np.mean(Astock)]

#Concentration is the independent variable, in mM
Cstock = 0.01 #mM
C = [Cstock/20, Cstock/10, Cstock/4, Cstock/2, Cstock]
xaxis = np.linspace(0,Cstock,1000)

#Linear regression
Coeff = np.vstack([np.ones(len(C)),C]).T
y=np.vstack(Abs)
x=np.linalg.lstsq(Coeff,y)[0]

#notice that constant term ~ 0. This is because the machine was zeroed. Revise Abs values by subtracting noise (constant)
y[:] = [a - x[0] for a in y]


#plots, note that the fit only includes the coefficient of concentration (beta)
fig0 = figure(figsize=(10,10))
fit = x[1]*xaxis
plot(xaxis,fit,'r-',label='Regression Fit')
plot(C,Abs,'b*',label='Original Data',markersize=10)
xlim((0,0.01001))
grid('on')
legend(loc=0)
xlabel('Concentration (mM)')
ylabel('Absorbance (mAbs)')
title('Regression Analysis and Original Data')

#Residuals
e = (Abs - x[1]*C) 
RSS = sum(e**2)
TSS = sum((Abs - np.mean(Abs))**2)
R2 = 1 - RSS/TSS
print('R squared is equal to','%.3f'%R2,'. This shows that Beer Lambert Law holds')#

#Regression confidence intervals
#Assume e (error term of regression) normally distributed
gamma = 0.05 # 95% confidence interval
n = len(C) # n data points
tstat = stats.t.ppf(1-gamma/2,n-2) # P(T>t(v,alpha)) = alpha, so take inverse and use probability/percentile to find t value
y1err = x[1]*C + tstat * np.sqrt(((1/(n-2)) * sum(e**2)) * ((1/n) + ((C-np.mean(C))**2)/(sum((C-np.mean(C))**2)))) 
y2err = x[1]*C - tstat * np.sqrt(((1/(n-2)) * sum(e**2)) * ((1/n) + ((C-np.mean(C))**2)/(sum((C-np.mean(C))**2))))

err =  tstat * np.sqrt(((1/(n-2)) * sum(e**2)) * (1/(sum((C-np.mean(C))**2))))

#kerr1 = np.mean(y1err/C)
#kerr2 = np.mean(y2err/C)
#kerr = 0.5 * ((kerr1-x[1])+(x[1]-kerr2))
print('Uncertainty in slope =','%.3f'%err,'comapared to the slope which is','%.3f'%x[1])

plot(C,y1err,'r-',ls='--',linewidth='0.5')
plot(C,y2err,'r-',ls='--',linewidth='0.5')
fill_between(C,y1err,y2err,facecolor = 'red',alpha='0.1')
R squared is equal to 0.998 . This shows that Beer Lambert Law holds
Uncertainty in slope = 3998.436 comapared to the slope which is 49658.928
Out[34]:
<matplotlib.collections.PolyCollection at 0x7fbb1af037f0>
/usr/lib/python3/dist-packages/matplotlib/collections.py:571: FutureWarning: elementwise comparison failed; returning scalar instead, but in the future will perform elementwise comparison
  if self._edgecolors == str('face'):

Reaction Data

In [42]:
#Reaction 1: Solution 2 and Stock NaOK
t1 = np.arange(30,690,30)
zero11 = 0
zero12 = 2
A11 = np.array([207, 190, 174, 161, 148, 136, 125, 115, 106, 98, 91, 84, 77, 72, 67, 62, 58, 54, 50, 47, 44, 41])
A12 = np.array([207, 187, 171, 155, 141, 128, 117, 107, 98, 90, 82, 75, 69, 64, 58, 54, 50, 46, 43, 40, 37, 34])
A1 = 0.5 * (A11+A12)
C1 = A1/x[1] * 1e-3

#Reaction 2: Stock MV and Stock NaOH
t2 = np.arange(30,270,30)
zero21 = 0
zero22 = 0
A21 = np.array([162, 123, 94, 72, 55, 43, 34, 27])
A22 = np.array([162, 121, 89, 66, 50, 38, 29, 24])
A2 = 0.5* (A21+A22)
C2= A2/x[1] * 1e-3

#Reaction 3: Solution 3 and Stock NaOH
t3 = np.arange(30,270,30)
zero31 = 0
zero32 = 2
A31 = np.array([76, 57, 43, 34, 26, 20, 16, 13])
A32 = np.array([85, 60, 52, 42, 34, 28, 23, 20])
A3 = 0.5* (A31+A32)
C3= A3/x[1] * 1e-3

#regression analysis for first order
Coeff11 = np.vstack([np.ones(len(t1)),t1]).T
Coeff21 = np.vstack([np.ones(len(t2)),t2]).T
Coeff31 = np.vstack([np.ones(len(t3)),t3]).T
y11=np.vstack(np.log(C1))
y21=np.vstack(np.log(C2))
y31=np.vstack(np.log(C3))
x11=np.linalg.lstsq(Coeff11,y11)[0]
x21=np.linalg.lstsq(Coeff21,y21)[0]
x31=np.linalg.lstsq(Coeff31,y31)[0]

e11 = (np.log(C1) - (x11[0]+x11[1]*t1))
e21 = (np.log(C2) - (x21[0]+x21[1]*t2))
e31 = (np.log(C3) - (x31[0]+x31[1]*t3))
RSS11 = sum(e11**2)
RSS21 = sum(e21**2)
RSS31 = sum(e31**2)
TSS11 = sum((np.log(C1) - np.mean(np.log(C1)))**2)
TSS21 = sum((np.log(C2) - np.mean(np.log(C2)))**2)
TSS31 = sum((np.log(C3) - np.mean(np.log(C3)))**2)
R211 = 1 - RSS11/TSS11
R221 = 1 - RSS21/TSS21
R231 = 1 - RSS31/TSS31

n1=len(C1)  
n2=len(C2)
n3=len(C3)
tstat1 = stats.t.ppf(1-gamma/2,n1-2)
tstat2 = stats.t.ppf(1-gamma/2,n2-2)
tstat3 = stats.t.ppf(1-gamma/2,n3-2)
err1= tstat1 * np.sqrt(((1/(n1-2)) * sum(e11**2)) * 1/sum((t1 - np.mean(t1))**2))
err2= tstat2 * np.sqrt(((1/(n2-2)) * sum(e21**2)) * 1/sum((t2 - np.mean(t2))**2))
err3= tstat3 * np.sqrt(((1/(n3-2)) * sum(e31**2)) * 1/sum((t3 - np.mean(t3))**2))

#regression analysis for second order
Coeff12 = np.vstack([np.ones(len(t1)),t1]).T
Coeff22 = np.vstack([np.ones(len(t2)),t2]).T
Coeff32 = np.vstack([np.ones(len(t2)),t3]).T
y12=np.vstack(1/C1)
y22=np.vstack(1/C2)
y32=np.vstack(1/C3)
x12=np.linalg.lstsq(Coeff12,y12)[0]
x22=np.linalg.lstsq(Coeff22,y22)[0]
x32=np.linalg.lstsq(Coeff32,y32)[0]

e12 = (1/C1 - (x12[0]+x12[1]*t1))
e22 = (1/C2 - (x22[0]+x22[1]*t2))
e32 = (1/C3 - (x32[0]+x32[1]*t3))
RSS12 = sum(e12**2)
RSS22 = sum(e22**2)
RSS32 = sum(e32**2)
TSS12 = sum(((1/C1) - np.mean(1/C1))**2)
TSS22 = sum(((1/C2) - np.mean(1/C2))**2)
TSS32 = sum(((1/C3) - np.mean(1/C3))**2)
R2s1 = 1 - RSS12/TSS12
R2s2 = 1 - RSS22/TSS22
R2s3 = 1 - RSS32/TSS32

#copy this plot code later it's amazing
fig = figure(figsize = (10,10))

ax1 = fig.add_subplot(311)
ax1.plot(t1,C1*1e3,'b*',markersize=10, label='Reaction 1')
ax1.plot(t2,C2*1e3,'r*',markersize=10, label='Reaction 2')
ax1.plot(t3,C3*1e3,'g*',markersize=10, label='Reaction 3')
grid('on')
legend(loc=0)
xlabel('Time (s)')
ylabel('Concentration (mM)')
title('Concentration vs Time')

ax2 = fig.add_subplot(312)
ax2 = plot(t1,np.log(C1),'bo',markersize=10, label='Reaction 1')
ax2 = plot(t2,np.log(C2),'ro',markersize=10, label='Reaction 2')
ax2 = plot(t3,np.log(C3),'go',markersize=10, label='Reaction 3')
grid('on')
legend(loc=0)
xlabel('Time (s)')
ylabel('log(Concentration)')
title('Log(Concentration) vs. Time')

ax3 = fig.add_subplot(313)
ax3 = plot(t1,1/(C1),'bo',markersize=10, label='Reaction 1')
ax3 = plot(t2,1/(C2),'ro',markersize=10, label='Reaction 2')
ax3 = plot(t3,1/(C3),'go',markersize=10, label='Reaction 3')
grid('on')
legend(loc=0)
xlabel('Time (s)')
ylabel('1/Concentration (1/M)')
title('1/Concentration vs. Time')


fig.tight_layout()

print('Coefficient of determination for first order Reaction 1 regression is','%.3f'%R211)
print('Coefficient of determination for first order Reaction 2 regression is','%.3f'%R221)
print('Coefficient of determination for first order Reaction 3 regression is','%.3f'%R231)

print('Coefficient of determination for second order Reaction 1 regression is','%.3f'%R2s1)
print('Coefficient of determination for second order Reaction 2 regression is','%.3f'%R2s2)
print('Coefficient of determination for second order Reaction 3 regression is','%.3f'%R2s3)
Coefficient of determination for first order Reaction 1 regression is 0.998
Coefficient of determination for first order Reaction 2 regression is 0.998
Coefficient of determination for first order Reaction 3 regression is 0.996
Coefficient of determination for second order Reaction 1 regression is 0.968
Coefficient of determination for second order Reaction 2 regression is 0.957
Coefficient of determination for second order Reaction 3 regression is 0.974

Kinetics Analysis

In [75]:
#calculation of pseudo first order constants
k1ps = -1*x11[1]
k2ps = -1*x21[1]
k3ps = -1*x31[1]
COH1 = 0.025
COH2 = 0.1
COH3 = 0.1
errCOH1 = COH1 * ((0.04/25)+(0.1/100))

#calculate n using 1 and 2 and 1 and 3
n1 = np.log(k1ps/k2ps)/np.log(COH1/COH2)
errn1num = ((err1/abs(k1ps))+(err2/abs(k2ps)))
errn1denum = (errCOH1/COH1)
errn1 = n1 * ((errn1num)/abs((np.log(k1ps/k2ps))) + ((errn1denum)/(COH1/COH2)))

n2 = np.log(k1ps/k3ps)/np.log(COH1/COH3)
errn2num = ((err1/abs(k1ps))+(err3/abs(k3ps)))
errn2denum = (errCOH1/COH1)
errn2 = n2 * ((errn2num)/abs((np.log(k1ps/k3ps))) + ((errn2denum)/(COH1/COH3)))


n = 0.5*(n1+n2)
errn=errn1+errn2
print('%.5f'%n,'is the value of n and its uncertainty is','%.5f'%errn)

#calculate rate constant from pseudo constants
k1 = k1ps/(COH1**n)
k2 = k2ps/(COH2**n)
k3 = k3ps/(COH3**n)
k1err = k1*(err1/k1ps + ((COH1**n)*n*(errCOH1/COH1))/(COH1**n))
k2err = err2 / (COH2**n)
k3err = err3 / (COH3**n)
kav = (1/3)*(k1+k2+k3)
print('The pseudo rate constants are','%.5f'%k1ps,'%.5f'%k2ps,'%.5f'%k3ps,' and their errors are','%.7f'%err1,'%.7f'%err2,'%.7f'%err3,'respectively')
print('The rate constants are','%.5f'%k1,'%.5f'%k2,'%.5f'%k3,' and their errors are','%.7f'%k1err,'%.7f'%k2err,'%.7f'%k3err,'respectively')
0.79546 is the value of n and its uncertainty is 0.12221
The pseudo rate constants are 0.00271 0.00890 0.00749  and their errors are 0.0000515 0.0003794 0.0004934 respectively
The rate constants are 0.05098 0.05558 0.04676  and their errors are 0.0010733 0.0023688 0.0030807 respectively
In [ ]:
 
In [ ]:
 
In [ ]: