Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
braverock
GitHub Repository: braverock/portfolioanalytics
Path: blob/master/sandbox/FFEV/HermiteGrid_CVaR_Recursion.R
1433 views
1
# This script illustrates the discrete Newton recursion to process views on CVaR according to Entropy Pooling
2
# This script complements the article
3
# "Fully Flexible Extreme Views"
4
# by A. Meucci, D. Ardia, S. Keel
5
# available at www.ssrn.com
6
# The most recent version of this code is available at
7
# MATLAB Central - File Exchange
8
9
# Prior market model (normal) on grid
10
emptyMatrix = matrix( nrow=0 , ncol=0 )
11
market.mu = 0.0
12
market.sig2 = 1.0
13
market.pdf = function(x) dnorm( x , mean = market.mu , sd = sqrt(market.sig2) )
14
market.cdf = function(x) pnorm( x , mean = market.mu , sd = sqrt(market.sig2) )
15
market.rnd = function(x) rnorm( x , mean = market.mu , sd = sqrt(market.sig2) )
16
market.inv = function(x) qnorm( x , mean = market.mu , sd = sqrt(market.sig2) )
17
market.VaR95 = market.inv(0.05)
18
market.CVaR95 = integrate( function( x ) ( x * market.pdf( x ) ), -100, market.VaR95)$val / 0.05
19
20
tmp = ( ghqx - min( ghqx ) )/( max( ghqx ) - min( ghqx ) ) # rescale GH zeros so they belong to [0,1]
21
epsilon = 1e-10
22
Lower = market.inv( epsilon )
23
Upper = market.inv( 1 - epsilon )
24
X = Lower + tmp * ( Upper - Lower ) # rescale mesh
25
26
p = integrateSubIntervals( X, market.cdf )
27
p = normalizeProb( p )
28
J = nrow( X )
29
30
# Entropy posterior from extreme view on CVaR: brute-force approach
31
32
# view of the analyst
33
view.CVaR95 = -3.0
34
35
# Iterate over different VaR95 levels
36
nVaR95 = 100
37
VaR95 = seq(view.CVaR95, market.VaR95, len=nVaR95)
38
p_ = matrix(NaN, nrow = J, ncol = nVaR95 )
39
s_ = matrix(NaN, nrow = nVaR95, ncol = 1 )
40
KLdiv = matrix(NaN, nrow = nVaR95, ncol = 1)
41
42
for ( i in 1:nVaR95 ) {
43
idx = as.matrix( X <= VaR95[i] )
44
s_[i] = sum(idx)
45
posteriorEntropy = EntropyProg(p, t( idx ), as.matrix( 0.05 ), rbind( rep(1, J), t( idx * X ) ), rbind( 1, 0.05 * view.CVaR95 ) )
46
p_[,i] = posteriorEntropy$p_
47
KLdiv[i] = posteriorEntropy$optimizationPerformance$ml
48
}
49
50
# Display results
51
plot( s_, KLdiv )
52
dummy = min( KLdiv )
53
idxMin = which.min( KLdiv )
54
plot( s_[idxMin], KLdiv[idxMin] )
55
56
tmp = p_[, idxMin]
57
tmp = tmp / sum( tmp )
58
plot( X, tmp )
59
x = seq(min(X), max(X), len = J);
60
tmp = market.pdf(x)
61
tmp = tmp / sum(tmp)
62
plot(x, tmp)
63
plot(market.CVaR95, 0)
64
plot(view.CVaR95, 0)
65
66
# Entropy posterior from extreme view on CVaR: Newton Raphson approach
67
68
s = emptyMatrix
69
70
# initial value
71
idx = as.matrix( cumsum(p) <= 0.05 )
72
s[1] = sum(idx)
73
posteriorEntropy = EntropyProg(p, t( idx ), as.matrix( 0.05 ), rbind( rep(1, J), t( idx * X ) ), rbind( 1, 0.05 * view.CVaR95) )
74
KLdiv = as.matrix( posteriorEntropy$optimizationPerformance$ml )
75
p_ = posteriorEntropy$p_
76
77
# iterate
78
doStop = 0
79
i = 1
80
while ( !doStop ) {
81
i = i + 1
82
83
idx = cbind( matrix(1, 1, s[i - 1] ), matrix(0, 1, J - s[i-1] ) )
84
posteriorEntropy1 = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( t(idx) * X) ), rbind( 1, 0.05 * view.CVaR95 ) )
85
# [dummy, KLdiv_s] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);
86
87
idx = cbind( matrix(1, 1, s[i - 1] + 1 ), matrix(0, 1, J - s[i - 1] - 1) )
88
posteriorEntropy2 = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( t(idx) * X) ), rbind( 1, 0.05 * view.CVaR95 ) )
89
# [dummy, KLdiv_s1] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);
90
91
idx = cbind( matrix(1, 1, s[i - 1] + 2 ), matrix(0, 1, J - s[i - 1] - 2) )
92
posteriorEntropy3 = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( t(idx) * X) ), rbind( 1, 0.05 * view.CVaR95 ) )
93
# [dummy, KLdiv_s2] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);
94
95
# first difference
96
DE = posteriorEntropy2$optimizationPerformance$ml - posteriorEntropy1$optimizationPerformance$ml
97
98
# second difference
99
D2E = posteriorEntropy3$optimizationPerformance$ml - 2 * posteriorEntropy2$optimizationPerformance$ml + posteriorEntropy1$optimizationPerformance$ml
100
101
# optimal s
102
s = rbind( s, round( s[i - 1] - (DE / D2E) ) )
103
104
tmp = emptyMatrix
105
idx = cbind( matrix( 1, 1, s[i] ), matrix( 0, 1, J - s[i] ) )
106
tempEntropy = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( t(idx) * X) ), rbind( 1, 0.05 * view.CVaR95 ) )
107
# [tmp.p_, tmp.KLdiv] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);
108
p_ = cbind( p_, tempEntropy$p_ )
109
KLdiv = rbind( KLdiv, tempEntropy$optimizationPerformance$ml ) #ok<*AGROW>
110
111
# if change in KLdiv less than one percent, stop
112
if( abs( ( KLdiv[i] - KLdiv[i - 1] ) / KLdiv[i - 1] ) < 0.01 ) { doStop = 1 }
113
}
114
115
# Display results
116
117
N = length(s)
118
119
plot(1:N, KLdiv)
120
x = seq(min(X), max(X), len = J)
121
tmp = market.pdf(x)
122
tmp = tmp / sum(tmp)
123
plot( t( X ), tmp )
124
plot( t( X ), p_[, ncol(p_)] )
125
plot( market.CVaR95, 0.0 )
126
plot( view.CVaR95, 0.0 )
127
128
# zoom here
129
plot( t( X ), tmp )
130
plot( t( X ), p_[, 1] )
131
plot( t( X ), p_[, 2] )
132
plot( t( X ), p_[, ncol(p_)] )
133
plot( market.CVaR95, 0 )
134
plot( view.CVaR95, 0 )
135