Path: blob/master/sandbox/FFEV/HermiteGrid_CaseStudy.R
1433 views
# This script estimates the prior of a hedge fund return and processes extreme views on CVaR1# according to Entropy Pooling2# This script complements the article3# "Fully Flexible Extreme Views"4# by A. Meucci, D. Ardia, S. Keel5# available at www.ssrn.com6# The most recent version of this code is available at7# MATLAB Central - File Exchange89# IMPORTANT - This script is about the methodology, not the input data, which has been modified1011xi = as.matrix( 100 * data[, 2] )12n = nrow(xi)1314# bandwidth15bw = kernelbw(xi)1617# weights18lambda = log(2) / (n / 2)19wi = as.matrix( exp( -lambda * ( n - seq( from=n, to=1 ) ) ) )20wi = as.matrix( apply( wi, 2, rev ) / sum( wi ) )2122# Prior market model23# kernel density2425market.mu = mean(xi)26market.pdf = function ( x ) kernelpdf( x, xi, bw, wi )27market.cdf = function ( x ) kernelcdf( x, xi, bw, wi )28market.inv = function ( x ) kernelinv( x, xi, bw, wi )29market.VaR95 = market.inv( c(0.05) )30market.CVaR95 = integrate( function( x ) x * market.pdf( x ), -100, market.VaR95 )$val / 0.053132# numerical (Gauss-Hermite grid) prior33tmp = ( ghqx - min( ghqx ) )/( max( ghqx ) - min( ghqx ) ) # rescale GH zeros so they belong to [0,1]34epsilon = 1e-1035Lower = market.inv( epsilon )36Upper = market.inv( 1 - epsilon )37X = Lower + tmp * ( Upper - Lower ) # rescale mesh3839p = integrateSubIntervals( X , market.cdf )40p = normalizeProb( p )41J = nrow( X )4243# Entropy posterior from extreme view on mean and CVaR44view.mu = mean( xi ) - 1.045view.CVaR95 = market.CVaR95 - 1.04647# Netwton Raphson48emptyMatrix = matrix( ,nrow = 0, ncol = 0)49s = emptyMatrix50idx = as.matrix( cumsum(p) <= 0.05 )51s[1] = sum(idx)52posteriorEntropy = EntropyProg(p, t( idx ), as.matrix( 0.05 ), rbind( rep(1, J), t( X ), t( idx * X ) ), rbind( 1, view.mu, 0.05 * view.CVaR95) )53KLdiv = as.matrix( posteriorEntropy$optimizationPerformance$ml )54p_ = posteriorEntropy$p_5556doStop = 057i = 158while ( !doStop ) {59i = i + 16061idx = cbind( matrix(1, 1, s[i - 1] ), matrix(0, 1, J - s[i-1] ) )62posteriorEntropy1 = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( X ), t( t(idx) * X) ), rbind( 1, view.mu, 0.05 * view.CVaR95 ) )63# [dummy, KLdiv_s] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);6465idx = cbind( matrix(1, 1, s[i - 1] + 1 ), matrix(0, 1, J - s[i - 1] - 1) )66posteriorEntropy2 = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( X ), t( t(idx) * X) ), rbind( 1, view.mu, 0.05 * view.CVaR95 ) )67# [dummy, KLdiv_s1] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);6869idx = cbind( matrix(1, 1, s[i - 1] + 2 ), matrix(0, 1, J - s[i - 1] - 2) )70posteriorEntropy3 = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( X ), t( t(idx) * X) ), rbind( 1, view.mu, 0.05 * view.CVaR95 ) )71# [dummy, KLdiv_s2] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);7273# first difference74DE = posteriorEntropy2$optimizationPerformance$ml - posteriorEntropy1$optimizationPerformance$ml7576# second difference77D2E = posteriorEntropy3$optimizationPerformance$ml - 2 * posteriorEntropy2$optimizationPerformance$ml + posteriorEntropy1$optimizationPerformance$ml7879# optimal s80s = rbind( s, round( s[i - 1] - (DE / D2E) ) )8182tmp = emptyMatrix83idx = cbind( matrix( 1, 1, s[i] ), matrix( 0, 1, J - s[i] ) )84tempEntropy = EntropyProg(p, idx, as.matrix( 0.05 ), rbind( matrix(1, 1, J), t( X ), t( t(idx) * X) ), rbind( 1, view.mu, 0.05 * view.CVaR95 ) )85# [tmp.p_, tmp.KLdiv] = optimizeEntropy(p, [idx'; (idx .* X)'], [0.05; 0.05 * view.CVaR95], [ones(1, J); X'], [1; view.mu]);86p_ = cbind( p_, tempEntropy$p_ )87KLdiv = rbind( KLdiv, tempEntropy$optimizationPerformance$ml ) #ok<*AGROW>8889# if change in KLdiv less than one percent, stop90if( abs( ( KLdiv[i] - KLdiv[i - 1] ) / KLdiv[i - 1] ) < 0.01 ) { doStop = 1 }91}9293plot( t(X), p )94plot( t(X), p_[,ncol(p_)])9596