Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
braverock
GitHub Repository: braverock/portfolioanalytics
Path: blob/master/sandbox/localsearch.R
1433 views
1
###############################################################################
2
# Functions to use the Rdonlp2 library to perform local maxima/minima search
3
# to refine the optimal criteria identified by brute force search.
4
#
5
# These methods take output from the functions in optimizer.R and then
6
# perform sequential quadratic programming to find better optimal portfolios.
7
###############################################################################
8
9
10
localsearch = function(R, weightgrid, from, to, names.input, names.output, names.assets, cMin,
11
criteria=c( "StdDev" , "SR.StdDev" ,
12
"GVaR", "SR.GVaR", "mVaR", "SR.mVaR",
13
"GES", "SR.GES", "mES", "SR.mES", ...), columns.crit, p=0.95,
14
lowerbound = NA, upperbound = NA, EW=F)
15
{ # @author Kris Boudt and Brian G. Peterson
16
17
# Description:
18
#
19
# Performs a loop over the names.csv files (as many files as there are strings in names)
20
# These files must be such that each row corresponds to the portfolio weight vector in the
21
# respective row of weightgrid. Each column corresponds to an optimization criterion given
22
# in correct order in methods
23
#
24
# @todo
25
#
26
# R matrix holding historical returns
27
#
28
# weightgrid matrix each row contains one weighting vector, same number of columns as your returns
29
#
30
# names vector holding the names of the .csv files to be read
31
#
32
# from, to define the estimation sample
33
#
34
# criteria the criterion to be optimized
35
#
36
# columns.crit the columns of R in which the criteria are located
37
#
38
# minormax indicates whether the criterion should be minimized or maximized
39
#
40
# cMin the number of local minima to be computed
41
#
42
# lowerbound, upperbound a vector giving for each asset the lower and upper bounds
43
#
44
# EW optimization conditional on optimized portfolio return > EW return
45
# intuition: max modSharpe s.t. Return>EW is a good objective function because it says that an investor will prefer an optimizer portfolio
46
# only when they have a view that is better than the neutral (EW) view
47
# Return:
48
# For each criterion a .csv file is saved holding for each period in `names' the best weight vector
49
50
51
print("Local optimization of portfolio risk using a SQP solver")
52
print("Constraints: sum of weights=1 and user can specify bound constraints")
53
print("--------------------------------------------------------------------")
54
print("");
55
56
R = checkData(R, method="matrix");
57
weightgrid = checkData(weightgrid, method="matrix");
58
59
60
# Load the function donlp2
61
library("Rdonlp2")
62
63
# Create a matrix that will hold for each method and each vector the best weights
64
65
cAssets = ncol(weightgrid);
66
cPeriods = length(names.input);
67
cCriteria = length(criteria);
68
69
out = matrix( rep(0, cCriteria*cPeriods*cAssets) , ncol= cAssets );
70
# first cPeriods rows correspond to cCriteria[1] and so on
71
72
if(any(is.na(lowerbound))){ lowerbound = rep(-Inf,cAssets) }
73
if(any(is.na(upperbound))){ upperbound = rep(Inf,cAssets) }
74
75
# downside risk
76
alpha = 1-p;
77
78
# Estimation of the return mean vector, covariance, coskewness and cokurtosis matrix
79
80
81
82
if(EW){EWweights = matrix(rep(1,cAssets)/cAssets,ncol=1)}else{
83
A=matrix(rep(1,cAssets),nrow=1,ncol=cAssets);lin.lower=c(1);lin.upper=c(1);}
84
85
86
87
for( per in c(1:cPeriods) ){
88
89
print("-----------New estimation period ends on------------------------")
90
print(names.input[per]);
91
print("----------------------------------------------------------------")
92
print("")
93
in.sample.R = R[from[per]:to[per],] ; in.sample.T = dim(in.sample.R)[1];
94
mu = apply( in.sample.R , 2 , 'mean' )
95
sigma = cov(in.sample.R)
96
M3 = matrix(rep(0,cAssets^3),nrow=cAssets,ncol=cAssets^2)
97
M4 = matrix(rep(0,cAssets^4),nrow=cAssets,ncol=cAssets^3)
98
for(t in c(1:in.sample.T))
99
{
100
centret = as.numeric(matrix(in.sample.R[t,]-mu,nrow=cAssets,ncol=1))
101
M3 = M3 + ( centret%*%t(centret) )%x%t(centret)
102
M4 = M4 + ( centret%*%t(centret) )%x%t(centret)%x%t(centret)
103
}
104
M3 = 1/in.sample.T*M3
105
M4 = 1/in.sample.T*M4
106
107
Y = read.csv( file = paste( names.input[per],".csv",sep=""),
108
header = TRUE, sep = ",", na.strings = "NA", dec = ".")
109
Y = Y[1:dim(weightgrid)[1],columns.crit]
110
c=0;
111
112
if(EW){
113
A=matrix( rbind( rep(1,cAssets), mu) ,nrow=2,ncol=cAssets);
114
lin.lower=c(1,t(EWweights)%*%mu);lin.upper=c(1,Inf);}
115
116
for( c in c(1:cCriteria) ){
117
criterion = criteria[c]
118
print("-----------New criterion----------------------")
119
print(criterion);
120
print("----------------------------------------------")
121
print("")
122
switch( criterion,
123
StdDev = {
124
# to be minimized
125
best=sort( Y[,c],index.return=T,decreasing=F)$ix;
126
bestweights = weightgrid[best[1:cMin] , ];
127
global.best = Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
128
129
StdDevfun = function(w){
130
return( sqrt( t(w)%*%sigma%*%w ))
131
}
132
if( abs( ( global.best - StdDevfun(global.best.weight) )/StdDevfun(global.best.weight) )>0.05 ){
133
print("error StdDev definition"); break;}
134
if(EW){
135
localoptim = donlp2( par=EWweights , fn=StdDevfun , par.upper = upperbound , par.lower = lowerbound,
136
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
137
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
138
(localoptim$message == "computed correction small, regular case") |
139
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
140
if( localoptim$fx < global.best){ global.best = localoptim$fx ; global.best.weight = localoptim$par } } }
141
for( k in c(1:cMin) ){
142
localoptim = donlp2( par=bestweights[k,] , fn=StdDevfun , par.upper = upperbound , par.lower = lowerbound,
143
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
144
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
145
(localoptim$message != "computed correction small, regular case") &
146
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
147
if( localoptim$fx < global.best){
148
global.best = localoptim$fx ; global.best.weight = localoptim$par };
149
}#end loop over local minima
150
},
151
SR.StdDev = {
152
# to be maximized
153
best=sort( Y[,c],index.return=T,decreasing=T)$ix;
154
bestweights = weightgrid[best[1:cMin] , ];
155
global.best = -Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
156
157
NegSR.StdDevfun = function(w){
158
return( - (t(w)%*%mu) / sqrt( t(w)%*%sigma%*%w ) )
159
}
160
if( abs( ( global.best - NegSR.StdDevfun(global.best.weight) )/NegSR.StdDevfun(global.best.weight) )>0.05 ){
161
print("error SR.StdDev definition"); break; }
162
if(EW){
163
localoptim = donlp2( par=EWweights , fn=NegSR.StdDevfun , par.upper = upperbound , par.lower = lowerbound,
164
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
165
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
166
(localoptim$message == "computed correction small, regular case") |
167
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
168
if( localoptim$fx < global.best){
169
global.best = localoptim$fx ; global.best.weight = localoptim$par }}}
170
for( k in c(1:cMin) ){
171
localoptim = donlp2( par=bestweights[k,] , fn=NegSR.StdDevfun , par.upper = upperbound , par.lower = lowerbound,
172
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
173
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
174
(localoptim$message != "computed correction small, regular case") &
175
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
176
if( -localoptim$fx > global.best){
177
global.best = -localoptim$fx ; global.best.weight = localoptim$par };
178
}#end loop over local minima
179
},
180
GVaR = {
181
# minimization criterion
182
best=sort( Y[,c],index.return=T,decreasing=F)$ix;
183
bestweights = weightgrid[best[1:cMin] , ];
184
global.best = Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
185
186
GVaRfun = function(w){
187
return (- (t(w)%*%mu) - qnorm(alpha)*sqrt( t(w)%*%sigma%*%w ) )
188
}
189
if( abs( ( global.best - GVaRfun(global.best.weight) )/GVaRfun(global.best.weight) )>0.05 ){
190
print("error GVaR definition");break;}
191
if(EW){
192
localoptim = donlp2( par=EWweights , fn=GVaRfun , par.upper = upperbound , par.lower = lowerbound,
193
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
194
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
195
(localoptim$message == "computed correction small, regular case") |
196
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
197
if( localoptim$fx < global.best){
198
global.best = localoptim$fx ; global.best.weight = localoptim$par }}}
199
for( k in c(1:cMin) ){
200
localoptim = donlp2( par=bestweights[k,] , fn=GVaRfun , par.upper = upperbound , par.lower = lowerbound,
201
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
202
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
203
(localoptim$message != "computed correction small, regular case") &
204
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
205
if( localoptim$fx < global.best){
206
global.best = localoptim$fx; global.best.weight = localoptim$par };
207
}#end loop over local minima
208
},
209
SR.GVaR = {
210
# to be maximized
211
best=sort( Y[,c],index.return=T,decreasing=T)$ix;
212
bestweights = weightgrid[best[1:cMin] , ];
213
global.best = -Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
214
215
NegSR.GVaRfun = function(w){
216
GVaR = - (t(w)%*%mu) - qnorm(alpha)*sqrt( t(w)%*%sigma%*%w )
217
return( - (t(w)%*%mu) / GVaR )
218
}
219
if( abs( ( global.best - NegSR.GVaRfun(global.best.weight) )/NegSR.GVaRfun(global.best.weight) )>0.05 ){
220
print("error SR GVaR definition");break;}
221
if(EW){
222
localoptim = donlp2( par=EWweights , fn=NegSR.GVaRfun , par.upper = upperbound , par.lower = lowerbound,
223
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
224
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
225
(localoptim$message == "computed correction small, regular case") |
226
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
227
if( localoptim$fx < global.best){
228
global.best = localoptim$fx ; global.best.weight = localoptim$par } }}
229
for( k in c(1:cMin) ){
230
localoptim = donlp2( par=bestweights[k,] , fn=NegSR.GVaRfun , par.upper = upperbound , par.lower = lowerbound,
231
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
232
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
233
(localoptim$message != "computed correction small, regular case") &
234
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
235
if( -localoptim$fx > global.best){
236
global.best = -localoptim$fx; global.best.weight = localoptim$par };
237
}#end loop over local minima
238
},
239
mVaR = {
240
# minimization criterion
241
best=sort( Y[,c],index.return=T,decreasing=F)$ix;
242
bestweights = weightgrid[best[1:cMin] , ];
243
global.best = Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
244
245
mVaRfun = function(w){
246
pm4 = t(w)%*%M4%*%(w%x%w%x%w) ; pm3 = t(w)%*%M3%*%(w%x%w) ; pm2 = t(w)%*%sigma%*%w ;
247
skew = pm3 / pm2^(3/2);
248
exkurt = pm4 / pm2^(2) - 3; z = qnorm(alpha);
249
h = z + (1/6)*(z^2 -1)*skew
250
h = h + (1/24)*(z^3 - 3*z)*exkurt - (1/36)*(2*z^3 - 5*z)*skew^2;
251
return (- (t(w)%*%mu) - h*sqrt( pm2 ) )
252
}
253
if( abs( ( global.best - mVaRfun(global.best.weight) )/mVaRfun(global.best.weight) )>0.05 ){
254
print("error mVaR definition");break;}
255
if(EW){
256
localoptim = donlp2( par=EWweights , fn=mVaRfun , par.upper = upperbound , par.lower = lowerbound,
257
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
258
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
259
(localoptim$message == "computed correction small, regular case") |
260
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
261
if( localoptim$fx < global.best){
262
global.best = localoptim$fx ; global.best.weight = localoptim$par } } }
263
for( k in c(1:cMin) ){
264
localoptim = donlp2( par=bestweights[k,] , fn=mVaRfun , par.upper = upperbound , par.lower = lowerbound,
265
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
266
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
267
(localoptim$message != "computed correction small, regular case") &
268
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
269
if( localoptim$fx < global.best){
270
global.best = localoptim$fx; global.best.weight = localoptim$par };
271
}#end loop over local minima
272
},
273
SR.mVaR = {
274
# to be maximized
275
best=sort( Y[,c],index.return=T,decreasing=T)$ix;
276
bestweights = weightgrid[best[1:cMin] , ];
277
global.best = -Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
278
279
NegSR.mVaRfun = function(w){
280
pm4 = t(w)%*%M4%*%(w%x%w%x%w) ; pm3 = t(w)%*%M3%*%(w%x%w) ; pm2 = t(w)%*%sigma%*%w ;
281
skew = pm3 / pm2^(3/2);
282
exkurt = pm4 / pm2^(2) - 3; z = qnorm(alpha);
283
h = z + (1/6)*(z^2 -1)*skew
284
h = h + (1/24)*(z^3 - 3*z)*exkurt - (1/36)*(2*z^3 - 5*z)*skew^2;
285
mVaR = (- (t(w)%*%mu) - h*sqrt( pm2 ) )
286
return( - (t(w)%*%mu) / mVaR )
287
}
288
if( abs( ( global.best - NegSR.mVaRfun(global.best.weight) )/NegSR.mVaRfun(global.best.weight) )>0.05 ){
289
print("error SR mVaR definition");break}
290
if(EW){
291
localoptim = donlp2( par=EWweights , fn=NegSR.mVaRfun , par.upper = upperbound , par.lower = lowerbound,
292
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
293
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
294
(localoptim$message == "computed correction small, regular case") |
295
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
296
if( localoptim$fx < global.best){
297
global.best = localoptim$fx ; global.best.weight = localoptim$par }}}
298
299
for( k in c(1:cMin) ){
300
localoptim = donlp2( par=bestweights[k,] , fn=NegSR.mVaRfun , par.upper = upperbound , par.lower = lowerbound,
301
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
302
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
303
(localoptim$message != "computed correction small, regular case") &
304
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
305
if( -localoptim$fx > global.best){
306
global.best = -localoptim$fx; global.best.weight = localoptim$par };
307
}#end loop over local minima
308
},
309
GES = {
310
# minimization criterion
311
best=sort( Y[,c],index.return=T,decreasing=F)$ix;
312
bestweights = weightgrid[best[1:cMin], ];
313
global.best = Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
314
315
GESfun = function(w){
316
return (- (t(w)%*%mu) + dnorm(qnorm(alpha))*sqrt(t(w)%*%sigma%*%w)/alpha )
317
}
318
if( abs( ( global.best - GESfun(global.best.weight) )/GESfun(global.best.weight) )>0.05 ){
319
print("error GES definition"); break; }
320
if(EW){
321
localoptim = donlp2( par=EWweights , fn=GESfun , par.upper = upperbound , par.lower = lowerbound,
322
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
323
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
324
(localoptim$message == "computed correction small, regular case") |
325
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
326
if( localoptim$fx < global.best){
327
global.best = localoptim$fx ; global.best.weight = localoptim$par }}}
328
for( k in c(1:cMin) ){
329
localoptim = donlp2( par=bestweights[k,] , fn=GESfun , par.upper = upperbound , par.lower = lowerbound,
330
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
331
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
332
(localoptim$message != "computed correction small, regular case") &
333
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
334
if( localoptim$fx < global.best){
335
global.best = localoptim$fx; global.best.weight = localoptim$par };
336
}#end loop over local minima
337
},
338
SR.GES = {
339
# to be maximized
340
best=sort( Y[,c],index.return=T,decreasing=T)$ix;
341
bestweights = weightgrid[best[1:cMin] , ];
342
global.best = -Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
343
344
NegSR.GESfun = function(w){
345
GES = - (t(w)%*%mu) + dnorm(qnorm(alpha))*sqrt(t(w)%*%sigma%*%w)/alpha
346
return( - (t(w)%*%mu) / GES )
347
}
348
if( abs( ( global.best - NegSR.GESfun(global.best.weight) )/NegSR.GESfun(global.best.weight) )>0.05 ){
349
print("error SR GES definition"); break; }
350
if(EW){
351
localoptim = donlp2( par=EWweights , fn=NegSR.GESfun , par.upper = upperbound , par.lower = lowerbound,
352
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
353
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
354
(localoptim$message == "computed correction small, regular case") |
355
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
356
if( localoptim$fx < global.best){
357
global.best = localoptim$fx ; global.best.weight = localoptim$par }}}
358
for( k in c(1:cMin) ){
359
localoptim = donlp2( par=bestweights[k,] , fn=NegSR.GESfun, par.upper = upperbound , par.lower = lowerbound,
360
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
361
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
362
(localoptim$message != "computed correction small, regular case") &
363
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
364
if( -localoptim$fx > global.best){
365
global.best = -localoptim$fx; global.best.weight = localoptim$par };
366
}#end loop over local minima
367
},
368
mES = {
369
# minimization criterion
370
best=sort( Y[,c],index.return=T,decreasing=F)$ix;
371
bestweights = weightgrid[best[1:cMin] , ];
372
global.best = Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
373
374
375
Ipower = function(power,h){
376
fullprod = 1;
377
if( (power%%2)==0 ) #even number: number mod is zero
378
{
379
pstar = power/2;
380
for(j in c(1:pstar)){
381
fullprod = fullprod*(2*j) }
382
I = fullprod*dnorm(h);
383
384
for(i in c(1:pstar) )
385
{
386
prod = 1;
387
for(j in c(1:i) ){
388
prod = prod*(2*j) }
389
I = I + (fullprod/prod)*(h^(2*i))*dnorm(h)
390
}
391
}else{
392
pstar = (power-1)/2
393
for(j in c(0:pstar) ) {
394
fullprod = fullprod*( (2*j)+1 ) }
395
I = -fullprod*pnorm(h);
396
for(i in c(0:pstar) ){
397
prod = 1;
398
for(j in c(0:i) ){
399
prod = prod*( (2*j) + 1 )}
400
I = I + (fullprod/prod)*(h^( (2*i) + 1))*dnorm(h) }
401
}
402
return(I) }
403
404
mESfun = function(w){
405
pm4 = t(w)%*%M4%*%(w%x%w%x%w) ; pm3 = t(w)%*%M3%*%(w%x%w) ; pm2 = t(w)%*%sigma%*%w ;
406
skew = pm3 / pm2^(3/2);
407
exkurt = pm4 / pm2^(2) - 3; z = qnorm(alpha);
408
h = z + (1/6)*(z^2 -1)*skew
409
h = h + (1/24)*(z^3 - 3*z)*exkurt - (1/36)*(2*z^3 - 5*z)*skew^2;
410
411
E = dnorm(h)
412
E = E + (1/24)*( Ipower(4,h) - 6*Ipower(2,h) + 3*dnorm(h) )*exkurt
413
E = E + (1/6)*( Ipower(3,h) - 3*Ipower(1,h) )*skew;
414
E = E + (1/72)*( Ipower(6,h) -15*Ipower(4,h)+ 45*Ipower(2,h) - 15*dnorm(h) )*(skew^2)
415
E = E/alpha
416
417
return (- (t(w)%*%mu) - sqrt(pm2)*min(-E,h) )
418
}
419
if( abs( ( global.best - mESfun(global.best.weight) )/mESfun(global.best.weight) )>0.05 ){
420
print("error mES definition"); break; }
421
if(EW){
422
localoptim = donlp2( par=EWweights , fn=mESfun , par.upper = upperbound , par.lower = lowerbound,
423
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
424
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
425
(localoptim$message == "computed correction small, regular case") |
426
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
427
if( localoptim$fx < global.best){
428
global.best = localoptim$fx ; global.best.weight = localoptim$par }} }
429
for( k in c(1:cMin) ){
430
localoptim = donlp2( par=bestweights[k,] , fn=mESfun , par.upper = upperbound , par.lower = lowerbound,
431
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
432
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
433
(localoptim$message != "computed correction small, regular case") &
434
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
435
if( localoptim$fx < global.best){
436
global.best = localoptim$fx; global.best.weight = localoptim$par };
437
}#end loop over local minima
438
},
439
SR.mES = {
440
# to be maximized
441
best=sort( Y[,c],index.return=T,decreasing=T)$ix;
442
bestweights = weightgrid[best[1:cMin] , ];
443
global.best = -Y[best[1],c] ; global.best.weight = as.numeric(matrix(bestweights[1,],ncol=1));
444
445
Ipower = function(power,h){
446
fullprod = 1;
447
if( (power%%2)==0 ) #even number: number mod is zero
448
{
449
pstar = power/2;
450
for(j in c(1:pstar)){
451
fullprod = fullprod*(2*j) }
452
I = fullprod*dnorm(h);
453
454
for(i in c(1:pstar) )
455
{
456
prod = 1;
457
for(j in c(1:i) ){
458
prod = prod*(2*j) }
459
I = I + (fullprod/prod)*(h^(2*i))*dnorm(h)
460
}
461
}else{
462
pstar = (power-1)/2
463
for(j in c(0:pstar) ) {
464
fullprod = fullprod*( (2*j)+1 ) }
465
I = -fullprod*pnorm(h);
466
for(i in c(0:pstar) ){
467
prod = 1;
468
for(j in c(0:i) ){
469
prod = prod*( (2*j) + 1 )}
470
I = I + (fullprod/prod)*(h^( (2*i) + 1))*dnorm(h) }
471
}
472
return(I)
473
}
474
475
NegSR.mESfun = function(w){
476
pm4 = t(w)%*%M4%*%(w%x%w%x%w) ; pm3 = t(w)%*%M3%*%(w%x%w) ; pm2 = t(w)%*%sigma%*%w ;
477
skew = pm3 / pm2^(3/2);
478
exkurt = pm4 / pm2^(2) - 3; z = qnorm(alpha);
479
h = z + (1/6)*(z^2 -1)*skew
480
h = h + (1/24)*(z^3 - 3*z)*exkurt - (1/36)*(2*z^3 - 5*z)*skew^2;
481
482
E = dnorm(h)
483
E = E + (1/24)*( Ipower(4,h) - 6*Ipower(2,h) + 3*dnorm(h) )*exkurt
484
E = E + (1/6)*( Ipower(3,h) - 3*Ipower(1,h) )*skew;
485
E = E + (1/72)*( Ipower(6,h) -15*Ipower(4,h)+ 45*Ipower(2,h) - 15*dnorm(h) )*(skew^2)
486
E = E/alpha
487
mES = - (t(w)%*%mu) - sqrt(pm2)*min(-E,h)
488
return ( - (t(w)%*%mu) / mES )
489
}
490
if( abs( ( global.best - NegSR.mESfun(global.best.weight) )/NegSR.mESfun(global.best.weight) )>0.05 ){
491
print("error SR mES definition"); break; }
492
if(EW){
493
localoptim = donlp2( par=EWweights , fn=NegSR.mESfun , par.upper = upperbound , par.lower = lowerbound,
494
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
495
if( (localoptim$message == "KT-conditions satisfied, no further correction computed") |
496
(localoptim$message == "computed correction small, regular case") |
497
(localoptim$message == "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){
498
if( localoptim$fx < global.best){
499
global.best = localoptim$fx ; global.best.weight = localoptim$par }} }
500
501
502
for( k in c(1:cMin) ){
503
localoptim = donlp2( par=bestweights[k,] , fn=NegSR.mESfun , par.upper = upperbound , par.lower = lowerbound,
504
A=A,lin.lower=lin.lower,lin.upper=lin.upper)
505
if( (localoptim$message != "KT-conditions satisfied, no further correction computed") &
506
(localoptim$message != "computed correction small, regular case") &
507
(localoptim$message != "stepsizeselection: x (almost) feasible, dir. deriv. very small" ) ){next;}
508
if( -localoptim$fx > global.best){
509
global.best = -localoptim$fx; global.best.weight = localoptim$par };
510
}#end loop over local minima, indexed by k=1,...,cMin
511
}
512
)#end function that finds out which criterion to optimize and does the optimization
513
514
print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!")
515
print(c("end optimization of criterion",criterion,"for period",names.input[per]))
516
print("Local search has improved the objective function from");
517
print(Y[best[1],c] );
518
print("to");
519
print( global.best );
520
521
out[ ((c-1)*cPeriods + per), ] = as.vector(global.best.weight)
522
523
if ( EW ){
524
if ( (t(global.best.weight)%*%mu) <= (t(EWweights)%*%mu) ){ out[ ((c-1)*cPeriods + per), ] = as.vector(EWweights) } }
525
# first cPeriods rows correspond to cCriteria[1] and so on
526
527
}#end loop over optimization criteria; indexed by c=1,...,cCriteria
528
529
}#end loop over .csv files containing the years; indexed by per=1,...,cPeriods
530
531
532
# Output save
533
for( i in c(1:cCriteria) ){
534
criterion = criteria[i];
535
output = as.data.frame( out[c( ((i-1)*cPeriods+1) : (i*cPeriods) ),] );
536
rownames(output) = names.input;
537
colnames(output) = names.assets;
538
write.table( output , file = paste(names.output[i],".csv",sep=""),
539
append = FALSE, quote = TRUE, sep = ",",
540
eol = "\n", na = "NA", dec = ".", row.names = TRUE,
541
col.names = TRUE, qmethod = "escape")
542
}
543
}
544
545
MonthlyReturns.LocalSearch =
546
function (R, criteria, from =37, to=120, by=12, method = c("compound") )
547
{ # @author Brian G. Peterson, Kris Boudt
548
549
# R data structure of component returns
550
#
551
# criteria vector containing the names of the csv files holding the portfolio weights
552
#
553
# from, to Monthly returns are computed for R[from:to,]
554
#
555
# by Frequency of changing portfolio weights
556
#
557
558
# Setup:
559
result=NULL
560
resultcols=NULL
561
562
# data type conditionals
563
# cut the return series for from:to
564
if (class(R) == "timeSeries") {
565
R = R@Data
566
} else {
567
R = R
568
}
569
R=checkData(R,method="zoo")
570
cRebalancing = ceiling( (to-from+1)/12);
571
572
573
574
result.compound=c()
575
result.simple=c()
576
# Loop over the different optimisation criteria
577
578
for(criterion in criteria){
579
pcompoundreturn=psimplereturn=c();
580
weights = read.csv( file = paste( criterion,".csv",sep=""),
581
header = TRUE, sep = ",", na.strings = "NA", dec = ".")
582
if (ncol(weights) != ncol(R)) stop ("The Weighting Vector and Return Collection do not have the same number of Columns.")
583
portfoliovalue = 1;
584
for (row in 1:cRebalancing){
585
start = from+(row-1)*by;
586
end = min(from + row*by - 1,to);
587
wealthindex = cumvalue( R[start:end,], weights=weights[row,] )
588
compoundreturns = portfoliovalue*wealthindex - 1;
589
pcompoundreturn=c(pcompoundreturn, compoundreturns )
590
portfoliovalue = portfoliovalue*wealthindex[12]
591
simplereturns = c(wealthindex[1],wealthindex[2:12]/wealthindex[1:11] ) - 1;
592
psimplereturn = c( psimplereturn , simplereturns );
593
}
594
result.compound = cbind(result.compound, pcompoundreturn)
595
result.simple = cbind(result.simple, psimplereturn)
596
}
597
colnames(result.compound)= colnames(result.simple) = criteria
598
rownames(result.compound)= rownames(result.simple) = rownames(R)[from:to]
599
LSmonthlyportcompoundreturns = result.compound
600
LSmonthlyportsimplereturns = result.simple
601
602
save(LSmonthlyportcompoundreturns, file="compoundreturns.Rdata" )
603
604
save(LSmonthlyportcompoundreturns, file="simplereturns.Rdata" )
605
}
606
607
cumvalue <- function (R, weights=NULL)
608
{ # @author Brian G. Peterson,Kris Boudt
609
# Setup:
610
R=checkData(R,method="zoo")
611
612
if (is.null(weights)){
613
# set up an equal weighted portfolio
614
weights = t(rep(1/ncol(R), ncol(R)))
615
}
616
617
if (ncol(weights) != ncol(R)) stop ("The Weighting Vector and Return Collection do not have the same number of Columns.")
618
619
wealthindex.assets= cumprod( 1+R )
620
621
# build a structure for our weighted results
622
wealthindex.weighted = matrix(nrow=nrow(R),ncol=ncol(R))
623
colnames(wealthindex.weighted)=colnames(wealthindex.assets)
624
rownames(wealthindex.weighted)=rownames(wealthindex.assets)
625
626
# weight the results
627
for (col in 1:ncol(weights)){
628
wealthindex.weighted[,col]=weights[,col]*wealthindex.assets[,col]
629
}
630
wealthindex=apply(wealthindex.weighted,1,sum)
631
632
return(wealthindex)
633
} # end
634
635
###############################################################################
636
# R (http://r-project.org/) Numeric Methods for Optimization of Portfolios
637
#
638
# Copyright (c) 2004-2014 Kris Boudt, Peter Carl and Brian G. Peterson
639
#
640
# This library is distributed under the terms of the GNU Public License (GPL)
641
# for full details see the file COPYING
642
#
643
# $Id$
644
#
645
###############################################################################
646
# $Log: not supported by cvs2svn $
647
# Revision 1.14 2009-09-22 21:22:48 peter
648
# - added licensing details
649
#
650
# Revision 1.13 2008-01-31 13:51:11 kris
651
# Corrected portfolio return calculations
652
#
653
# Revision 1.10 2008/01/22 18:51:15 kris
654
# Corrected mistake in GVaR definition of localsearch.R and added row and columnnames to output
655
#
656
# Revision 1.9 2008/01/21 11:13:39 kris
657
# Fixed mistakes, built in checks that verify compatibility between grid of weights, the criteria values obtained by the grid search and the local search code
658
#
659
# Revision 1.7 2008/01/19 23:53:33 brian
660
# - fix checkData for weightgrid
661
#
662
# Revision 1.6 2008/01/19 16:00:05 kris
663
# *** empty log message ***
664
#
665
# Revision 1.4 2008/01/19 13:40:38 Kris
666
# - add checkData functionality
667
# Revision 1.4 2008/01/17 13:40:38 Kris
668
# - fix some bugs
669
# - check on real data
670
# Revision 1.3 2008/01/16 13:40:38 brian
671
# - add CVS header and footer
672
# - add introduction and Copyright notice
673
#
674
###############################################################################
675