QuantLib: a free/open-source library for quantitative finance
fully annotated source code - version 1.34
Loading...
Searching...
No Matches
binomiallossmodel.hpp
Go to the documentation of this file.
1/* -*- mode: c++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
2
3/*
4 Copyright (C) 2014 Jose Aparicio
5
6 This file is part of QuantLib, a free-software/open-source library
7 for financial quantitative analysts and developers - http://quantlib.org/
8
9 QuantLib is free software: you can redistribute it and/or modify it
10 under the terms of the QuantLib license. You should have received a
11 copy of the license along with this program; if not, please email
12 <quantlib-dev@lists.sf.net>. The license is also available online at
13 <http://quantlib.org/license.shtml>.
14
15 This program is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
17 FOR A PARTICULAR PURPOSE. See the license for more details.
18*/
19
20#ifndef quantlib_binomial_loss_model_hpp
21#define quantlib_binomial_loss_model_hpp
22
26#include <ql/handle.hpp>
27#include <algorithm>
28#include <numeric>
29#include <utility>
30
31namespace QuantLib {
32
33 /*! Binomial Defaultable Basket Loss Model\par
34 Models the portfolio loss distribution by approximatting it to an adjusted
35 binomial. Fits the two moments of the loss distribution through an adapted
36 binomial approximation. This simple model allows for portfolio inhomogeneity
37 with no excesive cost over the LHP.\par
38 See:\par
39 <b>Approximating Independent Loss Distributions with an Adjusted Binomial
40 Distribution</b> , Dominic O'Kane, 2007 EDHEC RISK AND ASSET MANAGEMENT
41 RESEARCH CENTRE \par
42 <b>Modelling single name and multi-name credit derivatives</b> Chapter
43 18.5.2, Dominic O'Kane, Wiley Finance, 2008 \par
44 The version presented here is adaptated to the multifactorial case
45 by computing a conditional binomial approximation; notice that the Binomial
46 is stable. This way the model can be used also in risk management models
47 rather than only in pricing. The copula is also left
48 undefined/arbitrary. \par
49 LLM: Loss Latent Model template parameter able to model default and
50 loss.\par
51 The model is allowed and arbitrary copula, although initially designed for
52 a Gaussian setup. If these exotic versions were not allowed the template
53 parameter can then be dropped but the use of random recoveries should be
54 added in some other way.
55
56 \todo untested/wip for the random recovery models.
57 \todo integrate with the previously computed probability inversions of
58 the cumulative functions.
59 */
60 template<class LLM>
62 public:
63 typedef typename LLM::copulaType copulaType;
64 explicit BinomialLossModel(ext::shared_ptr<LLM> copula) : copula_(std::move(copula)) {}
65
66 private:
67 void resetModel() override {
68 /* say there are defaults and these havent settled... and this is
69 the engine to compute them.... is this the wrong place?:*/
70 attachAmount_ = basket_->remainingAttachmentAmount();
71 detachAmount_ = basket_->remainingDetachmentAmount();
72
73 copula_->resetBasket(basket_.currentLink()); // forces interface
74 }
75
76 protected:
77 /*! Returns the probability of the default loss values given by the
78 method lossPoints.
79 */
80 std::vector<Real> expectedDistribution(const Date& date) const {
81 // precal date conditional magnitudes:
82 std::vector<Real> notionals = basket_->remainingNotionals(date);
83 std::vector<Probability> invProbs =
84 basket_->remainingProbabilities(date);
85 for(Size iName=0; iName<invProbs.size(); iName++)
86 invProbs[iName] =
87 copula_->inverseCumulativeY(invProbs[iName], iName);
88
89 return copula_->integratedExpectedValueV(
90 [&](const std::vector<Real>& v1) {
91 return lossProbability(date, notionals, invProbs, v1);
92 });
93 }
94 //! attainable loss points this model provides
95 std::vector<Real> lossPoints(const Date&) const;
96 //! Returns the cumulative full loss distribution
97 std::map<Real, Probability> lossDistribution(const Date& d) const override;
98 //! Loss level for this percentile
99 Real percentile(const Date& d, Real percentile) const override;
100 Real expectedShortfall(const Date& d, Real percentile) const override;
101 Real expectedTrancheLoss(const Date& d) const override;
102
103 // Model internal workings ----------------
104 //! Average loss per credit.
105 Real averageLoss(const Date&, const std::vector<Real>& reminingNots,
106 const std::vector<Real>&) const;
107 Real condTrancheLoss(const Date&, const std::vector<Real>& lossVals,
108 const std::vector<Real>& bsktNots,
109 const std::vector<Probability>& uncondDefProbs,
110 const std::vector<Real>&) const;
111 // expected as in time-value, not average, see literature
112 std::vector<Real> expConditionalLgd(const Date& d,
113 const std::vector<Real>& mktFactors) const
114 {
115 std::vector<Real> condLgds;
116 const std::vector<Size>& evalDateLives = basket_->liveList();
117 condLgds.reserve(evalDateLives.size());
118 for (unsigned long evalDateLive : evalDateLives)
119 condLgds.push_back(1. - copula_->conditionalRecovery(d, evalDateLive, mktFactors));
120 return condLgds;
121 }
122
123 //! Loss probability density conditional on the market factor value.
124 // Heres where the burden of the algorithm setup lies.
125 std::vector<Real> lossProbability(
126 const Date& date,
127 // expected exposures at the passed date, no wrong way means
128 // no dependence of the exposure with the mkt factor
129 const std::vector<Real>& bsktNots,
130 const std::vector<Real>& uncondDefProbInv,
131 const std::vector<Real>& mktFactor) const;
132
133 const ext::shared_ptr<LLM> copula_;
134
135 // cached arguments:
136 // remaining basket magnitudes:
138 };
139
140 //-------------------------------------------------------------------------
141
142 /* The algorithm to compute the prob. of n defaults in the basket is
143 recursive. For this reason theres no sense in returning the prob
144 distribution of a given number of defaults.
145 */
146 template< class LLM>
148 const Date& date,
149 const std::vector<Real>& bsktNots,
150 const std::vector<Real>& uncondDefProbInv,
151 const std::vector<Real>& mktFactors) const
152 { // the model as it is does not model the exposures conditional to the
153 // mkt factr, otherwise this needs revision
154 /// model does not take the unconditional rr
155 Size bsktSize = basket_->remainingSize();
156 /* The conditional loss per unit notional of each name at time 'date'
157 The spot recovery model is returning for all i's:
158 \frac{\int_0^t [1-rr_i(\tau; \xi)] P_{def-i}(0, \tau; \xi) d\tau}
159 {P_{def-i}(0,t;\xi)}
160 and the constant recovery model is simply returning:
161 1-RR_i
162 */
163 // conditional fractional LGD expected as given by the recovery model
164 // for the ramaining(live) names at the current eval date.
165 std::vector<Real> fractionalEL = expConditionalLgd(date, mktFactors);
166 std::vector<Real> lgdsLeft;
167 std::transform(fractionalEL.begin(), fractionalEL.end(),
168 bsktNots.begin(), std::back_inserter(lgdsLeft),
169 std::multiplies<>());
170 Real avgLgd =
171 std::accumulate(lgdsLeft.begin(), lgdsLeft.end(), Real(0.)) /
172 bsktSize;
173
174 std::vector<Probability> condDefProb(bsktSize, 0.);
175 for(Size j=0; j<bsktSize; j++)//transform
176 condDefProb[j] =
177 copula_->conditionalDefaultProbabilityInvP(uncondDefProbInv[j],
178 j, mktFactors);
179 // of full portfolio:
180 Real avgProb = avgLgd <= QL_EPSILON ? Real(0.) : // only if all are 0
181 std::inner_product(condDefProb.begin(),
182 condDefProb.end(), lgdsLeft.begin(), Real(0.))
183 / (avgLgd * bsktSize);
184 // model parameters:
185 Real m = avgProb * bsktSize;
186 Real floorAveProb = std::min(Real(bsktSize-1), std::floor(Real(m)));
187 Real ceilAveProb = floorAveProb + 1.;
188 // nu_A
189 Real varianceBinom = avgProb * (1. - avgProb)/bsktSize;
190 // nu_E
191 std::vector<Probability> oneMinusDefProb;//: 1.-condDefProb[j]
192 std::transform(condDefProb.begin(), condDefProb.end(),
193 std::back_inserter(oneMinusDefProb),
194 [](Real x) -> Real { return 1.0-x; });
195
196 //breaks condDefProb and lgdsLeft to spare memory
197 std::transform(condDefProb.begin(), condDefProb.end(),
198 oneMinusDefProb.begin(), condDefProb.begin(),
199 std::multiplies<>());
200 std::transform(lgdsLeft.begin(), lgdsLeft.end(),
201 lgdsLeft.begin(), lgdsLeft.begin(), std::multiplies<>());
202 Real variance = std::inner_product(condDefProb.begin(),
203 condDefProb.end(), lgdsLeft.begin(), Real(0.));
204
205 variance = avgLgd <= QL_EPSILON ? Real(0.) :
206 variance / (bsktSize * bsktSize * avgLgd * avgLgd );
207 Real sumAves = -std::pow(ceilAveProb-m, 2)
208 - (std::pow(floorAveProb-m, 2) - std::pow(ceilAveProb,2.))
209 * (ceilAveProb-m);
210 Real alpha = (variance * bsktSize + sumAves)
211 / (varianceBinom * bsktSize + sumAves);
212 // Full distribution:
213 // ....DO SOMETHING CHEAPER at least go up to the loss tranche limit.
214 std::vector<Probability> lossProbDensity(bsktSize+1, 0.);
215 if(avgProb >= 1.-QL_EPSILON) {
216 lossProbDensity[bsktSize] = 1.;
217 }else if(avgProb <= QL_EPSILON) {
218 lossProbDensity[0] = 1.;
219 }else{
220 /* FIX ME: With high default probabilities one only gets tiny values
221 at the end and the sum of probabilities in the
222 conditional distribution does not add up to one. It might be due to
223 the fact that recursion should be done in the other direction as
224 pointed out in the book. This is numerical.
225 */
226 Probability probsRatio = avgProb/(1.-avgProb);
227 lossProbDensity[0] = std::pow(1.-avgProb,
228 static_cast<Real>(bsktSize));
229 for(Size i=1; i<bsktSize+1; i++) // recursive to avoid factorial
230 lossProbDensity[i] = lossProbDensity[i-1] * probsRatio
231 * (bsktSize-i+1.)/i;
232 // redistribute probability:
233 for(Size i=0; i<bsktSize+1; i++)
234 lossProbDensity[i] *= alpha;
235 // adjust average
236 Real epsilon = (1.-alpha)*(ceilAveProb-m);
237 Real epsilonPlus = 1.-alpha-epsilon;
238 lossProbDensity[static_cast<Size>(floorAveProb)] += epsilon;
239 lossProbDensity[static_cast<Size>(ceilAveProb)] += epsilonPlus;
240 }
241 return lossProbDensity;
242 }
243
244 //-------------------------------------------------------------------------
245
246 template< class LLM>
248 const Date& d,
249 const std::vector<Real>& reminingNots,
250 const std::vector<Real>& mktFctrs) const
251 {
252 Size bsktSize = basket_->remainingSize();
253 /* The conditional loss per unit notional of each name at time 'date'
254 The spot recovery model is returning for all i's:
255 \frac{\int_0^t [1-rr_i(\tau; \xi)] P_{def-i}(0, \tau; \xi) d\tau}
256 {P_{def-i}(0,t;\xi)}
257 and the constant recovery model is simply returning:
258 1-RR_i
259 */
260 std::vector<Real> fractionalEL = expConditionalLgd(d, mktFctrs);
261 Real notBskt = std::accumulate(reminingNots.begin(),
262 reminingNots.end(), Real(0.));
263 std::vector<Real> lgdsLeft;
264 std::transform(fractionalEL.begin(), fractionalEL.end(),
265 reminingNots.begin(), std::back_inserter(lgdsLeft),
266 std::multiplies<>());
267 return std::accumulate(lgdsLeft.begin(), lgdsLeft.end(), Real(0.))
268 / (bsktSize*notBskt);
269 }
270
271 template< class LLM>
272 std::vector<Real> BinomialLossModel<LLM>::lossPoints(const Date& d) const
273 {
274 std::vector<Real> notionals = basket_->remainingNotionals(d);
275
276 Real aveLossFrct = copula_->integratedExpectedValue(
277 [&](const std::vector<Real>& v1) {
278 return averageLoss(d, notionals, v1);
279 });
280
281 std::vector<Real> data;
282 Size dataSize = basket_->remainingSize() + 1;
283 data.reserve(dataSize);
284 // use std::algorithm
285 Real outsNot = basket_->remainingNotional(d);
286 for(Size i=0; i<dataSize; i++)
287 data.push_back(i * aveLossFrct * outsNot);
288 return data;
289 }
290
291 template< class LLM>
293 const Date& d,
294 const std::vector<Real>& lossVals,
295 const std::vector<Real>& bsktNots,
296 const std::vector<Real>& uncondDefProbsInv,
297 const std::vector<Real>& mkf) const {
298
299 std::vector<Real> condLProb =
300 lossProbability(d, bsktNots, uncondDefProbsInv, mkf);
301 // \to do: move to a do-while over attach to detach
302 Real suma = 0.;
303 for(Size i=0; i<lossVals.size(); i++) {
304 suma += condLProb[i] *
305 std::min(std::max(lossVals[i]
306 - attachAmount_, 0.), detachAmount_ - attachAmount_);
307 }
308 return suma;
309 }
310
311 template< class LLM>
313 std::vector<Real> lossVals = lossPoints(d);
314 std::vector<Real> notionals = basket_->remainingNotionals(d);
315 std::vector<Probability> invProbs =
316 basket_->remainingProbabilities(d);
317 for(Size iName=0; iName<invProbs.size(); iName++)
318 invProbs[iName] =
319 copula_->inverseCumulativeY(invProbs[iName], iName);
320
321 return copula_->integratedExpectedValue(
322 [&](const std::vector<Real>& v1) {
323 return condTrancheLoss(d, lossVals, notionals, invProbs, v1);
324 });
325 }
326
327
328 template< class LLM>
329 std::map<Real, Probability> BinomialLossModel<LLM>::lossDistribution(const Date& d) const
330 {
331 std::map<Real, Probability> distrib;
332 std::vector<Real> lossPts = lossPoints(d);
333 std::vector<Real> values = expectedDistribution(d);
334 Real sum = 0.;
335 for(Size i=0; i<lossPts.size(); i++) {
336 distrib.insert(std::make_pair(lossPts[i],
337 //capped, some situations giving a very small probability over 1
338 std::min(sum+values[i],1.)
339 ));
340 sum+= values[i];
341 }
342 return distrib;
343 }
344
345 template< class LLM>
347 std::map<Real, Probability> dist = lossDistribution(d);
348 // \todo: Use some of the library interpolators instead
349 if(// included in test below-> (dist.begin()->second >=1.) ||
350 (dist.begin()->second >= perc))return dist.begin()->first;
351
352 // deterministic case (e.g. date requested is todays date)
353 if(dist.size() == 1) return dist.begin()->first;
354
355 if(perc == 1.) return dist.rbegin()->first;
356 if(perc == 0.) return dist.begin()->first;
357 std::map<Real, Probability>::const_iterator itdist = dist.begin();
358 while (itdist->second <= perc) ++itdist;
359 Real valPlus = itdist->second;
360 Real xPlus = itdist->first;
361 --itdist; //we're never 1st or last, because of tests above
362 Real valMin = itdist->second;
363 Real xMin = itdist->first;
364
365 Real portfLoss = xPlus-(xPlus-xMin)*(valPlus-perc)/(valPlus-valMin);
366
367 return
368 std::min(std::max(portfLoss - attachAmount_, 0.),
369 detachAmount_ - attachAmount_);
370 }
371
372 template< class LLM>
374 Real perctl) const
375 {
376 //taken from recursive since we have the distribution in both cases.
377 if(d == Settings::instance().evaluationDate()) return 0.;
378 std::map<Real, Probability> distrib = lossDistribution(d);
379
380 std::map<Real, Probability>::iterator
381 itNxt, itDist = distrib.begin();
382 for(; itDist != distrib.end(); ++itDist)
383 if(itDist->second >= perctl) break;
384 itNxt = itDist;
385 --itDist;
386
387 // \todo: I could linearly triangulate the exact point and get
388 // extra precission on the first(broken) period.
389 if(itNxt != distrib.end()) {
390 Real lossNxt = std::min(std::max(itNxt->first - attachAmount_,
391 0.), detachAmount_ - attachAmount_);
392 Real lossHere = std::min(std::max(itDist->first - attachAmount_,
393 0.), detachAmount_ - attachAmount_);
394
395 Real val = lossNxt - (itNxt->second - perctl) *
396 (lossNxt - lossHere) / (itNxt->second - itDist->second);
397 Real suma = (itNxt->second - perctl) * (lossNxt + val) * .5;
398 ++itDist; ++itNxt;
399 do{
400 lossNxt = std::min(std::max(itNxt->first - attachAmount_,
401 0.), detachAmount_ - attachAmount_);
402 lossHere = std::min(std::max(itDist->first - attachAmount_,
403 0.), detachAmount_ - attachAmount_);
404 suma += .5 * (lossHere + lossNxt)
405 * (itNxt->second - itDist->second);
406 ++itDist; ++itNxt;
407 }while(itNxt != distrib.end());
408 return suma / (1.-perctl);
409 }
410 QL_FAIL("Binomial model fails to calculate ESF.");
411 }
412
413 // The standard use:
416
417}
418
419#endif
basket of issuers and related notionals
std::vector< Real > expConditionalLgd(const Date &d, const std::vector< Real > &mktFactors) const
std::vector< Real > expectedDistribution(const Date &date) const
Real percentile(const Date &d, Real percentile) const override
Loss level for this percentile.
std::vector< Real > lossProbability(const Date &date, const std::vector< Real > &bsktNots, const std::vector< Real > &uncondDefProbInv, const std::vector< Real > &mktFactor) const
Loss probability density conditional on the market factor value.
Real averageLoss(const Date &, const std::vector< Real > &reminingNots, const std::vector< Real > &) const
Average loss per credit.
BinomialLossModel(ext::shared_ptr< LLM > copula)
std::vector< Real > lossPoints(const Date &) const
attainable loss points this model provides
void resetModel() override
Concrete models do now any updates/inits they need on basket reset.
Real expectedTrancheLoss(const Date &d) const override
const ext::shared_ptr< LLM > copula_
Real expectedShortfall(const Date &d, Real percentile) const override
Expected shortfall given a default loss percentile.
std::map< Real, Probability > lossDistribution(const Date &d) const override
Returns the cumulative full loss distribution.
Real condTrancheLoss(const Date &, const std::vector< Real > &lossVals, const std::vector< Real > &bsktNots, const std::vector< Probability > &uncondDefProbs, const std::vector< Real > &) const
Concrete date class.
Definition: date.hpp:125
RelinkableHandle< Basket > basket_
static Settings & instance()
access to the unique instance
Definition: singleton.hpp:104
#define QL_FAIL(message)
throw an error (possibly with file and line information)
Definition: errors.hpp:92
Date d
LinearInterpolation variance
#define QL_EPSILON
Definition: qldefines.hpp:178
QL_REAL Real
real number
Definition: types.hpp:50
Real Probability
probability
Definition: types.hpp:82
std::size_t Size
size of a container
Definition: types.hpp:58
Globally accessible relinkable pointer.
Definition: any.hpp:35
BinomialLossModel< GaussianConstantLossLM > GaussianBinomialLossModel
BinomialLossModel< TConstantLossLM > TBinomialLossModel
STL namespace.
Real alpha
Definition: sabr.cpp:200