Logo
Fully annotated reference manual - version 1.8.12
Loading...
Searching...
No Matches
ad.cpp
Go to the documentation of this file.
1/*
2 Copyright (C) 2019 Quaternion Risk Management Ltd
3 All rights reserved.
4
5 This file is part of ORE, a free-software/open-source library
6 for transparent pricing and risk analysis - http://opensourcerisk.org
7
8 ORE is free software: you can redistribute it and/or modify it
9 under the terms of the Modified BSD License. You should have received a
10 copy of the license along with this program.
11 The license is also available online at <http://opensourcerisk.org>
12
13 This program is distributed on the basis that it will form a useful
14 contribution to risk analytics and model standardisation, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the license for more details.
17*/
18
19#include "toplevelfixture.hpp"
20
24#include <qle/ad/ssaform.hpp>
26
27#include <ql/math/distributions/normaldistribution.hpp>
28#include <ql/math/randomnumbers/inversecumulativerng.hpp>
29#include <ql/math/randomnumbers/mt19937uniformrng.hpp>
30
31#include <boost/test/unit_test.hpp>
32
33using namespace QuantExt;
34
35BOOST_FIXTURE_TEST_SUITE(QuantExtTestSuite, qle::test::TopLevelFixture)
36
37BOOST_AUTO_TEST_SUITE(AdTest)
38
39BOOST_AUTO_TEST_CASE(testForwardEvaluation) {
40
41 constexpr Real tol = 1E-14;
42
43 // z = x+y, z = ux = (x+y)x = x^2+yx
45 auto x = cg_var(g, "x", ComputationGraph::VarDoesntExist::Create);
46 auto y = cg_var(g, "y", ComputationGraph::VarDoesntExist::Create);
47 auto u = cg_add(g, x, y, "u");
48 auto z = cg_mult(g, u, x, "z");
49
50 BOOST_TEST_MESSAGE("SSA Form:\n" + ssaForm(g, getRandomVariableOpLabels()));
51
52 std::vector<RandomVariable> values(g.size(), RandomVariable(1, 0.0));
53 values[x] = RandomVariable(1, 2.0);
54 values[y] = RandomVariable(1, 3.0);
55
57
58 // BOOST_CHECK_CLOSE(values[x][0], 2.0, tol); // deleted
59 // BOOST_CHECK_CLOSE(values[y][0], 3.0, tol);
60 // BOOST_CHECK_CLOSE(values[u][0], 5.0, tol);
61 BOOST_CHECK_CLOSE(values[z][0], 10.0, tol);
62}
63
64BOOST_AUTO_TEST_CASE(testBackwardDerivatives) {
65
66 constexpr Real tol = 1E-14;
67
68 // z = x+y, z = ux = (x+y)x = x^2+yx
70 auto x = cg_var(g, "x", ComputationGraph::VarDoesntExist::Create);
71 auto y = cg_var(g, "y", ComputationGraph::VarDoesntExist::Create);
72 auto u = cg_add(g, x, y, "u");
73 auto z = cg_mult(g, u, x, "z");
74
75 BOOST_TEST_MESSAGE("SSA Form:\n" + ssaForm(g, getRandomVariableOpLabels()));
76
77 // forward evaluation
78
79 std::vector<RandomVariable> values(g.size(), RandomVariable(1, 0.0));
80 values[x] = RandomVariable(1, 2.0);
81 values[y] = RandomVariable(1, 3.0);
82
85
86 // BOOST_CHECK_CLOSE(values[x][0], 2.0, tol); // deleted
87 // BOOST_CHECK_CLOSE(values[y][0], 3.0, tol);
88 // BOOST_CHECK_CLOSE(values[u][0], 5.0, tol);
89 BOOST_CHECK_CLOSE(values[z][0], 10.0, tol);
90
91 // backward derivatives
92
93 std::vector<RandomVariable> derivativesBwd(g.size(), RandomVariable(1, 0.0));
94 derivativesBwd[z] = RandomVariable(1, 1.0);
95
96 std::vector<bool> keep(g.size(), false);
97 keep[x] = true;
98 keep[y] = true;
99 keep[z] = true;
100 keep[u] = true;
101
102 backwardDerivatives(g, values, derivativesBwd, getRandomVariableGradients(1), RandomVariable::deleter, keep);
103
104 // dz/dx = 2x+y
105 BOOST_CHECK_CLOSE(derivativesBwd[x][0], 7.0, tol);
106 // dz/dy = x
107 BOOST_CHECK_CLOSE(derivativesBwd[y][0], 2.0, tol);
108 // dz/du = x
109 BOOST_CHECK_CLOSE(derivativesBwd[u][0], 2.0, tol);
110 // dz/dz = 1
111 BOOST_CHECK_CLOSE(derivativesBwd[z][0], 1.0, tol);
112}
113
114BOOST_AUTO_TEST_CASE(testForwardDerivatives) {
115
116 constexpr Real tol = 1E-14;
117
118 // z = x+y, z = ux = (x+y)x = x^2+yx
120 auto x = cg_var(g, "x", ComputationGraph::VarDoesntExist::Create);
121 auto y = cg_var(g, "y", ComputationGraph::VarDoesntExist::Create);
122 auto u = cg_add(g, x, y, "u");
123 auto z = cg_mult(g, u, x, "z");
124
125 BOOST_TEST_MESSAGE("SSA Form:\n" + ssaForm(g, getRandomVariableOpLabels()));
126
127 // forward evaluation
128
129 std::vector<RandomVariable> values(g.size(), RandomVariable(1, 0.0));
130 values[x] = RandomVariable(1, 2.0);
131 values[y] = RandomVariable(1, 3.0);
132
135
136 // forward derivatives
137
138 std::vector<RandomVariable> derivativesFwdX(g.size(), RandomVariable(1, 0.0));
139 std::vector<RandomVariable> derivativesFwdY(g.size(), RandomVariable(1, 0.0));
140 derivativesFwdX[x] = RandomVariable(1, 1.0);
141 derivativesFwdY[y] = RandomVariable(1, 1.0);
142
145
146 // dz/dx = 2x+y
147 BOOST_CHECK_CLOSE(derivativesFwdX[z][0], 7.0, tol);
148 // dz/dy = x
149 BOOST_CHECK_CLOSE(derivativesFwdY[z][0], 2.0, tol);
150}
151
152BOOST_AUTO_TEST_CASE(testIndicatorDerivative) {
153 BOOST_TEST_MESSAGE("Testing indicator derivative...");
154
155 Size n = 5000000; // number of samples
156 Real epsilon = 0.05; // indcator derivative bandwidth
157
159 auto z = cg_var(g, "z", ComputationGraph::VarDoesntExist::Create); // z ~ N(z0,1)
160 auto y = cg_indicatorGt(g, z, cg_const(g, 0.0)); // ind = 1_{z>0}
161
162 InverseCumulativeRng<MersenneTwisterUniformRng, InverseCumulativeNormal> normal(MersenneTwisterUniformRng(42));
163
164 Real tol = 30.0E-4;
165
166 Real z0 = -3.0;
167 while (z0 < 3.01) {
168 std::vector<RandomVariable> values(g.size(), RandomVariable(n)), derivatives(g.size(), RandomVariable(n));
169 BOOST_TEST_MESSAGE("z0=" << z0 << ":");
170 for (Size i = 0; i < n; ++i) {
171 values[z].set(i, z0 + normal.next().value);
172 }
174 Real av = expectation(values[y])[0];
175 Real refAv = 1.0 - CumulativeNormalDistribution()(-z0);
176 BOOST_TEST_MESSAGE("E( 1_{z>0} ) = " << av << ", reference " << refAv << ", diff " << refAv - av);
177 BOOST_CHECK_SMALL(av - refAv, tol);
178
179 derivatives[y] = RandomVariable(n, 1.0);
180 backwardDerivatives(g, values, derivatives,
181 getRandomVariableGradients(1, 2, QuantLib::LsmBasisSystem::Monomial, epsilon));
182 Real dav = expectation(derivatives[z])[0];
183 Real refDav = NormalDistribution()(-z0);
184 // it is dz / dz0 = 1, so we are really computing d/dz0 E(1_{z>0})
185 // and we have E(1_{z>0}) = 1 - \Phi(-z0), so d/dz0 E(1_{z>0}) = phi(-z0)
186 BOOST_TEST_MESSAGE("E( d/dz 1_{z>0} ) = " << dav << ", reference " << refDav << ", diff " << refDav - dav);
187 BOOST_CHECK_SMALL(dav - refDav, tol);
188
189 z0 += 0.5;
190 }
191}
192
193BOOST_AUTO_TEST_SUITE_END()
194
195BOOST_AUTO_TEST_SUITE_END()
BOOST_AUTO_TEST_CASE(testForwardEvaluation)
Definition: ad.cpp:39
backward derivatives computation
forward derivatives computation
forward evaluation
std::vector< RandomVariableGrad > getRandomVariableGradients(const Size size, const Size regressionOrder, const QuantLib::LsmBasisSystem::PolynomialType polynomType, const double eps, const Real regressionVarianceCutoff)
std::size_t cg_const(ComputationGraph &g, const double value)
void forwardDerivatives(const ComputationGraph &g, const std::vector< T > &values, std::vector< T > &derivatives, const std::vector< std::function< std::vector< T >(const std::vector< const T * > &, const T *)> > &grad, std::function< void(T &)> deleter={}, const std::vector< bool > &keepNodes={}, const std::size_t conditionalExpectationOpId=0, const std::function< T(const std::vector< const T * > &)> &conditionalExpectation={})
std::vector< RandomVariableOpNodeRequirements > getRandomVariableOpNodeRequirements()
RandomVariable expectation(const RandomVariable &r)
std::size_t cg_mult(ComputationGraph &g, const std::size_t a, const std::size_t b, const std::string &label)
std::size_t cg_add(ComputationGraph &g, const std::size_t a, const std::size_t b, const std::string &label)
std::vector< std::string > getRandomVariableOpLabels()
std::string ssaForm(const ComputationGraph &g, const std::vector< std::string > &opCodeLabels, const std::vector< T > &values, const std::vector< T > &values2)
Definition: ssaform.cpp:43
void backwardDerivatives(const ComputationGraph &g, std::vector< T > &values, std::vector< T > &derivatives, const std::vector< std::function< std::vector< T >(const std::vector< const T * > &, const T *)> > &grad, std::function< void(T &)> deleter={}, const std::vector< bool > &keepNodes={}, const std::vector< std::function< T(const std::vector< const T * > &)> > &fwdOps={}, const std::vector< std::function< std::pair< std::vector< bool >, bool >(const std::size_t)> > &fwdOpRequiresNodesForDerivatives={}, const std::vector< bool > &fwdKeepNodes={}, const std::size_t conditionalExpectationOpId=0, const std::function< T(const std::vector< const T * > &)> &conditionalExpectation={})
void forwardEvaluation(const ComputationGraph &g, std::vector< T > &values, const std::vector< std::function< T(const std::vector< const T * > &)> > &ops, std::function< void(T &)> deleter={}, bool keepValuesForDerivatives=true, const std::vector< std::function< std::pair< std::vector< bool >, bool >(const std::size_t)> > &opRequiresNodesForDerivatives={}, const std::vector< bool > &keepNodes={}, const std::size_t startNode=0, const std::size_t endNode=ComputationGraph::nan, const bool redBlockReconstruction=false)
std::size_t cg_var(ComputationGraph &g, const std::string &name, const ComputationGraph::VarDoesntExist v)
std::vector< RandomVariableOp > getRandomVariableOps(const Size size, const Size regressionOrder, QuantLib::LsmBasisSystem::PolynomialType polynomType, const double eps, QuantLib::Real regressionVarianceCutoff)
std::size_t cg_indicatorGt(ComputationGraph &g, const std::size_t a, const std::size_t b, const std::string &label)
ops for type randomvariable
convert cg to ssa form
static std::function< void(RandomVariable &)> deleter
Fixture that can be used at top level.