36 #if defined(QL_EXTRA_SAFETY_CHECKS)
37 void checkSymmetry(
const Matrix& matrix) {
38 Size size = matrix.rows();
40 "non square matrix: " << size <<
" rows, " <<
41 matrix.columns() <<
" columns");
42 for (
Size i=0; i<size; ++i)
43 for (
Size j=0; j<i; ++j)
45 "non symmetric matrix: " <<
46 "[" << i <<
"][" << j <<
"]=" << matrix[i][j] <<
47 ", [" << j <<
"][" << i <<
"]=" << matrix[j][i]);
51 void normalizePseudoRoot(
const Matrix& matrix,
53 Size size = matrix.rows();
55 "matrix/pseudo mismatch: matrix rows are " << size <<
56 " while pseudo rows are " << pseudo.columns());
57 Size pseudoCols = pseudo.columns();
60 for (
Size i=0; i<size; ++i) {
62 for (
Size j=0; j<pseudoCols; ++j)
63 norm += pseudo[i][j]*pseudo[i][j];
65 Real normAdj = std::sqrt(matrix[i][i]/norm);
66 for (
Size j=0; j<pseudoCols; ++j)
67 pseudo[i][j] *= normAdj;
75 class HypersphereCostFunction :
public CostFunction {
83 HypersphereCostFunction(
const Matrix& targetMatrix,
89 Array values(
const Array&)
const override {
90 QL_FAIL(
"values method not implemented");
92 Real value(
const Array& x)
const override {
96 for (i=0; i<
size_; i++) {
97 for (k=0; k<
size_; k++) {
101 for (j=0; j<=k; j++) {
104 std::cos(x[i*(i-1)/2+j]);
107 std::sin(x[i*(i-1)/2+j]);
113 for (i=0; i<
size_; i++) {
114 for (k=0; k<
size_; k++) {
115 for (j=0; j<=k; j++) {
116 if (j == k && k!=size_-1)
118 std::cos(x[j*size_+i]);
121 std::sin(x[j*size_+i]);
129 for (i=0;i<
size_;i++) {
130 for (j=0;j<
size_;j++) {
141 Matrix hypersphereOptimize(
const Matrix& targetMatrix,
142 const Matrix& currentRoot,
143 const bool lowerDiagonal) {
144 Size i,j,k,size = targetMatrix.rows();
145 Matrix result(currentRoot);
147 for (i=0; i<size; i++){
148 variance[i]=std::sqrt(targetMatrix[i][i]);
151 Matrix approxMatrix(result*
transpose(result));
153 for (i=0; i<size; i++) {
154 for (j=0; j<size; j++) {
155 result[i][j]/=std::sqrt(approxMatrix[i][i]);
159 for (i=0; i<size; i++) {
160 for (j=0; j<size; j++) {
166 ConjugateGradient optimize;
167 EndCriteria endCriteria(100, 10, 1e-8, 1e-8, 1e-8);
168 HypersphereCostFunction costFunction(targetMatrix,
variance,
170 NoConstraint constraint;
175 Array
theta(size * (size-1)/2);
176 const Real eps=1e-16;
177 for (i=1; i<size; i++) {
178 for (j=0; j<i; j++) {
179 theta[i*(i-1)/2+j]=result[i][j];
180 if (
theta[i*(i-1)/2+j]>1-eps)
181 theta[i*(i-1)/2+j]=1-eps;
182 if (
theta[i*(i-1)/2+j]<-1+eps)
183 theta[i*(i-1)/2+j]=-1+eps;
184 for (k=0; k<j; k++) {
185 theta[i*(i-1)/2+j] /= std::sin(
theta[i*(i-1)/2+k]);
186 if (
theta[i*(i-1)/2+j]>1-eps)
187 theta[i*(i-1)/2+j]=1-eps;
188 if (
theta[i*(i-1)/2+j]<-1+eps)
189 theta[i*(i-1)/2+j]=-1+eps;
191 theta[i*(i-1)/2+j] = std::acos(
theta[i*(i-1)/2+j]);
198 Problem p(costFunction, constraint,
theta);
199 optimize.minimize(p, endCriteria);
200 theta = p.currentValue();
201 std::fill(result.begin(),result.end(),1.0);
202 for (i=0; i<size; i++) {
203 for (k=0; k<size; k++) {
207 for (j=0; j<=k; j++) {
210 std::cos(
theta[i*(i-1)/2+j]);
213 std::sin(
theta[i*(i-1)/2+j]);
219 Array
theta(size * (size-1));
220 const Real eps=1e-16;
221 for (i=0; i<size; i++) {
222 for (j=0; j<size-1; j++) {
223 theta[j*size+i]=result[i][j];
224 if (
theta[j*size+i]>1-eps)
225 theta[j*size+i]=1-eps;
226 if (
theta[j*size+i]<-1+eps)
227 theta[j*size+i]=-1+eps;
230 if (
theta[j*size+i]>1-eps)
231 theta[j*size+i]=1-eps;
232 if (
theta[j*size+i]<-1+eps)
233 theta[j*size+i]=-1+eps;
237 if (result[i][j+1]<0)
242 Problem p(costFunction, constraint,
theta);
243 optimize.minimize(p, endCriteria);
244 theta=p.currentValue();
245 std::fill(result.begin(),result.end(),1.0);
246 for (i=0; i<size; i++) {
247 for (k=0; k<size; k++) {
248 for (j=0; j<=k; j++) {
249 if (j == k && k!=size-1)
250 result[i][k] *= std::cos(
theta[j*size+i]);
252 result[i][k] *= std::sin(
theta[j*size+i]);
258 for (i=0; i<size; i++) {
259 for (j=0; j<size; j++) {
268 Real normInf(
const Matrix& M) {
269 Size rows = M.rows();
270 Size cols = M.columns();
272 for (
Size i=0; i<rows; ++i) {
274 for (
Size j=0; j<cols; ++j)
275 colSum += std::fabs(M[i][j]);
276 norm = std::max(norm, colSum);
282 Matrix projectToUnitDiagonalMatrix(
const Matrix& M) {
283 Size size = M.rows();
285 "matrix not square");
288 for (
Size i=0; i<size; ++i)
295 Matrix projectToPositiveSemidefiniteMatrix(Matrix& M) {
296 Size size = M.rows();
298 "matrix not square");
300 Matrix diagonal(size, size, 0.0);
301 SymmetricSchurDecomposition jd(M);
302 for (
Size i=0; i<size; ++i)
303 diagonal[i][i] = std::max<Real>(jd.eigenvalues()[i], 0.0);
306 jd.eigenvectors()*diagonal*
transpose(jd.eigenvectors());
312 Matrix highamImplementation(
const Matrix& A,
const Size maxIterations,
const Real& tolerance) {
314 Size size = A.rows();
315 Matrix R, Y(A), X(A), deltaS(size, size, 0.0);
320 for (
Size i=0; i<maxIterations; ++i) {
322 X = projectToPositiveSemidefiniteMatrix(R);
324 Y = projectToUnitDiagonalMatrix(X);
328 normInf(X-lastX)/normInf(X),
329 normInf(Y-lastY)/normInf(Y),
330 normInf(Y-X)/normInf(Y)
341 for (
Size i=0; i<size; ++i)
342 for (
Size j=0; j<i; ++j)
353 #if defined(QL_EXTRA_SAFETY_CHECKS)
354 checkSymmetry(matrix);
357 "non square matrix: " << size <<
" rows, " <<
358 matrix.
columns() <<
" columns");
363 Matrix diagonal(size, size, 0.0);
366 Matrix result(size, size);
372 "negative eigenvalue(s) ("
379 for (
Size i=0; i<size; i++)
381 std::sqrt(std::max<Real>(jd.
eigenvalues()[i], 0.0));
384 normalizePseudoRoot(matrix, result);
389 for (
Size i=0; i<size; ++i){
391 std::sqrt(std::max<Real>(jd.
eigenvalues()[i], 0.0));
395 normalizePseudoRoot(matrix, result);
398 result = hypersphereOptimize(matrix, result,
false);
403 for (
Size i=0; i<size; ++i){
405 std::sqrt(std::max<Real>(jd.
eigenvalues()[i], 0.0));
410 normalizePseudoRoot(matrix, result);
413 result = hypersphereOptimize(matrix, result,
true);
416 int maxIterations = 40;
418 result = highamImplementation(matrix, maxIterations, tol);
424 "negative eigenvalue(s) ("
428 Array sqrtEigenvalues(size);
431 sqrtEigenvalues.
begin(),
433 return std::sqrt(std::max<Real>(lambda, 0.0));
437 for (
Size i=0; i < size; ++i)
439 sqrtEigenvalues.
begin(), sqrtEigenvalues.
end(),
446 result = 0.5*(result +
transpose(result));
450 QL_FAIL(
"unknown salvaging algorithm");
459 Real componentRetainedPercentage,
463 #if defined(QL_EXTRA_SAFETY_CHECKS)
464 checkSymmetry(matrix);
467 "non square matrix: " << size <<
" rows, " <<
468 matrix.
columns() <<
" columns");
472 "no eigenvalues retained");
475 "percentage to be retained > 100%");
478 "max rank required < 1");
489 "negative eigenvalue(s) ("
490 << std::scientific << eigenValues[size-1]
495 for (
Size i=0; i<size; ++i)
496 eigenValues[i] = std::max<Real>(eigenValues[i], 0.0);
500 int maxIterations = 40;
501 Real tolerance = 1e-6;
502 Matrix adjustedMatrix = highamImplementation(matrix, maxIterations, tolerance);
508 QL_FAIL(
"unknown or invalid salvaging algorithm");
512 Real enough = componentRetainedPercentage *
513 std::accumulate(eigenValues.
begin(),
515 if (componentRetainedPercentage == 1.0) {
520 Real components = eigenValues[0];
521 Size retainedFactors = 1;
522 for (
Size i=1; components<enough && i<size; ++i) {
523 components += eigenValues[i];
527 retainedFactors=std::min(retainedFactors, maxRank);
529 Matrix diagonal(size, retainedFactors, 0.0);
530 for (
Size i=0; i<retainedFactors; ++i)
531 diagonal[i][i] = std::sqrt(eigenValues[i]);
534 normalizePseudoRoot(matrix, result);
1-D array used in linear algebra.
const_iterator end() const
const_iterator begin() const
Matrix used in linear algebra.
const_row_iterator row_begin(Size i) const
const_column_iterator column_begin(Size i) const
symmetric threshold Jacobi algorithm.
const Matrix & eigenvectors() const
const Array & eigenvalues() const
floating-point comparisons
Conjugate gradient optimization method.
Abstract constraint class.
#define QL_REQUIRE(condition, message)
throw an error if the given pre-condition is not verified
#define QL_FAIL(message)
throw an error (possibly with file and line information)
LinearInterpolation variance
std::size_t Size
size of a container
Matrix pseudoSqrt(const Matrix &matrix, SalvagingAlgorithm::Type sa)
Matrix rankReducedSqrt(const Matrix &matrix, Size maxRank, Real componentRetainedPercentage, SalvagingAlgorithm::Type sa)
Matrix CholeskyDecomposition(const Matrix &S, bool flexible)
bool close(const Quantity &m1, const Quantity &m2, Size n)
Matrix transpose(const Matrix &m)
Abstract optimization problem class.
pseudo square root of a real symmetric matrix
Eigenvalues/eigenvectors of a real symmetric matrix.