Logo ROOT   6.30.04
Reference Guide
 All Namespaces Files Pages
GSLSimAnMinimizer.cxx
Go to the documentation of this file.
1 // @(#)root/mathmore:$Id$
2 // Author: L. Moneta Wed Dec 20 17:16:32 2006
3 
4 /**********************************************************************
5  * *
6  * Copyright (c) 2006 LCG ROOT Math Team, CERN/PH-SFT *
7  * *
8  * *
9  **********************************************************************/
10 
11 // Implementation file for class GSLSimAnMinimizer
12 
13 #include "Math/GSLSimAnMinimizer.h"
15 #include "Math/Error.h"
16 
18 #include "Math/MultiNumGradFunction.h" // needed to use transformation function
19 #include "Math/FitMethodFunction.h"
20 #include "Math/GenAlgoOptions.h"
21 
22 #include <iostream>
23 #include <cassert>
24 
25 namespace ROOT {
26 
27  namespace Math {
28 
29 
30 
31 // GSLSimAnMinimizer implementation
32 
33 GSLSimAnMinimizer::GSLSimAnMinimizer( int /* ROOT::Math::EGSLSimAnMinimizerType type */ ) :
34  BasicMinimizer()
35 {
36  // Constructor implementation : create GSLMultiFit wrapper object
37 
38  SetMaxIterations(100);
39  SetPrintLevel(0);
40 }
41 
42 GSLSimAnMinimizer::~GSLSimAnMinimizer () {
43 }
44 
45 
46 bool GSLSimAnMinimizer::Minimize() {
47  // set initial parameters of the minimizer
48  int debugLevel = PrintLevel();
49 
50  if (debugLevel >=1 ) std::cout <<"Minimize using GSLSimAnMinimizer " << std::endl;
51 
52  const ROOT::Math::IMultiGenFunction * function = ObjFunction();
53  if (function == 0) {
54  MATH_ERROR_MSG("GSLSimAnMinimizer::Minimize","Function has not been set");
55  return false;
56  }
57 
58  // vector of internal values (copied by default)
59  unsigned int npar = NPar();
60  std::vector<double> xvar;
61  std::vector<double> steps(StepSizes(),StepSizes()+npar);
62 
63  // needed for the transformation
64  MultiNumGradFunction * gradFunc = new MultiNumGradFunction( *function );
65  gradFunc->SetOwnership();
66 
67  MinimTransformFunction * trFunc = CreateTransformation(xvar, gradFunc );
68  // ObjFunction() will return now the new transformed function
69 
70  if (trFunc) {
71  // transform also the step sizes
72  trFunc->InvStepTransformation(X(), StepSizes(), &steps[0]);
73  steps.resize( trFunc->NDim() );
74  }
75 
76  assert (xvar.size() == steps.size() );
77 
78 
79 #ifdef DEBUG
80  for (unsigned int i = 0; i < npar ; ++i) {
81  std::cout << "x = " << xvar[i] << " steps " << steps[i] << " x " << X()[i] << std::endl;
82  }
83  std::cout << "f(x) = " << (*ObjFunction())(&xvar.front() ) << std::endl;
84  std::cout << "f(x) not transf = " << (*function)( X() ) << std::endl;
85  if (trFunc) std::cout << "ftrans(x) = " << (*trFunc) (&xvar.front() ) << std::endl;
86 #endif
87 
88  // output vector
89  std::vector<double> xmin(xvar.size() );
90 
91 
92  int iret = fSolver.Solve(*ObjFunction(), &xvar.front(), &steps.front(), &xmin[0], (debugLevel > 1) );
93 
94  SetMinValue( (*ObjFunction())(&xmin.front() ) );
95 
96  SetFinalValues(&xmin.front());
97 
98 
99  if (debugLevel >=1 ) {
100  if (iret == 0)
101  std::cout << "GSLSimAnMinimizer: Minimum Found" << std::endl;
102  else
103  std::cout << "GSLSimAnMinimizer: Error in solving" << std::endl;
104 
105  int pr = std::cout.precision(18);
106  std::cout << "FVAL = " << MinValue() << std::endl;
107  std::cout.precision(pr);
108  for (unsigned int i = 0; i < NDim(); ++i)
109  std::cout << VariableName(i) << "\t = " << X()[i] << std::endl;
110  }
111 
112 
113  return ( iret == 0) ? true : false;
114 }
115 
116 
117 unsigned int GSLSimAnMinimizer::NCalls() const {
118  // return number of function calls
119  const ROOT::Math::MinimTransformFunction * tfunc = dynamic_cast<const ROOT::Math::MinimTransformFunction *>(ObjFunction());
120  const ROOT::Math::MultiNumGradFunction * f = 0;
121  if (tfunc) f = dynamic_cast<const ROOT::Math::MultiNumGradFunction *>(tfunc->OriginalFunction());
122  else
123  f = dynamic_cast<const ROOT::Math::MultiNumGradFunction *>(ObjFunction());
124  if (f) return f->NCalls();
125  return 0;
126 }
127 
128 ROOT::Math::MinimizerOptions GSLSimAnMinimizer::Options() const {
129  ROOT::Math::MinimizerOptions opt;
130  opt.SetMinimizerType("GSLSimAn");
131  // set dummy values since those are not used
132  opt.SetTolerance(-1);
133  opt.SetPrintLevel(0);
134  opt.SetMaxIterations(-1);
135  opt.SetMaxFunctionCalls(0);
136  opt.SetStrategy(-1);
137  opt.SetErrorDef(0);
138  opt.SetPrecision(0);
139  opt.SetMinimizerAlgorithm("");
140 
141  const GSLSimAnParams & params = MinimizerParameters();
142 
143  ROOT::Math::GenAlgoOptions simanOpt;
144  simanOpt.SetValue("n_tries",params.n_tries);
145  simanOpt.SetValue("iters_fixed_T",params.iters_fixed_T);
146  simanOpt.SetValue("step_size",params.step_size);
147  simanOpt.SetValue("k",params.k);
148  simanOpt.SetValue("t_initial",params.t_initial);
149  simanOpt.SetValue("mu_t",params.mu_t);
150  simanOpt.SetValue("t_min",params.t_min);
151 
152  opt.SetExtraOptions(simanOpt);
153  return opt;
154 }
155 
156 void GSLSimAnMinimizer::SetOptions(const ROOT::Math::MinimizerOptions & opt) {
157 
158  // get the specific siman options
159  const ROOT::Math::IOptions * simanOpt = opt.ExtraOptions();
160  if (!simanOpt) {
161  MATH_WARN_MSG("GSLSimAnMinimizer::SetOptions", "No specific sim. annealing minimizer options are provided. No options are set");
162  return;
163  }
164  GSLSimAnParams params;
165  simanOpt->GetValue("n_tries",params.n_tries);
166  simanOpt->GetValue("iters_fixed_T",params.iters_fixed_T);
167  simanOpt->GetValue("step_size",params.step_size);
168  simanOpt->GetValue("k",params.k);
169  simanOpt->GetValue("t_initial",params.t_initial);
170  simanOpt->GetValue("mu_t",params.mu_t);
171  simanOpt->GetValue("t_min",params.t_min);
172 
173  SetParameters(params);
174 }
175 
176 
177  } // end namespace Math
178 
179 } // end namespace ROOT
180