Logo ROOT   6.30.04
Reference Guide
 All Namespaces Files Pages
TActivationReLU.h
Go to the documentation of this file.
1 // @(#)root/tmva $Id$
2 // Author: Matt Jachowski
3 
4 /**********************************************************************************
5  * Project: TMVA - a Root-integrated toolkit for multivariate data analysis *
6  * Package: TMVA *
7  * Class : TMVA::TActivationReLU *
8  * Web : http://tmva.sourceforge.net *
9  * *
10  * Description: *
11  * Tanh activation function for TNeuron *
12  * *
13  * Authors (alphabetical): *
14  * Matt Jachowski <jachowski@stanford.edu> - Stanford University, USA *
15  * *
16  * Copyright (c) 2005: *
17  * CERN, Switzerland *
18  * *
19  * Redistribution and use in source and binary forms, with or without *
20  * modification, are permitted according to the terms listed in LICENSE *
21  * (http://tmva.sourceforge.net/LICENSE) *
22  **********************************************************************************/
23 
24 #ifndef ROOT_TMVA_TActivationReLU
25 #define ROOT_TMVA_TActivationReLU
26 
27 //////////////////////////////////////////////////////////////////////////
28 // //
29 // TActivationReLU //
30 // //
31 // Rectified Linear Unit activation function for TNeuron //
32 // //
33 //////////////////////////////////////////////////////////////////////////
34 
35 #include "TString.h"
36 
37 #include "TMVA/TActivation.h"
38 
39 namespace TMVA {
40 
41  class TActivationReLU : public TActivation {
42 
43  public:
44  TActivationReLU() {}
45  ~TActivationReLU() {}
46 
47  // evaluate the activation function
48  Double_t Eval(Double_t arg) { return arg>0 ? arg : 0;}
49 
50  // evaluate the derivative of the activation function
51  Double_t EvalDerivative(Double_t arg) { return arg>0 ? 1 : 0;}
52 
53  // minimum of the range of the activation function
54  Double_t GetMin() { return -1; }
55 
56  // maximum of the range of the activation function
57  Double_t GetMax() { return 1; }
58 
59  // expression for the activation function
60  TString GetExpression();
61 
62  // writer of function code
63  virtual void MakeFunction(std::ostream& fout, const TString& fncName);
64 
65  private:
66  ClassDef(TActivationReLU, 0); // Rectified Linear Unit activation function for TNeuron
67  };
68 
69 } // namespace TMVA
70 
71 #endif