7 #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
8 #include <numpy/arrayobject.h>
24 PyGILState_STATE m_GILState;
27 PyGILRAII() : m_GILState(PyGILState_Ensure()) {}
28 ~PyGILRAII() { PyGILState_Release(m_GILState); }
33 REGISTER_METHOD(PyKeras)
35 ClassImp(MethodPyKeras);
37 MethodPyKeras::MethodPyKeras(const TString &jobName, const TString &methodTitle, DataSetInfo &dsi, const TString &theOption)
38 : PyMethodBase(jobName, Types::kPyKeras, methodTitle, dsi, theOption) {
42 fContinueTraining =
false;
44 fTriesEarlyStopping = -1;
45 fLearningRateSchedule =
"";
46 fFilenameTrainedModel =
"";
50 MethodPyKeras::MethodPyKeras(DataSetInfo &theData,
const TString &theWeightFile)
51 : PyMethodBase(Types::kPyKeras, theData, theWeightFile) {
56 fContinueTraining =
false;
58 fTriesEarlyStopping = -1;
59 fLearningRateSchedule =
"";
60 fFilenameTrainedModel =
"";
64 MethodPyKeras::~MethodPyKeras() {
67 Bool_t MethodPyKeras::HasAnalysisType(Types::EAnalysisType type, UInt_t numberClasses, UInt_t) {
68 if (type == Types::kRegression)
return kTRUE;
69 if (type == Types::kClassification && numberClasses == 2)
return kTRUE;
70 if (type == Types::kMulticlass && numberClasses >= 2)
return kTRUE;
76 void MethodPyKeras::DeclareOptions() {
77 DeclareOptionRef(fFilenameModel,
"FilenameModel",
"Filename of the initial Keras model");
78 DeclareOptionRef(fFilenameTrainedModel,
"FilenameTrainedModel",
"Filename of the trained output Keras model");
79 DeclareOptionRef(fBatchSize,
"BatchSize",
"Training batch size");
80 DeclareOptionRef(fNumEpochs,
"NumEpochs",
"Number of training epochs");
81 DeclareOptionRef(fNumThreads,
"NumThreads",
"Number of CPU threads (only for Tensorflow backend)");
82 DeclareOptionRef(fGpuOptions,
"GpuOptions",
"GPU options for tensorflow, such as allow_growth");
83 DeclareOptionRef(fVerbose,
"Verbose",
"Keras verbosity during training");
84 DeclareOptionRef(fContinueTraining,
"ContinueTraining",
"Load weights from previous training");
85 DeclareOptionRef(fSaveBestOnly,
"SaveBestOnly",
"Store only weights with smallest validation loss");
86 DeclareOptionRef(fTriesEarlyStopping,
"TriesEarlyStopping",
"Number of epochs with no improvement in validation loss after which training will be stopped. The default or a negative number deactivates this option.");
87 DeclareOptionRef(fLearningRateSchedule,
"LearningRateSchedule",
"Set new learning rate during training at specific epochs, e.g., \"50,0.01;70,0.005\"");
88 DeclareOptionRef(fTensorBoard,
"TensorBoard",
89 "Write a log during training to visualize and monitor the training performance with TensorBoard");
90 DeclareOptionRef(fTensorBoard,
"TensorBoard",
91 "Write a log during training to visualize and monitor the training performance with TensorBoard");
93 DeclareOptionRef(fNumValidationString =
"20%",
"ValidationSize",
"Part of the training data to use for validation. "
94 "Specify as 0.2 or 20% to use a fifth of the data set as validation set. "
95 "Specify as 100 to use exactly 100 events. (Default: 20%)");
108 UInt_t TMVA::MethodPyKeras::GetNumValidationSamples()
110 Int_t nValidationSamples = 0;
111 UInt_t trainingSetSize = GetEventCollection(Types::kTraining).size();
115 if (fNumValidationString.EndsWith(
"%")) {
117 TString intValStr = TString(fNumValidationString.Strip(TString::kTrailing,
'%'));
119 if (intValStr.IsFloat()) {
120 Double_t valSizeAsDouble = fNumValidationString.Atof() / 100.0;
121 nValidationSamples = GetEventCollection(Types::kTraining).size() * valSizeAsDouble;
123 Log() << kFATAL <<
"Cannot parse number \"" << fNumValidationString
124 <<
"\". Expected string like \"20%\" or \"20.0%\"." << Endl;
126 }
else if (fNumValidationString.IsFloat()) {
127 Double_t valSizeAsDouble = fNumValidationString.Atof();
129 if (valSizeAsDouble < 1.0) {
131 nValidationSamples = GetEventCollection(Types::kTraining).size() * valSizeAsDouble;
134 nValidationSamples = valSizeAsDouble;
137 Log() << kFATAL <<
"Cannot parse number \"" << fNumValidationString <<
"\". Expected string like \"0.2\" or \"100\"."
143 if (nValidationSamples < 0) {
144 Log() << kFATAL <<
"Validation size \"" << fNumValidationString <<
"\" is negative." << Endl;
147 if (nValidationSamples == 0) {
148 Log() << kFATAL <<
"Validation size \"" << fNumValidationString <<
"\" is zero." << Endl;
151 if (nValidationSamples >= (Int_t)trainingSetSize) {
152 Log() << kFATAL <<
"Validation size \"" << fNumValidationString
153 <<
"\" is larger than or equal in size to training set (size=\"" << trainingSetSize <<
"\")." << Endl;
156 return nValidationSamples;
159 void MethodPyKeras::ProcessOptions() {
161 if (fFilenameTrainedModel.IsNull()) {
162 fFilenameTrainedModel = GetWeightFileDir() +
"/TrainedModel_" + GetName() +
".h5";
170 if (GetKerasBackend() == kTensorFlow) {
171 Log() << kINFO <<
"Using TensorFlow backend - setting special configuration options " << Endl;
172 PyRunString(
"import tensorflow as tf");
173 PyRunString(
"from keras.backend import tensorflow_backend as K");
176 PyRunString(
"tf_major_version = int(tf.__version__.split('.')[0])");
178 PyObject *pyTfVersion = PyDict_GetItemString(fLocalNS,
"tf_major_version");
179 int tfVersion = PyLong_AsLong(pyTfVersion);
180 Log() << kINFO <<
"Using Tensorflow version " << tfVersion << Endl;
183 TString configProto = (tfVersion >= 2) ?
"tf.compat.v1.ConfigProto" :
"tf.ConfigProto";
184 TString session = (tfVersion >= 2) ?
"tf.compat.v1.Session" :
"tf.Session";
187 int num_threads = fNumThreads;
188 if (num_threads > 0) {
189 Log() << kINFO <<
"Setting the CPU number of threads = " << num_threads << Endl;
191 PyRunString(TString::Format(
"session_conf = %s(intra_op_parallelism_threads=%d,inter_op_parallelism_threads=%d)",
192 configProto.Data(), num_threads,num_threads));
195 PyRunString(TString::Format(
"session_conf = %s()",configProto.Data()));
200 if (!fGpuOptions.IsNull() ) {
201 TObjArray * optlist = fGpuOptions.Tokenize(
",");
202 for (
int item = 0; item < optlist->GetEntries(); ++item) {
203 Log() << kINFO <<
"Applying GPU option: gpu_options." << optlist->At(item)->GetName() << Endl;
204 PyRunString(TString::Format(
"session_conf.gpu_options.%s", optlist->At(item)->GetName()));
207 PyRunString(TString::Format(
"sess = %s(config=session_conf)", session.Data()));
210 PyRunString(
"K.set_session(sess)");
212 PyRunString(
"tf.compat.v1.keras.backend.set_session(sess)");
217 Log() << kWARNING <<
"Cannot set the given " << fNumThreads <<
" threads when not using tensorflow as backend" << Endl;
218 if (!fGpuOptions.IsNull() ) {
219 Log() << kWARNING <<
"Cannot set the given GPU option " << fGpuOptions <<
" when not using tensorflow as backend" << Endl;
225 if (fContinueTraining) Log() << kINFO <<
"Continue training with trained model" << Endl;
226 SetupKerasModel(fContinueTraining);
229 void MethodPyKeras::SetupKerasModel(
bool loadTrainedModel) {
235 TString filenameLoadModel;
236 if (loadTrainedModel) {
237 filenameLoadModel = fFilenameTrainedModel;
240 filenameLoadModel = fFilenameModel;
242 PyRunString(
"model = keras.models.load_model('"+filenameLoadModel+
"')",
243 "Failed to load Keras model from file: "+filenameLoadModel);
244 Log() << kINFO <<
"Load model from file: " << filenameLoadModel << Endl;
252 fNVars = GetNVariables();
253 if (GetAnalysisType() == Types::kClassification || GetAnalysisType() == Types::kMulticlass) fNOutputs = DataInfo().GetNClasses();
254 else if (GetAnalysisType() == Types::kRegression) fNOutputs = DataInfo().GetNTargets();
255 else Log() << kFATAL <<
"Selected analysis type is not implemented" << Endl;
258 fVals =
new float[fNVars];
259 npy_intp dimsVals[2] = {(npy_intp)1, (npy_intp)fNVars};
260 PyArrayObject* pVals = (PyArrayObject*)PyArray_SimpleNewFromData(2, dimsVals, NPY_FLOAT, (
void*)fVals);
261 PyDict_SetItemString(fLocalNS,
"vals", (PyObject*)pVals);
263 fOutput.resize(fNOutputs);
264 npy_intp dimsOutput[2] = {(npy_intp)1, (npy_intp)fNOutputs};
265 PyArrayObject* pOutput = (PyArrayObject*)PyArray_SimpleNewFromData(2, dimsOutput, NPY_FLOAT, (
void*)&fOutput[0]);
266 PyDict_SetItemString(fLocalNS,
"output", (PyObject*)pOutput);
269 fModelIsSetup =
true;
272 void MethodPyKeras::Init() {
274 TMVA::Internal::PyGILRAII raii;
276 if (!PyIsInitialized()) {
277 Log() << kFATAL <<
"Python is not initialized" << Endl;
283 PyRunString(
"import sys; sys.argv = ['']",
"Set sys.argv failed");
284 PyRunString(
"import keras",
"Import Keras failed");
287 fModelIsSetup =
false;
290 void MethodPyKeras::Train() {
291 if(!fModelIsSetup) Log() << kFATAL <<
"Model is not setup for training" << Endl;
297 UInt_t nAllEvents = Data()->GetNTrainingEvents();
298 UInt_t nValEvents = GetNumValidationSamples();
299 UInt_t nTrainingEvents = nAllEvents - nValEvents;
301 Log() << kINFO <<
"Split TMVA training data in " << nTrainingEvents <<
" training events and "
302 << nValEvents <<
" validation events" << Endl;
304 float* trainDataX =
new float[nTrainingEvents*fNVars];
305 float* trainDataY =
new float[nTrainingEvents*fNOutputs];
306 float* trainDataWeights =
new float[nTrainingEvents];
307 for (UInt_t i=0; i<nTrainingEvents; i++) {
308 const TMVA::Event* e = GetTrainingEvent(i);
310 for (UInt_t j=0; j<fNVars; j++) {
311 trainDataX[j + i*fNVars] = e->GetValue(j);
316 if (GetAnalysisType() == Types::kClassification || GetAnalysisType() == Types::kMulticlass) {
317 for (UInt_t j=0; j<fNOutputs; j++) {
318 trainDataY[j + i*fNOutputs] = 0;
320 trainDataY[e->GetClass() + i*fNOutputs] = 1;
322 else if (GetAnalysisType() == Types::kRegression) {
323 for (UInt_t j=0; j<fNOutputs; j++) {
324 trainDataY[j + i*fNOutputs] = e->GetTarget(j);
327 else Log() << kFATAL <<
"Can not fill target vector because analysis type is not known" << Endl;
330 trainDataWeights[i] = e->GetWeight();
333 npy_intp dimsTrainX[2] = {(npy_intp)nTrainingEvents, (npy_intp)fNVars};
334 npy_intp dimsTrainY[2] = {(npy_intp)nTrainingEvents, (npy_intp)fNOutputs};
335 npy_intp dimsTrainWeights[1] = {(npy_intp)nTrainingEvents};
336 PyArrayObject* pTrainDataX = (PyArrayObject*)PyArray_SimpleNewFromData(2, dimsTrainX, NPY_FLOAT, (
void*)trainDataX);
337 PyArrayObject* pTrainDataY = (PyArrayObject*)PyArray_SimpleNewFromData(2, dimsTrainY, NPY_FLOAT, (
void*)trainDataY);
338 PyArrayObject* pTrainDataWeights = (PyArrayObject*)PyArray_SimpleNewFromData(1, dimsTrainWeights, NPY_FLOAT, (
void*)trainDataWeights);
339 PyDict_SetItemString(fLocalNS,
"trainX", (PyObject*)pTrainDataX);
340 PyDict_SetItemString(fLocalNS,
"trainY", (PyObject*)pTrainDataY);
341 PyDict_SetItemString(fLocalNS,
"trainWeights", (PyObject*)pTrainDataWeights);
351 float* valDataX =
new float[nValEvents*fNVars];
352 float* valDataY =
new float[nValEvents*fNOutputs];
353 float* valDataWeights =
new float[nValEvents];
355 for (UInt_t i=0; i< nValEvents ; i++) {
356 UInt_t ievt = nTrainingEvents + i;
357 const TMVA::Event* e = GetTrainingEvent(ievt);
359 for (UInt_t j=0; j<fNVars; j++) {
360 valDataX[j + i*fNVars] = e->GetValue(j);
363 if (GetAnalysisType() == Types::kClassification || GetAnalysisType() == Types::kMulticlass) {
364 for (UInt_t j=0; j<fNOutputs; j++) {
365 valDataY[j + i*fNOutputs] = 0;
367 valDataY[e->GetClass() + i*fNOutputs] = 1;
369 else if (GetAnalysisType() == Types::kRegression) {
370 for (UInt_t j=0; j<fNOutputs; j++) {
371 valDataY[j + i*fNOutputs] = e->GetTarget(j);
374 else Log() << kFATAL <<
"Can not fill target vector because analysis type is not known" << Endl;
376 valDataWeights[i] = e->GetWeight();
379 npy_intp dimsValX[2] = {(npy_intp)nValEvents, (npy_intp)fNVars};
380 npy_intp dimsValY[2] = {(npy_intp)nValEvents, (npy_intp)fNOutputs};
381 npy_intp dimsValWeights[1] = {(npy_intp)nValEvents};
382 PyArrayObject* pValDataX = (PyArrayObject*)PyArray_SimpleNewFromData(2, dimsValX, NPY_FLOAT, (
void*)valDataX);
383 PyArrayObject* pValDataY = (PyArrayObject*)PyArray_SimpleNewFromData(2, dimsValY, NPY_FLOAT, (
void*)valDataY);
384 PyArrayObject* pValDataWeights = (PyArrayObject*)PyArray_SimpleNewFromData(1, dimsValWeights, NPY_FLOAT, (
void*)valDataWeights);
385 PyDict_SetItemString(fLocalNS,
"valX", (PyObject*)pValDataX);
386 PyDict_SetItemString(fLocalNS,
"valY", (PyObject*)pValDataY);
387 PyDict_SetItemString(fLocalNS,
"valWeights", (PyObject*)pValDataWeights);
392 Log() << kINFO <<
"Training Model Summary" << Endl;
393 PyRunString(
"model.summary()");
397 PyObject* pBatchSize = PyLong_FromLong(fBatchSize);
398 PyObject* pNumEpochs = PyLong_FromLong(fNumEpochs);
399 PyObject* pVerbose = PyLong_FromLong(fVerbose);
400 PyDict_SetItemString(fLocalNS,
"batchSize", pBatchSize);
401 PyDict_SetItemString(fLocalNS,
"numEpochs", pNumEpochs);
402 PyDict_SetItemString(fLocalNS,
"verbose", pVerbose);
405 PyRunString(
"callbacks = []");
409 PyRunString(
"callbacks.append(keras.callbacks.ModelCheckpoint('"+fFilenameTrainedModel+
"', monitor='val_loss', verbose=verbose, save_best_only=True, mode='auto'))",
"Failed to setup training callback: SaveBestOnly");
410 Log() << kINFO <<
"Option SaveBestOnly: Only model weights with smallest validation loss will be stored" << Endl;
414 if (fTriesEarlyStopping>=0) {
416 tries.Form(
"%i", fTriesEarlyStopping);
417 PyRunString(
"callbacks.append(keras.callbacks.EarlyStopping(monitor='val_loss', patience="+tries+
", verbose=verbose, mode='auto'))",
"Failed to setup training callback: TriesEarlyStopping");
418 Log() << kINFO <<
"Option TriesEarlyStopping: Training will stop after " << tries <<
" number of epochs with no improvement of validation loss" << Endl;
422 if (fLearningRateSchedule!=
"") {
424 PyRunString(
"strScheduleSteps = '"+fLearningRateSchedule+
"'\n"
425 "schedulerSteps = {}\n"
426 "for c in strScheduleSteps.split(';'):\n"
427 " x = c.split(',')\n"
428 " schedulerSteps[int(x[0])] = float(x[1])\n",
429 "Failed to setup steps for scheduler function from string: "+fLearningRateSchedule,
432 PyRunString(
"def schedule(epoch, model=model, schedulerSteps=schedulerSteps):\n"
433 " if epoch in schedulerSteps: return float(schedulerSteps[epoch])\n"
434 " else: return float(model.optimizer.lr.get_value())\n",
435 "Failed to setup scheduler function with string: "+fLearningRateSchedule,
438 PyRunString(
"callbacks.append(keras.callbacks.LearningRateScheduler(schedule))",
439 "Failed to setup training callback: LearningRateSchedule");
440 Log() << kINFO <<
"Option LearningRateSchedule: Set learning rate during training: " << fLearningRateSchedule << Endl;
444 if (fTensorBoard !=
"") {
445 TString logdir = TString(
"'") + fTensorBoard + TString(
"'");
447 "callbacks.append(keras.callbacks.TensorBoard(log_dir=" + logdir +
448 ", histogram_freq=0, batch_size=batchSize, write_graph=True, write_grads=False, write_images=False))",
449 "Failed to setup training callback: TensorBoard");
450 Log() << kINFO <<
"Option TensorBoard: Log files for training monitoring are stored in: " << logdir << Endl;
454 PyRunString(
"history = model.fit(trainX, trainY, sample_weight=trainWeights, batch_size=batchSize, epochs=numEpochs, verbose=verbose, validation_data=(valX, valY, valWeights), callbacks=callbacks)",
455 "Failed to train model");
458 std::vector<float> fHistory;
459 fHistory.resize(fNumEpochs);
460 npy_intp dimsHistory[1] = { (npy_intp)fNumEpochs};
461 PyArrayObject* pHistory = (PyArrayObject*)PyArray_SimpleNewFromData(1, dimsHistory, NPY_FLOAT, (
void*)&fHistory[0]);
462 PyDict_SetItemString(fLocalNS,
"HistoryOutput", (PyObject*)pHistory);
466 PyRunString(
"number_of_keys=len(history.history.keys())");
467 PyObject* PyNkeys=PyDict_GetItemString(fLocalNS,
"number_of_keys");
468 int nkeys=PyLong_AsLong(PyNkeys);
469 for (iHis=0; iHis<nkeys; iHis++) {
471 PyRunString(TString::Format(
"copy_string=str(list(history.history.keys())[%d])",iHis));
473 PyObject* stra=PyDict_GetItemString(fLocalNS,
"copy_string");
475 #if PY_MAJOR_VERSION < 3 // for Python2
476 const char *stra_name = PyBytes_AsString(stra);
478 TString sname = TString::Format(
"'%s'",stra_name);
479 const char * name = sname.Data();
481 PyObject* repr = PyObject_Repr(stra);
482 PyObject* str = PyUnicode_AsEncodedString(repr,
"utf-8",
"~E~");
483 const char *name = PyBytes_AsString(str);
486 Log() << kINFO <<
"Getting training history for item:" << iHis <<
" name = " << name << Endl;
487 PyRunString(TString::Format(
"for i,p in enumerate(history.history[%s]):\n HistoryOutput[i]=p\n",name),
488 TString::Format(
"Failed to get %s from training history",name));
489 for (
size_t i=0; i<fHistory.size(); i++)
490 fTrainHistory.AddValue(name,i+1,fHistory[i]);
500 if (!fSaveBestOnly) {
501 PyRunString(
"model.save('"+fFilenameTrainedModel+
"', overwrite=True)",
502 "Failed to save trained model: "+fFilenameTrainedModel);
503 Log() << kINFO <<
"Trained model written to file: " << fFilenameTrainedModel << Endl;
512 delete[] trainDataWeights;
515 delete[] valDataWeights;
518 void MethodPyKeras::TestClassification() {
519 MethodBase::TestClassification();
522 Double_t MethodPyKeras::GetMvaValue(Double_t *errLower, Double_t *errUpper) {
524 NoErrorCalc(errLower, errUpper);
528 if (!fModelIsSetup) {
530 SetupKerasModel(
true);
534 const TMVA::Event* e = GetEvent();
535 for (UInt_t i=0; i<fNVars; i++) fVals[i] = e->GetValue(i);
536 PyRunString(
"for i,p in enumerate(model.predict(vals)): output[i]=p\n",
537 "Failed to get predictions");
539 return fOutput[TMVA::Types::kSignal];
542 std::vector<Double_t> MethodPyKeras::GetMvaValues(Long64_t firstEvt, Long64_t lastEvt, Bool_t logProgress) {
545 if (!fModelIsSetup) {
547 SetupKerasModel(
true);
551 Long64_t nEvents = Data()->GetNEvents();
552 if (firstEvt > lastEvt || lastEvt > nEvents) lastEvt = nEvents;
553 if (firstEvt < 0) firstEvt = 0;
554 nEvents = lastEvt-firstEvt;
557 Timer timer( nEvents, GetName(), kTRUE );
560 Log() << kHEADER << Form(
"[%s] : ",DataInfo().GetName())
561 <<
"Evaluation of " << GetMethodName() <<
" on "
562 << (Data()->GetCurrentType() == Types::kTraining ?
"training" :
"testing")
563 <<
" sample (" << nEvents <<
" events)" << Endl;
565 float* data =
new float[nEvents*fNVars];
566 for (UInt_t i=0; i<nEvents; i++) {
567 Data()->SetCurrentEvent(i);
568 const TMVA::Event *e = GetEvent();
569 for (UInt_t j=0; j<fNVars; j++) {
570 data[j + i*fNVars] = e->GetValue(j);
574 npy_intp dimsData[2] = {(npy_intp)nEvents, (npy_intp)fNVars};
575 PyArrayObject* pDataMvaValues = (PyArrayObject*)PyArray_SimpleNewFromData(2, dimsData, NPY_FLOAT, (
void*)data);
576 if (pDataMvaValues==0) Log() <<
"Failed to load data to Python array" << Endl;
579 PyObject* pModel = PyDict_GetItemString(fLocalNS,
"model");
580 if (pModel==0) Log() << kFATAL <<
"Failed to get model Python object" << Endl;
581 PyArrayObject* pPredictions = (PyArrayObject*) PyObject_CallMethod(pModel, (
char*)
"predict", (
char*)
"O", pDataMvaValues);
582 if (pPredictions==0) Log() << kFATAL <<
"Failed to get predictions" << Endl;
587 std::vector<double> mvaValues(nEvents);
588 float* predictionsData = (
float*) PyArray_DATA(pPredictions);
589 for (UInt_t i=0; i<nEvents; i++) {
590 mvaValues[i] = (double) predictionsData[i*fNOutputs + TMVA::Types::kSignal];
595 <<
"Elapsed time for evaluation of " << nEvents <<
" events: "
596 << timer.GetElapsedTime() <<
" " << Endl;
603 std::vector<Float_t>& MethodPyKeras::GetRegressionValues() {
608 SetupKerasModel(
true);
612 const TMVA::Event* e = GetEvent();
613 for (UInt_t i=0; i<fNVars; i++) fVals[i] = e->GetValue(i);
614 PyRunString(
"for i,p in enumerate(model.predict(vals)): output[i]=p\n",
615 "Failed to get predictions");
618 Event * eTrans =
new Event(*e);
619 for (UInt_t i=0; i<fNOutputs; ++i) {
620 eTrans->SetTarget(i,fOutput[i]);
623 const Event* eTrans2 = GetTransformationHandler().InverseTransform(eTrans);
624 for (UInt_t i=0; i<fNOutputs; ++i) {
625 fOutput[i] = eTrans2->GetTarget(i);
631 std::vector<Float_t>& MethodPyKeras::GetMulticlassValues() {
636 SetupKerasModel(
true);
640 const TMVA::Event* e = GetEvent();
641 for (UInt_t i=0; i<fNVars; i++) fVals[i] = e->GetValue(i);
642 PyRunString(
"for i,p in enumerate(model.predict(vals)): output[i]=p\n",
643 "Failed to get predictions");
648 void MethodPyKeras::ReadModelFromFile() {
651 void MethodPyKeras::GetHelpMessage()
const {
655 Log() <<
"Keras is a high-level API for the Theano and Tensorflow packages." << Endl;
656 Log() <<
"This method wraps the training and predictions steps of the Keras" << Endl;
657 Log() <<
"Python package for TMVA, so that dataloading, preprocessing and" << Endl;
658 Log() <<
"evaluation can be done within the TMVA system. To use this Keras" << Endl;
659 Log() <<
"interface, you have to generate a model with Keras first. Then," << Endl;
660 Log() <<
"this model can be loaded and trained in TMVA." << Endl;
664 MethodPyKeras::EBackendType MethodPyKeras::GetKerasBackend() {
667 PyRunString(
"keras_backend_is_set = keras.backend.backend() == \"tensorflow\"");
668 PyObject * keras_backend = PyDict_GetItemString(fLocalNS,
"keras_backend_is_set");
669 if (keras_backend !=
nullptr && keras_backend == Py_True)
672 PyRunString(
"keras_backend_is_set = keras.backend.backend() == \"theano\"");
673 keras_backend = PyDict_GetItemString(fLocalNS,
"keras_backend_is_set");
674 if (keras_backend !=
nullptr && keras_backend == Py_True)
677 PyRunString(
"keras_backend_is_set = keras.backend.backend() == \"cntk\"");
678 keras_backend = PyDict_GetItemString(fLocalNS,
"keras_backend_is_set");
679 if (keras_backend !=
nullptr && keras_backend == Py_True)
685 TString MethodPyKeras::GetKerasBackendName() {
687 EBackendType type = GetKerasBackend();
688 if (type == kTensorFlow)
return "TensorFlow";
689 if (type == kTheano)
return "Theano";
690 if (type == kCNTK)
return "CNTK";