preferences: adjusts for Path preference
Change-Id: Icf1d0bccec77320ad4b66f90d2999f7210627629
diff --git a/GreenScreen/TFInference.cpp b/GreenScreen/TFInference.cpp
index f7053f5..678bc94 100644
--- a/GreenScreen/TFInference.cpp
+++ b/GreenScreen/TFInference.cpp
@@ -56,7 +56,7 @@
bool
TensorflowInference::isAllocated() const
{
- return allocated;
+ return allocated_;
}
#ifdef TFLITE
@@ -108,7 +108,7 @@
std::runtime_error("Failed to allocate tensors!");
} else {
Plog::log(Plog::LogPriority::INFO, "TENSOR", "TENSORS ALLOCATED" );
- allocated = true;
+ allocated_ = true;
}
}
@@ -226,7 +226,7 @@
tensorflow::GraphDef graph_def;
tensorflow::Status load_graph_status = tensorflow::ReadBinaryProto(tensorflow::Env::Default(), tfModel.modelPath, &graph_def);
if (!load_graph_status.ok()) {
- allocated = false;
+ allocated_ = false;
Plog::log(Plog::LogPriority::INFO, "LOAD GRAPH", "A problem occured when loading the graph");
return ;
}
@@ -259,12 +259,12 @@
tensorflow::Status session_create_status = session->Create(graph_def);
if (!session_create_status.ok()) {
Plog::log(Plog::LogPriority::INFO, "INIT SESSION", "A problem occured when initializating session");
- allocated = true;
+ allocated_ = true;
return ;
}
Plog::log(Plog::LogPriority::INFO, "INIT SESSION", "session initialized");
- allocated = true;
+ allocated_ = true;
}
void