From be4122149bf6de228c060b1be5c8e53789579496 Mon Sep 17 00:00:00 2001 From: Donghak PARK Date: Mon, 23 Dec 2024 17:44:46 +0900 Subject: [PATCH] [hotfix] Fix if-else condition for FSU Currently, only the forwarding code is distinguished when Train and Swap are enabled. I also added a case where Swap is turned off and only Inference is turned on. **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Donghak PARK --- nntrainer/models/neuralnet.cpp | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/nntrainer/models/neuralnet.cpp b/nntrainer/models/neuralnet.cpp index bff18c2ddd..9ed65bf0a9 100644 --- a/nntrainer/models/neuralnet.cpp +++ b/nntrainer/models/neuralnet.cpp @@ -350,13 +350,13 @@ sharedConstTensors NeuralNetwork::forwarding( PROFILE_MEM_ANNOTATE("Forwarding for layer: " + node->getName()); auto f = std::get<0>(node->getExecutionOrder()); - + bool swap_mode = std::get(model_flex_props); // temperally remain. when we evaluate all for asynch mode, we weill remove - if (exec_mode == ExecutionMode::TRAIN) { + if (exec_mode == ExecutionMode::TRAIN or + (exec_mode == ExecutionMode::INFERENCE and !swap_mode)) { model_graph.flushCacheExcept(f); node->forwarding(training); } else { - /** currently, it supports FSU asynch mode for inference. The prcedure of FSU is below,