From a1598999bfd0e1942d79a737ae311c688f1ab275 Mon Sep 17 00:00:00 2001 From: Donghak PARK Date: Mon, 23 Dec 2024 13:09:55 +0900 Subject: [PATCH] [Unittest] FSU Unittest with Simple FC Model Add Unittest for FSU - Use Simple FC Model that consist with 6-FC layer - FP16-FP16 / Lookahead 1 **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Donghak PARK --- .../integration_test_fsu.cpp | 132 ++++++++++++++++++ test/unittest/integration_tests/meson.build | 6 + 2 files changed, 138 insertions(+) create mode 100644 test/unittest/integration_tests/integration_test_fsu.cpp diff --git a/test/unittest/integration_tests/integration_test_fsu.cpp b/test/unittest/integration_tests/integration_test_fsu.cpp new file mode 100644 index 0000000000..f4c1a32adc --- /dev/null +++ b/test/unittest/integration_tests/integration_test_fsu.cpp @@ -0,0 +1,132 @@ +// SPDX-License-Identifier: Apache-2.0 +/** + * Copyright (C) 2024 Donghak Park + * + * @file integration_test_fsu.cpp + * @date 20 Dec 2024 + * @brief Unit Test for Asynch FSU + * @see https://github.com/nnstreamer/nntrainer + * @author Donghak Park + * @bug No known bugs except for NYI items + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using LayerHandle = std::shared_ptr; +using ModelHandle = std::unique_ptr; + +template +static std::string withKey(const std::string &key, const T &value) { + std::stringstream ss; + ss << key << "=" << value; + return ss.str(); +} + +template +static std::string withKey(const std::string &key, + std::initializer_list value) { + if (std::empty(value)) { + throw std::invalid_argument("empty data cannot be converted"); + } + + std::stringstream ss; + ss << key << "="; + + auto iter = value.begin(); + for (; iter != value.end() - 1; ++iter) { + ss << *iter << ','; + } + ss << *iter; + + return ss.str(); +} + +TEST(fsu, simple_fc) { + + std::unique_ptr model = ml::train::createModel( + ml::train::ModelType::NEURAL_NET, {withKey("loss", "mse")}); + + model->addLayer( + ml::train::createLayer("input", {withKey("name", "input0"), + withKey("input_shape", "1:1:320")})); + model->addLayer( + ml::train::createLayer("fully_connected", + {withKey("unit", 1000), + withKey("weight_initializer", "xavier_uniform"), + withKey("bias_initializer", "zeros")})); + model->addLayer( + ml::train::createLayer("fully_connected", + {withKey("unit", 1000), + withKey("weight_initializer", "xavier_uniform"), + withKey("bias_initializer", "zeros")})); + model->addLayer( + ml::train::createLayer("fully_connected", + {withKey("unit", 1000), + withKey("weight_initializer", "xavier_uniform"), + withKey("bias_initializer", "zeros")})); + model->addLayer( + ml::train::createLayer("fully_connected", + {withKey("unit", 1000), + withKey("weight_initializer", "xavier_uniform"), + withKey("bias_initializer", "zeros")})); + model->addLayer( + ml::train::createLayer("fully_connected", + {withKey("unit", 1000), + withKey("weight_initializer", "xavier_uniform"), + withKey("bias_initializer", "zeros")})); + model->addLayer( + ml::train::createLayer("fully_connected", + {withKey("unit", 100), + withKey("weight_initializer", "xavier_uniform"), + withKey("bias_initializer", "zeros")})); + + model->setProperty({withKey("batch_size", 1), withKey("epochs", 1), + withKey("memory_swap", "true"), + withKey("memory_swap_lookahead", "1"), + withKey("model_tensor_type", "FP16-FP16")}); + + auto optimizer = ml::train::createOptimizer("sgd", {"learning_rate=0.001"}); + model->setOptimizer(std::move(optimizer)); + + int status = model->compile(ml::train::ExecutionMode::INFERENCE); + EXPECT_EQ(status, ML_ERROR_NONE); + + status = model->initialize(ml::train::ExecutionMode::INFERENCE); + EXPECT_EQ(status, ML_ERROR_NONE); + + model->save("simplefc_weight_fp16_fp16_100.bin", + ml::train::ModelFormat::MODEL_FORMAT_BIN); + model->load("./simplefc_weight_fp16_fp16_100.bin"); + + + uint feature_size = 320; + + float input[320]; + float label[1]; + + for (uint j = 0; j < feature_size; ++j) + input[j] = j; + + std::vector in; + std::vector l; + std::vector answer; + + in.push_back(input); + l.push_back(label); + + answer = model->inference(1, in, l); + + in.clear(); + l.clear(); +} diff --git a/test/unittest/integration_tests/meson.build b/test/unittest/integration_tests/meson.build index 19e6fa95e6..8071a860fc 100644 --- a/test/unittest/integration_tests/meson.build +++ b/test/unittest/integration_tests/meson.build @@ -11,8 +11,14 @@ mixed_precision_targets = [ 'integration_test_mixed_precision.cpp', ] +fsu_targets = [ + model_util_path / 'models_test_utils.cpp', + 'integration_test_fsu.cpp', +] + if get_option('enable-fp16') test_target += mixed_precision_targets + test_target += fsu_targets endif exe = executable(