From 178fd569bb2c46cf9aec5914eacf3c451c738ffb Mon Sep 17 00:00:00 2001 From: Eunju Yang Date: Fri, 27 Dec 2024 14:52:37 +0900 Subject: [PATCH] [ Subgraph ] Example application for subgraph usecase - This commit contains an example application to build a large neuralnet with a subgraph. - This application adds subgraph with `is_shared_subgraph=true` option. - The summarization result shows the layers are respectively shared. Signed-off-by: Eunju Yang --- Applications/Subgraph/jni/main.cpp | 56 ++--- Applications/Subgraph/jni/meson.build | 2 +- Applications/Subgraph/jni/model_util.h | 2 +- Applications/Subgraph/jni/out_summary.txt | 241 ---------------------- nntrainer/compiler/subgraph_realizer.cpp | 2 +- 5 files changed, 23 insertions(+), 280 deletions(-) delete mode 100644 Applications/Subgraph/jni/out_summary.txt diff --git a/Applications/Subgraph/jni/main.cpp b/Applications/Subgraph/jni/main.cpp index 43ab555f9..597645226 100644 --- a/Applications/Subgraph/jni/main.cpp +++ b/Applications/Subgraph/jni/main.cpp @@ -4,7 +4,7 @@ * * @file main.cpp * @date 27 Dec 2024 - * @brief Test Application for shared_from + * @brief Test Application for subgraph weight sharing * @see https://github.com/nnstreamer/nntrainer * @author Eunju Yang * @bug No known bugs except for NYI items @@ -27,44 +27,24 @@ using LayerHandle = std::shared_ptr; using ModelHandle = std::unique_ptr; using UserDataType = std::unique_ptr; -/** - * @brief tain data callback - */ -int trainData_cb(float **input, float **label, bool *last, void *user_data) { - auto data = reinterpret_cast(user_data); - - data->next(input, label, last); - return 0; -} - /** * @brief Create subgraph * @return vector of layers that contain subgraph */ -std::vector createSubGraph(const std::string &scope, - int subgraph_idx) { +std::vector createSubGraph(const std::string &scope) { using ml::train::createLayer; std::vector layers; - layers.push_back(createLayer( - "fully_connected", - {withKey("name", scope + "/fc_in" + std::to_string(subgraph_idx)), - withKey("unit", 320), - withKey("input_layers", "input/" + std::to_string(subgraph_idx)), - withKey("shared_from", scope + "/fc_in0")})); - layers.push_back(createLayer( - "fully_connected", - { - withKey("name", scope + "/fc_out" + std::to_string(subgraph_idx)), - withKey("unit", 320), - withKey("input_layers", scope + "/fc_in" + std::to_string(subgraph_idx)), - withKey("shared_from", scope + "/fc_out0"), - })); - layers.push_back(createLayer( - "identity", - {withKey("name", "input/" + std::to_string(subgraph_idx + 1))})); + layers.push_back(createLayer("fully_connected", { + withKey("name", "fc_in"), + withKey("unit", 320), + })); + layers.push_back(createLayer("fully_connected", { + withKey("name", "fc_out"), + withKey("unit", 320), + })); return layers; } @@ -79,12 +59,16 @@ int main(int argc, char *argv[]) { /** add input layer */ model->addLayer( - ml::train::createLayer("input", {"name=input/0", "input_shape=1:1:320"})); + ml::train::createLayer("input", {"name=input", "input_shape=1:1:320"})); + + /** create a subgraph structure */ + auto subgraph = createSubGraph("subgraph"); - /** add subgraphs with shared_from */ - for (auto idx_sg = 0; idx_sg < n_sg; ++idx_sg) { - for (auto &layer : createSubGraph(std::string("subgraph"), idx_sg)) - model->addLayer(layer); + for (unsigned int idx_sg = 0; idx_sg < n_sg; ++idx_sg) { + model->addWithReferenceLayers( + subgraph, "subgraph", {}, {"fc_in"}, {"fc_out"}, + ml::train::ReferenceLayersType::SUBGRAPH, + {withKey("subgraph_idx", idx_sg), withKey("is_shared_subgraph", "true")}); } auto optimizer = ml::train::createOptimizer("sgd", {"learning_rate=0.001"}); @@ -102,4 +86,4 @@ int main(int argc, char *argv[]) { /** check weight sharing from summary */ model->summarize(std::cout, ML_TRAIN_SUMMARY_TENSOR); -} \ No newline at end of file +} diff --git a/Applications/Subgraph/jni/meson.build b/Applications/Subgraph/jni/meson.build index dfcf8f90a..c8009f8d8 100644 --- a/Applications/Subgraph/jni/meson.build +++ b/Applications/Subgraph/jni/meson.build @@ -1,4 +1,4 @@ -subgraph_source = ['main.cpp', cifar_path / 'cifar_dataloader.cpp'] +subgraph_source = ['main.cpp'] subgraph_dependencies = [app_utils_dep, nntrainer_dep, nntrainer_ccapi_dep] e = executable( diff --git a/Applications/Subgraph/jni/model_util.h b/Applications/Subgraph/jni/model_util.h index 92b8fcb46..60b307569 100644 --- a/Applications/Subgraph/jni/model_util.h +++ b/Applications/Subgraph/jni/model_util.h @@ -32,4 +32,4 @@ static std::string withKey(const std::string &key, ss << *iter; return ss.str(); -} \ No newline at end of file +} diff --git a/Applications/Subgraph/jni/out_summary.txt b/Applications/Subgraph/jni/out_summary.txt deleted file mode 100644 index 272cd43ae..000000000 --- a/Applications/Subgraph/jni/out_summary.txt +++ /dev/null @@ -1,241 +0,0 @@ -reorg created -reorg deleted -================================================================================ - Layer name Layer type Output dimension Input layer -================================================================================ - input/0 input 1:1:1:320 --------------------------------------------------------------------------------- - subgraph/fc_in0 fully_connected 1:1:1:320 input/0 --------------------------------------------------------------------------------- - subgraph/fc_out0 fully_connected 1:1:1:320 subgraph/fc_in0 --------------------------------------------------------------------------------- - input/1 identity 1:1:1:320 subgraph/fc_out0 --------------------------------------------------------------------------------- - subgraph/fc_in1 fully_connected 1:1:1:320 input/1 --------------------------------------------------------------------------------- - subgraph/fc_out1 fully_connected 1:1:1:320 subgraph/fc_in1 --------------------------------------------------------------------------------- - input/2 identity 1:1:1:320 subgraph/fc_out1 --------------------------------------------------------------------------------- - subgraph/fc_in2 fully_connected 1:1:1:320 input/2 --------------------------------------------------------------------------------- - subgraph/fc_out2 fully_connected 1:1:1:320 subgraph/fc_in2 --------------------------------------------------------------------------------- - input/3 identity 1:1:1:320 subgraph/fc_out2 -================================================================================ -=================== -Layer Type: input -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -normalization: false -standardization: false -======properties: -======weights: -======metrics: -Layer loss value: 0 -=================== -Layer Type: fully_connected -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -weight Shape: 1:1:320:320 [ FP32 : NCHW ] -weight Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -weight_regularizer: none -weight_regularizer_constant: 1.000000 -weight_initializer: xavier_uniform -weight_decay: 0.000000 -bias_decay: 0.000000 -bias_initializer: zeros -disable_bias: false -print: false -unit: 320 -======properties: -======weights: - -data addr: 0x7808e3ebd010 -Shape: 1:1:320:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] - -data addr: 0x7808e3f21010 -Shape: 1:1:1:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] -======metrics: -Layer loss value: 0 -=================== -Layer Type: fully_connected -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -weight Shape: 1:1:320:320 [ FP32 : NCHW ] -weight Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -weight_regularizer: none -weight_regularizer_constant: 1.000000 -weight_initializer: xavier_uniform -weight_decay: 0.000000 -bias_decay: 0.000000 -bias_initializer: zeros -disable_bias: false -print: false -unit: 320 -======properties: -======weights: - -data addr: 0x7808e3f21510 -Shape: 1:1:320:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] - -data addr: 0x7808e3f85510 -Shape: 1:1:1:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] -======metrics: -Layer loss value: 0 -=================== -Layer Type: identity -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -======properties: -======weights: -======metrics: -Layer loss value: 0 -=================== -Layer Type: fully_connected -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -weight Shape: 1:1:320:320 [ FP32 : NCHW ] -weight Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -weight_regularizer: none -weight_regularizer_constant: 1.000000 -weight_initializer: xavier_uniform -weight_decay: 0.000000 -bias_decay: 0.000000 -bias_initializer: zeros -disable_bias: false -print: false -unit: 320 -======properties: -======weights: - -data addr: 0x7808e3ebd010 -Shape: 1:1:320:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] - -data addr: 0x7808e3f21010 -Shape: 1:1:1:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] -======metrics: -Layer loss value: 0 -=================== -Layer Type: fully_connected -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -weight Shape: 1:1:320:320 [ FP32 : NCHW ] -weight Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -weight_regularizer: none -weight_regularizer_constant: 1.000000 -weight_initializer: xavier_uniform -weight_decay: 0.000000 -bias_decay: 0.000000 -bias_initializer: zeros -disable_bias: false -print: false -unit: 320 -======properties: -======weights: - -data addr: 0x7808e3f21510 -Shape: 1:1:320:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] - -data addr: 0x7808e3f85510 -Shape: 1:1:1:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] -======metrics: -Layer loss value: 0 -=================== -Layer Type: identity -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -======properties: -======weights: -======metrics: -Layer loss value: 0 -=================== -Layer Type: fully_connected -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -weight Shape: 1:1:320:320 [ FP32 : NCHW ] -weight Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -weight_regularizer: none -weight_regularizer_constant: 1.000000 -weight_initializer: xavier_uniform -weight_decay: 0.000000 -bias_decay: 0.000000 -bias_initializer: zeros -disable_bias: false -print: false -unit: 320 -======properties: -======weights: - -data addr: 0x7808e3ebd010 -Shape: 1:1:320:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] - -data addr: 0x7808e3f21010 -Shape: 1:1:1:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] -======metrics: -Layer loss value: 0 -=================== -Layer Type: fully_connected -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -weight Shape: 1:1:320:320 [ FP32 : NCHW ] -weight Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -weight_regularizer: none -weight_regularizer_constant: 1.000000 -weight_initializer: xavier_uniform -weight_decay: 0.000000 -bias_decay: 0.000000 -bias_initializer: zeros -disable_bias: false -print: false -unit: 320 -======properties: -======weights: - -data addr: 0x7808e3f21510 -Shape: 1:1:320:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] - -data addr: 0x7808e3f85510 -Shape: 1:1:1:320 [ FP32 : NCHW ] -[0 0 0 ... 0 0 0] -======metrics: -Layer loss value: 0 -=================== -Layer Type: identity -======shape information: -input Shape: 1:1:1:320 [ FP32 : NCHW ] -output Shape: 1:1:1:320 [ FP32 : NCHW ] -======meta properties: -======properties: -======weights: -======metrics: -Layer loss value: 0 diff --git a/nntrainer/compiler/subgraph_realizer.cpp b/nntrainer/compiler/subgraph_realizer.cpp index aa9a83b22..904988f9f 100644 --- a/nntrainer/compiler/subgraph_realizer.cpp +++ b/nntrainer/compiler/subgraph_realizer.cpp @@ -79,4 +79,4 @@ SubgraphRealizer::realize(const GraphRepresentation &reference) { return subgraph_realizer(reference); } -} // namespace nntrainer \ No newline at end of file +} // namespace nntrainer