Skip to content

Commit

Permalink
[ Test/App ] App to test subgraph function-related functions
Browse files Browse the repository at this point in the history
- This commit writes an app to test functionality to enable subgraph
  feature.
- This commit only test `shared_from` property.

Self evaluation:

Build test: [X]Passed [ ]Failed [ ]Skipped
Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Eunju Yang <[email protected]>
  • Loading branch information
EunjuYang committed Dec 26, 2024
1 parent ea8307c commit ba7e166
Show file tree
Hide file tree
Showing 5 changed files with 393 additions and 0 deletions.
105 changes: 105 additions & 0 deletions Applications/Subgraph/jni/main.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
// SPDX-License-Identifier: Apache-2.0
/**
* Copyright (C) 2024 Eunju Yang <[email protected]>
*
* @file main.cpp
* @date 27 Dec 2024
* @brief Test Application for shared_from
* @see https://github.com/nnstreamer/nntrainer
* @author Eunju Yang <[email protected]>
* @bug No known bugs except for NYI items
*/

#include <cifar_dataloader.h>
#include <layer.h>
#include <model.h>
#include <optimizer.h>

#include <array>
#include <iostream>
#include <sstream>
#include <string>
#include <vector>

#include <model_util.h>

using LayerHandle = std::shared_ptr<ml::train::Layer>;
using ModelHandle = std::unique_ptr<ml::train::Model>;
using UserDataType = std::unique_ptr<nntrainer::util::DataLoader>;

/**
* @brief tain data callback
*/
int trainData_cb(float **input, float **label, bool *last, void *user_data) {
auto data = reinterpret_cast<nntrainer::util::DataLoader *>(user_data);

data->next(input, label, last);
return 0;
}

/**
* @brief Create subgraph
* @return vector of layers that contain subgraph
*/
std::vector<LayerHandle> createSubGraph(const std::string &scope,
int subgraph_idx) {

using ml::train::createLayer;

std::vector<LayerHandle> layers;

layers.push_back(createLayer(
"fully_connected",
{withKey("name", scope + "/fc_in" + std::to_string(subgraph_idx)),
withKey("unit", 320),
withKey("input_layers", "input/" + std::to_string(subgraph_idx)),
withKey("shared_from", scope + "/fc_in0")}));
layers.push_back(createLayer(
"fully_connected",
{
withKey("name", scope + "/fc_out" + std::to_string(subgraph_idx)),
withKey("unit", 320),
withKey("input_layers", scope + "/fc_in" + std::to_string(subgraph_idx)),
withKey("shared_from", scope + "/fc_out0"),
}));
layers.push_back(createLayer(
"identity",
{withKey("name", "input/" + std::to_string(subgraph_idx + 1))}));

return layers;
}

int main(int argc, char *argv[]) {

/** model */
ModelHandle model = ml::train::createModel(ml::train::ModelType::NEURAL_NET);

/** number of subgraphs */
const int n_sg = 3;

/** add input layer */
model->addLayer(
ml::train::createLayer("input", {"name=input/0", "input_shape=1:1:320"}));

/** add subgraphs with shared_from */
for (auto idx_sg = 0; idx_sg < n_sg; ++idx_sg) {
for (auto &layer : createSubGraph(std::string("subgraph"), idx_sg))
model->addLayer(layer);
}

auto optimizer = ml::train::createOptimizer("sgd", {"learning_rate=0.001"});
model->setOptimizer(std::move(optimizer));

/** model compilation */
if (model->compile(ml::train::ExecutionMode::INFERENCE)) {
throw std::invalid_argument("model compilation failed!");
}

/** model initialization */
if (model->initialize(ml::train::ExecutionMode::INFERENCE)) {
throw std::invalid_argument("model initialization failed!");
}

/** check weight sharing from summary */
model->summarize(std::cout, ML_TRAIN_SUMMARY_TENSOR);
}
11 changes: 11 additions & 0 deletions Applications/Subgraph/jni/meson.build
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
subgraph_source = ['main.cpp', cifar_path / 'cifar_dataloader.cpp']
subgraph_dependencies = [app_utils_dep, nntrainer_dep, nntrainer_ccapi_dep]

e = executable(
'nntrainer_subgraph',
subgraph_source,
include_directories: [include_directories('.'), cifar_include_dir],
dependencies: subgraph_dependencies,
install: get_option('install-app'),
install_dir: application_install_dir,
)
35 changes: 35 additions & 0 deletions Applications/Subgraph/jni/model_util.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#include <string>

/**
* @brief make "key=value" from key and value
*
* @tparam T type of a value
* @param key key
* @param value value
* @return std::string with "key=value"
*/
template <typename T>
static std::string withKey(const std::string &key, const T &value) {
std::stringstream ss;
ss << key << "=" << value;
return ss.str();
}

template <typename T>
static std::string withKey(const std::string &key,
std::initializer_list<T> value) {
if (std::empty(value)) {
throw std::invalid_argument("empty data cannot be converted");
}

std::stringstream ss;
ss << key << "=";

auto iter = value.begin();
for (; iter != value.end() - 1; ++iter) {
ss << *iter << ',';
}
ss << *iter;

return ss.str();
}
241 changes: 241 additions & 0 deletions Applications/Subgraph/jni/out_summary.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,241 @@
reorg created
reorg deleted
================================================================================
Layer name Layer type Output dimension Input layer
================================================================================
input/0 input 1:1:1:320
--------------------------------------------------------------------------------
subgraph/fc_in0 fully_connected 1:1:1:320 input/0
--------------------------------------------------------------------------------
subgraph/fc_out0 fully_connected 1:1:1:320 subgraph/fc_in0
--------------------------------------------------------------------------------
input/1 identity 1:1:1:320 subgraph/fc_out0
--------------------------------------------------------------------------------
subgraph/fc_in1 fully_connected 1:1:1:320 input/1
--------------------------------------------------------------------------------
subgraph/fc_out1 fully_connected 1:1:1:320 subgraph/fc_in1
--------------------------------------------------------------------------------
input/2 identity 1:1:1:320 subgraph/fc_out1
--------------------------------------------------------------------------------
subgraph/fc_in2 fully_connected 1:1:1:320 input/2
--------------------------------------------------------------------------------
subgraph/fc_out2 fully_connected 1:1:1:320 subgraph/fc_in2
--------------------------------------------------------------------------------
input/3 identity 1:1:1:320 subgraph/fc_out2
================================================================================
===================<input/0>
Layer Type: input
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
normalization: false
standardization: false
======properties:
======weights:
======metrics:
Layer loss value: 0
===================<subgraph/fc_in0>
Layer Type: fully_connected
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
weight Shape: 1:1:320:320 [ FP32 : NCHW ]
weight Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
weight_regularizer: none
weight_regularizer_constant: 1.000000
weight_initializer: xavier_uniform
weight_decay: 0.000000
bias_decay: 0.000000
bias_initializer: zeros
disable_bias: false
print: false
unit: 320
======properties:
======weights:
<N9nntrainer6TensorE at 0x5cfcfe77df70>
data addr: 0x7808e3ebd010
Shape: 1:1:320:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
<N9nntrainer6TensorE at 0x5cfcfe77e340>
data addr: 0x7808e3f21010
Shape: 1:1:1:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
======metrics:
Layer loss value: 0
===================<subgraph/fc_out0>
Layer Type: fully_connected
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
weight Shape: 1:1:320:320 [ FP32 : NCHW ]
weight Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
weight_regularizer: none
weight_regularizer_constant: 1.000000
weight_initializer: xavier_uniform
weight_decay: 0.000000
bias_decay: 0.000000
bias_initializer: zeros
disable_bias: false
print: false
unit: 320
======properties:
======weights:
<N9nntrainer6TensorE at 0x5cfcfe77fb40>
data addr: 0x7808e3f21510
Shape: 1:1:320:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
<N9nntrainer6TensorE at 0x5cfcfe77fcf0>
data addr: 0x7808e3f85510
Shape: 1:1:1:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
======metrics:
Layer loss value: 0
===================<input/1>
Layer Type: identity
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
======properties:
======weights:
======metrics:
Layer loss value: 0
===================<subgraph/fc_in1>
Layer Type: fully_connected
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
weight Shape: 1:1:320:320 [ FP32 : NCHW ]
weight Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
weight_regularizer: none
weight_regularizer_constant: 1.000000
weight_initializer: xavier_uniform
weight_decay: 0.000000
bias_decay: 0.000000
bias_initializer: zeros
disable_bias: false
print: false
unit: 320
======properties:
======weights:
<N9nntrainer6TensorE at 0x5cfcfe77df70>
data addr: 0x7808e3ebd010
Shape: 1:1:320:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
<N9nntrainer6TensorE at 0x5cfcfe77e340>
data addr: 0x7808e3f21010
Shape: 1:1:1:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
======metrics:
Layer loss value: 0
===================<subgraph/fc_out1>
Layer Type: fully_connected
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
weight Shape: 1:1:320:320 [ FP32 : NCHW ]
weight Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
weight_regularizer: none
weight_regularizer_constant: 1.000000
weight_initializer: xavier_uniform
weight_decay: 0.000000
bias_decay: 0.000000
bias_initializer: zeros
disable_bias: false
print: false
unit: 320
======properties:
======weights:
<N9nntrainer6TensorE at 0x5cfcfe77fb40>
data addr: 0x7808e3f21510
Shape: 1:1:320:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
<N9nntrainer6TensorE at 0x5cfcfe77fcf0>
data addr: 0x7808e3f85510
Shape: 1:1:1:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
======metrics:
Layer loss value: 0
===================<input/2>
Layer Type: identity
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
======properties:
======weights:
======metrics:
Layer loss value: 0
===================<subgraph/fc_in2>
Layer Type: fully_connected
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
weight Shape: 1:1:320:320 [ FP32 : NCHW ]
weight Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
weight_regularizer: none
weight_regularizer_constant: 1.000000
weight_initializer: xavier_uniform
weight_decay: 0.000000
bias_decay: 0.000000
bias_initializer: zeros
disable_bias: false
print: false
unit: 320
======properties:
======weights:
<N9nntrainer6TensorE at 0x5cfcfe77df70>
data addr: 0x7808e3ebd010
Shape: 1:1:320:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
<N9nntrainer6TensorE at 0x5cfcfe77e340>
data addr: 0x7808e3f21010
Shape: 1:1:1:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
======metrics:
Layer loss value: 0
===================<subgraph/fc_out2>
Layer Type: fully_connected
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
weight Shape: 1:1:320:320 [ FP32 : NCHW ]
weight Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
weight_regularizer: none
weight_regularizer_constant: 1.000000
weight_initializer: xavier_uniform
weight_decay: 0.000000
bias_decay: 0.000000
bias_initializer: zeros
disable_bias: false
print: false
unit: 320
======properties:
======weights:
<N9nntrainer6TensorE at 0x5cfcfe77fb40>
data addr: 0x7808e3f21510
Shape: 1:1:320:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
<N9nntrainer6TensorE at 0x5cfcfe77fcf0>
data addr: 0x7808e3f85510
Shape: 1:1:1:320 [ FP32 : NCHW ]
[0 0 0 ... 0 0 0]
======metrics:
Layer loss value: 0
===================<input/3>
Layer Type: identity
======shape information:
input Shape: 1:1:1:320 [ FP32 : NCHW ]
output Shape: 1:1:1:320 [ FP32 : NCHW ]
======meta properties:
======properties:
======weights:
======metrics:
Layer loss value: 0
Loading

0 comments on commit ba7e166

Please sign in to comment.