Skip to content

Commit

Permalink
[Layer] add "sub layer"
Browse files Browse the repository at this point in the history
- added "sub layer"

**Self evaluation:**
1. Build test:   [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <[email protected]>
  • Loading branch information
baek2sm committed Oct 11, 2024
1 parent 8c0ca15 commit 7746b95
Show file tree
Hide file tree
Showing 3 changed files with 225 additions and 0 deletions.
94 changes: 94 additions & 0 deletions nntrainer/layers/sub_layer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
// SPDX-License-Identifier: Apache-2.0
/**
* Copyright (C) 2024 SeungBaek Hong <[email protected]>
*
* @file sub_layer.cpp
* @date 26 August 2024
* @see https://github.com/nnstreamer/nntrainer
* @author SeungBaek Hong <[email protected]>
* @bug No known bugs except for NYI items
* @brief This is sub layer class (operation layer)
*
*/

#include <nntrainer_error.h>
#include <nntrainer_log.h>
#include <node_exporter.h>
#include <sub_layer.h>
#include <util_func.h>

#include <layer_context.h>

namespace nntrainer {

static constexpr size_t SINGLE_INOUT_IDX = 0;

void SubLayer::finalize(InitLayerContext &context) {
context.setOutputDimensions({context.getInputDimensions()[0]});
}

void SubLayer::forwarding(RunLayerContext &context, bool training) {
Tensor &hidden_ = context.getOutput(SINGLE_INOUT_IDX);

const Tensor &input0 = context.getInput(0);
const Tensor &input1 = context.getInput(1);

input0.subtract(input1, hidden_);
}

void SubLayer::incremental_forwarding(RunLayerContext &context,
unsigned int from, unsigned int to,
bool training) {
Tensor &hidden_ = context.getOutput(SINGLE_INOUT_IDX);
TensorDim hidden_dim = hidden_.getDim();
TensorDim hidden_step_dim = hidden_dim;

if (from) {
NNTR_THROW_IF(to - from != 1, std::invalid_argument)
<< "incremental step size is not 1";
from = 0;
to = 1;
}

hidden_step_dim.batch(1);
hidden_step_dim.height(to - from);

for (unsigned int b = 0; b < hidden_.batch(); ++b) {
Tensor hidden_step = hidden_.getSharedDataTensor(
hidden_step_dim, b * hidden_dim.getFeatureLen(), true);

const Tensor &input0 = context.getInput(0);
const Tensor &input1 = context.getInput(1);

TensorDim input_dim = input0.getDim();
TensorDim input_step_dim = input_dim;
input_step_dim.batch(1);
input_step_dim.height(to - from);

Tensor input0_step = input0.getSharedDataTensor(
input_step_dim, b * input_dim.getFeatureLen(), true);

Tensor input1_step = input1.getSharedDataTensor(
input_step_dim, b * input_dim.getFeatureLen(), true);

input0_step.subtract(input1_step, hidden_step);
}
}

void SubLayer::calcDerivative(RunLayerContext &context) {
context.getOutgoingDerivative(0).copy(
context.getIncomingDerivative(SINGLE_INOUT_IDX));

context.getOutgoingDerivative(1).copy(
context.getIncomingDerivative(SINGLE_INOUT_IDX).multiply(-1));
}

void SubLayer::setProperty(const std::vector<std::string> &values) {
auto remain_props = loadProperties(values, sub_props);
if (!remain_props.empty()) {
std::string msg = "[SubLayer] Unknown Layer Properties count " +
std::to_string(values.size());
throw exception::not_supported(msg);
}
}
} /* namespace nntrainer */
103 changes: 103 additions & 0 deletions nntrainer/layers/sub_layer.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
// SPDX-License-Identifier: Apache-2.0
/**
* Copyright (C) 2024 SeungBaek Hong <[email protected]>
*
* @file sub_layer.h
* @date 26 August 2024
* @see https://github.com/nnstreamer/nntrainer
* @author SeungBaek Hong <[email protected]>
* @bug No known bugs except for NYI items
* @brief This is sub layer class (operation layer)
*
*/

#ifndef __SUB_LAYER_H__
#define __SUB_LAYER_H__
#ifdef __cplusplus

#include <common_properties.h>
#include <layer_devel.h>

namespace nntrainer {

/**
* @class Sub Layer
* @brief Sub Layer
*/
class SubLayer : public Layer {
public:
/**
* @brief Constructor of Sub Layer
*/
SubLayer() : Layer(), sub_props(props::Print()) {}

/**
* @brief Destructor of Sub Layer
*/
~SubLayer(){};

/**
* @brief Move constructor of Sub Layer.
* @param[in] SubLayer &&
*/
SubLayer(SubLayer &&rhs) noexcept = default;

/**
* @brief Move assignment operator.
* @parma[in] rhs SubLayer to be moved.
*/
SubLayer &operator=(SubLayer &&rhs) = default;

/**
* @copydoc Layer::finalize(InitLayerContext &context)
*/
void finalize(InitLayerContext &context) override;

/**
* @copydoc Layer::forwarding(RunLayerContext &context, bool training)
*/
void forwarding(RunLayerContext &context, bool training) override;

/**
* @copydoc Layer::incremental_forwarding(RunLayerContext &context, unsigned
* int from, unsigned int to, bool training)
*/
void incremental_forwarding(RunLayerContext &context, unsigned int from,
unsigned int to, bool training) override;

/**
* @copydoc Layer::calcDerivative(RunLayerContext &context)
*/
void calcDerivative(RunLayerContext &context) override;

/**
* @copydoc bool supportBackwarding() const
*/
bool supportBackwarding() const override { return true; };

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
* method)
*/
void exportTo(Exporter &exporter,
const ml::train::ExportMethods &method) const override {}

/**
* @copydoc Layer::setProperty(const std::vector<std::string> &values)
*/
void setProperty(const std::vector<std::string> &values) override;

/**
* @copydoc Layer::getType()
*/
const std::string getType() const override { return SubLayer::type; };

std::tuple<props::Print> sub_props;

inline static const std::string type = "sub";
};

} // namespace nntrainer

#endif /* __cplusplus */
#endif /* __SUB_LAYER_H__ */
28 changes: 28 additions & 0 deletions test/unittest/layers/unittest_layers_sub.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
// SPDX-License-Identifier: Apache-2.0
/**
* Copyright (C) 2024 SeungBaek Hong <[email protected]>
*
* @file unittest_layers_sub.cpp
* @date 26 August 2024
* @brief Sub Layer Test
* @see https://github.com/nnstreamer/nntrainer
* @author SeungBaek Hong <[email protected]>
* @bug No known bugs except for NYI items
*/
#include <tuple>

#include <gtest/gtest.h>

#include <layers_common_tests.h>
#include <sub_layer.h>

auto semantic_sub = LayerSemanticsParamType(
nntrainer::createLayer<nntrainer::SubLayer>, nntrainer::SubLayer::type, {},
LayerCreateSetPropertyOptions::AVAILABLE_FROM_APP_CONTEXT, false, 1);

auto semantic_sub_multi = LayerSemanticsParamType(
nntrainer::createLayer<nntrainer::SubLayer>, nntrainer::SubLayer::type, {},
LayerCreateSetPropertyOptions::AVAILABLE_FROM_APP_CONTEXT, false, 2);

GTEST_PARAMETER_TEST(Sub, LayerSemantics,
::testing::Values(semantic_sub, semantic_sub_multi));

0 comments on commit 7746b95

Please sign in to comment.