From 4c3af20187869298fd0f86b19af0df90d08ce9b2 Mon Sep 17 00:00:00 2001 From: Seungbaek Hong Date: Thu, 14 Nov 2024 14:22:12 +0900 Subject: [PATCH] [Graph] add inplace setting through layer property - now you can set in-place flag through layer property of tensor operation layers. - rename "initializeInPlaceType" func to "initializeInPlace". now "is_inplace" property is set in that function, too. - in some layers, support_backwarding flag may be changed by the in-place setting. **Self evaluation:** 1. Build test: [X]Passed [ ]Failed [ ]Skipped 2. Run test: [X]Passed [ ]Failed [ ]Skipped Signed-off-by: Seungbaek Hong --- nntrainer/graph/network_graph.cpp | 2 +- nntrainer/layers/add_layer.cpp | 1 + nntrainer/layers/add_layer.h | 22 ++++++++++++-- nntrainer/layers/bn_layer.h | 14 ++++++--- nntrainer/layers/cl_layers/reshape_cl.h | 8 +++-- nntrainer/layers/common_properties.h | 10 ++++++ nntrainer/layers/divide_layer.h | 30 ++++++++++++++++-- nntrainer/layers/dropout.h | 11 +------ nntrainer/layers/flatten_layer.h | 10 +++--- nntrainer/layers/identity_layer.h | 15 +++------ nntrainer/layers/input_layer.cpp | 4 +-- nntrainer/layers/input_layer.h | 9 ++++-- nntrainer/layers/layer_devel.h | 9 ++++-- nntrainer/layers/layer_node.cpp | 6 ++-- nntrainer/layers/layer_node.h | 4 +-- nntrainer/layers/layer_normalization_layer.h | 14 ++++++--- nntrainer/layers/loss/loss_layer.h | 4 --- nntrainer/layers/multiout_layer.h | 15 +++------ nntrainer/layers/multiply_layer.h | 32 +++++++++++++++++--- nntrainer/layers/reshape_layer.h | 15 +++------ nntrainer/layers/subtract_layer.h | 23 ++++++++++++-- nntrainer/layers/time_dist.h | 5 --- nntrainer/utils/node_exporter.h | 1 + 23 files changed, 168 insertions(+), 96 deletions(-) diff --git a/nntrainer/graph/network_graph.cpp b/nntrainer/graph/network_graph.cpp index dedd69fe60..a918db36f7 100644 --- a/nntrainer/graph/network_graph.cpp +++ b/nntrainer/graph/network_graph.cpp @@ -655,7 +655,7 @@ NetworkGraph::canExecuteInPlace(const std::shared_ptr &lnode) { return InPlaceType::RESTRICTING; } - InPlaceType inplace_type = lnode->initializeInPlaceType(); + InPlaceType inplace_type = lnode->initializeInPlace(); /** Set inplace_type based on the input connections */ switch (inplace_type) { /** A case where it cannot operate in-place */ diff --git a/nntrainer/layers/add_layer.cpp b/nntrainer/layers/add_layer.cpp index 6396c7cc5f..3320d2aeb6 100644 --- a/nntrainer/layers/add_layer.cpp +++ b/nntrainer/layers/add_layer.cpp @@ -11,6 +11,7 @@ * */ +#include "common_properties.h" #include #include #include diff --git a/nntrainer/layers/add_layer.h b/nntrainer/layers/add_layer.h index a9c1a53b7d..97e58f71b1 100644 --- a/nntrainer/layers/add_layer.h +++ b/nntrainer/layers/add_layer.h @@ -35,7 +35,8 @@ class AddLayer : public BinaryOperationLayer { /** * @brief Constructor of Add Layer */ - AddLayer() : BinaryOperationLayer(), add_props(props::Print()) {} + AddLayer() : + BinaryOperationLayer(), add_props(props::Print(), props::InPlaceProp()) {} /** * @brief Move constructor of Add Layer. @@ -74,6 +75,23 @@ class AddLayer : public BinaryOperationLayer { */ bool supportBackwarding() const final { return true; }; + /** + * @brief Initialize the in-place settings of the layer + * @return InPlaceType + */ + InPlaceType initializeInPlace() final { + if (std::get(add_props).empty() || + std::get(add_props).get()) { + is_inplace = true; + } else { + is_inplace = false; + } + if (!supportInPlace()) + return InPlaceType::NONE; + else + return InPlaceType::NON_RESTRICTING; + } + /** * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods * method) @@ -91,7 +109,7 @@ class AddLayer : public BinaryOperationLayer { */ const std::string getType() const final { return AddLayer::type; } - std::tuple add_props; + std::tuple add_props; inline static const std::string type = "add"; }; diff --git a/nntrainer/layers/bn_layer.h b/nntrainer/layers/bn_layer.h index f8d611cd9d..c7638edc71 100644 --- a/nntrainer/layers/bn_layer.h +++ b/nntrainer/layers/bn_layer.h @@ -100,6 +100,15 @@ class BatchNormalizationLayer : public Layer { */ bool supportBackwarding() const override { return true; } + /** + * @brief Initialize the in-place settings of the layer + * @return InPlaceType + */ + InPlaceType initializeInPlace() final { + is_inplace = true; + return InPlaceType::NON_RESTRICTING; + } + using Layer::setProperty; /** @@ -108,11 +117,6 @@ class BatchNormalizationLayer : public Layer { */ void setProperty(const std::vector &values) override; - /** - * @copydoc Layer::supportInPlace() - */ - bool supportInPlace() const override { return true; } - /** * @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch) */ diff --git a/nntrainer/layers/cl_layers/reshape_cl.h b/nntrainer/layers/cl_layers/reshape_cl.h index 3d19a0e0b6..d4895aeae7 100644 --- a/nntrainer/layers/cl_layers/reshape_cl.h +++ b/nntrainer/layers/cl_layers/reshape_cl.h @@ -87,9 +87,13 @@ class ReshapeLayerCl : public Layer { bool supportBackwarding() const override { return false; }; /** - * @copydoc Layer::supportInPlace() + * @brief Initialize the in-place settings of the layer + * @return InPlaceType */ - bool supportInPlace() const override { return true; } + InPlaceType initializeInPlace() final { + is_inplace = true; + return InPlaceType::NON_RESTRICTING; + } /** * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods diff --git a/nntrainer/layers/common_properties.h b/nntrainer/layers/common_properties.h index cc22e7cc56..baad9ae650 100644 --- a/nntrainer/layers/common_properties.h +++ b/nntrainer/layers/common_properties.h @@ -124,6 +124,16 @@ class TensorDimension : public TensorDimProperty { using prop_tag = dimension_prop_tag; /**< property type */ }; +/** + * @brief Inplace operation property + * + */ +class InPlaceProp : public nntrainer::Property { +public: + static constexpr const char *key = "inplace"; /**< unique key to access */ + using prop_tag = bool_prop_tag; /**< property type */ +}; + /** * @brief trainable property, use this to set and check how if certain layer is * trainable diff --git a/nntrainer/layers/divide_layer.h b/nntrainer/layers/divide_layer.h index 9a2bc85c91..8bfad3b7aa 100644 --- a/nntrainer/layers/divide_layer.h +++ b/nntrainer/layers/divide_layer.h @@ -30,7 +30,10 @@ class DivideLayer : public BinaryOperationLayer { /** * @brief Constructor of Divide Layer */ - DivideLayer() : BinaryOperationLayer(), divide_props(props::Print()) {} + DivideLayer() : + BinaryOperationLayer(), + divide_props(props::Print(), props::InPlaceProp()), + support_backwarding(true) {} /** * @brief Destructor of Divide Layer @@ -72,7 +75,27 @@ class DivideLayer : public BinaryOperationLayer { /** * @copydoc bool supportBackwarding() const */ - bool supportBackwarding() const final { return true; }; + bool supportBackwarding() const final { return support_backwarding; }; + + /** + * @brief Initialize the in-place settings of the layer + * @return InPlaceType + */ + InPlaceType initializeInPlace() final { + if (std::get(divide_props).empty() || + !std::get(divide_props).get()) { + is_inplace = false; + support_backwarding = true; + } else { + is_inplace = true; + support_backwarding = false; + } + + if (!supportInPlace()) + return InPlaceType::NONE; + else + return InPlaceType::NON_RESTRICTING; + } /** * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods @@ -91,7 +114,8 @@ class DivideLayer : public BinaryOperationLayer { */ const std::string getType() const final { return DivideLayer::type; }; - std::tuple divide_props; + std::tuple divide_props; + bool support_backwarding; /**< support backwarding */ inline static const std::string type = "divide"; }; diff --git a/nntrainer/layers/dropout.h b/nntrainer/layers/dropout.h index 1425c008df..2e71432b7d 100644 --- a/nntrainer/layers/dropout.h +++ b/nntrainer/layers/dropout.h @@ -30,9 +30,7 @@ class DropOutLayer : public Layer { * @brief Constructor of DropOut Layer */ DropOutLayer(float dropout = 0.0) : - Layer(), - dropout_rate(props::DropOutRate(dropout)), - epsilon(1e-3) {} + Layer(), dropout_rate(props::DropOutRate(dropout)), epsilon(1e-3) {} /** * @brief Destructor of DropOut Layer @@ -89,13 +87,6 @@ class DropOutLayer : public Layer { */ void setProperty(const std::vector &values) override; - /** - * @copydoc Layer::supportInPlace() - * - * @todo Enable in-place support once supported by manager - */ - bool supportInPlace() const override { return false; } - inline static const std::string type = "dropout"; private: diff --git a/nntrainer/layers/flatten_layer.h b/nntrainer/layers/flatten_layer.h index e954697a09..a39e1dd2a0 100644 --- a/nntrainer/layers/flatten_layer.h +++ b/nntrainer/layers/flatten_layer.h @@ -60,14 +60,12 @@ class FlattenLayer : public ReshapeLayer { void setProperty(const std::vector &values) override; /** - * @brief Initialize the in-place type of the layer + * @brief Initialize the in-place settings of the layer * @return InPlaceType */ - InPlaceType initializeInPlaceType() final { - if (!supportInPlace()) - return InPlaceType::NONE; - else - return InPlaceType::RESTRICTING; + InPlaceType initializeInPlace() final { + is_inplace = true; + return InPlaceType::RESTRICTING; } /** diff --git a/nntrainer/layers/identity_layer.h b/nntrainer/layers/identity_layer.h index 780ad41b7a..1a2ebcf80e 100644 --- a/nntrainer/layers/identity_layer.h +++ b/nntrainer/layers/identity_layer.h @@ -70,19 +70,12 @@ class IdentityLayer final : public Layer { bool supportBackwarding() const override { return true; }; /** - * @copydoc Layer::supportInPlace() - */ - bool supportInPlace() const override { return true; } - - /** - * @brief Initialize the in-place type of the layer + * @brief Initialize the in-place settings of the layer * @return InPlaceType */ - InPlaceType initializeInPlaceType() final { - if (!supportInPlace()) - return InPlaceType::NONE; - else - return InPlaceType::RESTRICTING; + InPlaceType initializeInPlace() final { + is_inplace = true; + return InPlaceType::RESTRICTING; } /** diff --git a/nntrainer/layers/input_layer.cpp b/nntrainer/layers/input_layer.cpp index ad55284bb0..8ec370ec99 100644 --- a/nntrainer/layers/input_layer.cpp +++ b/nntrainer/layers/input_layer.cpp @@ -33,9 +33,7 @@ namespace nntrainer { static constexpr size_t SINGLE_INOUT_IDX = 0; InputLayer::InputLayer() : - Layer(), - input_props(props::Normalization(), props::Standardization()), - is_inplace(true) {} + Layer(), input_props(props::Normalization(), props::Standardization()) {} void InputLayer::setProperty(const std::vector &values) { auto remain_props = loadProperties(values, input_props); diff --git a/nntrainer/layers/input_layer.h b/nntrainer/layers/input_layer.h index cfab71333b..2c4899f45e 100644 --- a/nntrainer/layers/input_layer.h +++ b/nntrainer/layers/input_layer.h @@ -80,9 +80,13 @@ class InputLayer : public Layer { bool supportBackwarding() const override { return false; }; /** - * @copydoc Layer::supportInPlace() + * @brief Initialize the in-place settings of the layer + * @return InPlaceType */ - bool supportInPlace() const override { return is_inplace; } + InPlaceType initializeInPlace() final { + is_inplace = true; + return InPlaceType::NON_RESTRICTING; + } /** * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods @@ -105,7 +109,6 @@ class InputLayer : public Layer { private: std::tuple input_props; - bool is_inplace = true; }; } // namespace nntrainer diff --git a/nntrainer/layers/layer_devel.h b/nntrainer/layers/layer_devel.h index ec2c2e590e..73a4f99293 100644 --- a/nntrainer/layers/layer_devel.h +++ b/nntrainer/layers/layer_devel.h @@ -258,10 +258,10 @@ class Layer { * @details all layers default to out of place execution * @note all layers default to out of place execution */ - virtual bool supportInPlace() const { return false; } + virtual bool supportInPlace() const { return is_inplace; } /** - * @brief Initialize the in-place type of the layer + * @brief Initialize the in-place settings of the layer * @details If it is a layer that supports in-place, the default in-place type * is NONE_RESTRICTING, but if there is a RESTRICTING type among the input * layers, it is set to NONE in the network_graph.cpp. @@ -269,7 +269,7 @@ class Layer { * override this function. * @return InPlaceType */ - virtual InPlaceType initializeInPlaceType() { + virtual InPlaceType initializeInPlace() { if (!supportInPlace()) return InPlaceType::NONE; else @@ -292,6 +292,9 @@ class Layer { * @return true if supports backwarding, else false */ virtual bool supportBackwarding() const = 0; + +protected: + bool is_inplace = false; /**< whether this layer is in-place or not */ }; /// @todo Decide where to put and how to implement(#986) diff --git a/nntrainer/layers/layer_node.cpp b/nntrainer/layers/layer_node.cpp index 557b1c1976..17a5de9436 100644 --- a/nntrainer/layers/layer_node.cpp +++ b/nntrainer/layers/layer_node.cpp @@ -931,11 +931,11 @@ bool LayerNode::supportInPlace() const { } /** - * @brief Initialize the in-place type of the layer + * @brief Initialize the in-place settings of the layer * @return InPlaceType */ -InPlaceType LayerNode::initializeInPlaceType() { - inplace_type = layer->initializeInPlaceType(); +InPlaceType LayerNode::initializeInPlace() { + inplace_type = layer->initializeInPlace(); return inplace_type; } diff --git a/nntrainer/layers/layer_node.h b/nntrainer/layers/layer_node.h index b3532ea242..3a3754e286 100644 --- a/nntrainer/layers/layer_node.h +++ b/nntrainer/layers/layer_node.h @@ -354,7 +354,7 @@ class LayerNode final : public ml::train::Layer, public GraphNode { bool supportInPlace() const; /** - * @brief Initialize the in-place type of the layer + * @brief Initialize the in-place settings of the layer * @details If it is a layer that supports in-place, the default in-place type * is NONE_RESTRICTING, but if there is a RESTRICTING type among the input * layers, it is set to NONE in the network_graph.cpp. @@ -362,7 +362,7 @@ class LayerNode final : public ml::train::Layer, public GraphNode { * override this function. * @return InPlaceType */ - InPlaceType initializeInPlaceType(); + InPlaceType initializeInPlace(); /** * @brief Notify that this layer will execute in-place * diff --git a/nntrainer/layers/layer_normalization_layer.h b/nntrainer/layers/layer_normalization_layer.h index 7511df0871..c368e16d46 100644 --- a/nntrainer/layers/layer_normalization_layer.h +++ b/nntrainer/layers/layer_normalization_layer.h @@ -99,6 +99,15 @@ class LayerNormalizationLayer : public Layer { */ bool supportBackwarding() const override { return true; } + /** + * @brief Initialize the in-place settings of the layer + * @return InPlaceType + */ + InPlaceType initializeInPlace() final { + is_inplace = true; + return InPlaceType::NON_RESTRICTING; + } + using Layer::setProperty; /** @@ -106,11 +115,6 @@ class LayerNormalizationLayer : public Layer { */ void setProperty(const std::vector &values) override; - /** - * @copydoc Layer::supportInPlace() - */ - bool supportInPlace() const override { return true; } - /** * @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch) */ diff --git a/nntrainer/layers/loss/loss_layer.h b/nntrainer/layers/loss/loss_layer.h index b643d5013d..581e9477a8 100644 --- a/nntrainer/layers/loss/loss_layer.h +++ b/nntrainer/layers/loss/loss_layer.h @@ -47,8 +47,6 @@ class LossLayer : public Layer { */ virtual bool supportBackwarding() const override { return true; } - bool supportInPlace() const override { return is_inplace; } - /** * @copydoc Layer::requireLabel() */ @@ -71,8 +69,6 @@ class LossLayer : public Layer { Tensor l; /**< loss tensor to store intermediate value to calculate loss value */ - - bool is_inplace = true; }; } // namespace nntrainer diff --git a/nntrainer/layers/multiout_layer.h b/nntrainer/layers/multiout_layer.h index dc18d532eb..7bdd837cb9 100644 --- a/nntrainer/layers/multiout_layer.h +++ b/nntrainer/layers/multiout_layer.h @@ -74,21 +74,14 @@ class MultiOutLayer : public Layer { bool supportBackwarding() const override { return true; }; /** - * @brief Initialize the in-place type of the layer + * @brief Initialize the in-place settings of the layer * @return InPlaceType */ - InPlaceType initializeInPlaceType() final { - if (!supportInPlace()) - return InPlaceType::NONE; - else - return InPlaceType::RESTRICTING; + InPlaceType initializeInPlace() final { + is_inplace = true; + return InPlaceType::RESTRICTING; } - /** - * @copydoc Layer::supportInPlace() - */ - bool supportInPlace() const override { return true; } - /** * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods * method) diff --git a/nntrainer/layers/multiply_layer.h b/nntrainer/layers/multiply_layer.h index 634ae95f83..dfd3daaedc 100644 --- a/nntrainer/layers/multiply_layer.h +++ b/nntrainer/layers/multiply_layer.h @@ -30,7 +30,10 @@ class MultiplyLayer : public BinaryOperationLayer { /** * @brief Constructor of Multiply Layer */ - MultiplyLayer() : BinaryOperationLayer(), multiply_props(props::Print()) {} + MultiplyLayer() : + BinaryOperationLayer(), + multiply_props(props::Print(), props::InPlaceProp()), + support_backwarding(true) {} /** * @brief Destructor of Multiply Layer @@ -70,9 +73,29 @@ class MultiplyLayer : public BinaryOperationLayer { void calcDerivative(RunLayerContext &context) final; /** - * @copydoc bool supportBackwarding() const + * @copydoc bool supportBackwarding() */ - bool supportBackwarding() const final { return true; }; + bool supportBackwarding() const final { return support_backwarding; }; + + /** + * @brief Initialize the in-place settings of the layer + * @return InPlaceType + */ + InPlaceType initializeInPlace() final { + if (std::get(multiply_props).empty() || + !std::get(multiply_props).get()) { + is_inplace = false; + support_backwarding = true; + } else { + is_inplace = true; + support_backwarding = false; + } + + if (!supportInPlace()) + return InPlaceType::NONE; + else + return InPlaceType::NON_RESTRICTING; + } /** * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods @@ -91,7 +114,8 @@ class MultiplyLayer : public BinaryOperationLayer { */ const std::string getType() const final { return MultiplyLayer::type; }; - std::tuple multiply_props; + std::tuple multiply_props; + bool support_backwarding; /**< support backwarding */ inline static const std::string type = "multiply"; }; diff --git a/nntrainer/layers/reshape_layer.h b/nntrainer/layers/reshape_layer.h index e2c6d13c10..4391b48d6f 100644 --- a/nntrainer/layers/reshape_layer.h +++ b/nntrainer/layers/reshape_layer.h @@ -74,19 +74,12 @@ class ReshapeLayer : public Layer { bool supportBackwarding() const override { return true; }; /** - * @copydoc Layer::supportInPlace() - */ - bool supportInPlace() const override { return true; } - - /** - * @brief Initialize the in-place type of the layer + * @brief Initialize the in-place settings of the layer * @return InPlaceType */ - InPlaceType initializeInPlaceType() override { - if (!supportInPlace()) - return InPlaceType::NONE; - else - return InPlaceType::RESTRICTING; + InPlaceType initializeInPlace() override { + is_inplace = true; + return InPlaceType::RESTRICTING; } /** diff --git a/nntrainer/layers/subtract_layer.h b/nntrainer/layers/subtract_layer.h index 851bfb271c..7c3a75211a 100644 --- a/nntrainer/layers/subtract_layer.h +++ b/nntrainer/layers/subtract_layer.h @@ -30,7 +30,9 @@ class SubtractLayer : public BinaryOperationLayer { /** * @brief Constructor of Subtract Layer */ - SubtractLayer() : BinaryOperationLayer(), subtract_props(props::Print()) {} + SubtractLayer() : + BinaryOperationLayer(), + subtract_props(props::Print(), props::InPlaceProp()) {} /** * @brief Destructor of Sub Layer @@ -74,6 +76,23 @@ class SubtractLayer : public BinaryOperationLayer { */ bool supportBackwarding() const final { return true; }; + /** + * @brief Initialize the in-place settings of the layer + * @return InPlaceType + */ + InPlaceType initializeInPlace() final { + if (std::get(subtract_props).empty() || + std::get(subtract_props).get()) { + is_inplace = true; + } else { + is_inplace = false; + } + if (!supportInPlace()) + return InPlaceType::NONE; + else + return InPlaceType::NON_RESTRICTING; + } + /** * @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods * method) @@ -91,7 +110,7 @@ class SubtractLayer : public BinaryOperationLayer { */ const std::string getType() const final { return SubtractLayer::type; }; - std::tuple subtract_props; + std::tuple subtract_props; inline static const std::string type = "subtract"; }; diff --git a/nntrainer/layers/time_dist.h b/nntrainer/layers/time_dist.h index e5e440c230..98f754699d 100644 --- a/nntrainer/layers/time_dist.h +++ b/nntrainer/layers/time_dist.h @@ -111,11 +111,6 @@ class TimeDistLayer : public Layer { dist_layer->setProperty(values); } - /** - * @copydoc Layer::supportInPlace() - */ - virtual bool supportInPlace() const override { return false; } - /** * @copydoc Layer::requireLabel() */ diff --git a/nntrainer/utils/node_exporter.h b/nntrainer/utils/node_exporter.h index de29cf77d9..cc950bbe0f 100644 --- a/nntrainer/utils/node_exporter.h +++ b/nntrainer/utils/node_exporter.h @@ -235,6 +235,7 @@ class Activation; class BatchNormalization; class Packed; class LossScaleForMixed; +class InplaceProp; } // namespace props class LayerNode;