Skip to content

Commit

Permalink
[Graph] add inplace direction setting through layer property
Browse files Browse the repository at this point in the history
add inplace direction setting for binary tensor operation layer.

**Self evaluation:**
1. Build test:   [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <[email protected]>
  • Loading branch information
baek2sm committed Dec 5, 2024
1 parent d924ca2 commit d0a0d01
Show file tree
Hide file tree
Showing 10 changed files with 154 additions and 12 deletions.
14 changes: 14 additions & 0 deletions nntrainer/graph/network_graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -768,6 +768,9 @@ NetworkGraph::finalizeContext(const std::shared_ptr<LayerNode> &lnode,
if (lnode->getType() == IdentityLayer::type) {
s.variable_spec.reference_name = inputs[i]->getName();
s.variable_spec.dim.setFormat(inputs[i]->getDim().getFormat());
} else if (lnode->getInPlaceDirection() == InPlaceDirection::RIGHT) {
s.variable_spec.reference_name = inputs[1]->getName();
s.variable_spec.dim.setFormat(inputs[1]->getDim().getFormat());
} else {
s.variable_spec.reference_name = inputs[0]->getName();
s.variable_spec.dim.setFormat(inputs[0]->getDim().getFormat());
Expand All @@ -779,6 +782,9 @@ NetworkGraph::finalizeContext(const std::shared_ptr<LayerNode> &lnode,
if (lnode->getType() == IdentityLayer::type) {
s.gradient_spec->reference_name = inputs[i]->getGradientName();
s.gradient_spec->dim.setFormat(inputs[i]->getDim().getFormat());
} else if (lnode->getInPlaceDirection() == InPlaceDirection::RIGHT) {
s.gradient_spec->reference_name = inputs[1]->getGradientName();
s.gradient_spec->dim.setFormat(inputs[1]->getDim().getFormat());
} else {
s.gradient_spec->reference_name = inputs[0]->getGradientName();
s.gradient_spec->dim.setFormat(inputs[0]->getDim().getFormat());
Expand Down Expand Up @@ -924,6 +930,8 @@ NetworkGraph::refinalizeContext(const std::shared_ptr<LayerNode> &lnode,
TensorSpecV2::RequestType::READ_ONLY_VIEW;
if (lnode->getType() == IdentityLayer::type) {
s.variable_spec.reference_name = inputs[i]->getName();
} else if (lnode->getInPlaceDirection() == InPlaceDirection::RIGHT) {
s.variable_spec.reference_name = inputs[1]->getName();
} else {
s.variable_spec.reference_name = inputs[0]->getName();
}
Expand All @@ -933,6 +941,12 @@ NetworkGraph::refinalizeContext(const std::shared_ptr<LayerNode> &lnode,
TensorSpecV2::RequestType::READ_ONLY_VIEW;
if (lnode->getType() == IdentityLayer::type) {
s.gradient_spec->reference_name = inputs[i]->getGradientName();
} else if (lnode->getInPlaceDirection() == InPlaceDirection::RIGHT) {
// @note With binary inputs, inputs[0] represents the left input
// tensor while inputs[1] represents the right input tensor. As a
// result, if the in-place direction is set to right, the in-place
// memory is assigned to inputs[1].
s.gradient_spec->reference_name = inputs[1]->getGradientName();
} else {
s.gradient_spec->reference_name = inputs[0]->getGradientName();
}
Expand Down
23 changes: 21 additions & 2 deletions nntrainer/layers/add_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,9 @@ class AddLayer : public BinaryOperationLayer {
* @brief Constructor of Add Layer
*/
AddLayer() :
BinaryOperationLayer(), add_props(props::Print(), props::InPlaceProp()) {}
BinaryOperationLayer(),
add_props(props::Print(), props::InPlaceProp(),
props::InPlaceDirectionProp()) {}

/**
* @brief Move constructor of Add Layer.
Expand Down Expand Up @@ -75,6 +77,22 @@ class AddLayer : public BinaryOperationLayer {
*/
bool supportBackwarding() const final { return true; };

/**
* @brief Get the inplace direction for the tensor operation layer
*
* @return InPlaceDirection
*/
InPlaceDirection getInPlaceDirection() override {
if (!supportInPlace())
return InPlaceDirection::NONE;
if (std::get<props::InPlaceDirectionProp>(add_props).empty() ||
(std::get<props::InPlaceDirectionProp>(add_props).get() == "left")) {
return InPlaceDirection::LEFT;
} else {
return InPlaceDirection::RIGHT;
}
};

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
Expand Down Expand Up @@ -109,7 +127,8 @@ class AddLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return AddLayer::type; }

std::tuple<props::Print, props::InPlaceProp> add_props;
std::tuple<props::Print, props::InPlaceProp, props::InPlaceDirectionProp>
add_props;

inline static const std::string type = "add";
};
Expand Down
11 changes: 11 additions & 0 deletions nntrainer/layers/common_properties.h
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,17 @@ class InPlaceProp : public nntrainer::Property<bool> {
using prop_tag = bool_prop_tag; /**< property type */
};

/**
* @brief Inplace direction property
*
*/
class InPlaceDirectionProp : public nntrainer::Property<std::string> {
public:
static constexpr const char *key =
"inplace_direction"; /**< unique key to access */
using prop_tag = str_prop_tag; /**< property type */
};

/**
* @brief trainable property, use this to set and check how if certain layer is
* trainable
Expand Down
22 changes: 20 additions & 2 deletions nntrainer/layers/divide_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ class DivideLayer : public BinaryOperationLayer {
*/
DivideLayer() :
BinaryOperationLayer(),
divide_props(props::Print(), props::InPlaceProp()),
divide_props(props::Print(), props::InPlaceProp(),
props::InPlaceDirectionProp()),
support_backwarding(true) {}

/**
Expand Down Expand Up @@ -77,6 +78,22 @@ class DivideLayer : public BinaryOperationLayer {
*/
bool supportBackwarding() const final { return support_backwarding; };

/**
* @brief Get the inplace direction for the tensor operation layer
*
* @return InPlaceDirection
*/
InPlaceDirection getInPlaceDirection() override {
if (!supportInPlace())
return InPlaceDirection::NONE;
if (std::get<props::InPlaceDirectionProp>(divide_props).empty() ||
(std::get<props::InPlaceDirectionProp>(divide_props).get() == "left")) {
return InPlaceDirection::LEFT;
} else {
return InPlaceDirection::RIGHT;
}
};

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
Expand Down Expand Up @@ -114,7 +131,8 @@ class DivideLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return DivideLayer::type; };

std::tuple<props::Print, props::InPlaceProp> divide_props;
std::tuple<props::Print, props::InPlaceProp, props::InPlaceDirectionProp>
divide_props;
bool support_backwarding; /**< support backwarding */

inline static const std::string type = "divide";
Expand Down
27 changes: 27 additions & 0 deletions nntrainer/layers/layer_devel.h
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,24 @@ enum class InPlaceType {
layers ahead of it to be in-place */
};

/**
* @brief Emum class for the direction of inplace
*
* @details When the In-Place option is enabled and the layer has binary inputs,
* you can specify the direction of the in-place operation using this
* enumeration. For instance, if a layer is in-place with the direction set to
* LEFT, the output of the layer will be written directly into the 'input[0]' of
* the preceding layer. Conversely, if the direction is set to RIGHT, the output
* of the layer will be written directly into the 'input[1]' of the preceding
* layer.
*/
enum class InPlaceDirection {
NONE, /**< default. It will be set to LEFT or RIGHT only when the type of the
operation layer is binary and the is_inplace setting is true */
LEFT, /**< left side of the layer is in-place */
RIGHT, /**< right side of the layer is in-place */
};

/**
* @class Layer Base class for layers
* @brief Base class for all layers
Expand Down Expand Up @@ -255,6 +273,15 @@ class Layer {
*/
virtual bool supportInPlace() const { return is_inplace; }

/**
* @brief Get the inplace direction for the tensor operation layer
*
* @return InPlaceDirection
*/
virtual InPlaceDirection getInPlaceDirection() {
return InPlaceDirection::NONE;
};

/**
* @brief Initialize the in-place settings of the layer
* @details If it is a layer that supports in-place, the default in-place type
Expand Down
7 changes: 7 additions & 0 deletions nntrainer/layers/layer_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -930,6 +930,13 @@ bool LayerNode::supportInPlace() const {
return layer->supportInPlace();
}

/**
* @brief Get the inplace direction for the layer
*/
InPlaceDirection LayerNode::getInPlaceDirection() const {
return layer->getInPlaceDirection();
};

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
Expand Down
13 changes: 10 additions & 3 deletions nntrainer/layers/layer_node.h
Original file line number Diff line number Diff line change
Expand Up @@ -380,12 +380,19 @@ class LayerNode final : public ml::train::Layer, public GraphNode {
*/
InPlaceType getInPlaceType() const { return inplace_type; }

/**
* @brief Get the inplace direction for the layer
*
* @return InPlaceDirection
*/
InPlaceDirection getInPlaceDirection() const;

/**
* @brief check if this layer requires label to be passed
* @return true if requires a label when training, else false
* @note if requireLabel() == true means, for now, that it is endpoint of a
* graph(numOutlayers == 0). label will be fed to the gradient of hidden if
* requireLabel is true
* @note if requireLabel() == true means, for now, that it is endpoint of
* a graph(numOutlayers == 0). label will be fed to the gradient of hidden
* if requireLabel is true
*/
bool requireLabel() const;

Expand Down
23 changes: 21 additions & 2 deletions nntrainer/layers/multiply_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ class MultiplyLayer : public BinaryOperationLayer {
*/
MultiplyLayer() :
BinaryOperationLayer(),
multiply_props(props::Print(), props::InPlaceProp()),
multiply_props(props::Print(), props::InPlaceProp(),
props::InPlaceDirectionProp()),
support_backwarding(true) {}

/**
Expand Down Expand Up @@ -77,6 +78,23 @@ class MultiplyLayer : public BinaryOperationLayer {
*/
bool supportBackwarding() const final { return support_backwarding; };

/**
* @brief Get the inplace direction for the tensor operation layer
*
* @return InPlaceDirection
*/
InPlaceDirection getInPlaceDirection() override {
if (!supportInPlace())
return InPlaceDirection::NONE;
if (std::get<props::InPlaceDirectionProp>(multiply_props).empty() ||
(std::get<props::InPlaceDirectionProp>(multiply_props).get() ==
"left")) {
return InPlaceDirection::LEFT;
} else {
return InPlaceDirection::RIGHT;
}
};

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
Expand Down Expand Up @@ -114,7 +132,8 @@ class MultiplyLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return MultiplyLayer::type; };

std::tuple<props::Print, props::InPlaceProp> multiply_props;
std::tuple<props::Print, props::InPlaceProp, props::InPlaceDirectionProp>
multiply_props;
bool support_backwarding; /**< support backwarding */

inline static const std::string type = "multiply";
Expand Down
23 changes: 21 additions & 2 deletions nntrainer/layers/subtract_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ class SubtractLayer : public BinaryOperationLayer {
*/
SubtractLayer() :
BinaryOperationLayer(),
subtract_props(props::Print(), props::InPlaceProp()) {}
subtract_props(props::Print(), props::InPlaceProp(),
props::InPlaceDirectionProp()) {}

/**
* @brief Destructor of Sub Layer
Expand Down Expand Up @@ -76,6 +77,23 @@ class SubtractLayer : public BinaryOperationLayer {
*/
bool supportBackwarding() const final { return true; };

/**
* @brief Get the inplace direction for the tensor operation layer
*
* @return InPlaceDirection
*/
InPlaceDirection getInPlaceDirection() override {
if (!supportInPlace())
return InPlaceDirection::NONE;
if (std::get<props::InPlaceDirectionProp>(subtract_props).empty() ||
(std::get<props::InPlaceDirectionProp>(subtract_props).get() ==
"left")) {
return InPlaceDirection::LEFT;
} else {
return InPlaceDirection::RIGHT;
}
};

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
Expand Down Expand Up @@ -110,7 +128,8 @@ class SubtractLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return SubtractLayer::type; };

std::tuple<props::Print, props::InPlaceProp> subtract_props;
std::tuple<props::Print, props::InPlaceProp, props::InPlaceDirectionProp>
subtract_props;

inline static const std::string type = "subtract";
};
Expand Down
3 changes: 2 additions & 1 deletion nntrainer/utils/node_exporter.h
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,8 @@ class Activation;
class BatchNormalization;
class Packed;
class LossScaleForMixed;
class InplaceProp;
class InPlaceProp;
class InPlaceDirectionProp;
} // namespace props

class LayerNode;
Expand Down

0 comments on commit d0a0d01

Please sign in to comment.