Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Graph] add inplace setting through layer property @open sesame 12/02 14:37 #2796

Merged
merged 1 commit into from
Dec 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 9 additions & 12 deletions nntrainer/graph/network_graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -642,8 +642,10 @@ void NetworkGraph::addLayer(std::shared_ptr<LayerNode> layer) {

InPlaceType
NetworkGraph::canExecuteInPlace(const std::shared_ptr<LayerNode> &lnode) {
if (!lnode->supportInPlace()) {
return InPlaceType::NONE;
InPlaceType inplace_type = lnode->initializeInPlace();

if (inplace_type == InPlaceType::NONE) {
return inplace_type;
}

if (lnode->getType() == InputLayer::type &&
Expand All @@ -655,34 +657,29 @@ NetworkGraph::canExecuteInPlace(const std::shared_ptr<LayerNode> &lnode) {
return InPlaceType::RESTRICTING;
}

InPlaceType inplace_type = lnode->initializeInPlaceType();
/** Set inplace_type based on the input connections */
switch (inplace_type) {
/** A case where it cannot operate in-place */
case InPlaceType::NONE:
return InPlaceType::NONE;
/** A case where it can operate in-place even if there is a multi-out type
* input connection. */
case InPlaceType::RESTRICTING:
if (inplace_type == InPlaceType::RESTRICTING) {
for (size_t i = 0, num_node = lnode->getNumInputConnections(); i < num_node;
++i) {
const std::string &input_name = lnode->getInputConnectionName(i);
if (getLayerNode(input_name)->getInPlaceType() ==
InPlaceType::RESTRICTING)
return InPlaceType::RESTRICTING;
return inplace_type;
}
return InPlaceType::NON_RESTRICTING;
}
/** A case where it cannot operate in-place if there is a multi-out type
* input connection. */
default:
else { /** condition: NON_RESTRICTING */
for (size_t i = 0, num_node = lnode->getNumInputConnections(); i < num_node;
++i) {
const std::string &input_name = lnode->getInputConnectionName(i);
if (getLayerNode(input_name)->getInPlaceType() ==
InPlaceType::RESTRICTING)
return InPlaceType::NONE;
}
return InPlaceType::NON_RESTRICTING;
return inplace_type;
}
}

Expand Down
1 change: 1 addition & 0 deletions nntrainer/layers/add_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
*
*/

#include "common_properties.h"
#include <add_layer.h>
#include <nntrainer_error.h>
#include <nntrainer_log.h>
Expand Down
22 changes: 20 additions & 2 deletions nntrainer/layers/add_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ class AddLayer : public BinaryOperationLayer {
/**
* @brief Constructor of Add Layer
*/
AddLayer() : BinaryOperationLayer(), add_props(props::Print()) {}
AddLayer() :
BinaryOperationLayer(), add_props(props::Print(), props::InPlaceProp()) {}

/**
* @brief Move constructor of Add Layer.
Expand Down Expand Up @@ -74,6 +75,23 @@ class AddLayer : public BinaryOperationLayer {
*/
bool supportBackwarding() const final { return true; };

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
if (std::get<props::InPlaceProp>(add_props).empty() ||
std::get<props::InPlaceProp>(add_props).get()) {
is_inplace = true;
} else {
is_inplace = false;
}
if (!supportInPlace())
return InPlaceType::NONE;
else
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
* method)
Expand All @@ -91,7 +109,7 @@ class AddLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return AddLayer::type; }

std::tuple<props::Print> add_props;
std::tuple<props::Print, props::InPlaceProp> add_props;

inline static const std::string type = "add";
};
Expand Down
14 changes: 9 additions & 5 deletions nntrainer/layers/bn_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,15 @@ class BatchNormalizationLayer : public Layer {
*/
bool supportBackwarding() const override { return true; }

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

using Layer::setProperty;

/**
Expand All @@ -108,11 +117,6 @@ class BatchNormalizationLayer : public Layer {
*/
void setProperty(const std::vector<std::string> &values) override;

/**
* @copydoc Layer::supportInPlace()
*/
bool supportInPlace() const override { return true; }

/**
* @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch)
*/
Expand Down
8 changes: 6 additions & 2 deletions nntrainer/layers/cl_layers/reshape_cl.h
Original file line number Diff line number Diff line change
Expand Up @@ -87,9 +87,13 @@ class ReshapeLayerCl : public Layer {
bool supportBackwarding() const override { return false; };

/**
* @copydoc Layer::supportInPlace()
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
bool supportInPlace() const override { return true; }
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
Expand Down
10 changes: 10 additions & 0 deletions nntrainer/layers/common_properties.h
Original file line number Diff line number Diff line change
Expand Up @@ -124,6 +124,16 @@ class TensorDimension : public TensorDimProperty {
using prop_tag = dimension_prop_tag; /**< property type */
};

/**
* @brief Inplace operation property
*
*/
class InPlaceProp : public nntrainer::Property<bool> {
public:
static constexpr const char *key = "inplace"; /**< unique key to access */
using prop_tag = bool_prop_tag; /**< property type */
};

/**
* @brief trainable property, use this to set and check how if certain layer is
* trainable
Expand Down
30 changes: 27 additions & 3 deletions nntrainer/layers/divide_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,10 @@ class DivideLayer : public BinaryOperationLayer {
/**
* @brief Constructor of Divide Layer
*/
DivideLayer() : BinaryOperationLayer(), divide_props(props::Print()) {}
DivideLayer() :
BinaryOperationLayer(),
divide_props(props::Print(), props::InPlaceProp()),
support_backwarding(true) {}

/**
* @brief Destructor of Divide Layer
Expand Down Expand Up @@ -72,7 +75,27 @@ class DivideLayer : public BinaryOperationLayer {
/**
* @copydoc bool supportBackwarding() const
*/
bool supportBackwarding() const final { return true; };
bool supportBackwarding() const final { return support_backwarding; };

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
if (std::get<props::InPlaceProp>(divide_props).empty() ||
!std::get<props::InPlaceProp>(divide_props).get()) {
is_inplace = false;
support_backwarding = true;
} else {
is_inplace = true;
support_backwarding = false;
}

if (!supportInPlace())
return InPlaceType::NONE;
else
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
Expand All @@ -91,7 +114,8 @@ class DivideLayer : public BinaryOperationLayer {
*/
const std::string getType() const final { return DivideLayer::type; };

std::tuple<props::Print> divide_props;
std::tuple<props::Print, props::InPlaceProp> divide_props;
bool support_backwarding; /**< support backwarding */

inline static const std::string type = "divide";
};
Expand Down
11 changes: 1 addition & 10 deletions nntrainer/layers/dropout.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,7 @@ class DropOutLayer : public Layer {
* @brief Constructor of DropOut Layer
*/
DropOutLayer(float dropout = 0.0) :
Layer(),
dropout_rate(props::DropOutRate(dropout)),
epsilon(1e-3) {}
Layer(), dropout_rate(props::DropOutRate(dropout)), epsilon(1e-3) {}

/**
* @brief Destructor of DropOut Layer
Expand Down Expand Up @@ -89,13 +87,6 @@ class DropOutLayer : public Layer {
*/
void setProperty(const std::vector<std::string> &values) override;

/**
* @copydoc Layer::supportInPlace()
*
* @todo Enable in-place support once supported by manager
*/
bool supportInPlace() const override { return false; }

inline static const std::string type = "dropout";

private:
Expand Down
15 changes: 4 additions & 11 deletions nntrainer/layers/identity_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -70,19 +70,12 @@ class IdentityLayer final : public Layer {
bool supportBackwarding() const override { return true; };

/**
* @copydoc Layer::supportInPlace()
*/
bool supportInPlace() const override { return true; }

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlaceType() final {
if (!supportInPlace())
return InPlaceType::NONE;
else
return InPlaceType::RESTRICTING;
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::RESTRICTING;
}

/**
Expand Down
4 changes: 1 addition & 3 deletions nntrainer/layers/input_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@ namespace nntrainer {
static constexpr size_t SINGLE_INOUT_IDX = 0;

InputLayer::InputLayer() :
Layer(),
input_props(props::Normalization(), props::Standardization()),
is_inplace(true) {}
Layer(), input_props(props::Normalization(), props::Standardization()) {}

void InputLayer::setProperty(const std::vector<std::string> &values) {
auto remain_props = loadProperties(values, input_props);
Expand Down
9 changes: 6 additions & 3 deletions nntrainer/layers/input_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -80,9 +80,13 @@ class InputLayer : public Layer {
bool supportBackwarding() const override { return false; };

/**
* @copydoc Layer::supportInPlace()
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
bool supportInPlace() const override { return is_inplace; }
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

/**
* @copydoc Layer::exportTo(Exporter &exporter, ml::train::ExportMethods
Expand All @@ -105,7 +109,6 @@ class InputLayer : public Layer {

private:
std::tuple<props::Normalization, props::Standardization> input_props;
bool is_inplace;
};
} // namespace nntrainer

Expand Down
9 changes: 6 additions & 3 deletions nntrainer/layers/layer_devel.h
Original file line number Diff line number Diff line change
Expand Up @@ -253,18 +253,18 @@ class Layer {
* @details all layers default to out of place execution
* @note all layers default to out of place execution
*/
virtual bool supportInPlace() const { return false; }
virtual bool supportInPlace() const { return is_inplace; }

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @details If it is a layer that supports in-place, the default in-place type
* is NONE_RESTRICTING, but if there is a RESTRICTING type among the input
* layers, it is set to NONE in the network_graph.cpp.
* Layers with exceptional behavior such as No-Operation layers should
* override this function.
* @return InPlaceType
*/
virtual InPlaceType initializeInPlaceType() {
virtual InPlaceType initializeInPlace() {
if (!supportInPlace())
return InPlaceType::NONE;
else
Expand All @@ -287,6 +287,9 @@ class Layer {
* @return true if supports backwarding, else false
*/
virtual bool supportBackwarding() const = 0;

protected:
bool is_inplace = false; /**< whether this layer is in-place or not */
};

/// @todo Decide where to put and how to implement(#986)
Expand Down
6 changes: 3 additions & 3 deletions nntrainer/layers/layer_node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -931,11 +931,11 @@ bool LayerNode::supportInPlace() const {
}

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType LayerNode::initializeInPlaceType() {
inplace_type = layer->initializeInPlaceType();
InPlaceType LayerNode::initializeInPlace() {
inplace_type = layer->initializeInPlace();
return inplace_type;
}

Expand Down
4 changes: 2 additions & 2 deletions nntrainer/layers/layer_node.h
Original file line number Diff line number Diff line change
Expand Up @@ -352,15 +352,15 @@ class LayerNode final : public ml::train::Layer, public GraphNode {
bool supportInPlace() const;

/**
* @brief Initialize the in-place type of the layer
* @brief Initialize the in-place settings of the layer
* @details If it is a layer that supports in-place, the default in-place type
* is NONE_RESTRICTING, but if there is a RESTRICTING type among the input
* layers, it is set to NONE in the network_graph.cpp.
* Layers with exceptional behavior such as No-Operation layers should
* override this function.
* @return InPlaceType
*/
InPlaceType initializeInPlaceType();
InPlaceType initializeInPlace();
/**
* @brief Notify that this layer will execute in-place
*
Expand Down
14 changes: 9 additions & 5 deletions nntrainer/layers/layer_normalization_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -99,18 +99,22 @@ class LayerNormalizationLayer : public Layer {
*/
bool supportBackwarding() const override { return true; }

/**
* @brief Initialize the in-place settings of the layer
* @return InPlaceType
*/
InPlaceType initializeInPlace() final {
is_inplace = true;
return InPlaceType::NON_RESTRICTING;
}

using Layer::setProperty;

/**
* @copydoc Layer::setProperty(const std::vector<std::string> &values)
*/
void setProperty(const std::vector<std::string> &values) override;

/**
* @copydoc Layer::supportInPlace()
*/
bool supportInPlace() const override { return true; }

/**
* @copydoc Layer::setBatch(RunLayerContext &context, unsigned int batch)
*/
Expand Down
Loading