Skip to content

Commit

Permalink
[ Prop ] Update bn layer properties to follow naming convention
Browse files Browse the repository at this point in the history
- This commit updates some bn layer's properties to make them follow
  naming convention
    - BNPARAMS_GAMMA_INIT -> GammaInitializer
    - BNPARAMS_MU_INIT -> MuInitializer
    - BNPARAMS_VAR_INIT -> VarInitializer
    - BNPARAMS_BETA_INIT -> BetaInitializer
- This commit replaces parts of using RMS_NORM_GAMMA_INIT to GammaInitializer

**Self evaluation:**

Build test: [X]Passed [ ]Failed [ ]Skipped
Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Eunju Yang <[email protected]>
  • Loading branch information
EunjuYang committed Dec 2, 2024
1 parent 8702596 commit 78907d7
Show file tree
Hide file tree
Showing 10 changed files with 48 additions and 62 deletions.
16 changes: 8 additions & 8 deletions nntrainer/layers/bn_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,10 +50,10 @@ enum BNParams {
BatchNormalizationLayer::BatchNormalizationLayer() :
Layer(),
divider(0),
bn_props(props::Epsilon(), props::BNPARAMS_MU_INIT(),
props::BNPARAMS_VAR_INIT(), props::BNPARAMS_BETA_INIT(),
props::BNPARAMS_GAMMA_INIT(), props::Momentum(), props::Axis(),
props::WeightDecay(), props::BiasDecay()) {
bn_props(props::Epsilon(), props::MuInitializer(), props::VarInitializer(),
props::BetaInitializer(), props::GammaInitializer(),
props::Momentum(), props::Axis(), props::WeightDecay(),
props::BiasDecay()) {
wt_idx.fill(std::numeric_limits<unsigned>::max());
}

Expand All @@ -62,10 +62,10 @@ void BatchNormalizationLayer::finalize(InitLayerContext &context) {
NNTR_THROW_IF(context.getNumInputs() != 1, std::invalid_argument)
<< "Only one input is allowed for batch normalization layer";

auto &bnparams_mu = std::get<props::BNPARAMS_MU_INIT>(bn_props);
auto &bnparams_var = std::get<props::BNPARAMS_VAR_INIT>(bn_props);
auto &bnparams_beta = std::get<props::BNPARAMS_BETA_INIT>(bn_props);
auto &bnparams_gamma = std::get<props::BNPARAMS_GAMMA_INIT>(bn_props);
auto &bnparams_mu = std::get<props::MuInitializer>(bn_props);
auto &bnparams_var = std::get<props::VarInitializer>(bn_props);
auto &bnparams_beta = std::get<props::BetaInitializer>(bn_props);
auto &bnparams_gamma = std::get<props::GammaInitializer>(bn_props);
auto &weight_decay = std::get<props::WeightDecay>(bn_props);
auto &bias_decay = std::get<props::BiasDecay>(bn_props);

Expand Down
6 changes: 3 additions & 3 deletions nntrainer/layers/bn_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -126,9 +126,9 @@ class BatchNormalizationLayer : public Layer {
std::vector<unsigned int> axes_to_reduce; /**< target axes to reduce */
std::array<unsigned int, 11>
wt_idx; /**< indices of the weights and tensors */
std::tuple<props::Epsilon, props::BNPARAMS_MU_INIT, props::BNPARAMS_VAR_INIT,
props::BNPARAMS_BETA_INIT, props::BNPARAMS_GAMMA_INIT,
props::Momentum, props::Axis, props::WeightDecay, props::BiasDecay>
std::tuple<props::Epsilon, props::MuInitializer, props::VarInitializer,
props::BetaInitializer, props::GammaInitializer, props::Momentum,
props::Axis, props::WeightDecay, props::BiasDecay>
bn_props;
};

Expand Down
2 changes: 1 addition & 1 deletion nntrainer/layers/cl_layers/rmsnorm_layer_cl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ RMSNormLayerCl::RMSNormLayerCl() : LayerImplCl() { wt_idx.fill(0); }
void RMSNormLayerCl::finalize(InitLayerContext &context) {
std::vector<TensorDim> dim = context.getInputDimensions();
context.setOutputDimensions(dim);
auto &rmsparams_gamma = std::get<props::RMS_NORM_GAMMA_INIT>(rmsnorm_props);
auto &rmsparams_gamma = std::get<props::GammaInitializer>(rmsnorm_props);

TensorDim gamma_dim(
1, 1, 1, dim[0].width(),
Expand Down
2 changes: 1 addition & 1 deletion nntrainer/layers/cl_layers/rmsnorm_layer_cl.h
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ class RMSNormLayerCl : public LayerImplCl {
private:
std::array<unsigned int, 1> wt_idx;

std::tuple<props::RMS_NORM_GAMMA_INIT, props::Epsilon>
std::tuple<props::GammaInitializer, props::Epsilon>
rmsnorm_props; /**< rmsnorm layer properties */

inline static std::vector<ClContext::SharedPtrClKernel>
Expand Down
8 changes: 4 additions & 4 deletions nntrainer/layers/common_properties.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -314,13 +314,13 @@ WeightInitializer::WeightInitializer(Initializer value) { set(value); }

BiasInitializer::BiasInitializer(Initializer value) { set(value); }

BNPARAMS_MU_INIT::BNPARAMS_MU_INIT(Initializer value) { set(value); }
MuInitializer::MuInitializer(Initializer value) { set(value); }

BNPARAMS_VAR_INIT::BNPARAMS_VAR_INIT(Initializer value) { set(value); }
VarInitializer::VarInitializer(Initializer value) { set(value); }

BNPARAMS_GAMMA_INIT::BNPARAMS_GAMMA_INIT(Initializer value) { set(value); }
GammaInitializer::GammaInitializer(Initializer value) { set(value); }

BNPARAMS_BETA_INIT::BNPARAMS_BETA_INIT(Initializer value) { set(value); }
BetaInitializer::BetaInitializer(Initializer value) { set(value); }

BasicRegularizer::BasicRegularizer(nntrainer::WeightRegularizer value) {
set(value);
Expand Down
45 changes: 16 additions & 29 deletions nntrainer/layers/common_properties.h
Original file line number Diff line number Diff line change
Expand Up @@ -1020,74 +1020,61 @@ class BiasInitializer final : public EnumProperty<InitializerInfo> {
};

/**
* @brief BNPARAMS_MU_INIT Initialization Enumeration Information
* @brief MuInitializer Initialization Enumeration Information
*
*/
class BNPARAMS_MU_INIT final : public EnumProperty<InitializerInfo> {
class MuInitializer final : public EnumProperty<InitializerInfo> {
public:
/**
* @brief Construct a BNPARAMS_MU_INIT object
* @brief Construct a MuInitializer object
*/
BNPARAMS_MU_INIT(Initializer value = Initializer::ZEROS);
MuInitializer(Initializer value = Initializer::ZEROS);
using prop_tag = enum_class_prop_tag;
static constexpr const char *key = "moving_mean_initializer";
};

/**
* @brief BNPARAMS_VAR_INIT Initialization Enumeration Information
* @brief VarInitializer Initialization Enumeration Information
*
*/
class BNPARAMS_VAR_INIT final : public EnumProperty<InitializerInfo> {
class VarInitializer final : public EnumProperty<InitializerInfo> {
public:
/**
* @brief Construct a BNPARAMS_VAR_INIT object
* @brief Construct a VarInitializer object
*/
BNPARAMS_VAR_INIT(Initializer value = Initializer::ONES);
VarInitializer(Initializer value = Initializer::ONES);
using prop_tag = enum_class_prop_tag;
static constexpr const char *key = "moving_variance_initializer";
};

/**
* @brief BNPARAMS_GAMMA_INIT Initialization Enumeration Information
* @brief GammaInitializer Initialization Enumeration Information
*
*/
class BNPARAMS_GAMMA_INIT final : public EnumProperty<InitializerInfo> {
class GammaInitializer final : public EnumProperty<InitializerInfo> {
public:
/**
* @brief Construct a BNPARAMS_GAMMA_INIT object
* @brief Construct a GammaInitializer object
*/
BNPARAMS_GAMMA_INIT(Initializer value = Initializer::ONES);
GammaInitializer(Initializer value = Initializer::ONES);
using prop_tag = enum_class_prop_tag;
static constexpr const char *key = "gamma_initializer";
};

/**
* @brief BNPARAMS_BETA_INIT Initialization Enumeration Information
* @brief BetaInitializer Initialization Enumeration Information
*
*/
class BNPARAMS_BETA_INIT final : public EnumProperty<InitializerInfo> {
class BetaInitializer final : public EnumProperty<InitializerInfo> {
public:
/**
* @brief Construct a BNPARAMS_BETA_INIT object
* @brief Construct a BetaInitializer object
*/
BNPARAMS_BETA_INIT(Initializer value = Initializer::ZEROS);
BetaInitializer(Initializer value = Initializer::ZEROS);
using prop_tag = enum_class_prop_tag;
static constexpr const char *key = "beta_initializer";
};

/**
* @brief RMS_NORM_GAMMA_INIT Initialization Enumeration Information
*/
class RMS_NORM_GAMMA_INIT final : public EnumProperty<InitializerInfo> {
public:
/**
* @brief Construct a RMS_NORM_GAMMA_INIT object
*/
RMS_NORM_GAMMA_INIT(Initializer value = Initializer::ONES) { set(value); };
using prop_tag = enum_class_prop_tag;
static constexpr const char *key = "gamma_initializer";
};

/**
* @brief Enumeration of tensor regularization type
*/
Expand Down
10 changes: 5 additions & 5 deletions nntrainer/layers/layer_normalization_layer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,9 @@ enum LNParams {

LayerNormalizationLayer::LayerNormalizationLayer() :
Layer(),
layer_normalization_props(
std::vector<props::Axis>(), props::Epsilon(), props::BNPARAMS_GAMMA_INIT(),
props::BNPARAMS_BETA_INIT(), props::WeightDecay(), props::BiasDecay()) {
layer_normalization_props(std::vector<props::Axis>(), props::Epsilon(),
props::GammaInitializer(), props::BetaInitializer(),
props::WeightDecay(), props::BiasDecay()) {
wt_idx.fill(std::numeric_limits<unsigned>::max());
}

Expand All @@ -51,9 +51,9 @@ void LayerNormalizationLayer::finalize(InitLayerContext &context) {
}

auto gamma_initializer =
std::get<props::BNPARAMS_GAMMA_INIT>(layer_normalization_props).get();
std::get<props::GammaInitializer>(layer_normalization_props).get();
auto beta_initializer =
std::get<props::BNPARAMS_BETA_INIT>(layer_normalization_props).get();
std::get<props::BetaInitializer>(layer_normalization_props).get();
auto weight_decay = std::get<props::WeightDecay>(layer_normalization_props);
auto bias_decay = std::get<props::BiasDecay>(layer_normalization_props);

Expand Down
5 changes: 2 additions & 3 deletions nntrainer/layers/layer_normalization_layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,8 @@ class LayerNormalizationLayer : public Layer {
remain_axes; /**< remained axes (exclusive with normalize axes) */

std::array<unsigned int, 7> wt_idx;
std::tuple<std::vector<props::Axis>, props::Epsilon,
props::BNPARAMS_GAMMA_INIT, props::BNPARAMS_BETA_INIT,
props::WeightDecay, props::BiasDecay>
std::tuple<std::vector<props::Axis>, props::Epsilon, props::GammaInitializer,
props::BetaInitializer, props::WeightDecay, props::BiasDecay>
layer_normalization_props;
};

Expand Down
8 changes: 4 additions & 4 deletions nntrainer/utils/node_exporter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -144,10 +144,10 @@ void Exporter::saveTflResult(const std::tuple<props::Activation> &props,

template <>
void Exporter::saveTflResult(
const std::tuple<props::Epsilon, props::BNPARAMS_MU_INIT,
props::BNPARAMS_VAR_INIT, props::BNPARAMS_BETA_INIT,
props::BNPARAMS_GAMMA_INIT, props::Momentum, props::Axis,
props::WeightDecay, props::BiasDecay> &props,
const std::tuple<props::Epsilon, props::MuInitializer, props::VarInitializer,
props::BetaInitializer, props::GammaInitializer,
props::Momentum, props::Axis, props::WeightDecay,
props::BiasDecay> &props,
const BatchNormalizationLayer *self) {
createIfNull(tf_node);

Expand Down
8 changes: 4 additions & 4 deletions nntrainer/utils/node_exporter.h
Original file line number Diff line number Diff line change
Expand Up @@ -258,10 +258,10 @@ class BatchNormalizationLayer;
*/
template <>
void Exporter::saveTflResult(
const std::tuple<props::Epsilon, props::BNPARAMS_MU_INIT,
props::BNPARAMS_VAR_INIT, props::BNPARAMS_BETA_INIT,
props::BNPARAMS_GAMMA_INIT, props::Momentum, props::Axis,
props::WeightDecay, props::BiasDecay> &props,
const std::tuple<props::Epsilon, props::MuInitializer, props::VarInitializer,
props::BetaInitializer, props::GammaInitializer,
props::Momentum, props::Axis, props::WeightDecay,
props::BiasDecay> &props,
const BatchNormalizationLayer *self);

class LayerImpl;
Expand Down

0 comments on commit 78907d7

Please sign in to comment.