diff --git a/nntrainer/layers/bn_layer.cpp b/nntrainer/layers/bn_layer.cpp index c5802291d8..bd0b2f7fce 100644 --- a/nntrainer/layers/bn_layer.cpp +++ b/nntrainer/layers/bn_layer.cpp @@ -50,10 +50,10 @@ enum BNParams { BatchNormalizationLayer::BatchNormalizationLayer() : Layer(), divider(0), - bn_props(props::Epsilon(), props::BNPARAMS_MU_INIT(), - props::BNPARAMS_VAR_INIT(), props::BNPARAMS_BETA_INIT(), - props::BNPARAMS_GAMMA_INIT(), props::Momentum(), props::Axis(), - props::WeightDecay(), props::BiasDecay()) { + bn_props(props::Epsilon(), props::MuInitializer(), props::VarInitializer(), + props::BetaInitializer(), props::GammaInitializer(), + props::Momentum(), props::Axis(), props::WeightDecay(), + props::BiasDecay()) { wt_idx.fill(std::numeric_limits::max()); } @@ -62,10 +62,10 @@ void BatchNormalizationLayer::finalize(InitLayerContext &context) { NNTR_THROW_IF(context.getNumInputs() != 1, std::invalid_argument) << "Only one input is allowed for batch normalization layer"; - auto &bnparams_mu = std::get(bn_props); - auto &bnparams_var = std::get(bn_props); - auto &bnparams_beta = std::get(bn_props); - auto &bnparams_gamma = std::get(bn_props); + auto &bnparams_mu = std::get(bn_props); + auto &bnparams_var = std::get(bn_props); + auto &bnparams_beta = std::get(bn_props); + auto &bnparams_gamma = std::get(bn_props); auto &weight_decay = std::get(bn_props); auto &bias_decay = std::get(bn_props); diff --git a/nntrainer/layers/bn_layer.h b/nntrainer/layers/bn_layer.h index f8d611cd9d..22b1ea3ef0 100644 --- a/nntrainer/layers/bn_layer.h +++ b/nntrainer/layers/bn_layer.h @@ -126,9 +126,9 @@ class BatchNormalizationLayer : public Layer { std::vector axes_to_reduce; /**< target axes to reduce */ std::array wt_idx; /**< indices of the weights and tensors */ - std::tuple + std::tuple bn_props; }; diff --git a/nntrainer/layers/cl_layers/rmsnorm_layer_cl.cpp b/nntrainer/layers/cl_layers/rmsnorm_layer_cl.cpp index d71acbe386..fe05129959 100644 --- a/nntrainer/layers/cl_layers/rmsnorm_layer_cl.cpp +++ b/nntrainer/layers/cl_layers/rmsnorm_layer_cl.cpp @@ -96,7 +96,7 @@ RMSNormLayerCl::RMSNormLayerCl() : LayerImplCl() { wt_idx.fill(0); } void RMSNormLayerCl::finalize(InitLayerContext &context) { std::vector dim = context.getInputDimensions(); context.setOutputDimensions(dim); - auto &rmsparams_gamma = std::get(rmsnorm_props); + auto &rmsparams_gamma = std::get(rmsnorm_props); TensorDim gamma_dim( 1, 1, 1, dim[0].width(), diff --git a/nntrainer/layers/cl_layers/rmsnorm_layer_cl.h b/nntrainer/layers/cl_layers/rmsnorm_layer_cl.h index 89a0600c4b..d6f3225603 100644 --- a/nntrainer/layers/cl_layers/rmsnorm_layer_cl.h +++ b/nntrainer/layers/cl_layers/rmsnorm_layer_cl.h @@ -135,7 +135,7 @@ class RMSNormLayerCl : public LayerImplCl { private: std::array wt_idx; - std::tuple + std::tuple rmsnorm_props; /**< rmsnorm layer properties */ inline static std::vector diff --git a/nntrainer/layers/common_properties.cpp b/nntrainer/layers/common_properties.cpp index e1eac32986..c38700bca6 100644 --- a/nntrainer/layers/common_properties.cpp +++ b/nntrainer/layers/common_properties.cpp @@ -314,13 +314,13 @@ WeightInitializer::WeightInitializer(Initializer value) { set(value); } BiasInitializer::BiasInitializer(Initializer value) { set(value); } -BNPARAMS_MU_INIT::BNPARAMS_MU_INIT(Initializer value) { set(value); } +MuInitializer::MuInitializer(Initializer value) { set(value); } -BNPARAMS_VAR_INIT::BNPARAMS_VAR_INIT(Initializer value) { set(value); } +VarInitializer::VarInitializer(Initializer value) { set(value); } -BNPARAMS_GAMMA_INIT::BNPARAMS_GAMMA_INIT(Initializer value) { set(value); } +GammaInitializer::GammaInitializer(Initializer value) { set(value); } -BNPARAMS_BETA_INIT::BNPARAMS_BETA_INIT(Initializer value) { set(value); } +BetaInitializer::BetaInitializer(Initializer value) { set(value); } BasicRegularizer::BasicRegularizer(nntrainer::WeightRegularizer value) { set(value); diff --git a/nntrainer/layers/common_properties.h b/nntrainer/layers/common_properties.h index e67bda4def..ff59a88a4d 100644 --- a/nntrainer/layers/common_properties.h +++ b/nntrainer/layers/common_properties.h @@ -1020,74 +1020,61 @@ class BiasInitializer final : public EnumProperty { }; /** - * @brief BNPARAMS_MU_INIT Initialization Enumeration Information + * @brief MuInitializer Initialization Enumeration Information * */ -class BNPARAMS_MU_INIT final : public EnumProperty { +class MuInitializer final : public EnumProperty { public: /** - * @brief Construct a BNPARAMS_MU_INIT object + * @brief Construct a MuInitializer object */ - BNPARAMS_MU_INIT(Initializer value = Initializer::ZEROS); + MuInitializer(Initializer value = Initializer::ZEROS); using prop_tag = enum_class_prop_tag; static constexpr const char *key = "moving_mean_initializer"; }; /** - * @brief BNPARAMS_VAR_INIT Initialization Enumeration Information + * @brief VarInitializer Initialization Enumeration Information * */ -class BNPARAMS_VAR_INIT final : public EnumProperty { +class VarInitializer final : public EnumProperty { public: /** - * @brief Construct a BNPARAMS_VAR_INIT object + * @brief Construct a VarInitializer object */ - BNPARAMS_VAR_INIT(Initializer value = Initializer::ONES); + VarInitializer(Initializer value = Initializer::ONES); using prop_tag = enum_class_prop_tag; static constexpr const char *key = "moving_variance_initializer"; }; /** - * @brief BNPARAMS_GAMMA_INIT Initialization Enumeration Information + * @brief GammaInitializer Initialization Enumeration Information * */ -class BNPARAMS_GAMMA_INIT final : public EnumProperty { +class GammaInitializer final : public EnumProperty { public: /** - * @brief Construct a BNPARAMS_GAMMA_INIT object + * @brief Construct a GammaInitializer object */ - BNPARAMS_GAMMA_INIT(Initializer value = Initializer::ONES); + GammaInitializer(Initializer value = Initializer::ONES); using prop_tag = enum_class_prop_tag; static constexpr const char *key = "gamma_initializer"; }; /** - * @brief BNPARAMS_BETA_INIT Initialization Enumeration Information + * @brief BetaInitializer Initialization Enumeration Information * */ -class BNPARAMS_BETA_INIT final : public EnumProperty { +class BetaInitializer final : public EnumProperty { public: /** - * @brief Construct a BNPARAMS_BETA_INIT object + * @brief Construct a BetaInitializer object */ - BNPARAMS_BETA_INIT(Initializer value = Initializer::ZEROS); + BetaInitializer(Initializer value = Initializer::ZEROS); using prop_tag = enum_class_prop_tag; static constexpr const char *key = "beta_initializer"; }; -/** - * @brief RMS_NORM_GAMMA_INIT Initialization Enumeration Information - */ -class RMS_NORM_GAMMA_INIT final : public EnumProperty { -public: - /** - * @brief Construct a RMS_NORM_GAMMA_INIT object - */ - RMS_NORM_GAMMA_INIT(Initializer value = Initializer::ONES) { set(value); }; - using prop_tag = enum_class_prop_tag; - static constexpr const char *key = "gamma_initializer"; -}; - /** * @brief Enumeration of tensor regularization type */ diff --git a/nntrainer/layers/layer_normalization_layer.cpp b/nntrainer/layers/layer_normalization_layer.cpp index a115e82b62..17b732ea81 100644 --- a/nntrainer/layers/layer_normalization_layer.cpp +++ b/nntrainer/layers/layer_normalization_layer.cpp @@ -38,9 +38,9 @@ enum LNParams { LayerNormalizationLayer::LayerNormalizationLayer() : Layer(), - layer_normalization_props( - std::vector(), props::Epsilon(), props::BNPARAMS_GAMMA_INIT(), - props::BNPARAMS_BETA_INIT(), props::WeightDecay(), props::BiasDecay()) { + layer_normalization_props(std::vector(), props::Epsilon(), + props::GammaInitializer(), props::BetaInitializer(), + props::WeightDecay(), props::BiasDecay()) { wt_idx.fill(std::numeric_limits::max()); } @@ -51,9 +51,9 @@ void LayerNormalizationLayer::finalize(InitLayerContext &context) { } auto gamma_initializer = - std::get(layer_normalization_props).get(); + std::get(layer_normalization_props).get(); auto beta_initializer = - std::get(layer_normalization_props).get(); + std::get(layer_normalization_props).get(); auto weight_decay = std::get(layer_normalization_props); auto bias_decay = std::get(layer_normalization_props); diff --git a/nntrainer/layers/layer_normalization_layer.h b/nntrainer/layers/layer_normalization_layer.h index 7511df0871..ba52b6caa8 100644 --- a/nntrainer/layers/layer_normalization_layer.h +++ b/nntrainer/layers/layer_normalization_layer.h @@ -124,9 +124,8 @@ class LayerNormalizationLayer : public Layer { remain_axes; /**< remained axes (exclusive with normalize axes) */ std::array wt_idx; - std::tuple, props::Epsilon, - props::BNPARAMS_GAMMA_INIT, props::BNPARAMS_BETA_INIT, - props::WeightDecay, props::BiasDecay> + std::tuple, props::Epsilon, props::GammaInitializer, + props::BetaInitializer, props::WeightDecay, props::BiasDecay> layer_normalization_props; }; diff --git a/nntrainer/utils/node_exporter.cpp b/nntrainer/utils/node_exporter.cpp index 031d2c2fbf..40cb945cda 100644 --- a/nntrainer/utils/node_exporter.cpp +++ b/nntrainer/utils/node_exporter.cpp @@ -144,10 +144,10 @@ void Exporter::saveTflResult(const std::tuple &props, template <> void Exporter::saveTflResult( - const std::tuple &props, + const std::tuple &props, const BatchNormalizationLayer *self) { createIfNull(tf_node); diff --git a/nntrainer/utils/node_exporter.h b/nntrainer/utils/node_exporter.h index de29cf77d9..8669017ced 100644 --- a/nntrainer/utils/node_exporter.h +++ b/nntrainer/utils/node_exporter.h @@ -258,10 +258,10 @@ class BatchNormalizationLayer; */ template <> void Exporter::saveTflResult( - const std::tuple &props, + const std::tuple &props, const BatchNormalizationLayer *self); class LayerImpl;