Skip to content

Commit

Permalink
[GPU/OpenCL] Initial version of RMSNorm Layer
Browse files Browse the repository at this point in the history
Added naive version of OpenCL implementation for RMSNorm Layer.
Incorporated kernel for ops used.
Added unit test for rmsnorm_layer_cl.

Signed-off-by: ThummalaPallavi <[email protected]>
  • Loading branch information
pallaviNNT committed Jun 20, 2024
1 parent 25b5117 commit 75f76e9
Show file tree
Hide file tree
Showing 11 changed files with 679 additions and 1 deletion.
10 changes: 10 additions & 0 deletions api/ccapi/include/layer.h
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ enum LayerType {
LAYER_REDUCE_MEAN, /**< Reduce mean Layer type */
LAYER_LOSS_CONSTANT_DERIVATIVE, /**< Synthetic loss layer to feed constant
derivative */
LAYER_RMSNORM = ML_TRAIN_LAYER_TYPE_RMSNORM, /**<RMS NORM Layer */
LAYER_UNKNOWN = ML_TRAIN_LAYER_TYPE_UNKNOWN /**< Unknown */
};

Expand Down Expand Up @@ -295,6 +296,15 @@ inline std::unique_ptr<Layer> FullyConnected(
return createLayer(LayerType::LAYER_FC, properties, compute_engine);
}

/**
* @brief Helper function to create RMS normalization layer for GPU
*/
inline std::unique_ptr<Layer> RMSNormCl(
const std::vector<std::string> &properties = {},
const LayerComputeEngine &compute_engine = LayerComputeEngine::GPU) {
return createLayer(LayerType::LAYER_RMSNORM, properties, compute_engine);
}

/**
* @brief Helper function to create batch normalization layer
*/
Expand Down
1 change: 1 addition & 0 deletions api/nntrainer-api-common.h
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ typedef enum {
Sigmoid Loss Layer type (Since 6.5) */
ML_TRAIN_LAYER_TYPE_LOSS_CROSS_ENTROPY_SOFTMAX = 502, /**< Cross Entropy with
Softmax Loss Layer type (Since 6.5) */
ML_TRAIN_LAYER_TYPE_RMSNORM = 503, /**< Cross Entropy with */
ML_TRAIN_LAYER_TYPE_UNKNOWN = 999 /**< Unknown Layer */
} ml_train_layer_type_e;

Expand Down
4 changes: 3 additions & 1 deletion nntrainer/cl_context.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

#include <cl_context.h>
#include <fc_layer_cl.h>

#include <rmsnorm_layer_cl.h>
namespace nntrainer {

std::mutex cl_factory_mutex;
Expand All @@ -26,6 +26,8 @@ static void add_default_object(ClContext &cc) {
cc.registerFactory(nntrainer::createLayer<FullyConnectedLayerCl>,
FullyConnectedLayerCl::type,
ml::train::LayerType::LAYER_FC);
cc.registerFactory(nntrainer::createLayer<RMSNormLayerCl>,
RMSNormLayerCl::type, ml::train::LayerType::LAYER_RMSNORM);
}

static void registerer(ClContext &cc) noexcept {
Expand Down
1 change: 1 addition & 0 deletions nntrainer/layers/cl_layers/meson.build
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
cl_layer_sources = [
'fc_layer_cl.cpp',
'blas_kernels.cpp',
'rmsnorm_layer_cl.cpp'
]

if get_option('enable-fp16')
Expand Down
Loading

0 comments on commit 75f76e9

Please sign in to comment.