Skip to content

Commit

Permalink
[ Context ] Add Engine Class to manage Contexts
Browse files Browse the repository at this point in the history
In order to extend the Context more easily, this PR adds Engine Class
to manage the Contexts. It adds Context Class as well which is base
class of all the Context.
 . add Engine Class
 . add Context Class
 . set default Context is app contexts
 . Pluggable support in Engine
 . some more code optimization & test codes requires.

**Self evaluation:**
1. Build test:	 [X]Passed [ ]Failed [ ]Skipped
2. Run test:	 [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: jijoong.moon <[email protected]>
  • Loading branch information
jijoongmoon committed Jan 2, 2025
1 parent 869d348 commit dbbde82
Show file tree
Hide file tree
Showing 36 changed files with 898 additions and 262 deletions.
9 changes: 6 additions & 3 deletions Applications/Custom/LayerClient/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

/// @todo Migrate this to api
#include <app_context.h>
#include <engine.h>

#include <mae_loss.h>
#include <pow.h>
Expand Down Expand Up @@ -195,13 +196,15 @@ int main(int argc, char *argv[]) {
}

try {
auto &app_context = nntrainer::AppContext::Global();
auto &ct_engine = nntrainer::Engine::Global();
auto app_context = static_cast<nntrainer::AppContext *>(
ct_engine.getRegisteredContext("cpu"));
/// registering custom layer here
/// registerFactory excepts a function that returns unique_ptr<Layer> from
/// std::vector<std::string> ml::train::createLayer<T> is a templated
/// function for generic usage
app_context.registerFactory(nntrainer::createLayer<custom::PowLayer>);
app_context.registerFactory(nntrainer::createLayer<custom::MaeLossLayer>);
app_context->registerFactory(nntrainer::createLayer<custom::PowLayer>);
app_context->registerFactory(nntrainer::createLayer<custom::MaeLossLayer>);
} catch (std::invalid_argument &e) {
std::cerr << "failed to register factory, reason: " << e.what()
<< std::endl;
Expand Down
10 changes: 6 additions & 4 deletions Applications/LLaMA/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@

#include <app_context.h>
#include <custom_multi_head_attention_layer.h>
#include <engine.h>
#include <rms_norm.h>
#include <rotary_embedding.h>
#include <swiglu.h>
Expand Down Expand Up @@ -723,18 +724,19 @@ int main(int argc, char *argv[]) {
#else
std::string text = "This is smaple input for LLaMA.";
#endif

auto &app_context = nntrainer::AppContext::Global();
auto &ct_engine = nntrainer::Engine::Global();
auto app_context =
static_cast<nntrainer::AppContext *>(ct_engine.getRegisteredContext("cpu"));
try {
app_context.registerFactory(nntrainer::createLayer<custom::SwiGLULayer>);
app_context->registerFactory(nntrainer::createLayer<custom::SwiGLULayer>);
} catch (std::invalid_argument &e) {
std::cerr << "failed to register factory, reason: " << e.what()
<< std::endl;
return 1;
}

try {
app_context.registerFactory(nntrainer::createLayer<custom::RMSNormLayer>);
app_context->registerFactory(nntrainer::createLayer<custom::RMSNormLayer>);
} catch (std::invalid_argument &e) {
std::cerr << "failed to register factory, reason: " << e.what()
<< std::endl;
Expand Down
7 changes: 5 additions & 2 deletions Applications/SimpleShot/task_runner.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
#include <unistd.h>

#include <app_context.h>
#include <engine.h>
#include <model.h>
#include <nntrainer-api-common.h>

Expand Down Expand Up @@ -183,7 +184,9 @@ std::unique_ptr<ml::train::Model> createModel(const std::string &backbone,
* @return int
*/
int main(int argc, char **argv) {
auto &app_context = nntrainer::AppContext::Global();
auto &ct_engine = nntrainer::Engine::Global();
auto app_context =
static_cast<nntrainer::AppContext *>(ct_engine.getRegisteredContext("cpu"));

if (argc != 6 && argc != 5) {
std::cout
Expand Down Expand Up @@ -221,7 +224,7 @@ int main(int argc, char **argv) {
std::string val_path = app_path + "/tasks/" + argv[4];

try {
app_context.registerFactory(
app_context->registerFactory(
nntrainer::createLayer<simpleshot::layers::CenteringLayer>);
} catch (std::exception &e) {
std::cerr << "registering factory failed: " << e.what();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@
#include <stdlib.h>
#include <time.h>

#include <app_context.h>
#include <engine.h>
#include <neuralnet.h>
#include <tensor.h>

Expand Down Expand Up @@ -375,7 +375,7 @@ int main(int argc, char *argv[]) {

/// @todo add api version of this
try {
nntrainer::AppContext::Global().setWorkingDirectory(data_path);
nntrainer::Engine::Global().setWorkingDirectory(data_path);
} catch (std::invalid_argument &e) {
std::cerr << "setting data_path failed, pwd is used instead";
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@

#include <bitmap_helpers.h>

#include <app_context.h>
#include <engine.h>
#define TRAINING true

/**
Expand Down Expand Up @@ -254,7 +254,7 @@ int main(int argc, char *argv[]) {

/// @todo add api version of this
try {
nntrainer::AppContext::Global().setWorkingDirectory(data_path);
nntrainer::Engine::Global().setWorkingDirectory(data_path);
} catch (std::invalid_argument &e) {
std::cerr << "setting data_path failed, pwd is used instead";
}
Expand Down
11 changes: 8 additions & 3 deletions Applications/YOLOv2/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

#include <app_context.h>
#include <det_dataloader.h>
#include <engine.h>
#include <layer.h>
#include <model.h>
#include <optimizer.h>
Expand Down Expand Up @@ -285,9 +286,13 @@ int main(int argc, char *argv[]) {
<< std::endl;

try {
auto &app_context = nntrainer::AppContext::Global();
app_context.registerFactory(nntrainer::createLayer<custom::ReorgLayer>);
app_context.registerFactory(
auto &ct_engine = nntrainer::Engine::Global();

auto app_context = static_cast<nntrainer::AppContext *>(
ct_engine.getRegisteredContext("cpu"));

app_context->registerFactory(nntrainer::createLayer<custom::ReorgLayer>);
app_context->registerFactory(
nntrainer::createLayer<custom::YoloV2LossLayer>);
} catch (std::invalid_argument &e) {
std::cerr << "failed to register factory, reason: " << e.what()
Expand Down
14 changes: 10 additions & 4 deletions Applications/YOLOv3/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@

#include <app_context.h>
#include <det_dataloader.h>
#include <engine.h>
#include <layer.h>
#include <model.h>
#include <optimizer.h>
Expand Down Expand Up @@ -402,17 +403,22 @@ int main(int argc, char *argv[]) {
<< std::endl;

try {
auto &app_context = nntrainer::AppContext::Global();
app_context.registerFactory(nntrainer::createLayer<custom::UpsampleLayer>);
auto &ct_engine = nntrainer::Engine::Global();
auto app_context = static_cast<nntrainer::AppContext *>(
ct_engine.getRegisteredContext("cpu"));

app_context->registerFactory(nntrainer::createLayer<custom::UpsampleLayer>);
} catch (std::invalid_argument &e) {
std::cerr << "failed to register factory, reason: " << e.what()
<< std::endl;
return 1;
}

try {
auto &app_context = nntrainer::AppContext::Global();
app_context.registerFactory(
auto &ct_engine = nntrainer::Engine::Global();
auto app_context = static_cast<nntrainer::AppContext *>(
ct_engine.getRegisteredContext("cpu"));
app_context->registerFactory(
nntrainer::createLayer<custom::YoloV3LossLayer>);
} catch (std::invalid_argument &e) {
std::cerr << "failed to register yolov3 loss, reason: " << e.what()
Expand Down
10 changes: 5 additions & 5 deletions api/ccapi/src/factory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@
#include <string>
#include <vector>

#include <app_context.h>
#include <databuffer.h>
#include <databuffer_factory.h>
#include <engine.h>
#include <layer.h>
#include <model.h>
#include <neuralnet.h>
Expand Down Expand Up @@ -127,8 +127,8 @@ createDataset(DatasetType type, datagen_cb cb, void *user_data,
std::unique_ptr<ml::train::LearningRateScheduler>
createLearningRateScheduler(const LearningRateSchedulerType &type,
const std::vector<std::string> &properties) {
auto &ac = nntrainer::AppContext::Global();
return ac.createObject<ml::train::LearningRateScheduler>(type, properties);
auto &eg = nntrainer::Engine::Global();
return eg.createLearningRateSchedulerObject(type, properties);
}

/**
Expand All @@ -137,8 +137,8 @@ createLearningRateScheduler(const LearningRateSchedulerType &type,
std::unique_ptr<ml::train::LearningRateScheduler>
createLearningRateScheduler(const std::string &type,
const std::vector<std::string> &properties) {
auto &ac = nntrainer::AppContext::Global();
return ac.createObject<ml::train::LearningRateScheduler>(type, properties);
auto &eg = nntrainer::Engine::Global();
return eg.createLearningRateSchedulerObject(type, properties);
}

std::string getVersion() {
Expand Down
2 changes: 2 additions & 0 deletions debian/nntrainer-dev.install
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@
/usr/include/nntrainer/loss_layer.h
# custom layer kits
/usr/include/nntrainer/app_context.h
/usr/include/nntrainer/context.h
/usr/include/nntrainer/engine.h
# logger
/usr/include/nntrainer/nntrainer_log.h
/usr/include/nntrainer/nntrainer_logger.h
Expand Down
8 changes: 2 additions & 6 deletions nntrainer/app_context.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -409,12 +409,8 @@ static void registerer(AppContext &ac) noexcept {
};

AppContext &AppContext::Global() {
static AppContext instance;
/// in g++ there is a bug that hangs up if caller throws,
/// so registerer is noexcept although it'd better not
/// https://gcc.gnu.org/bugzilla/show_bug.cgi?id=70298
std::call_once(global_app_context_init_flag, registerer, std::ref(instance));
return instance;
registerer(*this);
return *this;
}

void AppContext::setWorkingDirectory(const std::string &base) {
Expand Down
72 changes: 47 additions & 25 deletions nntrainer/app_context.h
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
#include <optimizer.h>
#include <optimizer_devel.h>

#include <context.h>
#include <nntrainer_error.h>

namespace nntrainer {
Expand All @@ -42,45 +43,26 @@ namespace {} // namespace
* @class AppContext contains user-dependent configuration
* @brief App
*/
class AppContext {
class AppContext : public Context {
public:
using PropsType = std::vector<std::string>;

template <typename T> using PtrType = std::unique_ptr<T>;

template <typename T>
using FactoryType = std::function<PtrType<T>(const PropsType &)>;

template <typename T>
using PtrFactoryType = PtrType<T> (*)(const PropsType &);
template <typename T>
using StrIndexType = std::unordered_map<std::string, FactoryType<T>>;

/** integer to string key */
using IntIndexType = std::unordered_map<int, std::string>;

/**
* This type contains tuple of
* 1) integer -> string index
* 2) string -> factory index
* @brief Default constructor
*/
template <typename T>
using IndexType = std::tuple<StrIndexType<T>, IntIndexType>;

template <typename... Ts> using FactoryMap = std::tuple<IndexType<Ts>...>;
AppContext() = default;

/**
* @brief Default constructor
* @brief Default destructor
*/
AppContext() = default;
~AppContext() override = default;

/**
*
* @brief Get Global app context.
*
* @return AppContext&
*/
static AppContext &Global();
AppContext &Global();

/**
* @brief Set Working Directory for a relative path. working directory is set
Expand Down Expand Up @@ -201,6 +183,44 @@ class AppContext {
const std::string &key = "",
const int int_key = -1);

std::unique_ptr<nntrainer::Layer>
createLayerObject(const std::string &type,
const std::vector<std::string> &properties = {}) override {
return createObject<nntrainer::Layer>(type, properties);
}

std::unique_ptr<nntrainer::Optimizer> createOptimizerObject(
const std::string &type,
const std::vector<std::string> &properties = {}) override {
return createObject<nntrainer::Optimizer>(type, properties);
}

std::unique_ptr<ml::train::LearningRateScheduler>
createLearningRateSchedulerObject(
const std::string &type,
const std::vector<std::string> &properties = {}) override {
return createObject<ml::train::LearningRateScheduler>(type, properties);
}

std::unique_ptr<nntrainer::Layer>
createLayerObject(const int int_key,
const std::vector<std::string> &properties = {}) override {
return createObject<nntrainer::Layer>(int_key, properties);
}

std::unique_ptr<nntrainer::Optimizer> createOptimizerObject(
const int int_key,
const std::vector<std::string> &properties = {}) override {
return createObject<nntrainer::Optimizer>(int_key, properties);
}

std::unique_ptr<ml::train::LearningRateScheduler>
createLearningRateSchedulerObject(
const int int_key,
const std::vector<std::string> &properties = {}) override {
return createObject<ml::train::LearningRateScheduler>(int_key, properties);
}

/**
* @brief Create an Object from the integer key
*
Expand Down Expand Up @@ -271,6 +291,8 @@ class AppContext {
throw std::invalid_argument("cannot create unknown object");
}

std::string getName() override { return "cpu"; }

private:
FactoryMap<nntrainer::Optimizer, nntrainer::Layer,
ml::train::LearningRateScheduler>
Expand Down
Loading

0 comments on commit dbbde82

Please sign in to comment.