Skip to content

Commit

Permalink
[Application] Bug fix about meson setting
Browse files Browse the repository at this point in the history
Now, PICO GPT and LLAMA are adding extra_defines meson option in the application side.

However, even if this code is executed during build, this definition is not reflected when actually running the app.

Because the application area is built after the process of reflecting extra_defines to add_project_arguments has already been completed, so adding extra_defines during application build is meaningless.

In addition, it is impossible to call add_project_arguments after build, so the structure to add extra_defines during build process is wrong.

The reason why PICO GPT and LLAMA add extra_defines is that the JSON-related script created now does not run on tizen, so json-related option was added to the root meson and the options on the application side were removed.

**Self evaluation:**
1. Build test:     [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <[email protected]>
  • Loading branch information
baek2sm committed Jun 7, 2024
1 parent d50487a commit 85cc5e3
Show file tree
Hide file tree
Showing 5 changed files with 12 additions and 12 deletions.
12 changes: 6 additions & 6 deletions Applications/LLaMA/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
#include <swiglu.h>
#include <transpose_layer.h>

#if defined(ENABLE_ENCODER2)
#if defined(ENABLE_JSON)
#include "json.hpp"
#include <codecvt>
#include <encoder.hpp>
Expand Down Expand Up @@ -572,7 +572,7 @@ void run(std::string text, bool apply_temperature) {

unsigned int init_len;

#if defined(ENABLE_ENCODER2)
#if defined(ENABLE_JSON)
std::string vocab_file_name = "../Applications/LLaMA/jni/vocab.json";
std::string merge_file_name = "../Applications/LLaMA/jni/merges.txt";

Expand Down Expand Up @@ -619,7 +619,7 @@ void run(std::string text, bool apply_temperature) {
std::cout << " Progress Reading: 100 % " << std::endl;
std::cout << std::endl << "### Output : " << std::endl;
if (init_len < INIT_SEQ_LEN) {
#if defined(ENABLE_ENCODER2)
#if defined(ENABLE_JSON)
auto decoded_str = tokenizer.decode({static_cast<int64_t>(ids)});
std::cout << decoded_str << " ";
std::cout.flush();
Expand All @@ -635,7 +635,7 @@ void run(std::string text, bool apply_temperature) {
input_sample[0] = static_cast<float>(init_input[i]);
} else {
input_sample[0] = static_cast<float>(ids);
#if defined(ENABLE_ENCODER2)
#if defined(ENABLE_JSON)
auto decoded_str = tokenizer.decode({static_cast<int64_t>(ids)});
std::cout << decoded_str << " ";
std::cout.flush();
Expand Down Expand Up @@ -684,7 +684,7 @@ void createAndRun(unsigned int epochs, unsigned int batch_size) {
g_model->load(weight_path);
}

#if defined(ENABLE_ENCODER2)
#if defined(ENABLE_JSON)
std::wstring decodeUnicodeEscape(const std::wstring &input) {
std::wstringstream result;

Expand Down Expand Up @@ -712,7 +712,7 @@ int main(int argc, char *argv[]) {
// Setting locale
std::locale::global(std::locale("ko_KR.UTF-8"));

#if defined(ENABLE_ENCODER2)
#if defined(ENABLE_JSON)
// Getting arguments From terminal
std::wstring input;
std::getline(std::wcin, input);
Expand Down
1 change: 0 additions & 1 deletion Applications/LLaMA/jni/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ transpose_dep = declare_dependency(
)

if get_option('platform') != 'tizen'
extra_defines += '-DENABLE_ENCODER2=1'
run_command(meson.source_root() / 'jni' / 'prepare_encoder.sh', meson.build_root(), '0.2' ,check: true)
endif

Expand Down
8 changes: 4 additions & 4 deletions Applications/PicoGPT/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
#include <string.h>
#include <tensor.h>

#if defined(ENABLE_ENCODER)
#if defined(ENABLE_JSON)
#include "encoder.hpp"
#endif

Expand All @@ -43,7 +43,7 @@ bool optimize = false;
// bool optimize = true;
bool optimize_attention = false;

#if defined(ENABLE_ENCODER)
#if defined(ENABLE_JSON)
template <typename T>
T unwrap(std::optional<T> &&value, const std::string &error_msg) {
if (value.has_value()) {
Expand Down Expand Up @@ -318,7 +318,7 @@ int main(int argc, char *argv[]) {

std::vector<int64_t> init_input;

#if defined(ENABLE_ENCODER)
#if defined(ENABLE_JSON)

std::string vocab_file_name = "../Applications/PicoGPT/jni/vocab.json";
std::string merge_file_name = "../Applications/PicoGPT/jni/merges.txt";
Expand Down Expand Up @@ -372,7 +372,7 @@ int main(int argc, char *argv[]) {

((uint *)(wpe_input))[0] = i;

#if defined(ENABLE_ENCODER)
#if defined(ENABLE_JSON)
std::vector<int64_t> token_ids;
for (auto element : ids) {
token_ids.push_back(static_cast<int64_t>(element));
Expand Down
1 change: 0 additions & 1 deletion Applications/PicoGPT/jni/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ nntr_pico_gpt_resdir = nntr_app_resdir / 'PicoGPT'
run_command('cp', '-lr', res_path, nntr_pico_gpt_resdir)

if get_option('platform') != 'tizen'
extra_defines += '-DENABLE_ENCODER=1'
run_command(meson.source_root() / 'jni' / 'prepare_encoder.sh', meson.build_root(), '0.1' , check: true)
endif

Expand Down
2 changes: 2 additions & 0 deletions meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ if get_option('platform') == 'tizen'
if get_option('enable-tizen-feature-check')
add_project_arguments('-D__FEATURE_CHECK_SUPPORT__', language: ['c', 'cpp'])
endif
else
add_project_arguments('-DENABLE_JSON=1', language: ['c', 'cpp'])
endif

warning_flags = [
Expand Down

0 comments on commit 85cc5e3

Please sign in to comment.