Skip to content

Commit

Permalink
[Application] Bug fix about meson setting
Browse files Browse the repository at this point in the history
Now, PICO GPT and LLAMA are adding extra_defines meson option in the application side.

However, even if this code is executed during build, this definition is not reflected when actually running the app.

Because the application area is built after the process of reflecting extra_defines to add_project_arguments has already been completed, so adding extra_defines during application build is meaningless.

In addition, it is impossible to call add_project_arguments after build, so the structure to add extra_defines during build process is wrong.

The reason why PICO GPT and LLAMA add extra_defines is that the encoder-related script created now does not run on tizen, so encoder-related option was added to the root meson and the options on the application side were removed.

**Self evaluation:**
1. Build test:     [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <[email protected]>
  • Loading branch information
baek2sm committed Jun 11, 2024
1 parent d50487a commit ffa3177
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 29 deletions.
12 changes: 6 additions & 6 deletions Applications/LLaMA/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
#include <swiglu.h>
#include <transpose_layer.h>

#if defined(ENABLE_ENCODER2)
#if defined(DENABLE_ENCODER)
#include "json.hpp"
#include <codecvt>
#include <encoder.hpp>
Expand Down Expand Up @@ -572,7 +572,7 @@ void run(std::string text, bool apply_temperature) {

unsigned int init_len;

#if defined(ENABLE_ENCODER2)
#if defined(DENABLE_ENCODER)
std::string vocab_file_name = "../Applications/LLaMA/jni/vocab.json";
std::string merge_file_name = "../Applications/LLaMA/jni/merges.txt";

Expand Down Expand Up @@ -619,7 +619,7 @@ void run(std::string text, bool apply_temperature) {
std::cout << " Progress Reading: 100 % " << std::endl;
std::cout << std::endl << "### Output : " << std::endl;
if (init_len < INIT_SEQ_LEN) {
#if defined(ENABLE_ENCODER2)
#if defined(DENABLE_ENCODER)
auto decoded_str = tokenizer.decode({static_cast<int64_t>(ids)});
std::cout << decoded_str << " ";
std::cout.flush();
Expand All @@ -635,7 +635,7 @@ void run(std::string text, bool apply_temperature) {
input_sample[0] = static_cast<float>(init_input[i]);
} else {
input_sample[0] = static_cast<float>(ids);
#if defined(ENABLE_ENCODER2)
#if defined(DENABLE_ENCODER)
auto decoded_str = tokenizer.decode({static_cast<int64_t>(ids)});
std::cout << decoded_str << " ";
std::cout.flush();
Expand Down Expand Up @@ -684,7 +684,7 @@ void createAndRun(unsigned int epochs, unsigned int batch_size) {
g_model->load(weight_path);
}

#if defined(ENABLE_ENCODER2)
#if defined(DENABLE_ENCODER)
std::wstring decodeUnicodeEscape(const std::wstring &input) {
std::wstringstream result;

Expand Down Expand Up @@ -712,7 +712,7 @@ int main(int argc, char *argv[]) {
// Setting locale
std::locale::global(std::locale("ko_KR.UTF-8"));

#if defined(ENABLE_ENCODER2)
#if defined(DENABLE_ENCODER)
// Getting arguments From terminal
std::wstring input;
std::getline(std::wcin, input);
Expand Down
5 changes: 0 additions & 5 deletions Applications/LLaMA/jni/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,6 @@ transpose_dep = declare_dependency(
include_directories: include_directories('./')
)

if get_option('platform') != 'tizen'
extra_defines += '-DENABLE_ENCODER2=1'
run_command(meson.source_root() / 'jni' / 'prepare_encoder.sh', meson.build_root(), '0.2' ,check: true)
endif

rms_norm_src = files('rms_norm.cpp')
rms_norm_layer = shared_library('rms_norm',
rms_norm_src,
Expand Down
8 changes: 4 additions & 4 deletions Applications/PicoGPT/jni/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
#include <string.h>
#include <tensor.h>

#if defined(ENABLE_ENCODER)
#if defined(DENABLE_ENCODER)
#include "encoder.hpp"
#endif

Expand All @@ -43,7 +43,7 @@ bool optimize = false;
// bool optimize = true;
bool optimize_attention = false;

#if defined(ENABLE_ENCODER)
#if defined(DENABLE_ENCODER)
template <typename T>
T unwrap(std::optional<T> &&value, const std::string &error_msg) {
if (value.has_value()) {
Expand Down Expand Up @@ -318,7 +318,7 @@ int main(int argc, char *argv[]) {

std::vector<int64_t> init_input;

#if defined(ENABLE_ENCODER)
#if defined(DENABLE_ENCODER)

std::string vocab_file_name = "../Applications/PicoGPT/jni/vocab.json";
std::string merge_file_name = "../Applications/PicoGPT/jni/merges.txt";
Expand Down Expand Up @@ -372,7 +372,7 @@ int main(int argc, char *argv[]) {

((uint *)(wpe_input))[0] = i;

#if defined(ENABLE_ENCODER)
#if defined(DENABLE_ENCODER)
std::vector<int64_t> token_ids;
for (auto element : ids) {
token_ids.push_back(static_cast<int64_t>(element));
Expand Down
5 changes: 0 additions & 5 deletions Applications/PicoGPT/jni/meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,6 @@ res_path = meson.current_source_dir() / '..' / 'res'
nntr_pico_gpt_resdir = nntr_app_resdir / 'PicoGPT'
run_command('cp', '-lr', res_path, nntr_pico_gpt_resdir)

if get_option('platform') != 'tizen'
extra_defines += '-DENABLE_ENCODER=1'
run_command(meson.source_root() / 'jni' / 'prepare_encoder.sh', meson.build_root(), '0.1' , check: true)
endif

pico_gpt_sources = [
'main.cpp',
]
Expand Down
14 changes: 5 additions & 9 deletions jni/prepare_encoder.sh
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,11 @@ function _untar_encoder {
tar -zxvf ${TAR_NAME} -C ${TARGET}
rm -f ${TAR_NAME}

if [ ${TARGET_VERSION} == "0.1" ]; then
mv -f ctre-unicode.hpp json.hpp encoder.hpp ../Applications/PicoGPT/jni/
echo "[Encoder] Finish moving encoder to PicoGPT\n"
fi

if [ ${TARGET_VERSION} == "0.2" ]; then
mv -f ctre-unicode.hpp json.hpp encoder.hpp ../Applications/LLaMA/jni/
echo "[Encoder] Finish moving encoder to LLaMA\n"
fi
cp -f ctre-unicode.hpp json.hpp encoder.hpp ../Applications/PicoGPT/jni/
echo "[Encoder] Finish moving encoder to PicoGPT\n"

mv -f ctre-unicode.hpp json.hpp encoder.hpp ../Applications/LLaMA/jni/
echo "[Encoder] Finish moving encoder to LLaMA\n"

}

Expand Down
3 changes: 3 additions & 0 deletions meson.build
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ if get_option('platform') == 'tizen'
if get_option('enable-tizen-feature-check')
add_project_arguments('-D__FEATURE_CHECK_SUPPORT__', language: ['c', 'cpp'])
endif
else
add_project_arguments('-DENABLE_ENCODER=1', language: ['c', 'cpp'])
run_command(meson.source_root() / 'jni' / 'prepare_encoder.sh', meson.build_root(), check: true)
endif

warning_flags = [
Expand Down

0 comments on commit ffa3177

Please sign in to comment.