forked from nnstreamer/nntrainer
-
Notifications
You must be signed in to change notification settings - Fork 0
/
meson_options.txt
59 lines (54 loc) · 3.38 KB
/
meson_options.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
option('platform', type: 'combo', choices: ['none', 'tizen', 'yocto', 'android'], value: 'none')
option('enable-app', type: 'boolean', value: true)
option('install-app', type: 'boolean', value: true)
option('use_gym', type: 'boolean', value: false)
option('enable-capi', type: 'feature', value: 'auto')
option('enable-ccapi', type: 'boolean', value: true)
option('enable-test', type: 'boolean', value: true)
option('enable-logging', type: 'boolean', value: true)
option('enable-tizen-feature-check', type: 'boolean', value: true)
option('enable-nnstreamer-backbone', type: 'boolean', value: false)
option('enable-tflite-backbone', type: 'boolean', value: true)
option('enable-profile', type: 'boolean', value: false)
option('enable-trace', type: 'boolean', value: false)
option('enable-debug', type: 'boolean', value: false)
option('enable-tflite-interpreter', type: 'boolean', value: true)
option('enable-memory-swap', type: 'boolean', value: false)
option('memory-swap-path', type: 'string', value: '')
option('test-timeout', type: 'integer', value: 60)
option('opencl-kernel-path', type: 'string', value: 'nntrainer_opencl_kernels')
# dependency conflict resolution
option('capi-ml-inference-actual', type: 'string', value: 'capi-ml-inference',
description: 'backward compatible dependency name of capi-ml-inference. ignored if ml-api-support is disabled.')
option('capi-ml-common-actual', type: 'string', value: 'capi-ml-common',
description: 'backward compatible dependency name of capi-ml-common. ignored if ml-api-support is disabled.')
option('tizen-version-major', type: 'integer', min : 4, max : 9999, value: 9999) # 9999 means "not Tizen"
option('tizen-version-minor', type: 'integer', min : 0, max : 9999, value: 0)
option('openblas-num-threads', type: 'integer', min : 0, max : 9999, value: 0)
#This is for the multi-threading in nntrainer
option('nntr-num-threads', type: 'integer', min : 0, max : 9999, value: 1)
option('omp-num-threads', type: 'integer', min : 0, max : 9999, value: 1)
option('hgemm-experimental-kernel', type: 'boolean', value: false)
# test related option
option('reduce-tolerance', type: 'boolean', value: true)
option('enable-long-test', type: 'boolean', value: false)
# backend options
option('enable-blas', type: 'boolean', value: true)
option('enable-fp16', type: 'boolean', value: false)
option('enable-cublas', type: 'boolean', value: false)
option('enable-openmp', type: 'boolean', value: true)
option('enable-neon', type: 'boolean', value: false)
option('enable-avx', type: 'boolean', value: true)
option('enable-opencl', type: 'boolean', value: false)
option('enable-biqgemm', type: 'boolean', value: false)
option('enable-benchmarks', type: 'boolean', value : false)
# ml-api dependency (to enable, install capi-inference from github.com/nnstreamer/api )
# To inter-operate with nnstreamer and ML-API packages, you need to enable this.
# If this is disabled, related options (capi-ml-*) are ignored.
option('ml-api-support', type: 'feature', value: 'auto')
# @todo : make them use 'feature' and depend on ml-api-support
option('enable-nnstreamer-tensor-filter', type: 'feature', value: 'auto')
option('enable-nnstreamer-tensor-trainer', type: 'feature', value: 'auto')
option('nnstreamer-subplugin-install-path', type: 'string', value: '/usr/lib/nnstreamer') # where nnstreamer subplugin should be installed
# application related options
option('enable_encoder', type: 'boolean', value: false)