From edececa2171ed2d0cb7d3c202aaac36b64aa6be1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?M=C3=A5ns=20Nilsson?= Date: Thu, 9 Nov 2023 14:09:46 +0100 Subject: [PATCH] Add 4-bit weight support to depthwise conv (#83) Unit tests are aligned with existing fully connected int4 unit tests. --- ARM.CMSIS-NN.pdsc | 4 + Include/arm_nnfunctions.h | 180 +++- Include/arm_nnsupportfunctions.h | 45 +- README.md | 31 +- Source/ConvolutionFunctions/CMakeLists.txt | 5 +- .../arm_depthwise_conv_get_buffer_sizes_s4.c | 82 ++ .../arm_depthwise_conv_get_buffer_sizes_s8.c | 7 +- .../arm_depthwise_conv_s4.c | 276 ++++++ .../arm_depthwise_conv_s4_opt.c | 534 ++++++++++ .../arm_depthwise_conv_wrapper_s4.c | 98 ++ Tests/UnitTest/CMakeLists.txt | 2 + .../Common/dw_s4_weights_template.json | 131 +++ .../TestData/depthwise_dilation/biases_data.h | 6 +- .../TestData/depthwise_dilation/config_data.h | 36 +- .../TestData/depthwise_dilation/input_data.h | 14 +- .../depthwise_dilation/output_mult_data.h | 7 +- .../depthwise_dilation/output_ref_data.h | 12 +- .../depthwise_dilation/output_shift_data.h | 6 +- .../TestData/depthwise_dilation/test_data.h | 4 +- .../depthwise_dilation/weights_data.h | 12 +- .../depthwise_eq_in_out_ch/biases_data.h | 40 +- .../depthwise_eq_in_out_ch/config_data.h | 6 +- .../depthwise_eq_in_out_ch/input_data.h | 926 +++++++++--------- .../depthwise_eq_in_out_ch/output_mult_data.h | 60 +- .../depthwise_eq_in_out_ch/output_ref_data.h | 766 +++++++-------- .../output_shift_data.h | 26 +- .../depthwise_eq_in_out_ch/test_data.h | 4 +- .../depthwise_eq_in_out_ch/weights_data.h | 110 +-- .../TestData/depthwise_int4_1/biases_data.h | 6 + .../TestData/depthwise_int4_1/config_data.h | 25 + .../TestData/depthwise_int4_1/input_data.h | 33 + .../depthwise_int4_1/output_mult_data.h | 9 + .../depthwise_int4_1/output_ref_data.h | 27 + .../depthwise_int4_1/output_shift_data.h | 7 + .../TestData/depthwise_int4_1/test_data.h | 9 + .../TestData/depthwise_int4_1/weights_data.h | 8 + .../TestData/depthwise_int4_2/biases_data.h | 6 + .../TestData/depthwise_int4_2/config_data.h | 25 + .../TestData/depthwise_int4_2/input_data.h | 42 + .../depthwise_int4_2/output_mult_data.h | 24 + .../depthwise_int4_2/output_ref_data.h | 10 + .../depthwise_int4_2/output_shift_data.h | 7 + .../TestData/depthwise_int4_2/test_data.h | 9 + .../TestData/depthwise_int4_2/weights_data.h | 18 + .../TestData/depthwise_int4_3/biases_data.h | 6 + .../TestData/depthwise_int4_3/config_data.h | 25 + .../TestData/depthwise_int4_3/input_data.h | 6 + .../depthwise_int4_3/output_mult_data.h | 6 + .../depthwise_int4_3/output_ref_data.h | 6 + .../depthwise_int4_3/output_shift_data.h | 6 + .../TestData/depthwise_int4_3/test_data.h | 9 + .../TestData/depthwise_int4_3/weights_data.h | 6 + .../TestData/depthwise_int4_4/biases_data.h | 6 + .../TestData/depthwise_int4_4/config_data.h | 25 + .../TestData/depthwise_int4_4/input_data.h | 9 + .../depthwise_int4_4/output_mult_data.h | 6 + .../depthwise_int4_4/output_ref_data.h | 6 + .../depthwise_int4_4/output_shift_data.h | 6 + .../TestData/depthwise_int4_4/test_data.h | 9 + .../TestData/depthwise_int4_4/weights_data.h | 6 + .../depthwise_int4_generic/biases_data.h | 6 + .../depthwise_int4_generic/config_data.h | 25 + .../depthwise_int4_generic/input_data.h | 33 + .../depthwise_int4_generic/output_mult_data.h | 7 + .../depthwise_int4_generic/output_ref_data.h | 17 + .../output_shift_data.h | 6 + .../depthwise_int4_generic/test_data.h | 9 + .../depthwise_int4_generic/weights_data.h | 19 + .../depthwise_int4_generic_2/biases_data.h | 7 + .../depthwise_int4_generic_2/config_data.h | 25 + .../depthwise_int4_generic_2/input_data.h | 18 + .../output_mult_data.h | 7 + .../output_ref_data.h | 20 + .../output_shift_data.h | 6 + .../depthwise_int4_generic_2/test_data.h | 9 + .../depthwise_int4_generic_2/weights_data.h | 13 + .../depthwise_int4_generic_3/biases_data.h | 7 + .../depthwise_int4_generic_3/config_data.h | 25 + .../depthwise_int4_generic_3/input_data.h | 24 + .../output_mult_data.h | 7 + .../output_ref_data.h | 6 + .../output_shift_data.h | 6 + .../depthwise_int4_generic_3/test_data.h | 9 + .../depthwise_int4_generic_3/weights_data.h | 11 + .../depthwise_int4_generic_4/biases_data.h | 6 + .../depthwise_int4_generic_4/config_data.h | 25 + .../depthwise_int4_generic_4/input_data.h | 12 + .../output_mult_data.h | 6 + .../output_ref_data.h | 13 + .../output_shift_data.h | 6 + .../depthwise_int4_generic_4/test_data.h | 9 + .../depthwise_int4_generic_4/weights_data.h | 8 + .../fully_connected_int4/biases_data.h | 6 +- .../fully_connected_int4/config_data.h | 4 +- .../fully_connected_int4/input_data.h | 6 +- .../fully_connected_int4/output_ref_data.h | 6 +- .../TestData/fully_connected_int4/test_data.h | 4 +- .../fully_connected_int4/weights_data.h | 6 +- .../test_arm_depthwise_conv_s4/CMakeLists.txt | 23 + .../Unity/unity_test_arm_depthwise_conv_s4.c | 50 + .../test_arm_depthwise_conv_s4.c | 400 ++++++++ .../CMakeLists.txt | 23 + .../unity_test_arm_depthwise_conv_s4_opt.c | 50 + .../test_arm_depthwise_conv_s4_opt.c | 490 +++++++++ .../test_arm_fully_connected_s8.c | 2 +- Tests/UnitTest/conv_settings.py | 234 +++-- Tests/UnitTest/fully_connected_settings.py | 163 +-- Tests/UnitTest/generate_test_data.py | 186 +++- Tests/UnitTest/test_settings.py | 18 +- 109 files changed, 4693 insertions(+), 1223 deletions(-) create mode 100644 Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s4.c create mode 100644 Source/ConvolutionFunctions/arm_depthwise_conv_s4.c create mode 100644 Source/ConvolutionFunctions/arm_depthwise_conv_s4_opt.c create mode 100644 Source/ConvolutionFunctions/arm_depthwise_conv_wrapper_s4.c create mode 100644 Tests/UnitTest/TestCases/Common/dw_s4_weights_template.json create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_1/weights_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_2/weights_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_3/weights_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_4/weights_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/weights_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/weights_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/weights_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/biases_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/config_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/input_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_mult_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_ref_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_shift_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/test_data.h create mode 100644 Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/weights_data.h create mode 100644 Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/CMakeLists.txt create mode 100644 Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/Unity/unity_test_arm_depthwise_conv_s4.c create mode 100644 Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/test_arm_depthwise_conv_s4.c create mode 100644 Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/CMakeLists.txt create mode 100644 Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/Unity/unity_test_arm_depthwise_conv_s4_opt.c create mode 100644 Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/test_arm_depthwise_conv_s4_opt.c diff --git a/ARM.CMSIS-NN.pdsc b/ARM.CMSIS-NN.pdsc index 0c5adfe5..fde9ed8c 100644 --- a/ARM.CMSIS-NN.pdsc +++ b/ARM.CMSIS-NN.pdsc @@ -45,6 +45,10 @@ + + + + diff --git a/Include/arm_nnfunctions.h b/Include/arm_nnfunctions.h index eb7a6881..9189325b 100644 --- a/Include/arm_nnfunctions.h +++ b/Include/arm_nnfunctions.h @@ -21,8 +21,8 @@ * Title: arm_nnfunctions.h * Description: Public header file for CMSIS NN Library * - * $Date: 27 October 2023 - * $Revision: V.12.2.0 + * $Date: 7 November 2023 + * $Revision: V.12.3.0 * * Target : Arm(R) M-Profile Architecture * -------------------------------------------------------------------- */ @@ -743,6 +743,49 @@ arm_cmsis_nn_status arm_depthwise_conv_wrapper_s8(const cmsis_nn_context *ctx, const cmsis_nn_dims *output_dims, int8_t *output_data); +/** + * @brief Wrapper function to pick the right optimized s4 depthwise convolution function + * + * @param[in, out] ctx Function context (e.g. temporary buffer). Check the function + * definition file to see if an additional buffer is required. + * Optional function {API}_get_buffer_size() provides the buffer + * size if required. + * The caller is expected to clear the buffer ,if applicable, for security reasons. + * @param[in] dw_conv_params Depthwise convolution parameters (e.g. strides, dilations, pads,...) + * dw_conv_params->dilation is not used. + * Range of dw_conv_params->input_offset : [-127, 128] + * Range of dw_conv_params->output_offset : [-128, 127] + * @param[in] quant_params Per-channel quantization info. + * It contains the multiplier and shift values to be applied to each + * output channel + * @param[in] input_dims Input (activation) tensor dimensions. Format: [H, W, C_IN] + * Batch argument N is not used and assumed to be 1. + * @param[in] input_data Input (activation) data pointer. Data type: int8 + * @param[in] filter_dims Filter tensor dimensions. Format: [1, H, W, C_OUT] + * @param[in] filter_data Filter data pointer. Data type: int8_t packed 4-bit weights, e.g four sequential + * weights [0x1, 0x2, 0x3, 0x4] packed as [0x21, 0x43]. + * @param[in] bias_dims Bias tensor dimensions. Format: [C_OUT] + * @param[in] bias_data Bias data pointer. Data type: int32 + * @param[in] output_dims Output tensor dimensions. Format: [1, H, W, C_OUT] + * @param[in, out] output_data Output data pointer. Data type: int8 + * @return The function returns + * ARM_CMSIS_NN_SUCCESS - Successful completion. + * + * @details + * - Supported framework: TensorFlow Lite + */ +arm_cmsis_nn_status arm_depthwise_conv_wrapper_s4(const cmsis_nn_context *ctx, + const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_per_channel_quant_params *quant_params, + const cmsis_nn_dims *input_dims, + const int8_t *input_data, + const cmsis_nn_dims *filter_dims, + const int8_t *filter_data, + const cmsis_nn_dims *bias_dims, + const int32_t *bias_data, + const cmsis_nn_dims *output_dims, + int8_t *output_data); + /** * @brief Get size of additional buffer required by arm_depthwise_conv_wrapper_s8() * @@ -787,6 +830,50 @@ int32_t arm_depthwise_conv_wrapper_s8_get_buffer_size_mve(const cmsis_nn_dw_conv const cmsis_nn_dims *filter_dims, const cmsis_nn_dims *output_dims); +/** + * @brief Get size of additional buffer required by arm_depthwise_conv_wrapper_s4() + * + * @param[in] dw_conv_params Depthwise convolution parameters (e.g. strides, dilations, pads,...) + * Range of dw_conv_params->input_offset : [-127, 128] + * Range of dw_conv_params->input_offset : [-128, 127] + * @param[in] input_dims Input (activation) tensor dimensions. Format: [H, W, C_IN] + * Batch argument N is not used and assumed to be 1. + * @param[in] filter_dims Filter tensor dimensions. Format: [1, H, W, C_OUT] + * @param[in] output_dims Output tensor dimensions. Format: [1, H, W, C_OUT] + * @return Size of additional memory required for optimizations in bytes. + * + */ +int32_t arm_depthwise_conv_wrapper_s4_get_buffer_size(const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_dims *input_dims, + const cmsis_nn_dims *filter_dims, + const cmsis_nn_dims *output_dims); + +/** + * @brief Get size of additional buffer required by arm_depthwise_conv_wrapper_s4() for processors with DSP extension. + * Refer to arm_depthwise_conv_wrapper_s4_get_buffer_size() for function argument details. + * + * @note Intended for compilation on Host. If compiling for an Arm target, use + * arm_depthwise_conv_wrapper_s4_get_buffer_size(). + * + */ +int32_t arm_depthwise_conv_wrapper_s4_get_buffer_size_dsp(const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_dims *input_dims, + const cmsis_nn_dims *filter_dims, + const cmsis_nn_dims *output_dims); + +/** + * @brief Get size of additional buffer required by arm_depthwise_conv_wrapper_s4() for Arm(R) Helium Architecture case. + * Refer to arm_depthwise_conv_wrapper_s4_get_buffer_size() for function argument details. + * + * @note Intended for compilation on Host. If compiling for an Arm target, use + * arm_depthwise_conv_wrapper_s4_get_buffer_size(). + * + */ +int32_t arm_depthwise_conv_wrapper_s4_get_buffer_size_mve(const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_dims *input_dims, + const cmsis_nn_dims *filter_dims, + const cmsis_nn_dims *output_dims); + /** * @brief Basic s8 depthwise convolution function that doesn't have any constraints on the input dimensions. * @@ -828,6 +915,48 @@ arm_cmsis_nn_status arm_depthwise_conv_s8(const cmsis_nn_context *ctx, const cmsis_nn_dims *output_dims, int8_t *output_data); +/** + * @brief Basic s4 depthwise convolution function that doesn't have any constraints on the input dimensions. + * + * @param[in, out] ctx Function context (e.g. temporary buffer). Check the function + * definition file to see if an additional buffer is required. + * Optional function {API}_get_buffer_size() provides the buffer + * size if an additional buffer is required exists if additional memory is. + * The caller is expected to clear the buffer ,if applicable, for security reasons. + * @param[in] dw_conv_params Depthwise convolution parameters (e.g. strides, dilations, pads,...) + * dw_conv_params->dilation is not used. + * Range of dw_conv_params->input_offset : [-127, 128] + * Range of dw_conv_params->input_offset : [-128, 127] + * @param[in] quant_params Per-channel quantization info. + * It contains the multiplier and shift values to be applied to each + * output channel + * @param[in] input_dims Input (activation) tensor dimensions. Format: [N, H, W, C_IN] + * Batch argument N is not used. + * @param[in] input Input (activation) data pointer. Data type: int8 + * @param[in] filter_dims Filter tensor dimensions. Format: [1, H, W, C_OUT] + * @param[in] kernel Filter data pointer. Data type: int8_t packed 4-bit weights, e.g four sequential + * weights [0x1, 0x2, 0x3, 0x4] packed as [0x21, 0x43]. + * @param[in] bias_dims Bias tensor dimensions. Format: [C_OUT] + * @param[in] bias Bias data pointer. Data type: int32 + * @param[in] output_dims Output tensor dimensions. Format: [N, H, W, C_OUT] + * @param[in, out] output Output data pointer. Data type: int8 + * @return The function returns ARM_CMSIS_NN_SUCCESS + * + * @details + * - Supported framework: TensorFlow Lite + */ +arm_cmsis_nn_status arm_depthwise_conv_s4(const cmsis_nn_context *ctx, + const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_per_channel_quant_params *quant_params, + const cmsis_nn_dims *input_dims, + const int8_t *input, + const cmsis_nn_dims *filter_dims, + const int8_t *kernel, + const cmsis_nn_dims *bias_dims, + const int32_t *bias, + const cmsis_nn_dims *output_dims, + int8_t *output); + /** * @brief Basic s16 depthwise convolution function that doesn't have any constraints on the input dimensions. * @@ -1064,6 +1193,40 @@ arm_cmsis_nn_status arm_depthwise_conv_s8_opt(const cmsis_nn_context *ctx, const cmsis_nn_dims *output_dims, int8_t *output_data); +/** + * @brief Optimized s4 depthwise convolution function with constraint that in_channel equals out_channel. + * Refer arm_depthwise_conv_s4() for function argument details. + * + * @return The function returns one of the following + * ARM_CMSIS_NN_ARG_ERROR - input channel != output channel or + * ch_mult != 1 + * ARM_CMSIS_NN_SUCCESS - Successful operation + * + * @note If number of channels is not a multiple of 4, upto 3 elements outside the boundary will be read out + * for the following if MVE optimizations(Arm Helium Technology) are used. + * - Output shift + * - Output multiplier + * - Output bias + * - kernel + * @details + * - Supported framework: TensorFlow Lite + * - The following constrains on the arguments apply + * -# Number of input channel equals number of output channels or ch_mult equals 1 + * - Reccomended when number of channels is 4 or greater. + * + */ +arm_cmsis_nn_status arm_depthwise_conv_s4_opt(const cmsis_nn_context *ctx, + const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_per_channel_quant_params *quant_params, + const cmsis_nn_dims *input_dims, + const int8_t *input_data, + const cmsis_nn_dims *filter_dims, + const int8_t *filter_data, + const cmsis_nn_dims *bias_dims, + const int32_t *bias_data, + const cmsis_nn_dims *output_dims, + int8_t *output_data); + /** * @brief Get the required buffer size for optimized s8 depthwise convolution * function with constraint that in_channel equals out_channel. @@ -1075,6 +1238,17 @@ arm_cmsis_nn_status arm_depthwise_conv_s8_opt(const cmsis_nn_context *ctx, */ int32_t arm_depthwise_conv_s8_opt_get_buffer_size(const cmsis_nn_dims *input_dims, const cmsis_nn_dims *filter_dims); +/** + * @brief Get the required buffer size for optimized s4 depthwise convolution + * function with constraint that in_channel equals out_channel. + * @param[in] input_dims Input (activation) tensor dimensions. Format: [1, H, W, C_IN] + * Batch argument N is not used. + * @param[in] filter_dims Filter tensor dimensions. Format: [1, H, W, C_OUT] + * @return The function returns required buffer size in bytes + * + */ +int32_t arm_depthwise_conv_s4_opt_get_buffer_size(const cmsis_nn_dims *input_dims, const cmsis_nn_dims *filter_dims); + /** * @defgroup FC Fully-connected Layer Functions * @@ -1110,7 +1284,7 @@ int32_t arm_depthwise_conv_s8_opt_get_buffer_size(const cmsis_nn_dims *input_dim * C : output depth and equals C_OUT in output_dims * H & W : Not used * @param[in] filter_data Filter data pointer. Data type: int8_t packed 4-bit weights, e.g four sequential - * weights [0x1, 0x2, 0x3, 0x4] packed as [0x21, 0x43]. + * weights [0x1, 0x2, 0x3, 0x4] packed as [0x21, 0x43]. * @param[in] bias_dims Bias tensor dimensions. Format: [C_OUT] * N, H, W : Not used * @param[in] bias_data Bias data pointer. Data type: int32 diff --git a/Include/arm_nnsupportfunctions.h b/Include/arm_nnsupportfunctions.h index 4a349d5c..63fdd4ea 100644 --- a/Include/arm_nnsupportfunctions.h +++ b/Include/arm_nnsupportfunctions.h @@ -21,8 +21,8 @@ * Title: arm_nnsupportfunctions.h * Description: Public header file of support functions for CMSIS NN Library * - * $Date: 3 November 2023 - * $Revision: V.17.4.0 + * $Date: 7 November 2023 + * $Revision: V.17.5.0 * * Target : Arm(R) M-Profile Architecture * -------------------------------------------------------------------- */ @@ -160,8 +160,23 @@ void arm_q7_to_q15_with_offset(const int8_t *src, int16_t *dst, int32_t block_si * */ void arm_s8_to_s16_unordered_with_offset(const int8_t *src, int16_t *dst, int32_t block_size, int16_t offset); + #endif +/** + * @brief Get the required buffer size for optimized s8 depthwise convolution + * function with constraint that in_channel equals out_channel. + * This is for processors with DSP extension. + * Refer to arm_depthwise_conv_s8_opt_get_buffer_size() for function argument details. + * + * @note Intended for compilation on Host. If compiling for an Arm target, use + * arm_depthwise_conv_s8_opt_get_buffer_size(). Note also this is a support function, + * so not recommended to call directly even on Host. + * + */ +int32_t arm_depthwise_conv_s8_opt_get_buffer_size_dsp(const cmsis_nn_dims *input_dims, + const cmsis_nn_dims *filter_dims); + /** * @brief Depthwise conv on an im2col buffer where the input channel equals output channel. * @param[in] row pointer to row @@ -777,14 +792,34 @@ __STATIC_FORCEINLINE void arm_memset_s8(int8_t *dst, const int8_t val, uint32_t /** * @brief read and expand one s4 word into two s8 words. */ -__STATIC_FORCEINLINE const int8_t *read_and_pad_s4(const int8_t *source, int32_t *out1, int32_t *out2) - +__STATIC_FORCEINLINE void read_and_pad_s4(const int8_t *source, int32_t *out1, int32_t *out2) { int16_t in = arm_nn_read_s8x2(source); int32_t inA = (in & 0x00FF) | ((in & 0xFF00) << 8); + *out1 = SXTB16_RORn(__sxtb16(inA << 4), 4); *out2 = SXTB16_RORn(__sxtb16(inA), 4); - return source; +} + +/** + * @brief read and expand one s4 word into two s8 words. + * @details The s4 elements are not evenly aligned on the byte boundary, so 3 bytes need to be read instead of 2. + * In other words first nibble to read start at the middle of a byte. + * byte index, s4 element + * 0, s4_x + * 0, s4_0 + * 1, s4_1 + * 1, s4_2 + * 2, s4_3 + * 2, s4_x + */ +__STATIC_FORCEINLINE void read_and_pad_s4_uneven(const int8_t *source, int32_t *out1, int32_t *out2) +{ + int32_t inA1 = (source[0] & 0xFF) | ((source[1] & 0xFF) << 16); + int32_t inA2 = (source[1] & 0xFF) | ((source[2] & 0xFF) << 16); + + *out1 = SXTB16_RORn(__sxtb16(inA2 << 4), 4); + *out2 = SXTB16_RORn(__sxtb16(inA1), 4); } /** diff --git a/README.md b/README.md index 4cf7d50a..75525a91 100644 --- a/README.md +++ b/README.md @@ -23,20 +23,21 @@ processors here are Cortex-M4 or a Cortex-M33 configured with optional DSP exten Processors with Arm Helium Technology use the Arm M-profile Vector Extension(MVE) instructions for optimization. Examples are Cortex-M55 or Cortex-M85 configured with MVE. -| Operator | C
int8 | C
int16 | DSP
int8 | DSP
int16 | MVE
int8 | MVE
int16 | -| --------------- | ----------- | ---------- | ----------- | ------------ | ----------- | ------------ | -| Conv2D | Yes | Yes | Yes | Yes | Yes | Yes | -| DepthwiseConv2D | Yes | Yes | Yes | Yes | Yes | Yes | -| TransposeConv2D | Yes | No | No | No | No | No | -| Fully Connected | Yes | Yes | Yes | Yes | Yes | Yes | -| Add | Yes | Yes | Yes | Yes | Yes | Yes | -| Mul | Yes | Yes | Yes | Yes | Yes | Yes | -| MaxPooling | Yes | Yes | Yes | Yes | Yes | Yes | -| AvgPooling | Yes | Yes | Yes | Yes | Yes | Yes | -| Softmax | Yes | Yes | Yes | Yes | Yes | No | -| LSTM | Yes | NA | Yes | NA | Yes | NA | -| SVDF | Yes | No | Yes | No | Yes | No | - +| Operator | C
int8 | C
int16 | C
int4* | DSP
int8 | DSP
int16 | DSP
int4* | MVE
int8 | MVE
int16 | +| --------------- | ----------- | ---------- | ----------- | ------------| -------------| -------------| ------------| -------------| +| Conv2D | Yes | Yes | No | Yes | Yes | No | Yes | Yes | +| DepthwiseConv2D | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes | +| TransposeConv2D | Yes | No | No | No | No | No | No | No | +| Fully Connected | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes | +| Add | Yes | Yes | N/A | Yes | Yes | N/A | Yes | Yes | +| Mul | Yes | Yes | N/A | Yes | Yes | N/A | Yes | Yes | +| MaxPooling | Yes | Yes | N/A | Yes | Yes | N/A | Yes | Yes | +| AvgPooling | Yes | Yes | N/A | Yes | Yes | N/A | Yes | Yes | +| Softmax | Yes | Yes | N/A | Yes | Yes | N/A | Yes | No | +| LSTM | Yes | NA | No | Yes | NA | No | Yes | NA | +| SVDF | Yes | No | No | Yes | No | No | Yes | No | + +* int4 weights + int8 activations ## Contribution Guideline First, a thank you for the contribution. Here are some guidelines and good to know information to get started. @@ -62,7 +63,7 @@ the unit tests. ### Version & Date Each File has a version number and a date field that must be updated when making any change to that file. The versioning -follows Semantic Versioning 2.0.0 format. +follows Semantic Versioning 2.0.0 format. For details check: https://semver.org/ ## Building CMSIS-NN as a library It is recommended to use toolchain files from [Arm Ethos-U Core Platform](https://review.mlplatform.org/admin/repos/ml/ethos-u/ethos-u-core-platform) project. These are supporting TARGET_CPU, which is a required argument. Note that if not specifying TARGET_CPU, these toolchains will set some default. The format must be TARGET_CPU=cortex-mXX, see examples below. diff --git a/Source/ConvolutionFunctions/CMakeLists.txt b/Source/ConvolutionFunctions/CMakeLists.txt index 9da3389c..be028d1e 100644 --- a/Source/ConvolutionFunctions/CMakeLists.txt +++ b/Source/ConvolutionFunctions/CMakeLists.txt @@ -1,5 +1,5 @@ # -# Copyright (c) 2019-2022 Arm Limited. +# SPDX-FileCopyrightText: Copyright 2019-2023 Arm Limited and/or its affiliates # # SPDX-License-Identifier: Apache-2.0 # @@ -16,6 +16,7 @@ # limitations under the License. # +file(GLOB SRC_S4 "./*_s4*.c") file(GLOB SRC "./*_s8*.c") file(GLOB SRC_S16 "./*_s16*.c") -target_sources(cmsis-nn PRIVATE ${SRC} ${SRC_S16}) +target_sources(cmsis-nn PRIVATE ${SRC} ${SRC_S16} ${SRC_S4}) diff --git a/Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s4.c b/Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s4.c new file mode 100644 index 00000000..452a863f --- /dev/null +++ b/Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s4.c @@ -0,0 +1,82 @@ +/* + * SPDX-FileCopyrightText: Copyright 2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* ---------------------------------------------------------------------- + * Project: CMSIS NN Library + * Title: arm_depthwise_conv_get_buffer_sizes_s8.c + * Description: Collection of get buffer size functions for the various s8 convolution layer functions. + * + * $Date: 30 October 2023 + * $Revision: V.1.0.0 + * + * Target : Arm(R) M-Profile Architecture + * + * -------------------------------------------------------------------- */ + +#include "arm_nnfunctions.h" +#include "arm_nnsupportfunctions.h" + +/** + * @ingroup NNConv + */ + +/** + * @addtogroup GetBufferSizeNNConv + * @{ + */ + +int32_t arm_depthwise_conv_s4_opt_get_buffer_size(const cmsis_nn_dims *input_dims, const cmsis_nn_dims *filter_dims) +{ + return arm_depthwise_conv_s8_opt_get_buffer_size_dsp(input_dims, filter_dims); +} + +int32_t arm_depthwise_conv_wrapper_s4_get_buffer_size(const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_dims *input_dims, + const cmsis_nn_dims *filter_dims, + const cmsis_nn_dims *output_dims) +{ + int32_t size = 0; + + if (input_dims->c == output_dims->c && input_dims->n == 1 && dw_conv_params->dilation.w == 1 && + dw_conv_params->dilation.h == 1) + { + size = arm_depthwise_conv_s4_opt_get_buffer_size(input_dims, filter_dims); + } + + return size; +} + +int32_t arm_depthwise_conv_wrapper_s4_get_buffer_size_dsp(const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_dims *input_dims, + const cmsis_nn_dims *filter_dims, + const cmsis_nn_dims *output_dims) +{ + return arm_depthwise_conv_wrapper_s4_get_buffer_size(dw_conv_params, input_dims, filter_dims, output_dims); +} + +int32_t arm_depthwise_conv_wrapper_s4_get_buffer_size_mve(const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_dims *input_dims, + const cmsis_nn_dims *filter_dims, + const cmsis_nn_dims *output_dims) +{ + return arm_depthwise_conv_wrapper_s4_get_buffer_size(dw_conv_params, input_dims, filter_dims, output_dims); +} + +/** + * @} end of GetBufferSizeNNConv group + */ diff --git a/Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s8.c b/Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s8.c index 6afa82f4..94a8f520 100644 --- a/Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s8.c +++ b/Source/ConvolutionFunctions/arm_depthwise_conv_get_buffer_sizes_s8.c @@ -21,8 +21,8 @@ * Title: arm_depthwise_conv_get_buffer_sizes_s8.c * Description: Collection of get buffer size functions for the various s8 convolution layer functions. * - * $Date: 20 January 2023 - * $Revision: V.1.0.0 + * $Date: 30 October 2023 + * $Revision: V.1.1.0 * * Target : Arm(R) M-Profile Architecture * @@ -47,8 +47,7 @@ __STATIC_INLINE int32_t arm_depthwise_conv_s8_opt_get_buffer_size_mve(const cmsi return (4 * CH_IN_BLOCK_MVE * filter_dims->w * filter_dims->h) * (int32_t)sizeof(int8_t); } -__STATIC_INLINE int32_t arm_depthwise_conv_s8_opt_get_buffer_size_dsp(const cmsis_nn_dims *input_dims, - const cmsis_nn_dims *filter_dims) +int32_t arm_depthwise_conv_s8_opt_get_buffer_size_dsp(const cmsis_nn_dims *input_dims, const cmsis_nn_dims *filter_dims) { return (input_dims->c * filter_dims->w * filter_dims->h) * sizeof(int16_t); } diff --git a/Source/ConvolutionFunctions/arm_depthwise_conv_s4.c b/Source/ConvolutionFunctions/arm_depthwise_conv_s4.c new file mode 100644 index 00000000..de9e7476 --- /dev/null +++ b/Source/ConvolutionFunctions/arm_depthwise_conv_s4.c @@ -0,0 +1,276 @@ +/* + * SPDX-FileCopyrightText: Copyright 2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* ---------------------------------------------------------------------- + * Project: CMSIS NN Library + * Title: arm_depthwise_conv_s4.c + * Description: s4 version of depthwise convolution. + * + * $Date: 31 October 2023 + * $Revision: V.1.0.0 + * + * Target : Arm(R) M-Profile Architecture + * + * -------------------------------------------------------------------- */ + +#include "arm_nnfunctions.h" +#include "arm_nnsupportfunctions.h" + +/** + * @ingroup Public + */ + +/** + * @addtogroup NNConv + * @{ + */ + +static void depthwise_conv_s4_generic(const int8_t *input, + const int32_t input_batches, + const int32_t input_x, + const int32_t input_y, + const int32_t input_ch, + const int8_t *kernel, + const int32_t output_ch, + const int32_t ch_mult, + const int32_t kernel_x, + const int32_t kernel_y, + const int32_t pad_x, + const int32_t pad_y, + const int32_t stride_x, + const int32_t stride_y, + const int32_t *bias, + int8_t *output, + const int32_t *output_shift, + const int32_t *output_mult, + const int32_t output_x, + const int32_t output_y, + const int32_t output_offset, + const int32_t input_offset, + const int32_t output_activation_min, + const int32_t output_activation_max, + const int32_t dilation_x, + const int32_t dilation_y) + +{ + (void)output_ch; + int i_out = 0; + int i_batch; + + const int32_t kernel_index_offset = input_ch >> 1; + + for (i_batch = 0; i_batch < input_batches; i_batch++) + { + for (int i_out_y = 0; i_out_y < output_y; i_out_y++) + { + const int16_t base_idx_y = (i_out_y * stride_y) - pad_y; + for (int i_out_x = 0; i_out_x < output_x; i_out_x++) + { + const int16_t base_idx_x = (i_out_x * stride_x) - pad_x; + int idx_out_ch_s4 = 0; + int get_low_nibble = 1; + + for (int i_input_ch = 0; i_input_ch < input_ch; i_input_ch++) + { + for (int i_ch_mult = 0; i_ch_mult < ch_mult; i_ch_mult++) + { + const int idx_out_ch = i_ch_mult + i_input_ch * ch_mult; + if (idx_out_ch && (idx_out_ch % 2 == 0)) + { + idx_out_ch_s4++; + } + + int16_t kernel_index_offset_uneven = 0; + int32_t acc_0 = 0; + + int ker_y_start; + int ker_x_start; + int ker_y_end; + int ker_x_end; + + if (dilation_x > 1) + { + const int32_t start_x_max = (-base_idx_x + dilation_x - 1) / dilation_x; + ker_x_start = MAX(0, start_x_max); + const int32_t end_min_x = (input_x - base_idx_x + dilation_x - 1) / dilation_x; + ker_x_end = MIN(kernel_x, end_min_x); + } + else + { + ker_x_start = MAX(0, -base_idx_x); + ker_x_end = MIN(kernel_x, input_x - base_idx_x); + } + + if (dilation_y > 1) + { + const int32_t start_y_max = (-base_idx_y + dilation_y - 1) / dilation_y; + ker_y_start = MAX(0, start_y_max); + const int32_t end_min_y = (input_y - base_idx_y + dilation_y - 1) / dilation_y; + ker_y_end = MIN(kernel_y, end_min_y); + } + else + { + ker_y_start = MAX(0, -base_idx_y); + ker_y_end = MIN(kernel_y, input_y - base_idx_y); + } + + if (bias) + { + acc_0 = bias[idx_out_ch]; + } + + if (input_ch % 2) + { + for (int i_ker_y = ker_y_start; i_ker_y < ker_y_end; i_ker_y++) + { + const int32_t idx_y = base_idx_y + dilation_y * i_ker_y; + for (int i_ker_x = ker_x_start; i_ker_x < ker_x_end; i_ker_x++) + { + int8_t ker_val; + const int32_t idx_x = base_idx_x + dilation_x * i_ker_x; + int32_t idx_0 = (idx_y * input_x + idx_x) * input_ch + i_input_ch; + int32_t ker_idx_0 = + (i_ker_y * kernel_x + i_ker_x) * (kernel_index_offset * ch_mult) + + idx_out_ch_s4 + kernel_index_offset_uneven; + + if (get_low_nibble) + { + get_low_nibble = 0; + ker_val = ((int8_t)(kernel[ker_idx_0] << 4) >> 4); + } + else + { + ker_val = (kernel[ker_idx_0] >> 4); + get_low_nibble = 1; + kernel_index_offset_uneven++; + } + + acc_0 += (input[idx_0] + input_offset) * ker_val; + } + } + if ((kernel_x * kernel_y) % 2) + { + kernel_index_offset_uneven++; + get_low_nibble = !get_low_nibble; + } + } + else + { + for (int i_ker_y = ker_y_start; i_ker_y < ker_y_end; i_ker_y++) + { + const int32_t idx_y = base_idx_y + dilation_y * i_ker_y; + for (int i_ker_x = ker_x_start; i_ker_x < ker_x_end; i_ker_x++) + { + int8_t ker_val; + const int32_t idx_x = base_idx_x + dilation_x * i_ker_x; + int32_t idx_0 = (idx_y * input_x + idx_x) * input_ch + i_input_ch; + int32_t ker_idx_0 = + (i_ker_y * kernel_x + i_ker_x) * (kernel_index_offset * ch_mult) + + idx_out_ch_s4; + + if (get_low_nibble) + { + ker_val = ((int8_t)(kernel[ker_idx_0] << 4) >> 4); + } + else + { + ker_val = (kernel[ker_idx_0] >> 4); + } + + acc_0 += (input[idx_0] + input_offset) * ker_val; + } + } + } + get_low_nibble = !get_low_nibble; + + /* Requantize and clamp output to provided range */ + acc_0 = arm_nn_requantize(acc_0, output_mult[idx_out_ch], output_shift[idx_out_ch]); + acc_0 += output_offset; + acc_0 = MAX(acc_0, output_activation_min); + acc_0 = MIN(acc_0, output_activation_max); + + output[i_out++] = acc_0; + } + } + } + } + + /* Advance to the next batch */ + input += (input_x * input_y * input_ch); + } +} + +/* + * Basic s4 depthwise convolution function. + * + * Refer header file for details. + * Optimization using DSP extension is not available for the generic case where channel multiplier is > 1. + * + */ +arm_cmsis_nn_status arm_depthwise_conv_s4(const cmsis_nn_context *ctx, + const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_per_channel_quant_params *quant_params, + const cmsis_nn_dims *input_dims, + const int8_t *input, + const cmsis_nn_dims *filter_dims, + const int8_t *kernel, + const cmsis_nn_dims *bias_dims, + const int32_t *bias, + const cmsis_nn_dims *output_dims, + int8_t *output) +{ + (void)bias_dims; + (void)ctx; + + const int32_t dilation_x = dw_conv_params->dilation.w; + const int32_t dilation_y = dw_conv_params->dilation.h; + + depthwise_conv_s4_generic(input, + input_dims->n, + input_dims->w, + input_dims->h, + input_dims->c, + kernel, + output_dims->c, + dw_conv_params->ch_mult, + filter_dims->w, + filter_dims->h, + dw_conv_params->padding.w, + dw_conv_params->padding.h, + dw_conv_params->stride.w, + dw_conv_params->stride.h, + bias, + output, + quant_params->shift, + quant_params->multiplier, + output_dims->w, + output_dims->h, + dw_conv_params->output_offset, + dw_conv_params->input_offset, + dw_conv_params->activation.min, + dw_conv_params->activation.max, + dilation_x, + dilation_y); + + /* Return to application */ + return ARM_CMSIS_NN_SUCCESS; +} + +/** + * @} end of NNConv group + */ diff --git a/Source/ConvolutionFunctions/arm_depthwise_conv_s4_opt.c b/Source/ConvolutionFunctions/arm_depthwise_conv_s4_opt.c new file mode 100644 index 00000000..253e29d0 --- /dev/null +++ b/Source/ConvolutionFunctions/arm_depthwise_conv_s4_opt.c @@ -0,0 +1,534 @@ +/* + * SPDX-FileCopyrightText: Copyright 2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* ---------------------------------------------------------------------- + * Project: CMSIS NN Library + * Title: arm_depthwise_conv_s4_opt.c + * Description: Optimized s4 depthwise separable convolution function for + * channel multiplier of 1. + * + * $Date: 31 October 2023 + * $Revision: V.1.0.0 + * + * Target : Arm(R) M-Profile Architecture + * + * -------------------------------------------------------------------- */ + +#include "arm_nnfunctions.h" +#include "arm_nnsupportfunctions.h" + +/** + * @ingroup Public + */ + +/** + * @addtogroup NNConv + * @{ + */ + +/* + * Optimized s4 depthwise convolution function with constraint that in_channel equals out_channel + * + * Refer prototype header file for details. + * + */ + +arm_cmsis_nn_status arm_depthwise_conv_s4_opt(const cmsis_nn_context *ctx, + const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_per_channel_quant_params *quant_params, + const cmsis_nn_dims *input_dims, + const int8_t *input, + const cmsis_nn_dims *filter_dims, + const int8_t *kernel, + const cmsis_nn_dims *bias_dims, + const int32_t *bias, + const cmsis_nn_dims *output_dims, + int8_t *output) +{ + (void)bias_dims; + + const int32_t input_ch = input_dims->c; + const int32_t output_ch = output_dims->c; + + /* Check depth multiplier is 1 */ + if (input_ch != output_ch) + { + return ARM_CMSIS_NN_ARG_ERROR; + } + + if (ctx->buf == NULL) + { + return ARM_CMSIS_NN_ARG_ERROR; + } + + const int32_t input_x = input_dims->w; + const int32_t input_y = input_dims->h; + const int32_t kernel_x = filter_dims->w; + const int32_t kernel_y = filter_dims->h; + const int32_t pad_x = dw_conv_params->padding.w; + const int32_t pad_y = dw_conv_params->padding.h; + const int32_t stride_x = dw_conv_params->stride.w; + const int32_t stride_y = dw_conv_params->stride.h; + const int32_t *output_shift = quant_params->shift; + const int32_t *output_mult = quant_params->multiplier; + const int32_t output_x = output_dims->w; + const int32_t output_y = output_dims->h; + const int32_t output_offset = dw_conv_params->output_offset; + const int32_t input_offset = dw_conv_params->input_offset; + const int32_t output_activation_min = dw_conv_params->activation.min; + const int32_t output_activation_max = dw_conv_params->activation.max; + int16_t *buffer_a = (int16_t *)ctx->buf; + + int16_t *const col_buffer_start = buffer_a; + int16_t *col_buffer = col_buffer_start; + const int32_t *const bias_start_pos = bias; + const int32_t *const out_mult_start_pos = output_mult; + const int32_t *const out_shift_start_pos = output_shift; + const uint16_t num_cols = kernel_x * kernel_y; + uint16_t row_count; + uint16_t row_shift = 0; + uint16_t col_shift = 0; + + for (int i_out_y = 0; i_out_y < output_y; i_out_y++) + { + const int16_t base_idx_y = (i_out_y * stride_y) - pad_y; + for (int i_out_x = 0; i_out_x < output_x; i_out_x++) + { + const int16_t base_idx_x = (i_out_x * stride_x) - pad_x; + + /* Out of bounds is only considered for the y axis as it provides a contiguous zero'ing opportunity than + along the x axis */ + const int ker_y_start = MAX(0, -base_idx_y); + /* Condition for kernel end dimension: (base_idx_y + ker_y_end) < input_y */ + const int ker_y_end = MIN(kernel_y, input_y - base_idx_y); + + int32_t index = 0; + if (ker_y_start != 0) + { + memset(&col_buffer[index], 0, (kernel_x * input_ch) * ker_y_start * sizeof(int16_t)); + index += (kernel_x * input_ch) * ker_y_start; + } + + for (int i_ker_y = ker_y_start; i_ker_y < ker_y_end; i_ker_y++) + { + const int32_t idx_y = base_idx_y + i_ker_y; + + for (int i_ker_x = 0; i_ker_x < kernel_x; i_ker_x++) + { + const int32_t idx_x = base_idx_x + i_ker_x; + if (idx_x < 0 || idx_x >= input_x) + { + memset(&col_buffer[index], 0, input_ch * sizeof(int16_t)); + } + else + { + arm_q7_to_q15_with_offset((int8_t *)input + (idx_y * input_x + idx_x) * input_ch, + &col_buffer[index], + input_ch, + (int16_t)input_offset); + } + index += input_ch; + } + } + + const int diff = kernel_y - ker_y_end; + if (diff != 0) + { + memset(&col_buffer[index], 0, (kernel_x * input_ch) * diff * sizeof(int16_t)); + } + + row_count = output_ch / 4; + row_shift = 0; + col_shift = 0; + bias = bias_start_pos; + output_mult = out_mult_start_pos; + output_shift = out_shift_start_pos; + + if (output_ch % 2) /* Uneven number of channels */ + { + int get_low_nibble = 1; + + while (row_count) + { + int32_t sum = 0; + int32_t sum_2 = 0; + int32_t sum_3 = 0; + int32_t sum_4 = 0; + if (bias) + { + sum = *bias++; + sum_2 = *bias++; + sum_3 = *bias++; + sum_4 = *bias++; + } + + uint16_t col_count = num_cols / 2; + int16_t *col_pos = col_buffer_start + col_shift; + const int8_t *row_pos = kernel + row_shift; + + row_shift += 2; + col_shift += 4; + + while (col_count) + { +#ifdef ARM_MATH_DSP + /* General idea is to read 4 + 4 (input, kernel) pair and re-arrange them in the right order to + use in a SMLAD instruction . One run of this loop produces 4 partial outputs with 8 MACs. */ + /* Note: variable names can be improved here to align with rows and columns. */ + int32_t ip_a1, ip_a2, ip_b1, ip_b2, op_a, op_b, op_c; + + /* Read 4 weights */ + read_and_pad_s4(row_pos, &ip_a2, &ip_b1); + read_and_pad_s4_uneven(row_pos + (input_ch >> 1), &ip_a1, &ip_b2); + + op_a = arm_nn_read_s16x2(col_pos); + op_b = arm_nn_read_s16x2(col_pos + input_ch); + + op_c = PKHBT(op_b, op_a, 16); + op_a = PKHTB(op_b, op_a, 16); + op_b = PKHBT(ip_b2, ip_a2, 16); + sum = SMLAD(op_c, op_b, sum); + + op_b = PKHBT(ip_b1, ip_a1, 16); + + sum_2 = SMLAD(op_a, op_b, sum_2); + + op_a = arm_nn_read_s16x2(col_pos + 2); + op_b = arm_nn_read_s16x2(col_pos + input_ch + 2); + + op_c = PKHBT(op_b, op_a, 16); + op_a = PKHTB(op_b, op_a, 16); + op_b = PKHTB(ip_a2, ip_b2, 16); + sum_3 = SMLAD(op_c, op_b, sum_3); + + op_b = PKHTB(ip_a1, ip_b1, 16); + sum_4 = SMLAD(op_a, op_b, sum_4); + +#else + int8_t ker0, ker1, ker2, ker3, ker00, ker11; + + ker00 = row_pos[0]; + ker11 = row_pos[1]; + ker0 = (int8_t)(ker00 << 4) >> 4; + ker1 = ker00 >> 4; + ker2 = (int8_t)(ker11 << 4) >> 4; + ker3 = ker11 >> 4; + + sum += ker0 * col_pos[0]; + sum_2 += ker1 * col_pos[1]; + sum_3 += ker2 * col_pos[2]; + sum_4 += ker3 * col_pos[3]; + + ker11 = row_pos[1 + (input_ch >> 1)]; + ker0 = row_pos[0 + (input_ch >> 1)] >> 4; + ker1 = (int8_t)(ker11 << 4) >> 4; + ker2 = ker11 >> 4; + ker3 = (int8_t)(row_pos[2 + (input_ch >> 1)] << 4) >> 4; + + sum += ker0 * col_pos[0 + input_ch]; + sum_2 += ker1 * col_pos[1 + input_ch]; + sum_3 += ker2 * col_pos[2 + input_ch]; + sum_4 += ker3 * col_pos[3 + input_ch]; + +#endif + row_pos += (input_ch); + col_pos += input_ch << 1; + + col_count--; + } + + col_count = num_cols & 0x1; + + while (col_count) + { + int8_t ker0, ker1, ker2, ker3, ker00, ker11; + + ker00 = row_pos[0]; + ker11 = row_pos[1]; + + ker0 = (int8_t)(ker00 << 4) >> 4; + ker1 = ker00 >> 4; + + ker2 = (int8_t)(ker11 << 4) >> 4; + ker3 = ker11 >> 4; + + sum += ker0 * col_pos[0]; + sum_2 += ker1 * col_pos[1]; + sum_3 += ker2 * col_pos[2]; + sum_4 += ker3 * col_pos[3]; + + row_pos += input_ch >> 1; + col_pos += input_ch; + + col_count--; + } + + sum = arm_nn_requantize(sum, *output_mult++, *output_shift++); + sum += output_offset; + sum = MAX(sum, output_activation_min); + sum = MIN(sum, output_activation_max); + *output++ = (int8_t)sum; + + sum_2 = arm_nn_requantize(sum_2, *output_mult++, *output_shift++); + sum_2 += output_offset; + sum_2 = MAX(sum_2, output_activation_min); + sum_2 = MIN(sum_2, output_activation_max); + *output++ = (int8_t)sum_2; + sum_3 = arm_nn_requantize(sum_3, *output_mult++, *output_shift++); + sum_3 += output_offset; + sum_3 = MAX(sum_3, output_activation_min); + sum_3 = MIN(sum_3, output_activation_max); + *output++ = (int8_t)sum_3; + + sum_4 = arm_nn_requantize(sum_4, *output_mult++, *output_shift++); + sum_4 += output_offset; + sum_4 = MAX(sum_4, output_activation_min); + sum_4 = MIN(sum_4, output_activation_max); + *output++ = (int8_t)sum_4; + + row_count--; + } + + row_count = output_ch & 0x3; + + while (row_count) + { + const int16_t *col_pos = col_buffer_start + col_shift; + const int8_t *row_pos = kernel + row_shift; + int32_t sum = 0; + int col_index = 0; + + if (bias) + { + sum = *bias++; + } + + col_shift += 1; + + for (int i = 0; i < num_cols; i++) + { + int8_t rhs = row_pos[i * (input_ch >> 1) + col_index]; + int8_t rhs0; + int16_t lhs0 = col_pos[i * input_ch]; + + if (get_low_nibble) + { + rhs0 = (int8_t)(rhs << 4) >> 4; + get_low_nibble = 0; + } + else + { + rhs0 = rhs >> 4; + get_low_nibble = 1; + col_index++; + } + + sum += rhs0 * lhs0; + } + + if (num_cols % 2 == 0) + { + get_low_nibble = !get_low_nibble; + } + + sum = arm_nn_requantize(sum, *output_mult++, *output_shift++); + sum += output_offset; + sum = MAX(sum, output_activation_min); + sum = MIN(sum, output_activation_max); + *output++ = (int8_t)sum; + + row_count--; + + /* Last row */ + if (row_count == 1) + { + row_shift += 1; + } + } + } + else /* Even number of channels */ + { + while (row_count) + { + int32_t sum = 0; + int32_t sum_2 = 0; + int32_t sum_3 = 0; + int32_t sum_4 = 0; + if (bias) + { + sum = *bias++; + sum_2 = *bias++; + sum_3 = *bias++; + sum_4 = *bias++; + } + + uint16_t col_count = num_cols / 2; + int16_t *col_pos = col_buffer_start + col_shift; + const int8_t *row_pos = kernel + row_shift; + + row_shift += 2; + col_shift += 4; + +#ifdef ARM_MATH_DSP + while (col_count) + { + /* General idea is to read 4 + 4 (input, kernel) pair and re-arrange them in the right order to + use in a SMLAD instruction . One run of this loop produces 4 partial outputs with 8 MACs. */ + /* Note: variable names can be improved here to align with rows and columns. */ + int32_t ip_a1, ip_a2, ip_b1, ip_b2, op_a, op_b, op_c; + + /* Read 4 weights */ + read_and_pad_s4(row_pos, &ip_a2, &ip_b1); + read_and_pad_s4(row_pos + (input_ch >> 1), &ip_b2, &ip_a1); + + op_a = arm_nn_read_s16x2(col_pos); + op_b = arm_nn_read_s16x2(col_pos + input_ch); + + op_c = PKHBT(op_b, op_a, 16); + op_a = PKHTB(op_b, op_a, 16); + op_b = PKHBT(ip_b2, ip_a2, 16); + sum = SMLAD(op_c, op_b, sum); + + op_b = PKHBT(ip_b1, ip_a1, 16); + + sum_2 = SMLAD(op_a, op_b, sum_2); + + op_a = arm_nn_read_s16x2(col_pos + 2); + op_b = arm_nn_read_s16x2(col_pos + input_ch + 2); + + op_c = PKHBT(op_b, op_a, 16); + op_a = PKHTB(op_b, op_a, 16); + op_b = PKHTB(ip_a2, ip_b2, 16); + sum_3 = SMLAD(op_c, op_b, sum_3); + + op_b = PKHTB(ip_a1, ip_b1, 16); + sum_4 = SMLAD(op_a, op_b, sum_4); + + row_pos += (input_ch); + col_pos += input_ch << 1; + + col_count--; + } + + col_count = num_cols & 0x1; +#else + col_count = num_cols; +#endif + while (col_count) + { + int8_t ker0, ker1, ker2, ker3, ker00, ker11; + + ker00 = row_pos[0]; + ker11 = row_pos[1]; + + ker0 = (int8_t)(ker00 << 4) >> 4; + ker1 = ker00 >> 4; + + ker2 = (int8_t)(ker11 << 4) >> 4; + ker3 = ker11 >> 4; + + sum += ker0 * col_pos[0]; + sum_2 += ker1 * col_pos[1]; + sum_3 += ker2 * col_pos[2]; + sum_4 += ker3 * col_pos[3]; + + row_pos += input_ch >> 1; + col_pos += input_ch; + + col_count--; + } + + sum = arm_nn_requantize(sum, *output_mult++, *output_shift++); + sum += output_offset; + sum = MAX(sum, output_activation_min); + sum = MIN(sum, output_activation_max); + *output++ = (int8_t)sum; + + sum_2 = arm_nn_requantize(sum_2, *output_mult++, *output_shift++); + sum_2 += output_offset; + sum_2 = MAX(sum_2, output_activation_min); + sum_2 = MIN(sum_2, output_activation_max); + *output++ = (int8_t)sum_2; + sum_3 = arm_nn_requantize(sum_3, *output_mult++, *output_shift++); + sum_3 += output_offset; + sum_3 = MAX(sum_3, output_activation_min); + sum_3 = MIN(sum_3, output_activation_max); + *output++ = (int8_t)sum_3; + + sum_4 = arm_nn_requantize(sum_4, *output_mult++, *output_shift++); + sum_4 += output_offset; + sum_4 = MAX(sum_4, output_activation_min); + sum_4 = MIN(sum_4, output_activation_max); + *output++ = (int8_t)sum_4; + + row_count--; + } + + if (output_ch & 0x2) + { + const int16_t *col_pos = col_buffer_start + col_shift; + const int16_t *col_pos_2 = col_buffer_start + col_shift + 1; + const int8_t *row_pos = kernel + row_shift; + int32_t sum = 0; + int32_t sum2 = 0; + + if (bias) + { + sum = *bias++; + sum2 = *bias++; + } + + for (int i = 0; i < num_cols; i++) + { + int8_t rhs = row_pos[i * (input_ch >> 1)]; + + int8_t rhs_low = (int8_t)(rhs << 4) >> 4; + int8_t rhs_high = rhs >> 4; + + int16_t lhs0 = col_pos[i * input_ch]; + int16_t lhs1 = col_pos_2[i * input_ch]; + + sum += rhs_low * lhs0; + sum2 += rhs_high * lhs1; + } + + sum = arm_nn_requantize(sum, *output_mult++, *output_shift++); + sum += output_offset; + sum = MAX(sum, output_activation_min); + sum = MIN(sum, output_activation_max); + *output++ = (int8_t)sum; + sum2 = arm_nn_requantize(sum2, *output_mult++, *output_shift++); + sum2 += output_offset; + sum2 = MAX(sum2, output_activation_min); + sum2 = MIN(sum2, output_activation_max); + *output++ = (int8_t)sum2; + } + } + + /* Clear counter and pointers */ + col_buffer = col_buffer_start; + } + } + + /* Return to application */ + return ARM_CMSIS_NN_SUCCESS; +} + +/** + * @} end of NNConv group + */ diff --git a/Source/ConvolutionFunctions/arm_depthwise_conv_wrapper_s4.c b/Source/ConvolutionFunctions/arm_depthwise_conv_wrapper_s4.c new file mode 100644 index 00000000..532c4e01 --- /dev/null +++ b/Source/ConvolutionFunctions/arm_depthwise_conv_wrapper_s4.c @@ -0,0 +1,98 @@ +/* + * SPDX-FileCopyrightText: Copyright 2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* ---------------------------------------------------------------------- + * Project: CMSIS NN Library + * Title: arm_depthwise_conv_wrapper_s4.c + * Description: Wrapper API to select appropriate depthwise conv s4 API based + * on dimensions. + * + * $Date: 30 October 2023 + * $Revision: V.1.0.0 + * + * Target : Arm(R) M-Profile Architecture + * + * -------------------------------------------------------------------- */ + +#include "arm_nnfunctions.h" + +/** + * @ingroup Public + */ + +/** + * @addtogroup NNConv + * @{ + */ + +/* + * s4 Depthwise conv wrapper function + * + * Refer header file for details. + * + */ +arm_cmsis_nn_status arm_depthwise_conv_wrapper_s4(const cmsis_nn_context *ctx, + const cmsis_nn_dw_conv_params *dw_conv_params, + const cmsis_nn_per_channel_quant_params *quant_params, + const cmsis_nn_dims *input_dims, + const int8_t *input, + const cmsis_nn_dims *filter_dims, + const int8_t *filter, + const cmsis_nn_dims *bias_dims, + const int32_t *bias, + const cmsis_nn_dims *output_dims, + int8_t *output) +{ + arm_cmsis_nn_status status = ARM_CMSIS_NN_SUCCESS; + if (1 == dw_conv_params->ch_mult && input_dims->n == 1 && dw_conv_params->dilation.w == 1 && + dw_conv_params->dilation.h == 1) + { + status = arm_depthwise_conv_s4_opt(ctx, + dw_conv_params, + quant_params, + input_dims, + input, + filter_dims, + filter, + bias_dims, + bias, + output_dims, + output); + } + else + { + status = arm_depthwise_conv_s4(ctx, + dw_conv_params, + quant_params, + input_dims, + input, + filter_dims, + filter, + bias_dims, + bias, + output_dims, + output); + } + + /* Return to application */ + return status; +} + +/** + * @} end of NNConv group + */ diff --git a/Tests/UnitTest/CMakeLists.txt b/Tests/UnitTest/CMakeLists.txt index c8220e53..66a22f45 100644 --- a/Tests/UnitTest/CMakeLists.txt +++ b/Tests/UnitTest/CMakeLists.txt @@ -80,6 +80,8 @@ add_subdirectory(TestCases/test_arm_convolve_1_x_n_s8) add_subdirectory(TestCases/test_arm_depthwise_conv_3x3_s8) add_subdirectory(TestCases/test_arm_depthwise_conv_fast_s16) add_subdirectory(TestCases/test_arm_depthwise_conv_s16) +add_subdirectory(TestCases/test_arm_depthwise_conv_s4) +add_subdirectory(TestCases/test_arm_depthwise_conv_s4_opt) add_subdirectory(TestCases/test_arm_depthwise_conv_s8) add_subdirectory(TestCases/test_arm_depthwise_conv_s8_opt) add_subdirectory(TestCases/test_arm_ds_cnn_l_s8) diff --git a/Tests/UnitTest/TestCases/Common/dw_s4_weights_template.json b/Tests/UnitTest/TestCases/Common/dw_s4_weights_template.json new file mode 100644 index 00000000..d815fd8c --- /dev/null +++ b/Tests/UnitTest/TestCases/Common/dw_s4_weights_template.json @@ -0,0 +1,131 @@ +{ + "version": 3, + "operator_codes": [ + { + "deprecated_builtin_code": 4, + "version": 1, + "builtin_code": "DEPTHWISE_CONV_2D" + } + ], + "subgraphs": [ + { + "tensors": [ + { + "shape": [ + batches, + input_y, + input_x, + input_ch + ], + "type": "INT8", + "buffer": 1, + "quantization": { + "scale": [ + input_scale + ], + "zero_point": [ + input_zp + ], + "quantized_dimension": 0 + }, + "is_variable": false + }, + { + "shape": [ + 1, + weight_y, + weight_x, + output_ch + ], + "type": "INT4", + "buffer": 2, + "quantization": { + "scale": w_scale, + "zero_point": w_zp, + "quantized_dimension": 3 + }, + "is_variable": false + }, + { + "shape": [ + output_ch + ], + "type": "INT32", + "buffer": 3, + "quantization": { + "scale": bias_scale, + "zero_point": bias_zp, + "quantized_dimension": 0 + }, + "is_variable": false + }, + { + "shape": [ + batches, + output_y, + output_x, + output_ch + ], + "type": "INT8", + "buffer": 4, + "quantization": { + "scale": [ + output_scale + ], + "zero_point": [ + output_zp + ], + "quantized_dimension": 0 + }, + "is_variable": false + } + ], + "inputs": [ + 0 + ], + "outputs": [ + 3 + ], + "operators": [ + { + "opcode_index": 0, + "inputs": [ + 0, + 1, + 2 + ], + "outputs": [ + 3 + ], + "builtin_options_type": "DepthwiseConv2DOptions", + "builtin_options": { + "padding": "type_pad", + "stride_w": stride_x, + "stride_h": stride_y, + "depth_multiplier": ch_mult, + "fused_activation_function": "NONE", + "dilation_w_factor": dilation_x, + "dilation_h_factor": dilation_y + }, + "custom_options_format": "FLEXBUFFERS" + } + ] + } + ], + "buffers": [ + { + }, + { + }, + { + "data": [ + ] + }, + { + "data": [ + ] + }, + { + } + ] +} diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/biases_data.h index 82780b3e..7c657eda 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/biases_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/biases_data.h @@ -1,6 +1,6 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include -const int32_t depthwise_dilation_biases[9] = {25579, -14160, 31652, 576, -9958, 15088, 547, -4855, 25742}; +const int32_t depthwise_dilation_biases[1] = {24749}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/config_data.h index 2421b81e..a68575fb 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/config_data.h @@ -1,25 +1,25 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once -#define DEPTHWISE_DILATION_OUT_CH 9 -#define DEPTHWISE_DILATION_IN_CH 3 -#define DEPTHWISE_DILATION_INPUT_W 6 -#define DEPTHWISE_DILATION_INPUT_H 5 -#define DEPTHWISE_DILATION_DST_SIZE 81 -#define DEPTHWISE_DILATION_INPUT_SIZE 90 +#define DEPTHWISE_DILATION_OUT_CH 1 +#define DEPTHWISE_DILATION_IN_CH 1 +#define DEPTHWISE_DILATION_INPUT_W 7 +#define DEPTHWISE_DILATION_INPUT_H 7 +#define DEPTHWISE_DILATION_DST_SIZE 49 +#define DEPTHWISE_DILATION_INPUT_SIZE 49 #define DEPTHWISE_DILATION_OUT_ACTIVATION_MIN -70 #define DEPTHWISE_DILATION_OUT_ACTIVATION_MAX 127 #define DEPTHWISE_DILATION_INPUT_BATCHES 1 #define DEPTHWISE_DILATION_FILTER_X 3 -#define DEPTHWISE_DILATION_FILTER_Y 4 -#define DEPTHWISE_DILATION_STRIDE_X 2 -#define DEPTHWISE_DILATION_STRIDE_Y 2 -#define DEPTHWISE_DILATION_PAD_X 1 -#define DEPTHWISE_DILATION_PAD_Y 4 -#define DEPTHWISE_DILATION_OUTPUT_W 3 -#define DEPTHWISE_DILATION_OUTPUT_H 3 -#define DEPTHWISE_DILATION_CH_MULT 3 +#define DEPTHWISE_DILATION_FILTER_Y 3 +#define DEPTHWISE_DILATION_STRIDE_X 1 +#define DEPTHWISE_DILATION_STRIDE_Y 1 +#define DEPTHWISE_DILATION_PAD_X 2 +#define DEPTHWISE_DILATION_PAD_Y 2 +#define DEPTHWISE_DILATION_OUTPUT_W 7 +#define DEPTHWISE_DILATION_OUTPUT_H 7 +#define DEPTHWISE_DILATION_CH_MULT 1 #define DEPTHWISE_DILATION_INPUT_OFFSET 128 -#define DEPTHWISE_DILATION_OUTPUT_OFFSET -23 +#define DEPTHWISE_DILATION_OUTPUT_OFFSET -107 #define DEPTHWISE_DILATION_DILATION_X 2 -#define DEPTHWISE_DILATION_DILATION_Y 3 +#define DEPTHWISE_DILATION_DILATION_Y 2 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/input_data.h index 9aee0e97..56c48a0d 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/input_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/input_data.h @@ -1,11 +1,9 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include -const int8_t depthwise_dilation_input[90] = { - 26, -12, -106, 44, -120, -86, -44, -65, 57, 81, 3, 14, -36, -57, 119, -107, -7, -86, - -113, 64, 6, 9, 105, -67, 47, -15, 118, 70, -5, -37, 94, -16, -13, 76, 93, -28, - -51, -61, -16, 26, 107, -104, 33, 19, 25, -32, -9, -38, 49, -52, -45, 77, 36, 115, - 32, 104, -47, -61, 109, -23, 59, 21, -37, -95, -61, -119, -115, 50, -64, -57, -103, -102, - 48, -84, 124, -119, 103, -67, 75, 75, -67, -28, 114, 28, 67, 42, 43, -74, -39, -74}; +const int8_t depthwise_dilation_input[49] = {14, 82, -18, 99, -16, 25, 88, 38, -115, 36, 37, -125, -90, + 7, 62, -114, -111, -50, 110, 92, 26, 125, -15, -38, 49, -52, + 16, -73, -105, -101, -80, -114, -63, 22, -125, -89, 35, 22, -86, + 49, 74, -106, 87, -1, -109, 55, 17, 16, -94}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_mult_data.h index 8f3d8bcf..320167df 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_mult_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_mult_data.h @@ -1,7 +1,6 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include -const int32_t depthwise_dilation_output_mult[9] = - {1289240074, 1261132178, 1336459652, 1386272717, 1262565343, 1381060732, 1370789449, 1332290030, 1419854710}; +const int32_t depthwise_dilation_output_mult[1] = {1611716446}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_ref_data.h index 2943037a..d2d17d7e 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_ref_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_ref_data.h @@ -1,10 +1,8 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include -const int8_t depthwise_dilation_output_ref[81] = { - 14, -46, 46, -26, -10, -18, -32, -35, 17, -18, -39, 44, 20, -34, 26, -54, -47, 24, 20, -32, 33, - -20, -41, 0, -64, -54, -35, -8, -2, 19, -16, 44, -31, -40, -49, 19, -6, -10, 40, 8, 25, 42, - -58, -48, -20, -32, -36, 59, 0, -14, 1, -53, -18, -20, 10, -59, 26, -35, -18, -14, -9, -36, 3, - -9, -64, 54, -7, 3, 2, -50, 5, -18, -22, -42, 55, 1, -4, -7, -35, -14, 4}; +const int8_t depthwise_dilation_output_ref[49] = { + -2, -69, 75, 20, 35, 46, 75, 30, 20, 85, 94, -9, 55, 3, -27, -48, -65, -45, -35, -12, -62, -2, 1, 32, 30, + 73, 28, -3, -3, 78, 100, 87, 6, 25, -6, -70, -45, -70, -70, -54, -61, -50, -41, -70, -70, -65, -57, -70, -40}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_shift_data.h index 4890166c..cd0a753c 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_shift_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/output_shift_data.h @@ -1,6 +1,6 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include -const int32_t depthwise_dilation_output_shift[9] = {-9, -9, -9, -9, -9, -9, -9, -9, -9}; +const int32_t depthwise_dilation_output_shift[1] = {-8}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/test_data.h index 8102687f..8314dc23 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/test_data.h @@ -1,5 +1,5 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "biases_data.h" #include "config_data.h" #include "input_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/weights_data.h index 78d78f06..38d9df8b 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_dilation/weights_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_dilation/weights_data.h @@ -1,12 +1,6 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include -const int8_t depthwise_dilation_weights[108] = { - -69, -13, 100, 63, 44, 91, -52, 57, -11, -6, -109, 80, 70, 90, -127, -86, 113, -70, - 49, -41, -40, -38, -123, 96, 52, -28, -25, -23, -116, 63, 36, 113, 58, -92, 112, -99, - -127, 91, 115, -25, 105, -113, 66, -59, 9, 64, 127, -127, 8, 73, 104, -41, -22, -8, - -80, 74, -55, 103, -93, 114, -114, 71, -58, 93, -4, 95, -65, 32, -61, -95, -112, -122, - -86, -53, 111, 104, 115, 26, -58, -28, 94, 127, -2, 47, 6, -70, 21, -127, 127, -13, - -46, -106, 14, -127, -34, -78, -30, -47, -50, -98, -76, 113, 99, 127, 112, 54, 5, -127}; +const int8_t depthwise_dilation_weights[9] = {6, -58, 46, -14, -6, -97, 99, 108, 127}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/biases_data.h index 4bf62900..3af320d8 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/biases_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/biases_data.h @@ -1,24 +1,24 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include const int32_t depthwise_eq_in_out_ch_biases[250] = { - 29110, -29347, -58523, -17564, -482, 3265, 8206, 44714, -34935, -15235, 32336, 25996, -29344, 5337, - -59777, 31129, 70239, -74176, -26668, 7666, 26186, -14262, 27468, -21206, -19548, -773, 15156, 22555, - 33352, -14022, -26954, 14103, 2807, 35140, 6693, 532, -17287, 1887, -5325, -23777, -66943, 9764, - -10627, -4835, 1878, -30778, -4085, 15106, 6571, 37532, 26433, -23428, 12326, -18500, 18432, -7769, - 7393, -33279, 30647, 8374, -26161, -365, 19616, -9524, 3011, -9230, -1325, 26590, -4415, -11514, - 25454, 31686, 24345, -38598, -17809, 14351, -3783, 16222, 29298, -4169, 12966, -17571, -962, 6676, - -26176, 28512, 6704, 28084, 46039, 6195, -9864, -26119, 37998, -37054, -38156, -2228, 198, 14730, - -1123, -18177, 16415, -1222, -26717, -3404, 15603, -14659, 33612, -35599, 22352, -8915, 21641, 20242, - 1927, -6323, 34385, -39977, 33906, -17523, -35068, -3758, -25109, -3517, 13293, -43054, -37187, 11141, - 17834, -11230, 11237, 9896, -35418, 6684, 21025, 13733, 8581, -25303, -19380, -7362, 122, -16862, - -35842, 8873, -15849, -13046, 10259, 42509, -24238, 17079, -7512, 228, 11169, 28836, 100288, 15166, - -37010, -36920, 27733, 21578, 20259, 1116, 33062, 104895, 25133, 7214, 32132, 20059, -20743, -9238, - -21095, 29336, -24141, -32559, -31227, -20928, 16868, 25304, 2375, -4337, -14019, -30012, -52864, -8956, - -19233, -57240, 110830, 31916, -3598, 23319, 2811, 24893, -23346, 32739, -21653, -39166, 33028, 59598, - -24319, 2570, -18910, 38224, 32877, 38979, -29309, 18922, 3092, 9886, 16643, -50423, 30173, 16077, - -25701, -27383, -3538, -5862, 6065, 15122, 10712, 4485, -3041, 30686, 16303, -23398, -30906, -6871, - -28519, 28291, 27281, -45825, -31799, 32639, 27508, 14892, 17410, 8283, -23449, 50960, 15615, -28514, - 1075, -3531, -40939, -15221, 55754, 32817, -37523, -1893, 8779, 21704, 22585, -15482}; + -10090, 24257, 15705, -44124, -32760, -8533, 5591, -42556, 29147, -14723, -21820, 8229, 31129, -24164, + -19477, -19349, -7030, 35236, -67311, 41291, 514, 34065, 5492, -31088, -34339, 22052, 15427, -32022, + 29252, 31810, 50871, 23101, -20296, -50784, 3863, 671, -15590, 30363, 25229, 25807, 8042, -12601, + 31778, -24335, 22522, -15681, 52742, 21744, 8745, -27294, 10099, 3329, -23897, 23612, 230, -24643, + -7868, 7226, -18834, 4056, -4287, -20594, -23628, -24895, 28242, 27742, 6891, 2985, 65293, 25824, + 42777, 18541, 1656, -4558, 2742, 6228, -38328, -35233, -18036, -57, -47097, -59983, -16244, -6711, + 18601, 38066, -4920, -1558, -48211, -5834, -45613, 49651, 31949, 81193, 27870, 26516, -12366, 3940, + -7915, 15064, -46683, -3175, -31446, -9618, -30311, -35020, 1402, -1820, 50421, -17035, -36230, -29218, + -26978, -28371, 45906, -8830, -53039, -7454, 26776, -48671, 4701, -35727, -43474, -26332, -26742, 1797, + -40887, 15969, 7237, -29985, 51641, -10670, -31854, -11485, -69545, -31690, 22642, -27074, 7955, 28871, + 7125, -29561, 23723, -30479, -19500, -21721, 25806, -17070, -21435, -3632, -16815, 11003, 15094, -1624, + -300, 69040, -16797, -10171, 25332, 37105, -30523, 40572, -926, -3145, 12021, -9348, 6608, -8554, + -10439, 15632, 10352, -15389, -23039, -14233, 16883, -12340, -20991, -41473, 24110, 28339, 14135, -14805, + 35351, -24704, 8044, -20030, 41859, 5208, -7515, -18777, 52091, 47741, -388, -9531, 15660, -27612, + -53825, -27528, 15638, 18806, -7331, -25661, -21496, -2838, 13841, -3026, 10522, -12127, -36591, 9074, + 26989, -11080, 7342, 44162, 33476, 22254, -2267, 20749, 83039, -7461, 52674, -7535, 25423, -50074, + 6190, -15646, 16243, -18563, -15069, 10635, -21598, 15332, -4472, -38225, -34910, -2245, -17412, 35974, + 11734, -12774, 52198, 22705, -648, 32133, 21241, -35679, 33933, 27385, 23909, -11178}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/config_data.h index e3dc35c0..afb549f4 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/config_data.h @@ -1,5 +1,5 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #define DEPTHWISE_EQ_IN_OUT_CH_OUT_CH 250 #define DEPTHWISE_EQ_IN_OUT_CH_IN_CH 250 @@ -20,6 +20,6 @@ #define DEPTHWISE_EQ_IN_OUT_CH_OUTPUT_H 5 #define DEPTHWISE_EQ_IN_OUT_CH_CH_MULT 1 #define DEPTHWISE_EQ_IN_OUT_CH_INPUT_OFFSET 128 -#define DEPTHWISE_EQ_IN_OUT_CH_OUTPUT_OFFSET 15 +#define DEPTHWISE_EQ_IN_OUT_CH_OUTPUT_OFFSET 8 #define DEPTHWISE_EQ_IN_OUT_CH_DILATION_X 1 #define DEPTHWISE_EQ_IN_OUT_CH_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/input_data.h index 21f6fbd7..7aa375c8 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/input_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/input_data.h @@ -1,467 +1,467 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include const int8_t depthwise_eq_in_out_ch_input[8750] = { - 33, 102, 13, -2, 91, 58, 13, 2, -5, -28, -67, -82, 32, -53, 21, -11, 76, -89, 119, - 118, 9, -111, -17, -37, -8, 23, 123, 72, 112, -94, -13, -33, -59, 39, -77, 63, -86, -27, - 114, 121, 100, -18, 2, -8, 90, -70, 80, -32, -89, -96, 120, -84, 5, -100, 63, 55, 113, - -77, 65, 66, 95, -100, -52, -118, 0, -45, -33, -121, -45, 125, 90, 65, 13, 21, 24, -106, - 13, 30, -110, -52, 126, -85, 15, -95, -14, -53, -110, -13, 56, 45, 105, 69, -11, -54, -59, - 16, -65, -85, 120, -120, 3, -127, -69, -67, 61, 30, 26, 83, 30, 72, 55, 13, 71, -113, - 7, 102, -90, 110, -14, 5, 123, 37, -28, 99, -37, -35, -124, 4, 40, 42, -91, -90, -78, - -34, 5, -16, 16, 22, -120, -107, -68, 123, -34, -75, 87, 88, 27, -50, -30, -77, 11, 97, - -21, -72, -99, -68, 117, 27, 25, -51, 8, 46, -47, -84, 18, 20, -15, 84, -24, -12, -17, - -106, -30, 69, -57, 67, 104, 93, -64, 2, -34, 125, -55, 34, 34, -74, 67, 110, -19, 32, - -112, -7, -57, 12, -52, 112, -32, 13, 78, 48, 96, -14, -116, 61, -75, -85, -123, -54, 10, - 61, 50, -65, -37, -90, 21, 30, -71, -30, 41, -77, 93, -73, -50, -115, 75, 17, -102, 97, - 29, 1, -93, 107, -91, 49, 29, -66, 104, 122, 59, 102, -112, 11, -122, -110, 123, -45, 73, - -120, 47, 14, 93, -119, -39, -50, 46, -43, 106, 28, 43, -70, -62, -97, 1, -123, 15, -43, - -23, -111, 56, -80, -3, 49, 27, 53, -114, 4, 63, -100, -114, -23, 23, 82, -12, 106, -46, - -19, -1, -114, -72, 80, 47, 4, 93, 64, 76, 7, 38, 114, 9, -8, -26, 125, 27, 43, - 2, 19, -82, -104, 0, 107, -122, 108, -5, -74, -80, -60, -89, 10, 108, -95, 27, -2, 62, - -103, 96, 71, 13, -109, 62, -95, 58, 51, -60, -96, 78, 36, 60, -107, -51, -9, 0, -72, - 64, 11, -22, -5, 111, 5, -9, -75, -25, -102, 4, -39, -43, -36, -83, 114, 103, 38, 5, - -117, -107, -34, -59, -82, 28, 106, 109, 14, 12, 82, 97, 119, 98, 119, 41, 35, -74, 79, - 19, 64, 3, -95, 1, 47, 12, 62, -64, -4, 5, -100, -96, -108, 9, 97, 43, -64, 49, - -16, -86, -80, -4, -124, -16, -72, -79, 75, 43, 14, 69, 91, 93, 10, 106, 9, 85, -77, - -87, -51, 99, 118, 97, -21, -61, -18, -62, 61, 87, 99, -98, 25, -77, 4, 27, 95, -32, - 45, -60, -50, 63, -104, 124, -10, 36, -31, -1, -108, -128, 121, -100, -3, -91, -31, -6, -78, - -66, -7, -113, -15, -25, -110, 118, -35, 34, 102, 104, -92, -16, -43, 75, -73, -21, 9, 119, - -85, 21, -57, 95, 82, 122, 30, -107, -120, 40, -50, 42, 115, 100, -56, 104, 93, -126, 11, - 4, -33, 105, -48, 31, -73, -27, 20, 95, 94, -104, -4, 96, 73, -53, 34, -125, -9, -25, - -15, -26, -101, 17, 2, 119, -53, 40, -114, 28, 90, 0, 117, 18, 74, 54, -113, 74, -82, - -40, 99, 83, 43, 121, 51, 123, -17, -11, -68, -29, -24, -110, 92, -85, 45, -71, -3, 20, - 78, 51, 90, 115, -66, 46, 32, -90, -95, 98, -90, -104, -77, -12, 20, -89, 65, 39, 76, - -82, 10, 63, -128, -89, 95, 91, 8, 95, 12, 97, 29, 86, 122, 57, 70, -125, -48, -48, - 60, 27, -108, 115, 73, -101, -66, -104, -109, 77, -83, 17, -35, -76, -37, 68, 9, 6, -62, - -36, -79, -88, 40, 74, -28, -31, 93, -65, -95, -15, 47, 106, 19, -115, 41, 81, 76, 9, - -32, 118, -30, -59, -14, -61, -94, -59, -11, 63, -3, 13, 28, -26, -6, -39, 37, -36, 77, - -8, -122, 102, 93, 66, 56, 45, 99, -30, 105, -27, 91, -127, 6, -117, -117, -115, -42, -17, - -126, -59, -121, -117, -117, 17, -68, -107, -42, -111, -29, 39, 59, -72, -88, -68, 41, -12, -104, - 26, -71, 36, 75, -24, -124, 61, 43, -104, 57, -40, 62, 53, -110, 104, -125, 115, -67, 117, - -123, -57, -77, -91, -64, 54, -23, -123, -128, -122, 40, 121, 63, 108, -53, 60, 25, -31, 82, - 111, 74, 57, 39, -101, 0, -52, -7, -3, 109, 41, 49, 125, 74, -62, 32, 51, -84, 54, - 4, -58, 33, 81, 24, 66, -106, -18, 20, -67, -102, 53, -71, 89, 62, 90, 55, -21, -8, - -90, -38, -66, -107, 53, 23, -65, -102, -53, 107, -109, 85, 26, 56, 19, -70, -56, -36, 43, - 106, 67, 105, 36, 89, 54, -107, 89, -48, -108, -53, 1, -119, -60, 82, 106, -117, 36, 23, - -4, -108, -14, 61, 2, 35, -112, -3, 14, 65, -126, 20, -103, -37, 102, -39, -77, -84, 32, - 70, -34, -76, 39, 102, 83, 92, 46, -40, 53, 19, -86, 49, 8, -40, -44, -105, -104, -107, - 52, -108, 2, 0, -64, -16, -20, -40, 87, -121, -31, -41, 55, 70, -82, -128, -100, -6, -51, - 12, 113, 107, -65, -79, 66, 16, -62, -37, 30, 83, -86, -89, -12, 69, -40, -61, -35, -53, - -11, -102, -29, 94, -84, 43, -83, 122, 69, 122, 122, -38, 101, -45, -115, 59, -43, 19, -90, - 23, -39, -111, -24, -120, 12, 63, 101, -97, -127, 90, -91, -27, 126, -41, 47, 124, -72, -10, - 107, -14, 98, 108, 14, 124, 101, 116, -90, -44, -123, 53, 22, -9, -104, -70, -9, 35, 121, - 79, 44, 7, -117, -55, 58, -1, 15, -13, 115, -16, 37, 116, -114, 121, 122, 110, -66, -47, - 125, 92, -48, -41, -18, -72, -30, 111, -32, 125, -41, 111, -56, 69, 67, 17, -79, -90, 92, - 100, 59, 63, -108, -96, -112, 82, 73, 50, -72, -95, -58, 0, 30, 115, -1, 29, 117, 101, - 56, 25, -42, -37, 54, 19, 2, 120, 111, -50, -93, -97, 121, -94, 70, -17, 107, 16, -125, - 115, 94, -91, -7, 27, 107, 28, 55, 102, 45, -72, 14, -124, 23, 28, -89, -4, 3, -102, - -74, -37, 42, -90, 32, 63, 64, -31, 65, -13, 89, 34, 73, 41, -94, 17, 9, 14, -65, - 10, 118, -6, -82, -44, -59, 21, 8, 12, 125, -108, -120, -116, -31, 83, 29, 44, 41, 87, - 114, 34, -3, 124, -46, 87, -107, -39, -70, 124, -127, -61, -113, -56, 72, -104, 119, 110, 73, - 56, 89, -23, 62, 66, 124, 14, 120, 25, 114, -26, 58, -121, -59, 30, -61, -48, -114, 114, - 70, 105, -62, 115, -14, 43, 90, -109, -96, 54, -18, 57, 79, 68, 14, 61, -57, 73, -122, - 60, -96, 111, 94, -15, 68, -96, 64, -61, -24, 24, 10, 73, -49, 73, 113, -12, -113, -88, - -8, 86, 108, 65, -34, 38, 82, 20, -100, 19, 98, -93, 46, 13, 43, 104, -66, -77, 36, - 57, -100, -68, 20, -12, 63, -110, 116, -83, 118, 0, -90, 3, 57, -118, -56, -105, -112, 33, - 78, -97, -73, 22, 25, -73, -60, 63, 97, 119, -120, 98, -102, 54, -74, 22, 33, -34, 76, - 110, 34, 3, 63, -30, 75, 92, -10, -99, 16, 61, 121, -35, 48, -120, -112, -69, 77, -126, - 0, 51, 67, -106, 17, -126, 99, 108, 46, -96, -97, 77, 5, 69, 126, 112, 68, 27, -79, - -27, -10, 10, 82, -10, 67, -16, -121, -114, 5, 118, 3, 15, -36, 86, -89, -50, 21, 5, - 45, 18, -120, -56, 21, -27, -60, 61, 30, -33, 36, -101, 19, 36, 7, -81, 28, -80, 119, - -84, 59, -48, -101, -57, 43, -6, -83, 57, -81, 108, -118, 8, -33, 78, 17, 35, 72, -119, - -11, 3, -56, -70, -53, -8, 49, 125, -27, -52, 54, -48, -97, -115, 56, 58, 21, -40, 55, - -64, 6, 5, 105, -48, 14, 97, -26, -19, -108, 94, 119, 84, -22, -45, 55, 47, -120, 88, - -60, 102, -123, -17, 44, 123, -97, -108, 31, -107, -29, 98, 109, 52, -5, -36, 111, -102, 47, - -14, -88, -57, -128, -66, 61, 86, 83, -22, -59, 6, 10, 84, 80, 106, 51, -75, -26, -82, - -128, -39, -106, 17, 13, -6, 30, 0, 47, -115, 117, 71, -4, -109, 121, 35, 32, -97, 2, - 10, 122, 65, -55, 33, 120, -33, -79, -90, 25, -65, 83, 50, -38, -46, -1, 17, 41, 4, - -80, -12, -10, -114, -91, 11, 89, 66, 115, -95, 69, -121, -20, 125, -119, 119, -47, -19, -22, - -46, -49, -90, 62, 26, 11, 14, -74, 30, -41, -100, 58, 74, -116, -58, 73, -86, 117, 124, - 94, 124, -46, -102, 107, 34, 11, -89, -97, -15, 71, 106, 69, -16, -118, -71, -102, 29, 78, - -3, 21, -20, 29, 60, 53, -79, 104, 89, -27, 4, 89, -55, 104, -128, 89, -42, 69, 45, - -19, 19, -84, -84, 39, 74, -46, -109, -11, -52, 78, -88, -6, 74, 47, 53, 24, 32, -107, - -21, 36, 2, 104, -69, -44, -66, 111, 90, 74, -96, 91, 69, -126, 5, -119, -7, -121, 19, - -12, 34, -4, -73, -33, 98, -88, 82, 90, 58, -59, 78, -119, -88, 100, -29, 43, -34, 126, - -54, -64, -118, 107, 110, -107, -47, -83, 35, -74, 107, -83, 8, -13, -94, -88, 99, -39, -94, - -106, -67, -24, 87, -45, 117, -53, 56, -85, -52, 16, -100, 64, 77, -103, -77, 108, 102, 114, - -110, 93, -121, 122, 125, -73, 21, 11, -52, 12, -100, -111, -117, -30, -123, -9, -102, 11, -80, - -76, 74, -114, 31, 44, -88, 126, -75, -13, 47, -84, 32, 2, 27, 6, -90, -108, 125, 95, - -3, 13, -74, 43, 121, -80, -66, -127, -84, 87, -85, 108, 30, 96, 8, -58, -86, 45, 122, - 98, 0, -49, 37, -91, -59, -7, -9, -116, 78, 26, 85, 102, 59, 67, -33, 47, 101, -24, - -4, -50, -43, 99, 51, 51, 5, -121, 7, 112, 107, 21, -90, -33, 72, -124, -78, 93, 48, - -44, -128, -42, 111, -103, -37, -74, 4, -123, -41, -87, -15, -75, -72, -107, 62, 69, -124, -70, - -113, 83, 103, 116, -9, 112, -58, -33, -76, 109, -109, -113, 111, -5, 47, -98, 19, -115, 65, - 73, 48, 83, 124, 107, 20, 122, -128, 104, -39, -28, 32, -44, 27, -76, -53, -86, -50, 10, - -36, -95, -122, 69, 95, 10, 20, 91, -81, 51, 39, -88, -119, 1, 119, -61, -1, 80, 25, - -97, -26, 83, -59, -71, -127, -37, 43, -5, 34, 34, 92, -39, -60, 1, 19, -74, -57, 56, - 28, -112, -55, -35, 74, -53, 68, -1, -116, 101, -15, 115, 40, -128, -49, -72, -19, 41, 61, - -122, 102, -67, -127, -32, -74, -125, 52, -109, 69, 56, 101, 87, -97, -117, -62, -114, 104, -96, - -114, 122, 10, -98, 54, 41, 39, 86, 28, 118, 44, 82, -84, -59, -124, 89, 29, -112, -81, - -29, -36, 74, -79, 115, 34, -85, 39, 76, 108, -31, 126, -17, -46, -89, 53, -115, -79, 113, - -68, 116, 45, 57, 0, 15, 43, 117, -72, 11, 75, -78, 26, -90, 53, -101, 49, 106, 50, - 73, 96, 78, -103, 6, -38, -103, 40, 97, -52, -55, -8, 15, 4, 98, -52, 100, 125, -20, - -18, -43, -57, -26, -80, 84, -24, -11, 77, 80, 103, -40, 125, -78, -102, -113, -41, -121, 27, - 53, -59, -27, -12, -97, 27, -75, -31, -25, -125, -55, 20, 5, -67, 38, 54, -89, 72, 82, - 105, 54, 112, 1, -16, -84, 95, 48, 9, 35, 68, -20, 7, -68, 71, 44, 7, 83, 117, - 56, 64, -17, -103, 111, 68, -42, -53, -96, -80, 20, 102, -34, -82, -118, -61, 15, 66, 62, - 34, 12, 16, -126, -112, -113, 55, 3, 57, -83, 50, 52, -9, -110, 121, -85, -48, 68, 68, - 36, 10, -98, 87, -80, -125, -75, -128, 106, -108, 66, -92, -22, 115, -2, 99, 2, 10, 73, - 79, -111, -29, -76, 107, 55, 7, 82, 22, 125, 109, -43, 35, -97, -52, 25, 14, 71, -5, - -47, 30, 119, -105, -59, 48, 85, -6, 111, 79, 97, 78, 118, -98, 70, 104, 56, -25, -94, - 27, -99, 9, -126, -12, -94, 95, -119, -107, -5, -98, 2, -12, 20, 124, 43, -74, -25, -49, - -2, 84, 14, 84, 43, 2, 91, -19, -66, -70, -89, 31, 59, 37, -49, 98, 99, -31, 114, - 83, 110, 15, 119, -93, -70, -2, 112, -22, -75, 47, 93, 89, 35, -5, -106, -51, -112, 35, - 51, 41, 99, 7, 71, 27, 55, 32, 52, -59, 44, 62, -113, -12, 102, -92, 35, 47, 30, - 41, 39, 124, 28, -27, -13, 85, 12, 120, -31, 83, -108, -71, 105, -32, 11, -106, -93, 6, - 31, 100, 118, 89, -110, 73, 107, 120, 5, 33, -54, 18, -45, -87, 64, 8, -95, 41, 114, - -49, -56, -109, 12, 100, 118, 63, -111, 33, 72, -115, 125, 81, 36, 112, -42, -45, 85, 56, - 55, -98, -102, 35, -103, 31, 63, -22, 18, -50, -81, -89, -75, 94, 20, 48, 38, -53, -104, - -103, -96, -89, -58, 77, -127, -2, 108, 28, 1, 59, 35, 120, 110, 103, 118, 11, 105, -68, - -101, 92, -38, -49, -1, 44, -18, -127, -90, -8, 102, 35, 55, 91, 73, -107, 114, 15, 83, - 5, -28, -87, 49, -67, 96, -15, 26, -118, -126, -56, 45, -47, 52, 53, 12, -12, 124, 91, - 15, 124, -67, -104, 58, 80, -105, 4, 79, -81, 8, 95, 113, -67, 89, 36, -49, -23, -52, - -77, 69, -82, 51, 106, 53, -110, 66, 33, 68, -86, 90, 77, 20, -50, -90, -50, 1, 125, - 19, -111, 5, -28, 3, -122, 116, -118, -55, -2, -59, -66, 2, 101, 119, -109, 110, -80, -69, - -49, 99, 5, -38, -35, -75, 0, 13, -32, -112, -5, -76, -6, -96, 11, 99, 4, 14, -90, - -13, 85, -108, 61, -72, -116, -102, -125, 40, -45, 67, 126, 7, 60, -107, 21, -110, -36, 125, - 3, 10, -25, 87, -107, -22, -122, -81, 31, 93, 4, 55, 108, -60, -83, -80, -34, 68, -36, - -122, 83, 40, -24, 109, -128, 64, -8, -72, -98, -1, 115, 77, -82, -104, -2, -79, 101, 92, - 36, 118, -77, -62, 124, 92, -54, -107, -91, -83, -92, 29, -49, -30, 94, -121, 4, 24, -6, - -7, 22, -121, -9, 4, 27, 27, 52, -14, -81, -31, 41, -59, -35, -90, 25, -87, -40, 33, - -90, -69, -79, 122, 86, 90, -108, -121, 71, 16, 15, 76, -48, 7, 17, -128, 124, 21, 65, - -52, 90, 71, 44, -39, -10, 96, -116, -110, 74, 118, 114, -98, -107, -57, 23, -25, 2, 118, - 27, -36, 13, -95, 98, -9, 18, -76, 24, 37, -52, -93, -67, -64, -1, 78, 119, -51, -82, - 56, -85, -85, -48, -27, -88, 17, -25, 115, 16, 50, 90, -111, -69, 35, 31, 109, 84, 49, - 25, 125, 125, -63, -114, -70, 8, -80, -81, -12, 20, 94, -8, 54, -3, -39, -92, 103, -84, - 100, 100, -54, -42, 39, -97, -80, -19, -127, 112, 118, -111, 112, 14, -29, 10, 19, 21, 96, - -95, -120, -66, 40, -93, -52, 109, -118, -6, 21, -90, -42, -69, 106, -56, -97, 61, -68, -2, - -28, -118, 43, -29, -17, 62, 38, -92, -19, -67, -47, -20, -85, -59, 87, -87, 90, 42, -107, - -82, -55, 121, 53, -49, 88, 102, 51, 1, 32, 87, -68, -85, -13, -13, 87, -112, 124, -74, - -84, 33, -113, -40, 58, 120, 101, 94, -64, 0, -33, 71, -88, -52, 121, -72, -21, -89, 92, - -33, 64, -99, 54, -111, 45, -37, -104, 121, -80, 107, 21, -38, 119, 84, 30, 6, 3, -31, - 85, 117, -35, 29, 51, -105, 79, -82, -48, -104, 42, 6, -83, 37, 56, 26, -42, 80, 79, - 1, 93, -23, -120, 55, -23, -35, -59, 16, 102, -8, 119, 7, 99, -54, -119, -39, -114, -2, - -91, -104, -19, 114, 19, 14, -111, 58, -44, -122, -84, -123, -105, 19, -118, -11, 78, -57, 87, - -108, -74, 70, 68, 89, 85, 51, 10, 68, -17, -66, 106, -122, -38, -79, 78, 7, -128, -20, - 91, 26, -36, 87, 18, -117, 48, 121, 63, 96, 73, 100, 118, -74, 68, -30, 37, -17, -45, - -41, -114, -27, 10, -40, -109, -103, -123, -113, 13, 125, -27, 81, -121, -53, -42, -67, -34, 9, - -119, 114, 46, -40, 122, -111, -7, -78, -12, 64, 26, 20, -111, 91, 37, 74, -32, -55, -76, - 70, -117, -85, 123, 39, 95, 114, -74, -10, 30, 3, -89, -16, -90, -128, 81, 71, -56, -11, - 45, -70, 71, -103, -17, -78, 102, 59, -7, -33, -14, 119, -56, 86, 117, -60, -32, -104, 73, - 102, 39, 48, -21, 77, -120, -34, -29, 53, -89, -102, -102, 107, -70, -28, 26, -74, -113, -25, - 55, -88, -59, 1, 10, 92, 126, -30, -4, 18, -48, -70, 57, -3, 13, -127, 118, -49, 0, - 84, -44, 8, 45, 3, 47, 51, 83, 30, 13, 75, -91, 121, 25, -88, 49, 99, 14, 122, - 77, 26, -31, 35, 94, 48, 52, -117, 96, -106, -63, -29, -72, -42, 109, -33, -40, 105, -110, - 63, 79, -74, 104, -26, -71, 25, 120, 1, 51, 125, -81, 19, 43, 64, 15, 104, 93, -53, - 76, 64, -25, -98, -69, 6, -40, -128, -52, 16, 13, -106, -69, 76, 122, 32, -74, 68, 21, - -103, -29, -50, 83, -52, 98, 119, 58, 59, 22, 23, 17, 74, 124, -58, -34, 43, 46, -8, - -71, 6, 80, -38, 116, 40, 104, -43, 46, 73, -99, -60, -123, -8, 4, -32, -55, -117, -24, - -115, -27, -81, -43, 47, -93, -79, 72, -70, -93, 44, 79, -10, -106, 11, -14, 12, -108, -1, - -38, -79, 118, 24, -101, 35, 20, -117, 16, 102, -114, -21, 101, -56, 47, -76, 0, -37, -90, - 52, -88, -127, -122, -72, -76, 58, -29, 84, 67, -75, 55, 29, 114, -46, 40, -31, 47, 63, - -90, 61, 98, -40, 125, -96, -20, 121, 85, 50, 49, 51, 17, 59, 49, -110, -5, 43, -84, - -110, -23, -104, -7, -77, -4, 20, 82, -47, -93, -37, 46, 81, -34, -97, 52, -115, -26, -40, - -18, -39, -42, 46, -116, -80, 66, 100, -103, -11, -53, 80, -112, -111, 68, -19, -34, 126, -71, - 66, 5, -118, -122, 117, 90, 4, -111, -47, -67, 29, -119, -103, 113, -106, -101, -123, 124, 22, - -41, 34, 51, -39, -53, -76, 98, 92, 108, -106, -20, -21, -87, 110, 45, 92, -61, 25, 78, - 13, 12, 41, -81, -74, 84, 105, -54, -19, -32, -100, -37, -69, 86, -95, -65, 121, 28, -38, - -82, 63, 43, -121, 125, 79, -40, 11, 61, -60, 53, 28, 67, 29, 24, -108, -61, 76, 62, - -61, 42, -33, -41, -25, 96, 84, 31, -41, 79, -22, -41, -100, 38, -13, 42, 90, 114, -101, - 93, 30, 70, 10, -66, 46, -47, 75, 102, 79, -42, -90, 78, -73, 55, -22, 66, 71, 87, - 32, -94, -102, -75, 105, 91, -82, -15, 109, 56, 81, 68, 20, 97, 6, 84, 2, 0, 24, - 106, -21, 113, 120, -125, -36, 112, 122, 28, -92, 16, 18, 79, 37, -12, 8, -7, 52, -57, - 96, -18, -68, 113, -87, 43, -64, -123, -47, -111, 120, 19, 112, -53, -126, 63, -117, -67, 106, - -120, -46, -123, 6, -83, -106, 72, -121, 101, -26, -18, -23, 102, 25, 70, 83, 4, -61, 111, - 107, -71, -95, -23, 6, 56, 100, -113, -1, 60, -90, -20, 1, 99, -97, -102, 75, -21, 119, - -65, -8, -98, 12, 118, 123, 75, -7, 76, 89, 65, 80, -27, 5, -20, 91, -49, 98, 108, - -76, -3, 67, -33, 96, -113, -21, 66, 26, -30, 42, -10, 109, -2, 60, -88, -97, -7, 40, - -61, -94, 2, -126, -79, 111, 84, 42, -48, -45, -103, -19, -110, -125, 71, 46, -66, 61, -79, - 54, 7, -38, 47, -57, 106, 55, 48, 54, 28, 116, 103, 63, -43, 106, -11, -73, 2, -77, - -104, -124, -74, -14, -115, 75, 23, 56, -2, -50, 87, -82, -44, -12, 14, -119, 106, -20, 11, - 113, -44, 61, 20, 121, 49, -64, -127, -91, 32, 105, -107, -19, -15, -117, -30, -19, -8, 28, - -120, -53, -122, -124, -63, -28, -33, 114, -27, 66, -38, 28, 18, -67, 85, 81, 6, -35, 58, - -16, -36, -45, 77, -122, -21, -101, 101, -8, -34, 54, -26, -128, 62, -14, -88, 115, -5, -22, - 41, 100, -20, 125, -39, 55, 44, 33, -16, -103, 126, 28, 110, 84, 77, 27, -68, 9, -117, - 40, -29, -83, -46, 103, 75, 58, 90, -7, -98, -123, -18, -51, 119, 82, 74, 87, -34, 37, - 30, -60, 119, -78, 113, -113, -37, -92, 36, -50, -38, -34, 24, -75, -114, 55, 87, 85, 19, - -34, 51, 13, -113, -29, 99, -23, 56, -67, 75, -61, 9, 120, 79, -120, -61, -23, 77, -87, - -58, 111, 13, 114, -127, -69, -46, -77, -70, 124, -42, 70, -83, -75, -27, -55, -59, -109, 113, - -51, 23, -124, -3, -117, 112, 2, -101, -104, 4, -80, -49, -38, 102, -53, -92, -92, 123, 79, - 116, -103, -108, -28, 88, -3, -68, 51, 108, 98, -70, 118, 6, 50, -17, 69, 31, -84, -9, - 26, 63, 29, 109, -66, 25, 108, 124, -8, -122, 9, 27, 50, -64, -105, 54, 30, 14, 72, - 32, 116, 1, -60, 57, 90, 18, 92, 29, -72, 36, -35, 3, -65, 107, 62, 89, 116, -108, - 103, 89, -15, 124, -60, 64, -42, 0, -36, -74, 65, -123, -16, -88, 22, -63, -3, -17, -118, - -128, 79, -70, 29, 83, -12, -120, 75, 41, 14, 42, 58, 65, -62, -107, -71, -26, -65, 13, - -106, -29, -111, -12, 42, 3, 109, -40, 117, -2, 82, 51, -105, -72, -26, -119, 65, 122, -32, - -109, -56, -101, 10, 91, 112, 96, 124, -75, 87, -33, -92, -58, -90, 27, -30, -16, -8, 34, - -6, -122, -38, -68, 15, -91, -78, 77, -116, 117, 94, 47, 16, -28, -33, 126, -116, -38, -106, - -98, 71, 58, 90, -10, 46, 20, 118, 27, 21, -42, -124, -120, 110, 6, -38, -75, 40, -74, - 109, -50, 84, 25, 62, 49, -41, 70, -18, -94, 21, -90, -97, -68, 24, 44, 23, 24, -119, - -68, 121, -82, 93, 80, -12, 104, 102, -107, -99, -126, 15, 95, 40, 102, 54, -12, 103, -1, - 74, 35, -16, 9, 111, -60, -6, 37, -109, 40, 14, -83, -128, -88, 63, 17, 70, -75, 120, - -87, 15, -40, 48, 23, 50, 104, -79, -42, 116, -54, -2, 112, 59, 30, -117, -86, 73, 33, - -126, 46, -42, -11, 47, -71, 59, -44, 56, 51, -69, -82, -49, 113, 80, 7, 4, 1, -33, - -88, -64, -66, 92, -78, -21, -128, -28, 5, -70, 111, 57, 111, -70, 51, -110, -64, -40, 49, - 73, -35, 37, -83, -96, 43, 25, -52, -14, 31, -117, -39, -64, 118, 86, -2, 107, 2, 67, - 15, 42, -93, -74, 77, -5, 55, 33, 123, 84, 100, 65, -121, -72, 57, -58, -86, -26, 104, - -30, 14, -108, 20, -114, -50, -40, -38, -37, 107, -72, -56, -46, -19, 21, 29, 34, 121, -32, - -119, 122, -119, -41, -123, -119, 62, -44, 115, 43, -127, -114, 111, -64, -77, 120, 61, 52, -10, - 28, 91, -23, 90, -118, 32, 118, -81, 118, -74, -28, 82, -52, 60, -47, -108, -28, 17, -20, - -27, -27, 36, -49, -23, -35, 38, -118, -48, -50, -104, -7, -104, 11, 60, -35, -83, 26, -112, - 18, -18, -98, -24, 125, 123, -33, -50, -80, 96, -40, 70, 19, 44, 82, 4, 42, 64, -1, - 120, 96, -123, 44, -98, -77, 29, 6, -105, -10, 30, -92, -113, -2, 76, 90, 26, 19, 79, - -41, 122, -59, -115, 89, 126, -90, -50, 53, 42, 10, 80, -75, -114, 51, 118, 91, 96, -96, - -73, 119, 95, -19, 58, -94, 78, 49, -26, -37, -25, 37, 69, -92, -21, -36, -99, -116, 56, - -106, 18, -124, -70, 97, -84, -90, 63, 6, 32, -86, -107, -94, -114, 48, 113, -87, -39, 98, - -22, 1, 19, -20, 55, -99, 118, -8, -125, 33, -49, 89, 5, 28, 42, 13, -77, -89, -4, - 86, 85, -113, 64, 124, 59, -106, 103, -47, -73, 45, -91, -125, -2, 31, -15, -98, -82, 20, - 107, -78, -113, -53, -89, 23, 83, 92, 81, -10, -30, 75, 108, -82, 63, -76, -9, -109, 93, - -2, 73, 54, 81, -21, -113, 33, 99, 91, 84, 119, 120, 15, 77, 74, 91, 29, 59, -56, - -40, -40, -38, 2, 109, 83, -50, -48, 30, -22, -34, 20, -49, 14, 49, 109, 49, -128, -126, - 24, -124, 102, 88, 87, -58, 38, -74, 3, 62, -112, -83, 76, -24, -108, -3, 40, 35, 18, - 84, -31, 23, 5, 1, -25, -98, -104, -101, 81, 25, -66, 114, 1, -47, 98, 102, -22, 48, - 46, 87, 66, 74, -74, -74, 62, 94, -115, -105, 6, -76, 58, -106, -35, -119, 125, -113, -73, - 17, 24, -104, -17, 13, -37, -98, 85, -84, -55, 11, -100, 115, 8, 82, -36, 47, 67, -11, - -77, 125, -7, -126, -117, 60, -97, 1, -31, 7, 92, 126, -2, 6, -54, -48, 49, -67, -63, - -56, 16, 105, 79, 29, -16, -88, 30, 50, 124, 122, -78, 39, 84, -121, 105, 70, -39, -89, - -106, -8, 6, -40, -101, -81, -114, 21, 35, -8, 10, -121, 118, 110, -16, -85, -124, -18, 68, - 54, 108, -97, 38, 85, -112, -108, 49, 24, -73, 126, -41, -66, -40, 104, -53, -128, -94, 89, - -121, -8, -114, 52, 27, -60, 18, 113, -112, -98, 11, 112, -67, 101, 48, 51, 116, 95, -63, - -100, -27, 87, 60, 61, -117, 14, -118, -126, 44, -28, 82, -29, 31, 54, 89, 57, -102, 122, - 82, 106, 60, -90, 59, -41, 20, 46, -5, 105, 6, -112, 56, 67, 16, 121, 44, 32, -85, - 44, -73, 125, -116, -101, -119, 13, 79, -55, 52, 107, -15, 78, -75, 51, -83, 63, 107, -106, - -49, -45, 53, 32, -28, -33, -35, 68, -84, 46, -21, -93, -48, 45, 91, 83, -94, 9, 45, - -78, 90, -8, -56, -97, 31, 47, 121, 69, 51, -123, 29, 80, 1, -119, -74, -15, -72, -55, - -105, 41, -50, -15, -8, 7, 79, 105, 107, 117, 46, 87, 5, -63, -119, -37, 78, -37, 93, - -55, 67, 34, 107, 98, 124, 1, -45, -30, 97, -53, -96, 3, 100, 62, 126, 16, -66, 94, - -5, -20, 38, 97, 39, -33, -33, -110, -78, 98, 38, 71, -3, -19, -82, 122, 3, -115, 53, - -5, 81, -127, -126, 111, -80, -80, 65, 100, 61, 22, 57, 99, 53, -83, -95, 30, 12, -69, - 30, 114, -52, -126, 7, 65, 2, -72, 9, -128, -28, -68, 120, 50, 66, 3, 18, 110, 16, - -115, 98, -64, -49, -74, -110, -41, -112, 18, -4, -110, 100, -54, 6, -113, -90, 39, 112, 124, - -3, 12, -111, -86, 69, -18, -40, 74, -121, -119, -69, -69, 17, -13, -48, 104, 62, -36, 57, - -128, 114, -61, 61, 28, 114, 62, 55, -106, 118, -37, 34, -92, 38, -15, -69, -81, -113, -58, - 122, 35, 49, -89, 46, -30, -62, -3, 87, 64, -29, 118, -120, -18, 117, -97, 41, 21, 36, - 112, 36, -43, -53, 111, -29, 44, 18, 78, -113, 95, 28, 32, -60, 118, -71, 1, -40, -35, - -22, -127, -124, -69, 16, -91, -68, 7, -7, -39, 104, -75, -28, 48, 49, 69, 37, 52, -121, - -68, 80, -101, 31, 66, -33, 52, -36, -57, -51, 114, -73, 110, -96, 80, -49, -125, -104, 32, - -116, 21, -91, 70, -57, 43, -49, -23, 67, 81, 52, 4, -77, 50, -92, -109, -90, 33, -73, - -11, 84, -65, -42, -36, 2, -89, -35, 43, 16, 3, 7, -85, 85, -48, 94, 124, -45, 61, - 105, -116, 75, -50, 103, -76, 69, 45, -102, 84, -7, -127, 107, -84, 40, 16, 38, -124, 45, - -98, -21, 58, -6, 84, 81, 46, 88, -64, 35, -63, 47, -23, 34, 84, 6, 38, 13, 85, - 26, -101, -75, -22, 112, -71, -36, -81, -109, -77, -86, 65, 49, -79, 49, 11, 126, 55, 93, - -112, 27, 110, 17, 120, -39, -51, -88, 126, -3, -51, 117, 93, -19, 69, -17, -102, -98, -57, - -61, -71, 53, -65, 99, -48, 25, 32, 24, 84, 13, 110, -69, -22, -20, -68, -66, -14, 89, - 57, -105, 114, -12, -51, 52, 116, -102, -53, 109, 85, 114, -90, 45, 123, 121, 108, 74, 84, - 107, 4, 109, 32, 29, -38, 38, 82, 11, -37, 14, 33, -51, -19, 51, -47, 102, 77, 29, - -43, 49, -40, 63, -65, 29, -62, 65, -88, -124, -57, -76, 85, 33, 24, -39, -40, 115, 87, - -102, 33, -40, -7, 1, -5, -74, -83, 121, 49, -72, -56, -7, 76, 27, 108, -71, -8, -39, - -70, 103, -16, -113, 96, -98, 39, 45, -114, 119, 82, 63, -24, 8, -81, 94, 116, -25, -116, - 39, -47, 87, 124, -93, -3, -25, 53, -16, 5, -102, -53, 70, 104, 48, 27, -65, -111, -40, - 123, 116, -95, -12, 114, 15, 49, -63, -58, -68, -22, 48, 122, 21, 124, 58, 85, 101, -33, - -82, -79, -123, -33, -55, 67, -92, -122, 48, 70, -11, 96, -72, 98, 29, -125, -32, -57, -1, - -122, 55, -51, 85, 96, 72, -22, -44, -39, -36, -19, 112, -72, -81, -110, -102, -121, -71, 107, - -104, -16, 15, -108, -62, 15, 117, -37, -68, 101, 69, -122, 84, -58, -40, -54, 94, 99, 34, - -105, -121, 106, -127, 53, 21, 26, 3, 68, -39, 108, 111, 79, 1, 92, 36, 6, -75, -112, - -111, -49, -114, 50, -108, -121, -95, 60, -26, -45, 88, -41, -37, 111, 26, -76, 36, 81, 14, - 27, -73, 94, 76, 21, -88, 97, 22, -92, -64, -19, -106, 115, 116, -18, 105, 16, -63, 105, - 108, -33, -5, -122, -125, -13, -54, 105, -59, -66, 90, -46, 27, -38, -116, -6, -24, 37, -73, - -71, -113, -56, -32, 91, -94, 54, -22, 66, -67, 16, 95, -27, 8, 81, -53, -44, 84, 38, - -67, 88, -46, 29, -108, 5, 13, -124, -36, -95, 92, -13, -63, -11, -5, -3, 54, 121, 41, - 63, 95, -89, -61, 60, -60, -66, -127, 102, 27, 26, -27, -103, -95, -31, -53, -62, 2, 44, - 56, -6, 116, -39, -28, 75, 102, 75, 111, 48, -36, 77, -62, -50, -112, 106, -124, 15, 21, - 42, 101, 4, -62, 0, -13, -108, -113, -63, -71, -4, -112, -5, 74, -79, -49, 77, -128, 26, - -51, 91, 20, -80, 13, -36, -121, 39, 78, 2, -120, -105, 3, 3, -24, 40, 90, 111, -125, - -43, -1, -118, 10, -16, -124, 5, -93, -107, -3, -87, -36, -70, 125, -27, -102, 40, -12, 18, - 98, 98, 124, -113, 28, -11, -91, -38, 28, 95, -109, -53, -46, -99, 90, -108, -53, -44, -122, - 56, 29, -50, -78, -93, -126, 14, 15, -1, -86, 110, -103, -94, 66, 110, -78, 9, 59, -29, - -119, 7, 7, 29, -90, -116, -98, -92, 18, 42, -117, -37, -94, -99, 86, -46, 86, -119, 79, - 95, -98, -8, 121, 59, -57, 126, -22, 46, -3, 41, -108, 95, 48, 10, -128, -18, 126, -84, - -71, -34, 30, 11, -22, 44, 30, 112, 63, -23, 116, 126, -87, -9, 88, -64, 115, -125, 29, - 3, 18, 69, 71, 58, -26, -99, 121, -100, 73, -19, -103, 50, 32, 65, 79, -15, 32, 63, - -127, -117, -19, -21, -43, 81, -33, -77, 43, 71, -19, 96, -88, -118, 36, 9, 66, 19, -5, - 44, -22, -14, 74, -107, -89, -51, -121, 122, -26, 42, 124, -53, 2, -90, 76, -84, 110, -91, - -86, 60, 66, -94, 18, -31, 95, -100, 65, -34, 71, 125, -86, -53, -62, -49, -115, -28, -2, - 11, 69, -99, -88, 114, 43, -60, -117, -31, 2, 56, 16, -83, -59, -116, -55, -87, 7, 53, - 59, -43, -60, 49, 114, -6, 29, -127, 75, -26, 77, 38, -34, 28, 96, -38, -113, -67, -68, - -105, -33, 108, 36, -52, -17, -64, -120, -111, 116, 100, 8, -42, -93, 90, 89, -126, -84, -19, - -42, -107, -119, -102, -79, -15, -10, -123, 97, -61, -42, 105, 117, -107, -48, 75, 7, 62, 83, - 91, 4, -27, 71, 32, 106, -27, -24, 66, 30, 44, -41, 6, -19, -5, 69, 83, 77, -41, - -29, 60, 0, -81, 121, -89, 119, -104, 28, 107, -60, 72, -87, 45, 77, -59, 30, -128, -60, - 41, -2, 21, 32, 126, -9, -46, 99, 24, 32, -127, -28, 24, -12, 32, 14, 25, -104, -79, - 103, 32, 22, 50, 80, -28, 0, -7, -100, 120, 47, 59, -119, 17, 18, -69, 40, 86, 109, - -56, 105, 1, 81, 38, -79, -79, 63, 39, -89, 44, 2, -28, -117, -42, -76, -36, 76, 18, - -125, 101, 118, -86, 67, -74, -91, -95, 112, 35, 104, 83, -23, 17, 35, 60, -20, 66, -74, - -69, 119, -110, -95, -38, -43, 60, 64, -2, -58, 118, 27, -107, -95, 39, 57, 101, 27, -82, - 113, 61, -78, -68, 39, 36, 17, -26, 55, -7, 21, -75, 27, -3, -127, 76, -101, -29, -4, - 47, 126, 72, -23, -82, 102, -30, 62, -59, 85, -108, -90, -104, -112, 117, -85, 42, 53, -48, - 90, 23, 93, -33, -33, 109, -66, -43, -24, -105, 3, 46, 17, 118, 118, 67, 94, -92, 112, - 33, 62, -60, 37, -109, 36, 86, 115, -72, 49, -105, 45, -4, 117, 68, 55, 22, 75, 51, - 18, -92, -64, 48, 106, -38, 53, -75, -44, -114, 70, -51, 88, -25, -28, 64, -77, -83, -113, - 82, -8, -117, 58, 25, -74, 98, -49, -115, 27, 54, 33, -51, 18, -68, -14, 85, 87, 86, - -42, -36, 90, 29, -76, 38, -119, 50, -64, 112, 13, -106, -128, 8, -124, 15, -109, -80, -99, - -88, -74, -12, -60, 84, 44, 67, -34, 86, -110, -84, -22, 52, 88, -3, 82, -52, 123, -80, - -47, 19, 29, -64, 58, -13, -103, 60, -5, -62, -32, 104, -28, -72, 19, -119, 113, 1, -49, - 34, 23, -83, -19, -28, 87, 102, -100, 35, -60, -10, 15, -21, -96, 20, -28, 86, -125, -21, - 118, -99, 99, -115, 22, 84, 67, -25, 29, -101, -68, -4, -96, 13, 77, 20, 68, -44, 122, - 89, -7, 84, 69, 33, -79, -21, -66, 20, 113, -128, 18, 50, 73, 46, -116, -85, 35, -76, - 35, 87, 114, -5, 124, -79, -5, -30, -67, 112, 29, -106, 0, -67, 0, 63, -58, 32, 111, - 125, -50, 54, -70, 27, 121, -7, 19, -29, 20, 48, 3, -110, -47, -12, 16, -104, -26, 81, - 116, 97, -86, -100, -37, -14, 107, 69, 108, -60, 47, 20, -54, 46, -30, -100, -4, -11, 109, - 2, -91, 110, 60, 97, 8, -21, -12, 109, -46, 80, 6, -41, 93, 22, -19, -54, 79, -38, - -31, 106, -46, -62, 90, 103, 43, -49, -58, -89, -69, -81, -97, 34, -62, 98, -60, -109, 100, - -71, 89, -92, -112, 119, 27, 62, -114, -35, 123, -39, 1, -58, 11, 6, -41, 120, -116, -103, - -14, 66, 0, 33, 32, -40, -66, -102, 67, -65, 65, -42, -49, 66, -20, -110, 5, 23, -52, - -112, 71, -68, 4, 121, 116, 3, -104, 3, -26, -5, 36, -13, -19, 77, 10, -96, 55, 119, - -94, 44, 12, 16, 4, 20, -26, 17, 20, -101, 84, 122, 94, -121, 34, -60, 72, -3, -44, - 4, -9, 91, 113, 107, 46, 16, 10, -100, -76, -34, -96, -49, 95, -64, -103, -51, 117, 62, - -72, 114, -17, -39, -62, 9, 85, 101, -39, -88, 122, -64, -83, 26, 57, 88, -72, -81, 40, - 93, 92, 92, -98, -88, 23, 32, -125, 108, 83, -26, -91, 62, -37, -23, -84, 123, 1, -71, - 83, 123, 55, -114, -4, -53, 95, 84, 104, 15, 124, -27, -101, -116, -110, -5, 93, 117, -34, - -24, 51, -39, 11, 73, 41, -114, 125, 64, 52, -31, -31, -88, 11, 24, 12, -109, 102, 116, - 20, -41, -20, 35, -85, 102, 72, 13, 13, 80, 123, 86, 99, -30, -92, -120, 125, 107, -93, - 118, -71, -13, -81, 34, 105, -115, 9, -65, -98, 44, -82, -123, 119, 14, 10, -96, -6, -5, - 73, 3, 106, -75, -97, -74, -6, -77, 5, 25, -25, -84, -58, 102, 112, 59, -84, 55, 84, - -97, 46, 105, -73, -109, 101, -25, -82, -34, 105, -115, 4, 125, -59, -109, -10, 44, -120, 71, - 104, -101, -108, -36, -85, 8, 100, 70, -75, 118, 58, -105, 73, 65, -126, 96, 94, -19, 16, - -92, -93, 54, -39, 69, 119, 100, -27, -91, -81, -92, 116, 119, -94, -29, 78, -99, 20, -68, - 77, -38, -36, 58, -30, -89, 76, 53, 61, -111, 125, -1, 80, -113, 108, 82, -1, -111, -106, - -4, -108, -32, 24, 90, 79, -27, -63, 118, -18, -80, -2, -102, -84, 124, -126, 115, 112, -55, - 77, 7, -32, -100, 62, 47, 121, 18, -58, 19, -49, -95, -51, -115, -12, 30, -111, -36, 34, - -51, -125, -13, 102, -115, 2, 6, -9, 102, -120, 123, -21, -52, -21, 69, 60, -12, 19, -64, - -116, -52, -51, 60, -101, 56, -102, 62, -95, -86, 91, -85, -53, 5, -84, 69, -127, -30, -20, - 14, 116, -116, 121, 77, 50, 82, -84, -92, -104, -26, 18, -30, 92, -15, 20, 85, -114, -42, - -88, 48, -121, -33, -32, 9, -98, -24, 33, 37, 19, 61, -100, -32, -73, -74, 68, 10, -92, - -69, 123, -22, -74, 12, -121, -33, 13, -83, -36, 7, -110, 110, 30, 11, -126, 99, 117, -109, - -4, -35, -77, -60, 16, 51, -94, -38, -28, -85, -54, -53, -78, 16, 24, 122, 2, -45, -38, - -22, -13, 54, -76, -90, 88, -11, 108, 23, -33, -99, -30, 41, -5, -104, 47, 0, 15, -122, - 39, -45, -91, 19, -115, 97, 113, 62, -2, -2, -92, -102, -25, 125, 91, 118, -103, 3, 67, - -36, -58, 51, 62, -91, 74, -42, 61, -34, -41, 28, -44, 61, -59, -125, 89, 79, -24, -45, - -52, -1, 62, 10, -128, 75, 1, 64, -84, 4, -67, 18, 90, 62, 48, -43, 100, -16, -111, - -93, 117, 117, -100, 91, -58, 66, 0, -8, 77, 79, -47, -67, -3, 62, 50, 117, 69, 4, - 82, 93, -88, 99, 104, -59, 22, 0, -5, -9, 20, -29, -5, -57, -95, 121, 20, 98, -46, - 80, -59, 29, -93, 80, -95, -12, 40, -26, -38, 58, 73, 55, -92, -2, -111, 121, 111, 122, - -57, -116, 51, 26, 5, 67, 90, 112, 92, 8, -37, 47, 44, 35, -27, -75, 112, 94, -112, - 52, -10, -2, 104, 7, -6, -46, 31, -21, 100, -19, 98, -12, -23, -100, -24, 40, 4, 24, - 1, 66, 11, 38, -11, 6, -67, -12, 40, -53, 117, 27, 36, 27, -89, -5, -31, -3, -21, - -100, -112, 109, 43, -113, -27, 58, 83, -5, -44, -56, -33, 59, 36, -35, -21, -48, 92, 45, - 17, 119, -76, 81, -49, 86, -14, -59, -93, -6, 29, 116, 57, -35, -5, -88, 88, -35, -52, - -121, -39, -124, 23, 82, -108, 49, 105, 25, -128, -26, 80, 90, 90, 46, 100, -110, -17, 114, - -112, -75, -40, 46, -51, -33, 13, -110, -37, -38, -108, -13, -122, -103, 99, -83, 119, 51, -62, - -34, 103, -34, 59, 125, 61, -104, 107, 49, 18, -20, -128, -52, 98, 100, -91, -69, -5, -13, - 115, -127, -126, -52, -93, 31, -4, 69, 68, -113, 28, 73, 79, 2, -30, 14, -65, 104, 113, - 69, 124, -104, -43, 37, 108, -42, -18, 63, 2, 44, 16, -23, -18, 59, 47, -19, -48, -123, - -90, 120, -110, -49, 63, -54, -124, -107, -34, -111, -106, 87, -110, -29, -125, -68, 109, -46, 110, - -44, -77, 25, 92, 19, -37, -50, 100, 67, -125, 51, -3, 46, 117, 96, -95, 85, -35, 54, - -11, 109, 122, 86, 102, -108, 120, -67, 94, 63, 47, -94, -8, -34, -35, -63, 117, -87, 51, - -102, -13, 15, -3, -74, -73, -48, -100, -42, -20, 69, -72, -113, -49, -123, 39, 57, -86, -69, - 36, -93, -31, 116, -22, -52, -30, 37, 62, 51, -32, 119, -126, 66, 96, 68, 26, -123, -28, - -11, 70, 98, 47, 74, 121, 34, -43, 72, 24, 105, 85, 1, -100, 56, 56, -44, -38, -73, - 91, -69, 109, -104, 57, -105, 105, 41, 67, 100, -110, 12, 43, -76, -49, 54, -16, 100, -75, - 88, 43, -108, 27, 34, -21, 120, -101, -104, 100, -32, 14, 8, 46, -32, -116, -36, 106, -47, - -63, 6, -98, 109, -19, 100, -117, 91, -47, -43, 18, -45, 14, -106, -29, -24, 103, -25, -48, - 125, 67, -36, -92, 59, -112, -67, 44, -21, -61, 67, -84, -113, 40, -64, -28, -75, -44, -6, - 48, 86, 93, -51, 71, -122, 34, 94, 66, 37, -104, -1, 14, 92, -85, -40, 28, 49, -121, - 78, -80, -125, 51, 86, 23, -74, -9, -128, 54, -15, 94, 110, 34, -34, 113, 126, 16, -103, - 12, -85, 41, 70, 70, 28, 49, 117, -100, 45, 111, 79, 39, 47, 6, 90, -36, 107, -26, - 120, 122, -35, 56, -127, 0, -17, -73, -119, 40, -101, 115, -11, -91, -90, 101, 19, -56, -80, - 60, -4, -31, -31, -33, 51, 85, 82, -88, -62, 59, 80, 43, 113, 3, -97, 13, 122, 72, - 66, 6, -47, 20, -10, 116, -56, 13, -85, 110, -107, 24, 3, -99, -14, 92, -122, 39, -19, - 14, 86, -73, -120, -42, 55, -34, 10, 11, 13, -30, -80, 58, -96, 119, -108, -109, -17, -123, - -88, 4, -41, -126, -35, -13, -21, -71, 19, 97, -68, 98, -46, -43, 50, -98, -50, 62, 26, - -44, 121, -123, -83, 22, 13, -24, 113, 46, -121, -3, 94, 78, -113, -128, 74, -118, -110, -104, - -18, -66, -28, 121, -53, 37, -11, -81, -107, -103, 90, 30, 76, 72, 39, 98, -98, -5, -70, - -63, -2, -114, -79, -102, -121, 22, 117, -101, -47, -35, 68, 15, -37, 98, -110, 79, -52, 94, - 68, -37, 72, 21, -7, 28, -77, -121, -51, -104, 71, 88, -34, -43, -20, -9, 93, 113, 125, - 31, -1, 67, -79, 77, 67, 109, 96, -47, 50, 124, -43, -125, -91, -113, -95, 54, 44, -70, - -67, 75, -3, -55, 40, 95, -125, 125, -107, 102, -28, -73, 67, -18, -10, 123, 69, 86, -115, - -84, -102, -107, -117, 105, -59, -101, -125, 91, 72, 30, 113, 3, -102, 87, -34, -20, -51, 11, - 49, -51, 3, 126, 14, -100, -69, 60, 79, -37, -94, 106, 9, -15, -103, 87, 63, -28, 122, - 0, 126, 105, 54, -33, 108, -59, 7, 56, 84, 80, 34, 66, 82, -20, -128, -13, -8, 92, - -2, -119, -47, -102, 10, -8, -64, 43, 61, -40, -47, 94, 54, -13, -31, 45, 1, 19, 23, - -59, 52, 52, 19, 54, -107, -22, 24, -53, -93, -49, 81, -59, 82, -125, 98, 123, 24, -43, - -29, 17, 103, 13, -63, 43, 98, 11, 118, 114, 20, 20, -7, 46, 34, 89, 87, -87, -97, - 12, 86, 34, -79, 26, -59, 110, 18, -111, 104, 94, -118, 97, -16, 67, 1, -113, -84, -86, - 28, 94, -15, 63, 105, -52, 85, -48, 33, -67, -41, -74, 49, 109, -97, 78, 13, 81, -63, - -37, 94, -86, -33, -110, 117, -36, 54, 25, -72, -53, 88, 5, 30, 52, 21, 79, 69, -71, - -57, 32, -92, -118, 122, -86, 55, -58, -46, 2, 36, -14, 17, -80, 99, 121, 48, -41, 29, - -3, -30, 108, 60, -1, 25, 26, 38, 113, -86, 41, 110, -118, -26, -38, -42, -73, -19, -109, - -51, 18, 67, 58, 97, -114, -87, -92, -62, 48, -79, 54, -62, 24, -4, -53, 0, -115, -87, - -86, 4, -110, 17, 76, 78, -1, 72, 70, -34, 42, 84, 34, -103, 47, 126, 95, -86, 63, - -94, -117, 11, 21, 13, -36, 54, 69, 66, 78, 96, -71, -50, -53, 3, 19, 18, -49, 103, - 75, 107, 116, 106, -10, -7, 104, 20, 21, 1, -1, 5, -92, -78, -52, 124, 36, -124, 74, - -90, 106, 26, -10, -126, 29, -17, -50, 7, -116, 81, 22, 73, -126, 33, -17, 33, 72, -9, - 94, -6, -20, 125, 78, -78, 33, 105, -34, -102, 23, -120, 55, 89, 112, -118, -88, -107, 19, - 28, 34, -17, -21, -64, 62, 16, 4, 126, -23, -78, -31, 49, -124, 32, 56, 115, 18, 68, - -109, 68, 35, 102, -95, 116, -26, 70, -6, -121, -101, -73, -90, -77, -13, -54, 18, -108, -61, - -126, 89, -63, -21, -19, 60, -73, 85, 77, 120, -69, 85, 106, 26, -65, 120, 8, 114, 57, - 17, 6, 73, -112, 51, 35, -34, -117, -48, -35, 15, -35, 85, 44, 96, 65, 38, 55, -121, - 123, -62, -86, 120, -28, 87, 27, 19, 2, 106, 42, -1, -22, -10, 0, 103, 92, -86, -94, - -114, 116, -105, 69, 82, 56, -6, 92, -41, -71, -59, 25, 97, -61, -26, 37, -52, -11, 111, - 118, 51, 86, -100, -66, 66, 70, -18, -32, 126, -28, -31, -3, -15, 63, 119, 31, 58, -46, - -63, -12, -92, -14, 60, 83, 47, -44, 111, -62, -107, 106, 95, 19, 65, 17, -116, -122, -52, - -59, -101, 78, -82, -119, 98, -80, -126, -74, -38, 28, -92, 4, -53, -71, 116, 108, 14, 52, - 60, -106, -68, -124, 21, 92, -13, 13, -50, 11, -46, 21, -62, 17, 9, 61, 6, -40, 8, - -79, 59, 33, -16, 18, -69, 96, -84, 122, 99, -18, -11, -89, 54, 103, -17, -75, -68, 124, - -32, 13, 52, 101, -112, -16, -15, 45, 7, 88, -109, 110, 125, -47, -25, 84, -119, 97, 49, - 96, -62, -47, 120, 32, 100, -114, -40, -33, -86, 90, -81, -90, -128, 19, -18, 125, -95, 50, - 26, 58, 40, -91, -99, -67, 71, 56, 30, 66, 27, -103, -48, -59, -10, 83, -38, 6, -11, - -90, -29, 113, 112, 91, -40, -86, -67, -100, -125, 13, -15, -100, 39, 7, 120, -23, -56, 56, - 1, -31, -125, -8, 6, -72, 16, -62, -118, 121, 43, 91, 72, -75, 53, 35, 95, -30, -10, - 26, -3, -94, -87, 66, -124, 49, -104, 66, 75, 84, -19, -37, -108, -83, 81, -59, -52, 25, - 111, -75, -50, -90, 12, 91, -19, -82, -117, -78, -115, -88, -65, -48, -95, 125, 26, 51, 112, - 51, 59, -87, -98, -72, 125, 104, -115, 57, 104, 26, 79, 98, -84, -92, 112, 49, 118, -33, - 86, 61, -111, 13, 88, -44, -105, -13, -83, -81, -109, -97, 89, 14, -95, -34, 20, 70, 32, - 22, 45, 81, -61, -16, 106, 41, 3, -83, -53, 115, -49, -104, -64, -111, 44, 89, 35, -53, - 111, -106, -117, 85, -25, 7, -15, 24, -49, 60, 121, -103, -28, -114, -102, -45, -44, -57, -78, - 11, 7, -102, 19, 64, 22, 98, 91, -20, 97, 71, 67, 79, -83, -82, 27, 23, -18, -94, - 17, -29, -17, 2, -11, -109, -124, 93, 21, 54, -107, -119, -67, 7, 124, 77, -18, -126, 124, - 103, -63, 46, -99, 51, -76, 61, -7, -101, 51, 100, -30, 68, 119, -25, 119, -126, -96, 64, - -1, 19, -80, 48, -35, 36, -111, -122, 112, 90, -74, -17, -102, -122, -47, 122, 100, 105, -118, - 12, 25, 64, -89, -125, 71, -22, -82, 116, -69, -64, 111, 114, -107, 8, 42, 87, -101, -122, - 10, 24, 26, -119, -62, 72, -17, 10, 19, 28, 107, 36, 45, 108, -6, 7, 34, 13, 123, - -62, 22, 73, 73, -128, 59, 60, -21, 43, -73, -11, -27, 102, -84, 7, -5, -85, 120, -49, - -4, -52, 41, -122, 37, -21, -62, -112, 78, -103, -39, -37, 79, -4, 74, 84, 12, -123, 61, - 57, -37, 96, 81, 56, 19, 30, -53, -82, -102, -85, 97, -87, 77, -39, -112, -20, -109, 107, - -111, -74, -43, -47, -100, -58, -20, 35, -2, 27, 109, 54, 44, -105, 36, 72, 66, -66, -108, - 126, -36, 65, -50, 15, 94, -80, 119, -30, -35, 22, -24, 57, -87, -107, -121, 47, 28, 44, - -18, -128, -39, -98, -74, -71, 90, -111, -96, 29, 49, 39, -9, 19, 83, -86, 1, 87, -48, - 52, 104, -105, -121, 29, 97, 75, -49, -3, -101, -19, -21, -7, -49, -29, -48, 59, -93, -15, - 32, -125, -76, 61, 87, 86, 91, 32, 114, 59, -70, 16, -64, -36, 98, -96, -56, -28, 101, - -17, 65, -44, -114, -119, -1, -87, 4, -91, -94, -79, -78, -110, -9, -110, 27, -32, 76, 6, - -73, 41, 15, -5, -47, -99, -15, -50, -45, 11, -14, 34, 57, -24, 98, 44, 33, 1, 75, - -18, -55, 44, 126, -13, 34, 73, 93, 25, 47, -89, 103, 4, 59, 57, 46, -2, -53, -18, - 100, 12, 1, -104, -126, 42, -108, 5, -12, 14, 12, -17, 108, -14, -118, -95, -11, -81, 19, - -105, -65, -62, 99, -64, -44, -92, -118, 83, 50, -3, -94, -16, 85, 108, 123, 71, -73, -101, - 114, -70, -22, 30, -75, -10, 5, -44, 78, -98, 56, -98, -41, 32, -125, -127, 51, -40, -30, - -62, 104, 42, -4, 102, -78, -108, -42, 72, 86, 94, -75, -54, 123, 57, -82, 121, -112, -6, - 30, -58, -82, 90, -69, 44, 56, 57, 32, -76, 81, 109, -17, 114, -11, -14, -56, -46, 45, - 91, -56, -104, 18, 109, -98, 1, 89, -51, -59, 27, 74, 103, 109, -25, 37, -58, -72, -10, - -41, 113, -1, 25, 44, 53, -99, 81, -98, -92, 3, 63, 57, 9, -8, -105, 96, 17, -124, - 89, 20, -15, 84, -11, -2, -30, 113, -92, -65, 98, -25, 22, -59, 67, 29, 57, 80, -51, - 33, 35, -25, 99, 67, 99, 87, -48, 110, -89, 79, -48, -43, -78, -91, 117, 96, 15, -64, - 23, 31, -32, 78, -42, -28, -60, 29, -24, -8, -88, 124, -8, -11, -24, 26, -10, 13, -117, - 94, -37, 36, 111, 85, 73, 15, 120, -18, -123, -40, 4, 12, 61, 63, 26, 118, 97, -59, - -124, 53, 92, -125, -12, 85, 91, 62, 21, -51, 122, 104, -49, 116, 58, 48, 29, -25, 104, - -4, -89, 85, -35, -30, 1, -17, -49, -59, -104, -19, -37, 22, 71, 54, -96, 101, 51, -10, - 116, 27, 98, -35, 51, -29, -66, 87, 4, 75, -8, 0, 107, -113, -36, 83, -43, 20, -21, - 58, -47, -79, -64, -65, -74, -11, 54, 54, 35, -46, -124, 49, -128, -122, -90, 77, -11, 53, - -123, 29, -52, 57, 95, 47, 35, 110, 113, -1, 41, -53, 15, 66, 45, -101, -80, 46, 17, - 112, 11, 33, 90, 6, -93, 23, -1, -57, -119, 109, -92, 55, 14, 46, 67, 125, -124, -99, - -14, -24, -36, 65, 61, -16, -43, 46, -27, 7, -97, -76, 19, -53, 67, 72, 111, 112, -12, - -69, 52, -103, -114, 48, 0, -88, -51, 75, -25, -60, -95, 68, -95, -108, -45, 90, 4, -45, - -40, 67, -11, 72, 52, 75, -40, -46, 116, -124, -104, -125, 87, 77, -56, -8, -17, -72, -113, - 45, 42, 39, 121, 38, -34, -41, -93, 118, -50, 78, -46, 17, -26, 46, -43, -61, 3, -85, - 59, 77, 75, 89, 62, 56, 117, 61, -53, 23, 66, 19, -67, -31, -73, -125, -126, 117, -127, - -104, 1, -86, -34, 75, 45, -1, 125, -58, 41, 83, 80, -5, 59, 64, -86, -41, -80, 94, - 116, 64, -77, -13, 80, 22, 43, -18, 125, -26, -36, 79, -78, 55, 50, 113, -7, -8, 1, - 1, -48, 70, 62, -68, -43, -84, -31, 107, 115, -34, 19, 124, 39, -18, 65, 110, 21, 19, - -107, -43, -95, -80, -36, 91, -127, 107, -111, 73, -90, 54, 55, -6, -8, 68, -11, -90, -62, - -94, -84, -96, -36, -91, 86, 62, 2, -85, -5, -5, 93, 5, -16, 65, 115, -51, 83, -94, - 14, 49, 96, -116, -15, 37, 95, 16, 19, -80, -102, -90, 37, -18, -101, 26, -39, -48, -110, - -84, 34, 50, -15, 32, -65, 12, -103, 77, 74, 114, 25, 102, -118, -46, -69, 43, -10, 34, - -78, -111, -105, -77, -109, -43, 26, 39, -82, -14, 57, 74, 9, -89, -99, 51, 19, 61, 84, - 87, -55, -28, -67, 102, -69, -103, 18, 35, 5, 37, 115, 51, 52, 48, -115, -17, -22, 124, - 39, -60, 8, -112, 117, 114, 47, -88, -27, -7}; + -72, -105, -92, 33, 98, -117, 59, 2, 107, 1, -37, -78, 66, -97, 6, -124, 45, 93, -1, + -69, 107, 36, -109, -6, 14, -73, -28, 119, -27, -44, 19, -3, -53, -31, -78, -63, -85, 10, + -104, 8, -90, -82, -91, 54, 27, -21, 91, 65, 10, -81, -87, 103, -75, 58, 40, -125, 123, + 42, -122, 119, -10, -126, -25, -93, 74, -117, -92, 94, -23, 106, 95, -24, -14, 81, 104, 94, + 7, 8, 62, -25, -11, -34, 61, 58, 103, 112, -119, -90, -117, -125, -70, 61, -33, 122, -47, + 47, 3, -30, -66, -74, -42, -43, -34, 71, -63, -64, -98, 124, -47, 96, 57, 39, 16, -69, + -58, 35, -85, -41, -82, -51, -92, 89, 92, -93, -77, -40, 67, -63, -108, 0, -97, -109, -37, + 91, 110, 20, 125, 52, 57, 30, -43, -76, -4, 114, 32, -49, 109, -37, -29, 106, 79, 119, + 45, -81, -12, 59, 43, -60, -45, -79, 26, -101, -8, -107, -77, 32, -30, -77, -66, 68, -55, + 98, 64, -74, 3, 102, -42, -69, 58, -46, -31, -15, -94, -7, 41, 107, 38, 87, 106, -88, + -14, -93, 63, 85, -103, 13, -99, -52, 104, -42, 28, 31, 116, 33, -67, 6, -57, 97, -122, + 77, 96, -11, 24, 96, 69, -32, -50, -44, -102, -21, 101, 0, 74, -60, 1, 95, 34, 97, + -9, 78, 114, -27, 75, -95, -83, -72, 21, -12, 119, -82, -101, -119, -9, 104, -49, -41, -112, + 34, -46, 63, -69, -116, 105, -47, 88, 78, -15, -28, -81, -43, 98, -7, 34, 113, -101, 65, + 43, 111, -34, 0, -44, 29, -83, -120, 22, -103, 48, 20, 57, -12, -128, -7, 71, 80, -108, + -43, -93, 98, 118, 50, 97, 63, 84, -124, -75, 114, -65, -32, -72, -119, 0, -117, 42, -73, + -127, 119, 61, 111, 104, -38, 100, -1, -108, 15, 45, -70, -15, 30, 110, -119, 35, -42, 40, + -45, -104, 112, -84, -2, -116, 47, 1, -13, -27, 9, -8, 66, 58, -51, -31, 5, 91, -58, + 37, 126, -111, -52, -87, 121, 62, -60, 80, -8, -127, -19, -38, 49, 109, -50, 88, -95, -69, + -44, 67, 61, -38, 95, 98, -79, -6, 105, 100, -91, -57, -35, -115, -43, 103, -118, -9, 89, + -12, 62, 47, -76, 126, 102, 123, -10, 89, -96, -83, -115, 92, 68, 72, 18, 75, 63, 60, + -24, -127, 53, 35, 99, -9, 51, 1, 71, 48, -123, -52, 5, -12, -6, 55, 81, 34, -33, + 14, 121, -23, 94, -106, 76, -2, -62, 45, 39, 78, 75, -91, 125, -124, -17, -101, -26, -40, + 46, -88, -99, 44, -106, 52, -67, -59, -6, 126, -17, -39, -113, -63, -102, -60, -85, 93, 98, + -80, -77, -74, 38, -78, -55, 120, 20, -99, 41, 82, 78, -80, -115, -49, -44, -52, -112, 27, + 77, 25, -65, 20, -63, -17, 0, -54, 33, 62, 106, 86, 25, -107, -29, 41, 33, 93, 7, + -109, -83, 120, -103, -94, -37, 35, 89, -95, 66, 80, 83, 12, 13, -101, -26, -44, -21, 31, + -12, 47, 125, 106, -96, 124, 52, -92, 0, -19, 20, 55, 68, -20, -101, -34, -81, 15, 52, + -123, 118, -2, 95, 109, 2, 93, -94, 45, -18, -54, 110, 71, -120, 63, 126, 30, -40, -78, + 27, 70, 70, -44, -87, 97, 80, 57, -24, -15, 33, -74, 77, 74, 35, -38, 11, 79, 100, + 71, -42, -110, 4, 116, -17, -80, 5, -50, 88, -97, -85, -101, 93, -42, -14, 5, 118, -114, + 111, 52, 83, 87, -69, -40, 66, 85, -72, 119, -64, 62, -77, -16, -31, 75, -13, 14, -8, + 5, 60, -13, 121, 11, 73, -30, 28, 94, -75, 28, -86, -69, -59, -83, -11, 69, 79, -7, + 53, 93, -79, 97, 21, -63, 107, -103, 115, -61, 21, 36, 19, -34, 82, 124, -123, -62, -66, + 57, 79, 92, -67, -29, -50, 24, -66, 76, 7, 84, 39, -61, -62, -74, -107, 47, 22, 0, + -75, -103, -93, -116, 87, 38, -36, 44, -39, -39, -15, -106, 25, -56, -67, 85, -60, -94, 82, + 61, 30, 73, 50, 47, 87, 62, 14, -51, -89, 61, -27, -69, 60, -20, 3, -12, -67, 80, + 19, -89, 31, -64, -118, -6, 53, -46, -51, -56, -39, -26, -28, -53, 112, -128, -47, -68, 107, + -26, 0, 25, 89, -9, 111, -26, 114, -35, -71, 99, -92, 116, -32, 105, -126, 4, -76, -114, + 1, -65, 62, -107, -101, -1, 45, 7, -65, -65, 105, -114, 96, -61, -100, 38, -93, 46, 106, + 49, 28, -110, 50, 102, 54, 86, -8, -119, -47, 32, 125, -40, 39, 49, -68, 7, 71, -66, + 17, -103, -3, 2, -50, -63, -4, 111, -77, -78, 6, -41, 88, -19, -87, 61, 5, 93, 120, + -18, -120, -59, 20, 17, -68, -100, -73, -122, -26, -70, -36, -118, 108, 80, -94, -26, -23, 3, + -44, 10, 7, 64, -55, -45, 13, 97, -106, 47, 4, -46, -30, -24, -120, 45, 34, -117, 111, + 112, 3, -37, -40, -35, 110, -59, -4, 114, -76, -65, 7, 114, -27, 33, -123, -69, 63, -23, + 47, -51, 123, 98, -39, -90, 106, -91, -11, 75, 34, -39, -5, -52, 75, -75, -12, -59, 111, + -85, -67, 124, 7, 8, 64, 91, -99, 110, 107, 85, 113, 87, 3, -93, -62, -91, 116, -89, + 87, -113, -108, 65, 96, 21, -59, 91, -46, -35, 7, -87, -60, -3, -123, -34, 98, 20, -76, + -109, 47, 21, -13, -83, -78, 2, -12, -72, -21, 17, 37, 39, -105, 49, 105, -61, -116, -38, + -46, -110, 17, -127, -62, -58, -117, -65, -63, -4, 60, -69, -59, -128, 48, 19, 125, 30, -64, + -103, -58, -14, -43, -113, 13, 0, 24, -32, -10, 19, 15, -84, -23, -124, 31, -46, -124, -5, + 88, -41, -91, 43, 14, 47, 100, -123, 122, 112, 69, 43, -33, -84, -69, -7, -35, 46, 83, + 104, 74, 107, 84, 90, 69, 61, -33, 4, 80, 92, 83, -92, -23, 108, 102, -16, 26, -91, + 91, -17, -51, -80, -48, 87, -49, -88, -76, -55, 107, 80, -22, 85, 57, 6, -103, -24, 88, + -96, -61, -29, 90, -3, -65, 72, -32, -80, -10, -6, -4, -78, 38, -73, 64, -81, -74, 124, + -79, -98, 85, 54, 95, -101, -28, -54, 34, -7, 22, -94, -102, 27, 74, 58, -63, 97, -8, + 82, 121, 25, 92, -11, -111, -105, 55, 54, -32, -63, 28, -99, -117, -113, -76, -59, -113, 101, + 15, -33, 35, -105, 98, -41, -25, 107, -44, 23, 99, 20, 82, -18, -59, -33, -82, -58, -54, + 3, -8, 65, -38, -43, -123, -48, -101, 56, -83, -111, -107, 57, 33, 47, -54, 80, 105, -115, + 17, -48, -17, -58, 116, -67, -98, -98, 69, 63, 15, -97, -116, 85, 39, 33, -25, -62, 78, + -40, 112, -127, -4, -38, 111, 24, 38, 101, -77, -7, -90, -5, 92, 77, 115, 56, -85, -57, + -42, -52, 18, 89, -55, -64, 78, 112, 69, 39, -123, -39, -46, -7, 117, -46, -121, -128, 74, + 71, 83, -114, -77, -74, -72, -4, -1, 117, 106, 107, 119, -86, -5, -87, 98, 54, -113, 45, + -74, -67, 26, -11, -93, 102, -111, -119, 57, -83, 86, 41, 20, 86, -63, -45, -57, -123, -105, + 71, 88, 78, -1, 1, 105, -54, -44, 54, 23, -72, 33, -37, -21, -5, -102, -7, -4, 3, + 97, -115, -53, 64, -25, 28, 3, -118, -122, 109, 6, -111, 19, 114, -43, -95, 84, -42, -33, + 49, -11, -90, 77, -67, -45, 105, -45, -32, 78, -51, -28, 89, -87, 14, 109, 100, -86, -44, + 60, 71, 38, -50, 105, -75, 28, 121, 99, 10, -118, 92, -116, 18, -78, 17, -59, -113, 95, + 2, 100, -117, -39, -85, -9, 4, 61, 98, 36, 101, -104, 4, 89, 69, 36, 84, -116, -102, + -46, -115, -33, 106, -81, 84, 89, -24, -40, -119, -52, 102, -116, -59, -24, 72, -27, -125, 120, + -25, 79, -116, 60, 26, -33, -62, 64, -102, -42, 17, -43, 72, -107, -108, 7, -35, -122, -110, + 14, -72, 61, 29, 56, 100, 40, -93, 65, 32, -83, -3, 24, -78, -121, 73, 39, 11, 6, + -70, 108, -38, -84, 121, -42, 111, -95, 56, -111, 31, -110, 56, -78, 33, 65, -56, 50, 115, + 37, 95, -62, 73, 71, -106, -125, 50, 105, 41, 110, 67, 114, 48, 118, -66, 49, -113, 7, + -35, 39, 26, -57, -33, -35, -106, -80, -74, -39, 99, 52, -97, 33, -72, 54, -120, 119, 19, + 125, -75, -118, -125, 102, 8, -39, 22, -109, 71, 18, 22, -10, -11, 30, -19, 37, -112, 72, + -42, 118, 51, 123, -112, 37, 52, -93, -70, -60, 8, -104, -107, -23, 125, 62, -56, -42, 20, + 56, -28, 107, 47, 121, -61, 49, -117, -123, -20, -55, 102, -58, -49, 8, -61, 61, -101, 46, + -71, -26, -110, 123, -5, -92, 56, 118, 29, 13, -93, 109, -10, -62, -28, 125, -4, 26, -17, + 90, 10, -87, 14, 48, -86, -97, 16, 61, -105, 96, 36, 55, 99, 61, 124, 7, 98, 37, + 112, 42, 77, -20, -91, 112, 90, -2, -39, 17, 57, 77, -119, -73, 99, 7, 115, 13, 72, + 26, -53, -101, -47, 125, 94, 17, 91, -4, -26, -54, 66, -96, 90, 35, 5, 84, 126, -125, + 104, -70, -83, 15, 104, -44, 73, 15, -117, 55, -117, -53, 50, -9, 115, 63, 98, -79, -104, + -7, 82, -61, 14, 117, -55, -20, -114, 111, 69, 105, 39, -83, 121, -47, -87, 83, -121, 83, + 85, -77, 74, 122, -82, -106, 34, -61, 28, -93, 35, 1, -5, -12, 22, -60, 33, 24, -12, + -77, 46, 7, -109, -11, 97, -21, -21, -53, 37, 86, 3, -47, -64, -47, 0, 55, -32, 110, + -109, 11, -27, -23, -116, 58, 10, 18, 72, -22, 46, 98, -15, -106, 43, -46, -48, -113, -117, + -70, -24, 110, 59, -69, -27, 71, -99, 67, 4, 4, 19, 103, -87, 23, -101, 77, -24, -11, + 84, 92, -72, 21, -8, 11, 4, -90, -85, 79, 45, -13, -40, -108, -78, 61, 66, -74, -109, + 27, 122, -118, 54, -47, -71, -102, -62, -91, 33, 91, 100, -122, 101, -108, 58, -104, -51, 51, + -17, 21, -46, 101, -121, 84, 83, -108, 17, 52, 103, 63, -90, -15, -104, 83, -55, -72, 83, + 22, 38, 88, -2, 8, -38, 57, 28, -20, -120, 80, 81, 56, -24, 119, -29, -46, -51, -60, + -62, 64, 98, 67, -76, -19, 124, -54, 93, -7, 46, 2, -58, 8, -60, -39, -26, -90, 90, + 25, 73, 25, -102, -15, -36, -5, 52, 121, 108, -83, -28, -118, -99, 32, -7, 77, -14, -108, + 84, 36, -91, 79, 1, 75, -29, -64, -61, 25, 35, -101, 101, 42, 46, 78, -85, 107, -122, + -97, 45, -108, 1, -107, -75, -50, 113, -15, 121, -117, 99, 13, -37, -111, -58, 45, 46, 57, + 125, -119, 19, -101, -78, -82, -29, -99, -111, 44, 61, -85, -51, 117, -59, -87, 4, 65, 63, + -46, -85, 0, 36, -83, -83, -93, 113, -31, -123, -69, -106, 67, 65, -75, 7, 75, 0, 16, + 13, 88, 69, 9, -34, -76, 124, -106, -36, 41, 9, -57, 9, 68, -11, 35, -8, 125, 72, + 115, 2, -101, -36, 77, -6, 112, -128, -15, -84, -29, -49, -10, -39, -47, -40, 43, -101, -109, + 6, 108, 12, -93, -10, -104, -8, -22, 124, 85, 125, 91, 76, 0, -86, -88, 97, 119, -117, + 69, 42, -35, -86, -100, 1, 72, 12, -20, 59, 119, 103, 83, -75, 27, 76, 67, -128, 42, + 27, -46, 23, 47, 96, -3, -41, 32, 62, -26, 109, 113, -60, -10, -109, -114, -76, -57, -29, + -101, -11, 72, -90, -70, -120, 41, 102, -89, 44, -100, -5, 33, 123, -39, 124, 109, -14, -127, + 18, -23, 52, 68, 42, -87, -48, 56, -99, -101, 123, -118, 15, 35, -84, -77, 67, -4, -13, + 121, 101, 94, 50, -58, -13, 112, -1, 3, 76, -104, 107, -108, 117, -128, 4, 29, -9, 28, + -7, -116, -17, 3, 59, -126, -96, 58, 98, -119, -30, -86, -120, 24, -90, -121, -111, -112, 37, + 89, 16, 27, -34, -106, 101, -120, -123, -96, 53, 13, 48, -37, 38, 48, -87, -30, 119, -43, + -47, -122, -96, 54, 81, -87, 55, -34, 43, -25, -83, -12, 77, 78, -101, -33, 44, 18, 22, + 27, -109, 42, -60, 87, 74, 58, 78, -62, -35, 119, 1, 120, -116, 19, -83, -93, -24, 105, + -70, -54, 87, -117, 102, 126, 103, -80, -82, -16, 49, -11, 109, -73, 13, -94, 58, -102, 96, + 11, 85, -98, -30, 23, -23, 108, -43, 0, -79, -9, 40, -5, -8, 73, -60, 52, -91, -63, + -42, 63, -101, -10, 115, 124, -50, 71, 68, 31, -39, 31, 52, 90, -106, 52, -84, 8, -18, + -13, 76, -30, 21, -29, -106, -35, -98, -98, -22, -92, 28, 38, -116, 117, 119, -6, -114, -44, + 1, 86, -6, -113, 107, -61, 46, 0, -117, -109, -69, 72, -43, 80, -27, 30, 113, -1, -89, + 118, -31, 21, 49, 7, -56, 43, 98, -56, -55, -16, -119, 11, -81, -85, -26, -107, -110, -39, + 65, -18, -83, -20, -35, 64, 118, 110, 8, 12, -91, 35, 10, 26, 18, 100, 94, 27, -116, + 79, -113, -103, -128, -35, -6, -94, -78, -71, -10, -25, -61, 58, -65, 43, 121, -89, -38, 3, + -96, -117, -113, 110, 81, -121, -108, -41, 91, -87, 109, -93, 67, -101, 61, 75, 52, 38, 84, + 46, 13, 106, 125, 65, 40, 1, -48, -20, -125, -92, -110, -53, -113, 65, 62, -60, -70, 121, + -106, -123, -46, -62, -115, -6, -94, -108, 96, -6, -11, -107, 73, 110, 51, -74, 0, 126, -5, + -27, -87, 114, 41, 44, -41, -87, 122, -45, -41, -16, 83, -93, -52, -42, -83, -53, -53, 53, + -14, -28, -87, -44, 25, -54, -11, -56, -104, 49, -16, -26, 60, 122, -34, 84, 4, -36, -5, + -7, 114, -7, 82, 47, -74, -123, 73, -41, -110, -101, 107, 24, -60, 33, 16, 9, 101, -109, + 15, -50, 107, 25, 47, 84, 110, 106, 72, 67, 6, 63, -18, 62, 24, 89, 21, -99, -65, + -48, -42, -80, -98, -22, -46, 47, 28, 120, 114, -93, 28, 47, 34, 15, -34, 79, 15, -74, + -29, -1, 103, -37, 117, -68, 44, 109, 0, -30, -87, -117, -107, -85, -84, -41, 31, -101, -128, + -24, -80, -31, -52, 115, 11, 62, -73, 64, 34, -120, -8, -19, -113, 68, -34, 25, -74, 98, + 63, 101, -35, 28, -36, -96, -57, 44, -107, -64, 111, 65, 63, -68, -82, 58, 120, -77, -72, + 29, -127, -108, 46, 77, 101, -119, 27, -58, 118, 38, -42, -45, -76, -27, 37, 81, 99, -9, + -16, -56, -59, 109, -112, -123, -67, 88, -89, 70, -63, -32, -68, -96, 76, 29, 12, 86, -41, + 65, -112, -63, -67, 101, 45, 75, -107, 8, -37, -112, 0, 111, -56, 7, -93, -27, 115, 107, + 82, 11, 34, 52, 27, 80, 93, -10, 106, 70, 83, 120, -100, -23, -51, 72, 81, -38, -54, + 4, -73, -91, 47, -9, 92, 49, 77, 46, -114, -53, 59, 118, -32, -104, 121, 17, 89, 108, + 45, -73, 21, 70, 64, 107, -1, -71, 44, 82, 14, 34, 104, -114, -32, -48, 59, -49, 26, + 117, -89, -7, 110, -20, -33, 104, -47, 111, -13, 112, -34, 114, 11, -128, 17, -10, 29, 116, + 4, -74, 90, -66, 0, -12, 55, -60, -85, -2, 113, -21, 7, -42, 123, 44, 20, -112, 49, + 47, 3, 61, 34, -30, 99, -53, -46, -36, 39, -112, -36, 119, 7, 36, -12, -33, -104, -96, + -46, 15, 85, -105, 91, 18, -78, -95, 35, -50, 15, 95, 62, -59, -30, -66, 110, -6, -33, + 90, 43, 6, 58, 107, 96, 126, -70, 51, -62, -98, 126, -75, -58, 102, 24, 11, 57, 42, + 28, -85, 49, 65, -30, 32, -39, 116, 13, -8, 73, 11, 76, -69, -99, 29, 42, -59, -10, + -17, 109, 20, -42, 4, 24, -7, -88, 91, -110, 78, 8, -103, -63, 108, 123, -98, -121, -88, + -55, -43, -17, -10, -66, -44, -24, -119, 105, -72, 89, -120, -122, 61, -46, -16, -128, 68, 29, + -72, 82, -14, 121, 103, -126, -73, 82, 31, 88, 58, -65, -94, -78, -28, -76, -3, 74, -66, + -39, 112, 30, 79, 101, -62, -34, 122, -44, 79, -14, 40, -71, -68, -127, -128, 86, 2, 22, + -109, -44, 76, -61, -82, -32, -54, -121, -54, -43, -80, -7, 71, 93, 23, -42, -35, -21, -118, + -80, 52, 32, 24, 63, 55, 72, -124, 68, 61, -119, -21, -37, 34, -120, -116, -97, -73, -103, + -5, 106, 107, 119, -86, -102, 78, 97, -28, -125, 9, -89, -45, -32, 68, -112, 22, 91, 68, + -76, 17, -82, 119, 31, 93, 18, 2, -22, -14, 60, -63, 36, 44, 50, 112, -71, -32, 105, + 11, 6, -50, -89, -108, 32, -19, -30, 116, -71, -61, -105, -37, 24, 24, 101, 114, -8, 2, + 70, 122, -49, 64, -120, 123, 58, 23, -37, 34, -68, 27, 121, 13, -120, -101, 113, 115, 78, + -31, -125, -83, -58, -98, -99, -101, 13, 108, 74, 65, -81, 123, -27, 29, -61, -48, 89, 85, + 65, 51, 22, -110, 22, 27, 67, 65, -23, -45, 10, -4, 49, -44, 97, -4, 25, -15, 77, + 10, -56, -122, -111, 94, -46, 10, -73, 49, -57, -44, 30, 95, -115, -2, 30, 80, 104, 67, + 81, 90, 96, -83, 123, 42, -31, -59, -101, -111, -101, 55, -68, 25, -58, 5, -23, -28, 74, + -15, 76, -72, 31, -31, 27, 100, -24, -69, 81, 114, -112, -23, -100, 119, -83, -43, -55, 25, + -46, -39, -76, -117, -87, -18, 83, 63, 65, 90, -22, 67, -20, 45, -82, -32, 42, -118, 85, + -54, -23, 74, 52, 79, -20, 106, 72, -128, 0, -107, -33, 64, -58, 24, -122, 98, 80, 113, + -125, 48, -43, -113, 59, -49, 3, -24, -22, 5, -90, -101, -13, -49, -82, 93, 116, -5, 5, + 34, -13, -68, 72, 48, 111, 14, 23, 34, -80, -32, 52, 12, -89, -41, -30, -47, -13, -12, + 4, -15, 56, -82, -128, -81, 10, 19, 61, -39, 99, 63, -56, 100, 44, -99, 34, -121, 37, + 111, 49, -30, 86, -101, 40, -24, -84, 110, 84, -90, 50, -110, 63, -9, -24, -35, 30, 76, + -74, -125, -24, -84, 81, 56, -109, 38, -47, -17, -101, 90, -83, 124, -108, -30, -58, -123, -59, + -95, -114, 115, 114, 17, -70, 65, 95, 118, -25, 109, -103, -24, -58, -118, -100, -86, 75, 21, + 61, 16, 97, 4, 48, 21, -46, 104, -66, -82, 44, 68, -24, -24, 12, 46, 120, 93, -122, + -113, 126, -51, 6, -68, 72, 102, 2, 68, -18, -106, 94, 43, -2, 45, -73, -43, -97, 87, + 23, 64, 61, 1, 34, -67, 95, -79, -84, 27, 101, 3, 85, 45, 2, -91, -118, 27, -89, + 63, 41, -14, -26, 87, -2, 0, -128, 74, 81, -87, -88, -37, -76, 4, 65, -124, -60, 14, + 82, 124, 64, 40, 85, -18, 42, -15, 10, 87, 67, 27, 1, 111, -52, 100, -65, -125, 54, + 26, 84, -112, 4, -108, 103, -79, -38, 109, 40, 126, -79, -53, 102, 39, -124, -24, -102, -7, + 58, 25, 31, 53, -119, -88, 59, -85, 72, 117, -54, 24, -8, -33, -108, 87, 66, 64, -34, + -73, -2, 38, -93, -19, 98, 98, 37, -76, -110, -111, 126, -56, -33, 42, 16, 120, 119, -69, + -85, -59, 9, -95, 2, -65, -59, -86, -2, -83, 117, 89, 98, 0, -54, -49, 54, -76, 5, + 73, 95, 3, -98, -86, 22, 9, 45, -121, 71, 66, 44, 12, 32, 105, -54, 4, 96, 19, + -106, 76, -24, -106, -21, 45, 91, 65, 121, 1, -61, -79, 57, -13, 58, -71, 46, 56, 79, + -36, 68, 119, 45, 113, -15, -69, -120, -8, 38, 71, 125, -112, 27, -63, -48, -115, 102, -65, + -23, -90, 104, -24, 7, 71, 89, 93, 3, 71, 75, -85, -101, -8, 25, 93, 83, 86, -117, + 118, -126, 51, 15, 25, 22, -66, 25, -57, 108, -6, -100, 78, 93, 91, 33, 27, -93, 39, + 118, -95, 100, -18, 74, -87, 18, 28, -43, -79, 59, -29, -77, 50, -56, -23, 95, 122, 58, + -34, 68, -70, 50, 7, 38, 77, 22, 126, 78, -124, -98, -68, 99, 39, -117, 68, -66, 37, + -66, -111, 24, -85, 95, -31, 13, 71, 109, 47, 110, -36, 98, 65, 116, -20, -54, -36, -104, + -77, 94, 0, -66, -29, -127, -29, 66, 18, -82, -82, 70, 12, -88, -25, -54, 86, -3, 115, + 58, -29, 97, -5, -82, 91, -17, 88, -126, 18, -128, -105, 120, 105, -20, -36, -92, -78, -44, + 57, 107, 71, 28, 98, -127, -45, 36, 5, -38, 99, 96, 117, 13, -73, 100, 56, -33, 12, + -42, -112, 69, -67, 37, -103, -120, -36, 44, 53, -1, 11, 81, -51, 12, 109, 61, 58, 68, + 73, -124, -81, -109, 50, -16, -87, -71, 15, 55, 28, 78, 12, 64, -82, -87, 18, -59, -62, + 88, 54, -93, -8, 50, 83, -22, 93, -66, 78, -73, 97, 54, -42, 108, -18, 20, -82, -38, + 36, 115, -14, -13, -9, 93, -41, 54, -28, 52, -102, 105, 78, 63, -109, -30, 99, -25, -101, + 22, 119, -54, -45, 4, 4, -122, -31, 87, 7, 68, -49, 72, -102, -9, 0, -115, -80, 67, + 85, 120, 126, -4, -108, -120, 119, 35, -67, 97, 26, -72, -114, 103, 72, 35, -126, -30, 66, + 119, -112, 58, 111, -78, -106, -111, -119, 80, -2, 48, 87, -32, -19, 110, 13, 106, 40, 91, + 28, -118, -64, -12, 2, -93, -113, 98, -103, 40, -37, -20, 40, -78, 53, -83, 104, -54, -60, + -51, 123, 1, 34, -47, -112, -120, 103, 11, 50, 36, -32, -117, -96, -99, 90, -99, 60, -55, + 13, -5, 86, -104, 80, 113, 79, -92, 22, -102, -81, -126, 115, 26, 38, 119, -70, 83, -119, + 6, 88, 8, 83, -14, -98, 123, 87, -91, 100, 117, -7, -2, -85, 17, -70, 35, 4, -62, + 110, -104, -111, -13, 95, -3, -111, -39, -7, 5, 11, -122, -93, 51, 99, -8, -39, -84, 99, + 34, 47, 91, 20, 30, 35, 96, 81, 33, 45, -100, 109, 114, -30, 118, -44, -4, 1, 104, + -18, -58, 100, 81, -112, -51, -105, -109, 117, -121, -50, 74, 91, -104, 91, -20, -104, 4, -127, + 55, 44, 26, 22, 103, -112, 56, -96, 73, 10, 72, 36, 1, 50, -54, -90, -106, -81, -106, + -55, -118, 65, -117, 6, 101, 107, -39, -14, -110, -1, 44, -87, 7, -46, 15, 70, -17, 109, + 47, -57, -99, 37, 2, 23, 61, 115, -101, -70, -99, 33, -75, -72, 27, -14, 99, 46, 60, + -125, 99, -75, -74, 25, -33, 93, -89, -126, -36, 10, -118, 13, -22, 0, 59, 57, -62, 96, + -121, -41, -74, -62, -127, 31, 75, -122, 75, -47, 123, -33, -61, 7, -19, 109, -48, 63, -124, + 23, -94, 5, -45, -13, -37, 56, 116, 105, -72, 40, 106, 76, 103, 7, -78, -66, 115, 51, + 42, 85, -1, 25, -55, 28, -113, 101, -15, 87, -48, 9, 3, -40, 41, -98, 36, -1, 10, + -56, -60, -5, 83, -97, -30, 100, -69, -99, 35, -113, 115, 15, 126, -106, 38, 90, 79, -42, + 44, 90, -5, -114, -45, -23, -122, -105, -47, -33, -128, 119, -60, 15, 90, -56, -38, -4, -61, + -42, -51, 15, -16, 35, -105, -39, 27, -123, -37, -85, -107, 110, 112, 105, -39, -124, 100, 44, + -47, 126, 67, -21, 38, 11, -35, -81, 14, -47, -40, 92, -72, -49, -32, -74, 38, -50, -81, + 0, 122, 42, -96, -61, -113, 27, 78, 89, -34, -74, -99, 82, 66, -86, 104, -8, -60, 26, + -19, 87, 50, -63, -61, -36, 5, 114, -27, -69, 84, 87, 14, -108, 109, -61, 20, 7, 34, + 103, 45, 5, 72, 60, -79, 74, 108, 114, 60, -86, 88, 39, -89, 46, 126, 77, 50, 44, + 83, -110, -74, 86, -76, 48, -105, 111, -10, 21, 64, 14, -71, 112, -106, 79, 34, -78, 51, + -8, -120, -115, -41, -45, 31, -122, -101, 3, 33, -29, -90, -92, 112, -28, 83, -109, 68, -108, + -74, -8, 38, 79, 39, -114, 4, 50, 88, 35, 12, 25, 125, 10, -17, -51, -120, -106, 8, + 73, -102, -83, 92, -68, 11, 111, 64, 86, 125, -12, -15, 71, -109, 49, -1, 109, 50, -71, + -87, -53, -11, -119, -17, -73, -101, 3, -32, 11, 118, -5, 1, 22, 30, 125, 16, 80, 89, + 102, 89, -102, -103, 42, -24, -13, 53, -35, -66, 5, -66, 80, 18, -88, -37, 81, 108, 25, + -47, 51, -34, -53, -100, -126, 117, -93, -76, 16, 115, 85, -27, -124, -96, -57, -82, -87, 19, + 94, 93, 32, 70, 22, 15, 8, 16, 4, 33, -55, -97, -48, -53, -124, 71, -12, 49, 56, + 53, -87, 76, 87, 39, -25, 10, -111, -73, -72, -82, 101, -32, 15, -96, 111, 47, 59, 78, + 15, -7, -6, 8, 1, 119, -52, -3, -120, -49, -15, 88, 72, 82, -108, -45, 44, -112, 28, + -35, 109, 96, 70, -21, -24, 12, -117, 25, -48, 23, -32, 40, -104, 35, -77, -43, 121, -101, + -117, 118, -37, -53, 119, -125, -29, 88, 91, -84, 64, 100, 25, 20, 26, -25, 87, 96, -113, + -46, -73, -87, -85, 92, 0, -20, -33, -61, 120, 52, -34, 99, -28, 106, -93, 40, 124, 4, + -1, 69, 7, 95, 45, -57, -4, 6, -57, 59, 37, -14, -80, -108, -117, 90, 42, 31, -4, + 32, 100, -35, 30, -105, 22, -30, -79, -75, -126, -11, 10, 24, -33, 104, -80, 36, -80, -71, + -20, 112, 59, -67, 52, 57, -30, 61, 18, 54, -17, 39, 118, 41, 105, 115, -98, -55, -23, + 84, 55, -24, 74, -17, -3, -102, 67, 21, -86, -56, 12, 101, -108, 4, -96, 63, 28, 122, + -33, -7, -92, -14, 82, -15, 38, -72, 102, -85, -69, 67, 34, -113, 58, 4, -104, -122, -107, + 73, -79, -15, 40, -69, -80, 119, -1, -59, 114, 89, -82, -80, -64, -54, 59, 57, -53, 98, + -38, -18, -16, 126, 38, 39, 105, 121, -77, -114, 33, 119, -19, 72, -6, -50, -25, -104, -121, + 28, -40, -56, 59, 74, -15, -62, -113, -1, -54, 43, 108, -48, 58, -81, 29, 21, 2, 110, + -108, -122, -100, 12, 95, 0, -96, -59, -7, 16, 36, -20, 55, -51, -84, -79, 125, 100, -107, + 20, 66, -17, -73, 71, 21, 65, -62, 115, -3, -52, 7, -117, -39, 24, 75, 120, 77, -85, + -91, 48, -19, -91, -58, -70, 99, -84, -109, 20, 53, 63, 81, -112, -32, -60, 38, -90, -128, + -87, 77, -78, 34, -48, 6, 123, 63, -38, -4, 122, -97, 18, 2, 50, 77, -128, 108, 109, + 126, 28, -4, -126, 39, 77, 33, 32, 44, 121, -6, 85, 105, 118, -64, 86, 22, -105, -106, + 25, -63, 60, 56, 66, -119, 14, 105, -90, 119, 7, 124, -101, -78, 63, -123, -62, 51, -126, + -112, -1, 72, 85, -42, -82, -66, 117, -80, 114, 81, 34, 123, -10, -88, -22, -14, -86, 0, + -50, 119, -41, -9, -8, 24, 125, 39, -45, 125, 49, 0, 16, -107, 108, -25, 59, 125, -98, + -115, 103, -102, -20, 52, -111, 35, -71, -12, -46, 99, 89, -97, 60, -25, 95, 27, 15, -22, + -12, 24, 30, -28, -88, -122, 55, 33, 34, -25, 67, 75, -96, 109, -34, 24, 101, -22, 74, + -96, -69, 55, 13, 112, 120, -82, -71, 95, -75, -8, -82, -24, -76, -67, -50, -45, 70, 45, + 48, -49, -74, -73, -62, -101, -123, -122, 28, -9, -57, 107, -20, 76, 40, 9, 49, -7, -115, + -112, -54, -78, 21, -111, 103, 76, 117, -34, 110, 91, -74, -32, 105, 67, -45, -82, 117, -47, + 34, 13, -110, -22, 20, 18, 117, 35, -126, 32, -32, 70, -98, 44, -1, -21, -31, -5, 38, + 75, -57, -55, 9, -120, -102, 17, -25, -89, 84, 91, -95, -24, 63, 39, 58, -86, -116, 101, + -83, 94, -89, -87, 83, -4, 34, -56, 61, -95, 2, 12, 126, -87, 46, -91, 115, 106, -83, + 84, 119, -23, -70, -12, -64, -75, 68, 3, 102, -3, -76, 36, -88, 108, -68, -107, 33, -87, + -41, -81, 17, 126, 107, -114, 49, -75, -124, -92, 9, 110, -17, -110, -128, 41, -47, 67, 67, + 124, 117, 85, 99, -20, -120, 77, 36, 113, -102, -13, -89, -107, 28, 51, 116, 69, -120, -36, + -67, -3, -92, 89, -128, 66, 55, -26, -16, 81, -62, 121, -1, 122, -66, 32, 115, -88, 14, + 119, -77, -101, 53, 39, 11, -44, -118, 37, 99, 61, 97, 100, 40, 75, 77, 89, -70, -78, + -53, 62, 94, -123, 91, -119, -61, -9, -17, -41, 56, 78, -45, 46, -77, 74, -107, 4, 106, + 68, -79, 96, -102, 88, 38, -71, -31, -10, 55, -70, -66, -9, 115, 39, -58, 77, -35, -95, + -60, -37, -74, -102, 76, -99, 61, -88, -76, 43, -42, 106, 116, 17, 122, 56, -75, -87, 9, + 6, 124, -11, 1, -9, -51, 111, 93, -64, 49, -58, -114, 27, 6, 70, -114, 23, 12, 58, + 56, 57, 31, 59, 98, -116, 103, -96, -53, -104, -40, -104, -80, 63, 11, -102, 105, 11, 18, + -1, -72, 125, -102, -78, -70, -3, -12, 17, -66, 2, 51, -111, 59, -80, -101, -69, -36, 57, + 59, 33, 18, 109, -92, 90, 12, -91, -114, -71, -28, 124, -128, 98, -18, 52, 93, -49, 41, + -124, -80, -48, -105, -85, 12, 42, -47, 65, 58, -62, 30, 39, 69, 125, 17, -85, 95, 0, + -35, -48, -35, -26, 112, 126, -94, -116, -107, 110, -86, -57, 77, -32, -88, 102, 67, -123, 69, + 33, -35, 95, -83, 38, 77, 13, -25, 120, 11, 86, 119, -11, -43, -122, 51, 105, -10, 9, + -48, -79, 33, 75, -69, 36, -34, -76, 118, 64, 36, -82, -64, 82, 113, -84, -30, 33, -91, + 83, -113, 102, -115, 81, 49, -26, -100, 102, -74, 46, 11, 59, -34, -87, -29, -94, -58, -122, + 84, -103, 37, 22, 9, 126, 27, -2, 46, -79, -93, -59, -65, 102, 10, 25, 126, 1, -92, + 68, -108, 123, 74, -106, -12, 104, 88, -40, -35, 54, 65, -109, 25, 67, -62, 100, -42, 97, + 13, -66, -111, -48, 76, -60, -121, -86, -58, -45, 66, -93, 64, 20, 77, 98, 13, -124, 45, + -15, 47, -113, 42, -93, 12, -78, -64, -128, 35, 55, -26, -126, 111, -109, 5, -32, 66, 46, + -101, 70, -34, -47, -4, -16, 26, 99, 108, 60, -78, -79, -89, -125, 106, 120, -99, -118, 112, + 50, -65, 34, -72, -54, -38, -30, 98, 51, -97, -52, 124, -109, -2, 120, -83, -56, -21, -14, + -10, 108, -39, 104, -103, -28, 36, -94, 59, 55, 90, -78, -72, -76, 27, 59, 73, 89, -12, + 46, -89, 6, 108, 107, -126, 1, -79, -83, -25, -34, -18, 42, 17, -65, 73, 1, -41, -94, + -71, -107, 3, -60, -111, -100, -27, -111, -77, -92, -117, 71, -54, 119, 66, 66, -10, 71, 88, + -39, 111, -66, -32, -35, 92, -116, 73, 28, 55, 61, -97, -90, 96, 62, 100, 112, -87, -60, + -73, -26, -106, 65, 72, 63, 30, -118, -58, 33, 93, -92, 71, 125, -121, -20, 8, 69, -39, + 122, -81, -101, 120, -30, -115, -106, -127, 126, 7, -85, -3, 110, 5, 40, -5, -89, -16, -68, + -12, -121, -22, -29, 106, -3, 11, 57, 64, -109, -118, -97, 51, 107, -39, 123, 100, 38, 22, + 113, -4, -89, 70, 86, 125, -122, 35, 65, -54, 41, 92, 78, 59, 122, -27, -21, 106, 43, + -92, -51, -109, 5, 44, 110, -47, -13, 117, -50, -12, 124, -122, 50, -83, -95, 123, 11, 3, + -29, -112, -30, 24, 80, -63, 56, 112, -75, 54, -94, -25, 6, 18, -124, 100, -109, 75, -33, + -76, -119, 21, 56, 9, -87, -25, -126, 46, -38, 112, 104, 46, -95, -104, -18, -8, -128, -84, + -37, -26, 44, -109, -14, 40, 66, -110, -25, -55, 70, 115, -95, 103, 85, -47, -19, 29, -73, + -114, 99, 28, -3, 2, 57, -99, 5, 121, -82, -34, -123, -97, -21, -104, 27, 18, 100, -36, + -53, 0, 123, -49, -99, 39, 5, -126, 46, -77, -59, 7, -114, -106, 120, 6, 66, -116, -35, + -120, -38, -115, -123, -110, -23, -8, 19, 56, 28, 79, -33, -90, -37, 45, 58, -19, -107, -39, + 50, -111, -14, -127, 42, 125, 40, -50, -34, 83, 87, 58, 58, -53, -20, -72, -103, 104, 64, + 118, 11, -124, 36, 115, 96, -123, -18, -122, 120, -70, -65, -45, -37, -116, -73, -124, 9, 38, + 116, -35, -109, -89, 2, -44, -81, -63, -90, 14, 106, 26, -19, 111, -11, 99, -20, 66, -89, + -114, 100, 114, -15, 83, 2, 113, -42, -37, 10, 33, -73, 34, 60, -43, 91, -13, -110, 49, + 102, 31, -88, 96, 124, -77, -73, 61, -5, 54, -2, 105, -105, 36, 75, -82, 72, 111, -123, + -72, 92, -53, -16, 85, -50, -13, 6, 0, 38, 76, 126, 36, -47, 108, -75, -79, 2, -112, + -118, 69, -117, 92, 13, -126, 76, -126, -2, 60, -18, -113, -39, 25, 61, -103, -80, 102, -100, + -69, -86, 36, 24, -100, 110, -110, 103, -25, 60, -35, -61, -59, 91, -78, 15, -102, 82, 66, + 49, 81, 50, -91, 119, 117, 64, -5, -9, 70, 11, -17, -82, 89, -92, -48, -94, -10, -126, + 108, 69, -89, 22, 125, 100, 124, 95, -28, 50, -71, -79, 110, -28, 1, 61, 120, -25, -51, + 66, 93, 19, 43, 101, 21, 101, 106, -44, -5, -24, 26, -92, 92, 123, -46, -117, -20, 122, + 116, 77, 37, -53, -91, -8, 87, 75, -68, -69, 91, 48, 29, -125, -26, -52, -43, -86, 119, + -38, 19, -5, -104, 121, 56, 82, 34, -115, 94, 7, -20, -24, -48, -64, -48, -18, -92, 18, + 114, -70, 72, 54, -36, -31, 70, 31, -101, -32, 29, -4, -84, -26, 48, 28, -127, 34, -10, + -80, -73, 125, -119, -22, 109, -96, 109, -110, 115, 8, -54, 98, 84, -70, 99, -31, -9, 111, + 85, -57, 72, 85, -103, -106, 33, -90, 115, 89, 23, -125, -44, 118, 83, -83, -60, -72, -89, + -14, 13, -109, 118, 123, -40, -2, 1, -14, -36, -44, -29, 34, -39, -89, 79, -110, 32, 49, + -56, -52, -24, -68, -66, 117, -93, -77, -49, -102, 21, 23, -71, -75, 68, 75, -1, -79, -57, + -123, 16, -12, -77, 55, -32, -110, -117, -85, -117, -74, 41, 88, 125, 47, 17, 79, 107, 61, + 118, -47, -113, 2, -116, -56, -67, 107, 84, 30, -66, 16, 17, -57, 15, -128, 104, 121, -91, + -87, -6, -28, -103, 44, -77, -31, -22, -40, -30, 109, 17, -67, -116, -99, 2, 39, -82, -122, + 122, 113, -51, 56, 82, -22, -124, -4, -127, 28, -46, -77, -97, -100, -84, 42, -28, 4, -15, + -7, -64, 82, -101, -62, 45, -90, 117, -127, -19, -49, -107, 112, -48, -8, -116, 14, 64, -120, + 27, 0, 105, 114, 90, -112, -62, 59, -7, -54, 45, -127, 6, -99, 80, 65, 63, -79, 122, + 66, -40, 98, -19, -105, 65, -34, 13, 1, 10, -18, 8, -11, 44, -104, 124, -117, 83, -102, + -125, -64, 68, -43, 106, 87, 22, 84, 122, -39, 51, 34, 40, -66, -22, -9, -117, 86, -57, + 88, 105, 7, -108, -72, 107, -6, -22, 79, 43, 95, -102, 61, -30, -101, -98, -65, -30, 80, + 121, 66, -89, 126, 16, 17, -62, 81, -51, 37, 70, 89, 83, -110, -56, -121, 71, -117, -105, + 70, 80, -57, -50, -98, 3, -15, 14, -119, 102, -99, -2, -79, 40, 60, 24, 4, 65, 14, + -116, 72, -81, 28, 4, -93, -25, 78, -33, -91, -65, 121, 53, 0, 11, -23, -117, -90, -67, + -112, 117, -57, 91, 81, -83, -23, -117, 99, 116, 27, -120, 60, -104, -47, 24, 71, 125, 71, + 90, -64, -128, 57, 122, -29, 115, -55, 40, -20, -72, -48, -3, 95, 90, -125, -67, -7, 117, + 7, -56, 29, 115, -97, -47, -96, -7, -35, -55, -9, -9, 83, -85, -74, 104, 45, -43, -104, + -97, -15, -8, 42, 85, 2, -46, 62, 72, 82, 24, 87, 68, -57, -57, -60, 110, 53, -60, + -74, 9, 47, -46, -8, 59, -39, 55, -26, -24, -68, -126, -47, 55, -15, -25, 84, -124, 57, + 107, -127, -68, 85, -95, -108, -35, 35, 24, 68, -108, 2, -118, -73, 66, -91, -118, -124, 44, + -22, -2, -57, 2, -112, -51, 37, 97, -14, 27, 123, -31, -106, -8, -80, -66, -55, -62, 126, + 106, -11, -48, 72, -20, -79, -35, -39, 76, 75, 97, -119, 63, 13, -71, -94, -6, -117, -51, + -35, -68, 93, -91, -78, 105, 37, -82, -91, -14, -5, -39, 84, -65, 91, 44, 119, 100, 88, + 117, -82, -20, 91, 56, -3, -99, 16, 1, 104, 47, -110, 117, -95, -109, 110, 99, 117, 69, + 52, 47, -37, -71, 11, -2, -125, 11, 3, 94, -104, -40, -75, 74, -53, -62, -66, 19, -114, + -59, -44, -70, 78, 79, 69, 96, -71, -21, -30, 92, 48, 56, 90, 20, 109, -22, -92, 105, + -78, -15, -51, -72, 37, -64, -1, 120, -126, 93, -87, 26, -9, -35, -11, 115, -20, -118, -4, + 73, -108, 3, 87, 99, -108, 95, -110, 84, -11, 63, -36, -121, 71, 51, -27, -14, -127, 75, + 79, 75, -98, 62, -4, -22, 87, 40, -6, 42, -80, -20, -107, -20, 61, -5, 29, -125, 24, + 60, -123, 12, 66, 73, -102, 88, -90, -64, -27, 37, 6, 121, -26, 31, 23, -28, -81, -78, + 90, -24, 9, 52, 104, -8, 34, 11, 70, 119, -101, -35, -126, 100, -122, -59, -101, -116, 66, + 32, 100, 56, -21, 41, 120, 89, -15, 0, 57, 33, 66, -44, -100, -51, -97, 54, 95, -44, + -17, 76, -33, 76, 29, -83, 14, -1, 67, 77, 66, 93, -4, -79, 91, -29, -54, 61, -26, + -24, -118, 43, -82, -75, -15, 82, 14, -33, 71, -67, 17, 101, -66, -51, -37, -26, 25, -24, + -31, -24, 120, 87, -41, 20, 25, -128, -59, -4, -30, 0, 84, -30, -40, 31, 98, -81, 62, + -118, -45, -59, -98, -53, 91, -122, -69, -39, 21, 35, -25, -2, -9, -34, -103, -124, -20, -101, + 108, 5, -111, 37, -15, -115, 119, 124, -107, -66, 26, 107, 29, -86, -32, -126, -72, -126, -74, + 37, 5, 115, 20, -1, -51, 98, 85, 58, 76, -6, 58, -2, 12, 22, 57, 63, -27, 107, + 43, 90, 24, 99, -2, 32, 10, -59, -43, 49, -75, -127, 12, 100, 35, -59, -25, 56, 32, + 76, 72, -120, 52, 44, -95, 110, 0, 75, 53, 66, 23, -2, -98, -94, -66, 26, -59, -72, + -69, -57, 117, -108, -94, 116, 3, 60, 105, -35, 53, 69, 75, 122, 65, 72, -46, 85, -31, + -79, -19, -34, 107, -100, -4, 120, 108, 70, -123, 102, -24, 50, 38, 40, 89, -11, -84, -110, + -17, 96, -36, 101, 33, -48, 86, 115, -90, -117, -29, 15, -6, 76, -121, -25, -105, 7, -111, + 32, -86, -77, 25, 10, 23, -66, -105, -103, -87, 1, -62, -50, 47, -7, 122, -69, -57, -99, + 43, -120, 74, -92, -111, -59, -44, -45, -54, -74, -13, 23, -106, 97, -2, -63, -127, 95, -12, + -111, -91, -104, 110, 13, -7, -92, -45, 70, -58, -56, -23, 52, -29, -108, -7, -68, -107, -35, + 64, -15, -22, -60, -68, -67, -12, 91, -60, -113, 25, -9, 66, 116, 56, 97, -128, 55, 72, + -123, 101, -120, -56, 34, 48, 46, 7, 25, -55, -90, -60, 88, -4, -96, -74, 100, -87, -14, + 71, 116, -7, 75, -2, -36, 98, -99, -45, 83, 75, -10, 32, -25, -48, -15, 55, -94, 75, + 83, -17, -40, 59, -97, -30, 52, -22, 75, 59, 0, 0, -41, 59, -95, -2, -33, 65, -55, + -82, -85, -49, -85, 105, 38, 99, -30, -90, 41, -95, -119, -100, 44, 62, 88, 59, 32, 88, + -108, -84, 56, -107, -43, -75, 117, -112, -119, -51, 120, -93, 87, -92, 11, 103, -41, 48, -23, + 114, 113, 108, 76, -109, 96, 2, -105, 79, 29, -79, -82, 26, 96, 16, -37, -53, 99, 111, + -79, 35, -39, -85, 42, 61, -60, -72, 35, -23, -18, 47, 50, 111, -77, 10, -104, 115, -55, + 12, 53, 126, -83, -77, 114, 84, -127, 118, 38, 50, -126, 84, -3, -94, 103, -115, 14, -56, + 15, -88, 51, 114, 91, 93, 5, 2, 94, -36, 126, -121, 32, 29, 49, -37, -112, -56, -117, + -98, 25, 10, -127, 87, -40, -123, -122, -51, 105, 45, -71, -24, 106, -84, 114, 51, 21, -116, + -39, 61, 74, -80, -114, 21, 37, -94, -93, -109, -106, 118, -44, 6, -19, -106, 62, -98, 96, + -66, 63, 4, 67, -67, 5, -114, 106, 1, -86, 91, -105, 84, -45, -74, 15, 4, -71, -88, + -111, 113, 49, -110, 33, -2, 111, 93, 65, 38, -25, 69, -40, 15, 69, -10, 56, 16, -33, + -45, 104, 78, 87, 16, 109, 51, -43, -117, 80, 58, 19, -68, 6, -2, 125, -17, -51, 88, + 17, -111, -56, 105, -102, -87, 49, 46, 3, -105, -76, 1, 20, 79, 80, 79, -19, 125, -80, + -17, 85, 48, -128, 64, 11, 55, -76, -105, -84, -53, -126, 92, 95, 93, -32, -40, 1, -27, + -20, -22, 92, 25, -18, -109, -11, -36, -8, 24, 81, 112, -112, 65, -13, 69, 73, -112, 98, + -124, 55, 76, -65, 95, 78, 55, -98, -89, -32, -120, -40, -24, 124, 9, -6, 13, -46, 70, + 40, 13, 88, -25, 19, -88, -89, -27, 75, 67, 88, -16, -95, -2, -123, 122, 27, -108, 9, + 28, -24, 113, -78, 8, -30, -66, -16, 29, 37, 79, 21, -61, -65, 79, -38, -13, -88, 111, + -109, -107, -20, -114, -60, 106, 35, 118, -125, 125, 54, 124, -123, -73, -116, 18, -8, -5, -45, + 123, -113, -121, -9, -38, -120, 116, -68, -20, 63, -96, 42, -78, 64, -81, 56, -19, -50, -26, + -51, 52, -105, -28, -128, -24, 38, 19, 28, 112, 114, 121, -63, -73, -79, -33, 106, 124, 22, + -6, -24, -107, 7, -23, 106, -75, 105, -94, 55, 62, -6, -120, 126, 42, -65, -119, -19, 25, + 125, -22, -32, -10, -66, -106, -14, 50, -34, 19, -68, 10, -29, 91, 101, -17, -85, -115, -51, + 13, 2, -127, 25, 29, 36, 90, -97, -89, 103, -101, 22, -115, 119, 8, -111, 47, 24, -18, + -68, -65, -83, -32, 120, -107, 108, 32, 92, -4, -17, -39, 26, 51, 23, -67, 44, 37, 11, + -86, -37, 62, -31, 98, 15, 41, 24, 46, -110, 110, 50, 76, 81, 40, -33, -95, -2, 39, + -24, -5, -44, -62, 14, 91, 96, -28, 90, -81, 45, -17, 41, 24, -65, 78, -99, -89, -13, + -108, 3, -13, 15, -118, 61, -58, -112, -26, 124, 37, 61, 28, 66, -46, 58, 125, -112, 55, + 56, -52, -20, -25, 17, 84, -80, -25, -99, -95, -67, 71, 66, 51, -4, -104, -82, -88, -31, + 91, 95, 87, 52, -86, -115, -107, 0, -49, 124, 31, 124, -89, 47, 110, -90, 82, 81, 63, + 18, 110, 54, 75, 126, 1, 12, 26, -100, 16, 118, -72, -10, -97, 34, -70, 48, -85, -84, + 103, 78, 99, 21, 31, 5, 72, 98, 98, 84, -91, 20, 88, -100, -98, -43, -116, 0, -111, + -72, 75, 46, -50, 18, -38, 52, -98, 78, 29, -127, 116, -125, 59, -26, 39, -29, 38, -35, + -69, 52, 67, -45, 8, -83, -84, 124, -43, 12, 104, 91, 115, 91, -128, -43, -112, -115, -15, + 96, -30, 9, 2, 63, -39, -3, 20, -116, 29, 80, -51, 90, 25, 10, 126, -106, -18, 15, + -33, 104, 2, -113, 26, -6, -80, -86, 78, 113, -113, -51, -55, -60, -109, -109, -49, -92, -30, + -28, -56, -101, 125, 2, -101, -81, 4, -69, 60, 101, 83, -8, -90, 54, 93, 113, 39, 116, + 38, -59, -24, -60, -16, -123, 123, -51, 111, 0, -119, -64, -79, 23, -19, -122, -58, 5, 119, + 118, -20, -103, -1, 88, 67, -90, 53, -47, -57, -85, 103, 106, 53, 88, 42, -24, 24, 98, + -64, -124, -87, 7, 57, -38, -112, 1, -99, 88, 21, 1, -95, -40, 44, 36, 13, -6, 125, + -66, -62, -97, -16, -52, 70, 18, -83, 100, -5, -101, 54, -60, 61, 49, 109, -2, -89, -12, + 40, -83, -71, -56, 125, -90, 119, -89, -108, -80, -56, 7, -58, 26, 23, -64, -91, -34, -44, + 32, 71, 116, 101, -36, -69, -83, -121, 113, 37, -6, 36, -46, -93, -70, 95, -79, -48, 11, + 26, 1, -23, 100, 15, 21, 73, 101, 124, 70, -119, -71, 88, 38, 48, -57, 88, -121, -56, + -57, -25, 108, 71, 54, -10, -39, 10, 8, -104, -50, -7, 103, -85, 122, -120, 36, 116, 114, + -21, 112, -120, 9, -10, -126, -79, -45, -37, -86, 121, -63, 90, 67, 56, 20, 25, -44, 123, + 93, -104, -104, -80, -74, 65, -120, -89, 94, -83, 30, 51, 43, 53, 58, 24, -90, 95, -104, + -39, -48, 74, 16, 20, 119, -114, -21, -102, -5, 20, -88, 125, 28, 54, -93, -68, -69, 32, + 16, 52, -39, 7, -112, -69, 87, 98, 109, -31, 32, 85, -117, 110, 85, 40, -32, 95, -52, + -19, -111, 0, -74, -102, 96, 19, 106, -46, 113, 55, -85, -109, -6, -6, -29, 120, 38, 61, + 102, 12, 15, -30, 116, 7, -57, -68, -84, -110, -50, 93, 69, -53, 97, 113, 78, 114, -49, + -79, -116, -124, -12, -2, 68, 1, 97, -60, -128, 42, -40, 11, 74, 50, -108, 75, 87, -75, + 18, 108, 12, -124, 9, -27, -82, 9, 0, -113, 12, -61, -125, 17, -49, 54, -115, 31, 112, + 79, 52, 123, 15, 51, -56, -14, 26, -45, -57, 17, 53, 101, 113, 2, 90, -107, -27, -64, + 24, -124, -68, -84, -50, 27, 54, -28, 37, -72, 11, 113, -52, -78, -94, 122, -98, 89, -70, + 4, 120, -84, 38, 105, 105, -26, 80, -49, 32, -2, 109, -43, 86, -29, -29, -127, -83, 80, + 27, -112, -43, 122, -53, -30, 111, -44, 107, 32, -57, 34, 73, 104, 20, -69, 43, 24, 12, + 38, 5, 55, 96, 109, 30, -70, -77, 38, -102, 117, -99, 111, -108, -37, 87, 76, 80, -73, + 2, -12, 75, -106, 116, 24, -113, -19, -34, 44, 36, -54, 70, 68, -61, 35, -38, 92, -51, + 30, -99, 106, -9, 106, -107, 112, 121, 20, -63, -36, -60, -28, 104, 84, 109, -102, 105, -80, + -21, 105, -6, -57, 68, -10, 83, 83, 59, 50, -102, -77, -92, -69, -97, 5, -53, 76, -27, + 114, -57, -50, 120, 119, -17, -114, -11, -96, -89, -116, 21, 80, -80, 103, -12, -15, 9, -2, + -105, 44, -87, -86, 68, 19, 9, 49, 61, 95, -25, 60, 112, -26, -123, -1, 35, 58, 78, + 2, 28, -53, -58, -51, 12, -113, 65, 35, -109, -128, 43, -97, 83, 113, -83, 48, 38, 14, + -29, -98, 50, -22, -1, 0, 7, -19, 123, 50, -3, -9, 113, -21, -91, 64, -14, -8, -81, + -53, -44, -12, 29, 37, -7, -121, -71, 24, -85, 104, 31, -53, 47, 64, 77, 22, 76, 70, + 125, 28, -126, 45, -66, 33, 66, 54, -68, -59, -41, 21, 22, -127, -119, 68, -103, -16, -26, + 8, 63, -119, 99, -65, -83, -70, -43, 68, -37, 33, -119, -42, -96, -117, -126, 54, -122, -41, + -28, -15, -118, 15, -88, 114, 71, -8, -27, -58, -61, 50, -106, 122, 118, -109, 83, -6, -60, + -113, 44, 120, 96, -79, -72, 122, 39, 36, -5, 121, -92, -104, 26, -90, -22, 104, -72, -58, + -5, 113, -121, -21, -63, 40, -123, -24, 2, -5, 96, -6, 14, 59, -106, -102, -26, 25, -30, + 81, -43, -117, -69, -121, 112, -3, -75, 31, 27, 14, -16, 84, 122, -9, -76, 74, -110, -98, + 121, -121, 44, -120, -4, -77, 119, -99, 23, 37, 40, 114, -48, 67, -71, -47, 45, 5, -3, + 107, 15, -82, -95, 80, -54, -38, 24, 38, 68, 19, 73, -83, 71, 29, 92, 22, 120, -98, + 43, -59, -53, -33, -45, -125, -108, 34, 54, -46, -80, -110, -34, 38, -25, -58, -52, -49, -104, + -32, -29, -27, -77, -35, 104, 38, 118, 13, -23, 102, 5, -98, 42, 90, -3, -9, -85, 120, + 110, -73, -6, 124, 11, -62, 108, 60, -86, 19, 114, -64, 2, -78, 58, 104, 50, -15, 25, + -72, -46, 42, -33, -48, -122, -25, -8, 122, 13, -14, 69, 100, 14, -104, -1, 72, -84, -4, + 66, -128, 120, -83, 103, 4, -25, -72, 100, -106, 98, -113, -56, 5, -114, 80, 30, -110, 33, + -50, 101, 84, 111, 57, -81, 79, 13, -113, 4, 17, 84, 31, 106, 45, -12, 50, 30, 10, + -35, 48, -23, -3, 106, 110, 116, 122, -21, 8, 1, 31, -73, 116, 77, 17, 37, 20, 83, + 13, 4, -16, -71, -62, -11, -104, 121, -62, 100, -117, 112, 96, 31, -109, -119, 14, -103, 39, + 115, -52, 86, -116, 109, -63, -58, 25, 86, -100, -50, 46, -15, -7, 76, -31, 74, -98, 83, + -105, -61, -122, -102, -94, 94, 36, -125, -36, 111, 80, 76, -104, 74, -88, 98, -119, -94, 40, + -89, 28, 24, 79, -17, 61, -11, 72, -127, -117, 76, 72, -105, 26, 49, 95, 14, 120, 49, + -119, -103, 25, -62, 93, 74, -47, -119, 119, -72, 62, -81, -61, 2, 9, -73, 5, 110, 52, + 70, -120, -1, -21, -66, 21, -24, -9, 102, -118, -60, -61, 95, -110, -72, -113, -23, -63, 119, + -100, -118, 101, 37, 97, 26, 4, -84, 52, -8, -61, 8, 48, -78, -6, -2, 84, -16, -69, + 73, 3, 40, -117, 60, -64, 104, -12, 46, 39, -52, -42, 57, 22, -95, -16, -116, 57, 24, + -88, -36, -82, 94, 72, -68, 31, -78, -48, 54, 31, -35, 44, -16, -41, 108, 85, 25, -119, + 23, 48, -53, 68, -85, -58, 59, -72, -5, 92, 105, 2, -104, 113, 71, -16, -120, -24, 4, + -11, 65, -102, -62, -112, 61, 5, 55, -53, -79, -2, -30, -38, -121, 95, -49, -7, 53, -24, + 99, -11, 48, 52, -7, 12, -116, 65, 40, 84, -128, -117, 94, -63, -5, 31, 107, 9, -120, + -27, -89, -80, -113, 125, -85, 107, 34, -11, -116, 65, 82, -95, 75, -125, -73, -25, 35, -54, + 30, -36, -71, 75, -114, -80, 84, -20, -7, 71, 31, -101, 32, -63, 38, -1, 65, 66, 32, + 57, -94, 95, 46, -36, 39, -122, 90, 61, -127, -7, -3, 43, -47, 42, 108, -106, 26, 126, + -123, -84, 77, 35, -91, 67, -33, 39, 52, -70, -98, -124, -7, -21, -56, -65, -22, -100, -24, + -127, 48, -115, -68, 120, -51, -116, -73, 7, -104, -75, 16, -82, 77, 62, 126, 57, -67, -60, + 20, 123, 35, -62, 111, 53, 8, -82, -110, -112, -19, 83, 55, -84, 61, 6, 91, -31, 49, + -87, -98, -117, -21, -36, 109, -21, -97, 47, -118, -73, 110, 96, -88, -80, 10, -29, -110, 1, + -39, 103, 83, 17, 100, 80, -31, 108, 29, -13, 107, 108, 65, -45, 111, -66, -83, -59, 54, + 39, -102, 5, -72, 95, -60, 63, 73, -41, -12, 95, -4, 31, 93, 28, -18, 5, -4, 64, + -52, -62, -110, 9, 89, 90, 117, -75, 107, 93, -10, -50, 104, -117, -92, -60, 20, 111, 60, + 95, -13, 2, -56, 94, 77, 100, -47, 95, -21, -26, -65, -126, -63, 78, -87, -74, 93, -31, + -95, 4, -33, 100, -19, 95, -58, -118, 50, 23, 29, -101, -61, -71, -69, -58, -73, 99, -52, + 52, -64, 126, -98, -3, -40, 4, 124, -10, 5, 48, 49, -15, -50, -1, 7, 85, -100, 19, + 60, -72, -127, 39, -41, 93, -117, -5, 61, 104, -57, 62, -42, -120, -112, 58, -66, -23, 78, + 46, 104, 84, -76, -97, -105, -107, 93, 119, -125, -14, 12, 17, 37, 28, 36, 26, 57, -101, + -9, 42, -126, -40, -32, -7, -90, 104, -121, -57}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_mult_data.h index 22373d8a..c53725f5 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_mult_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_mult_data.h @@ -1,34 +1,34 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include const int32_t depthwise_eq_in_out_ch_output_mult[250] = { - 1196229277, 1570084629, 1350637376, 2097324910, 1218223248, 1099478129, 2087688820, 1688302732, 1413407501, - 2122208013, 1171216285, 1131846592, 1145124342, 1909798501, 1790458271, 1736918560, 1895555244, 1451187744, - 1846687753, 1217366839, 1183971365, 2055369545, 2132097940, 1773619852, 1131483513, 1801398462, 1464563580, - 1811018615, 1107455152, 1201205825, 1329472431, 1372375192, 1760669830, 1441877788, 1812356763, 1191829512, - 1172810443, 1089646590, 1214611362, 1121414799, 1266526995, 1183194497, 1215143811, 1992060886, 1147000662, - 1121906173, 1957638619, 1834090523, 1923785892, 2065676448, 2074742130, 1991993949, 1145713657, 1125416205, - 2074563053, 1960311584, 1222978341, 1177127117, 2052437962, 2086766054, 1634372270, 1182223412, 1224596695, - 1169413493, 1107464498, 1182125253, 1144308934, 1224518023, 1110865142, 1159631939, 1128491513, 2083224074, - 1185973300, 1962234919, 1131410636, 1148706961, 1093962467, 1074098798, 1477634581, 1218235418, 1151514378, - 1175236743, 2052234835, 1983956364, 1153859543, 1859995204, 1748018124, 2077968262, 1495560499, 2087799801, - 1660768976, 1133282320, 1802374400, 2125049044, 1680837439, 1837419233, 1867936151, 1337791236, 2107197016, - 1943500404, 1210246949, 1163633273, 1177130884, 1723776277, 2034554495, 1132181636, 2100386019, 1715084975, - 1135832278, 2037429428, 1153409389, 1503971595, 1604816947, 1336586235, 1822125133, 1915954912, 1281817039, - 1177886672, 1530199367, 1148680302, 1659725667, 1148093667, 1177043592, 1823703499, 1892506305, 1206163321, - 1153944228, 1199924326, 1441998041, 2136293636, 1465041408, 1844659811, 1122903989, 1443218834, 1688940077, - 1108421891, 1214973065, 2081353767, 1474724585, 1215791225, 2037141688, 1223140321, 1704079876, 1130421368, - 2101261698, 1314606028, 1638510736, 1862843333, 1186144263, 1180191124, 1187949301, 1107573161, 1477267734, - 2032968884, 1698445047, 1971214106, 1194823974, 2077636478, 1592586419, 1115486580, 1118389911, 1231948399, - 1189086568, 1878483707, 1213302118, 2047567541, 1859362640, 1764170807, 1109301772, 1127191904, 1122643270, - 1116365809, 2125568888, 1196590472, 1137828708, 1202533687, 1108615601, 1986487053, 1274175277, 1224274473, - 1591087594, 1897092029, 1181753409, 1266835453, 1085761163, 2115652012, 2042278696, 1974841566, 1719595939, - 1378184606, 1838912842, 1725185129, 1918346222, 1847855373, 1184145081, 2128547848, 1916999815, 1149120750, - 1912498849, 2050880024, 1934307369, 1767051680, 2012119207, 1120424517, 1157614719, 1201128964, 2040655995, - 1181082089, 1637558559, 1104708369, 1646441542, 1442372422, 1831703849, 1197269181, 1316723942, 1209994416, - 2053238736, 1104995529, 1119775364, 1465880141, 2071617996, 1124134344, 1562828403, 1105315433, 1106751596, - 1760850790, 1913419876, 1675152915, 1181186622, 1883409473, 1621760407, 1551884011, 1160169749, 1150566982, - 1101807719, 1432431640, 1107489852, 1216949283, 1091078189, 1186227209, 1770790412, 1164084007, 1825876904, - 1153533627, 2074783712, 1730018604, 2061414540, 1211126177, 1870103181, 1893524839}; + 1130530885, 1148337893, 1087101421, 1719965109, 1103769978, 1093510810, 1700242542, 1418919773, 1169254541, + 1123734652, 1554231298, 2065493597, 1121529049, 1148578291, 1202662600, 1774627582, 1082170102, 1874550316, + 1967127499, 1638609609, 1161632196, 1747827001, 1743086006, 2128194963, 1968564471, 1838740420, 1195789154, + 1447328988, 1737384846, 1205467023, 1272788010, 1368279310, 1357138684, 1358448014, 1140370372, 2108283355, + 1159009263, 1152371301, 1655859251, 1678443653, 1095356635, 2049533625, 1840026386, 1752390635, 1080829076, + 1203776192, 1281561842, 1997110303, 2086590169, 1680171238, 2091786602, 2138093499, 1089663809, 1129785688, + 2040526447, 1785859523, 1716349815, 1180885222, 1122730538, 1857170325, 1084036869, 1225229906, 1184607216, + 2079166552, 1173133809, 2053515177, 1166441143, 1079160682, 2012777667, 2128307505, 1562545704, 1149594442, + 1724778472, 1369261841, 1610295699, 1107410486, 1952694390, 1173782989, 1190471348, 1198274473, 1551788147, + 1359894245, 1097539089, 1810999189, 1155973417, 1309281276, 1172317597, 1697154342, 1560887068, 1928764617, + 1304300240, 1313158550, 1151552524, 1795035371, 1100230330, 1133038356, 1216275153, 1193629138, 2064821196, + 1651570133, 1366369663, 1743012498, 1920815046, 1111676242, 1130521910, 1107722469, 1076571654, 2008799248, + 1199232216, 1917330101, 1726054609, 1417182928, 1922151442, 2125953960, 1451797176, 1871199709, 2124125372, + 1197445796, 1133900296, 1591733735, 1568947044, 2097710138, 1589056241, 2073901598, 1504157105, 1307185010, + 1788111353, 1551765781, 1202285799, 1942898138, 1191315197, 1201087444, 2143214999, 1900969538, 2102186447, + 2082457469, 2020846903, 1109216851, 1084987490, 1187130713, 1187890013, 1954595916, 1148253986, 1132547589, + 2128017603, 1166653619, 1182499411, 1745922055, 1319440368, 2055235498, 1941109296, 1199518414, 1993383038, + 2122929581, 1134090976, 2115934477, 2084314264, 1562845577, 2073719252, 1513973443, 1948622947, 1209823810, + 1136845753, 2122920464, 1667084782, 1809245107, 1544941769, 1158635027, 2041185742, 1139182844, 1742426711, + 1078036191, 1074141752, 1236613992, 2027327449, 1343921879, 1165948738, 1864732268, 1084231894, 1727815529, + 1274513744, 2043619775, 1100848311, 1715373409, 1599602390, 2063115550, 1262502559, 1883737133, 2006888605, + 1920628426, 1404089634, 1601883709, 2056025711, 1152923609, 1167386065, 1921105090, 2065643748, 1943379503, + 1105167204, 1103135614, 1120185388, 1197398357, 1138403672, 1985156528, 1134733959, 2050877572, 1367719308, + 1927331777, 1998172753, 1188301217, 2046928927, 1091629370, 1088273777, 1393736515, 2047597909, 1363526776, + 1621547726, 1960413322, 1623717500, 1197927161, 2122676861, 1123248230, 2020316247, 1959775111, 1939881168, + 2023033202, 1676237694, 2115254811, 1980413254, 2124227941, 1163768635, 1919228209, 1800268557, 2074207882, + 1111300439, 1179341264, 1996429924, 1835578854, 1661036737, 1161150119, 1180978104, 1820606969, 2110111230, + 1814546103, 1172641261, 2071292626, 1938351883, 1862893423, 1122555030, 2076490198}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_ref_data.h index bc6d8376..4191dce3 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_ref_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_ref_data.h @@ -1,387 +1,387 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include const int8_t depthwise_eq_in_out_ch_output_ref[8750] = { - 108, -14, 11, 40, 25, 37, 46, 13, -14, -25, 53, 35, -11, 45, -8, 32, 40, -12, 15, 27, 25, 12, 28, - -3, -8, -18, 49, 20, 86, -2, 5, 15, 26, 79, 5, 38, -17, 5, 4, -6, 10, 1, 24, 23, 0, 6, - 2, 45, 21, 46, 16, -19, 45, -6, 53, 6, -24, -61, 83, -33, -21, 42, 10, 1, -4, 6, -14, 40, -48, - -15, 10, 55, 22, -36, 19, 35, 8, 33, 26, 28, -4, 7, 57, 34, 0, 61, 16, 40, 44, -5, 33, 5, - 71, -37, -27, 22, 24, 27, -41, 4, -29, 33, -37, 3, -18, 34, 12, -6, 31, 11, 63, -4, 10, 11, 31, - -12, 40, -21, 18, 24, -42, -18, 14, -85, -33, 23, 17, 18, 46, 5, 20, 41, 51, 38, 3, -26, -47, 22, - 20, -8, -23, 34, -8, -9, 13, 48, -23, 25, -24, 15, 32, 9, 42, 29, -14, -40, 54, 12, 4, 14, 8, - 52, 71, 45, 45, 63, -16, -24, 11, 66, -39, -31, -17, 6, 16, 46, -1, -20, 0, 61, 1, -32, -42, -44, - 35, 62, -33, 53, -3, 50, 11, 27, -38, -46, 35, 50, 21, -12, 16, 6, 88, 51, -16, 48, -22, 33, 52, - -13, 25, 26, 10, -9, 7, 28, 8, 45, -22, 0, 10, 43, 15, 36, -17, 20, 51, 24, 35, -27, 35, 79, - -2, 40, 30, 0, -49, 60, 51, -83, -56, -30, -26, 1, 46, 62, -19, 21, 18, 22, 20, 26, 106, -7, 12, - 48, 17, 47, 23, 10, -21, 3, 34, 16, -22, 29, -3, 25, 34, -5, 7, 24, 37, 34, 46, -2, 8, -21, - 29, 41, 59, -20, -5, 34, 16, 80, 5, 28, -50, 5, -23, 13, 5, 8, -21, 24, 13, 2, -19, 15, 34, - 53, 32, -32, 64, 2, 41, -18, -21, -71, 62, -5, 14, 7, 31, 10, 13, 1, 1, 6, -68, -23, 32, 49, - 33, -38, 32, 10, -10, 36, 8, 19, -2, -11, 49, 7, -20, 39, 27, 52, 35, -6, 13, -6, 62, -19, -29, - 23, 38, 33, -48, -12, -8, 30, -17, 4, -29, 15, 44, -24, 16, 17, 66, 23, -11, 15, 40, -38, 40, -14, - 17, 3, -26, -17, 23, -92, -42, 24, 58, 27, 53, 21, 11, 31, 46, 44, -9, -8, -39, -6, 20, -28, -25, - 24, -7, -24, 22, 59, -43, 25, -44, -22, 53, 7, 44, 28, -32, -28, 58, 26, 10, 19, 34, 51, 72, 53, - 60, 46, -17, -18, 22, 69, -15, -64, -45, -7, 12, 52, -3, -2, 19, 47, 2, -30, -41, -39, 34, 33, -10, - 36, -6, 45, 28, 29, -24, -52, 56, 49, 34, 22, 20, 35, 99, 41, 25, 53, -9, 34, 33, -1, 48, 32, - -16, -8, 42, 38, 24, 29, -46, 7, 21, 42, 27, 20, 3, 23, 35, 15, 39, -28, 16, 60, 9, 15, 33, - 53, -60, 77, 49, -62, -64, 6, 1, 22, 40, 102, -30, 23, 3, 52, 21, 24, 108, -14, 15, 32, -29, 24, - 38, 14, -10, -4, 48, 35, -24, 35, -19, 21, 31, 1, 22, 63, 39, -8, 51, 5, 6, -15, 50, 19, 35, - 14, -5, 9, 33, 90, -17, 11, -46, 39, 7, 1, 5, 31, -16, 34, -34, -40, 23, 7, 24, 51, 10, -27, - 55, -2, 63, 3, -18, -76, 52, -7, -40, 22, 43, 4, -6, 29, -4, 34, -50, 13, 24, 48, 16, -34, 2, - 35, -10, 18, 18, 56, 17, -31, 55, 30, -14, 50, 28, 50, 34, 0, 7, 5, 48, -53, 1, 21, 29, 25, - -57, 8, 4, 21, -30, 1, -31, 27, 22, -6, 18, 5, 66, 21, 18, 20, 28, -12, 47, 7, 8, 28, -21, - -19, 34, -86, -33, -7, 23, 28, 31, 7, -5, 27, 51, 40, 23, -16, -46, 14, 34, -24, -36, 23, -8, -19, - 31, 63, -19, 27, -52, -15, 41, -1, 49, 67, -35, -20, 18, 29, 11, 12, 38, 46, 67, 42, 41, 69, -14, - -4, 35, 18, -4, -22, -21, -9, -15, 49, 8, -7, 6, 27, -10, -31, -56, -39, 30, 37, -22, 47, -4, 39, - 24, 19, -9, -57, 45, 62, 37, -12, 21, 20, 92, 49, 24, 18, -7, 35, 38, -2, 32, 52, -27, -31, 16, - 33, 13, 28, -17, 10, 48, 43, 29, 9, -10, -6, 1, 6, 27, -36, -1, 57, 26, 29, 11, 38, -56, 84, - 25, -66, -63, -18, 6, 14, 38, 70, -32, 31, 12, 28, 15, 26, 106, -21, 12, 18, -10, 20, 31, 13, -8, - -25, 65, 27, -44, 35, -4, 29, 36, -10, 24, 43, 56, 30, 58, 3, 4, -3, 32, 28, 39, -2, 0, 28, - 27, 68, -2, 61, -33, 26, -24, -17, 5, 16, 20, 40, 11, -4, 17, 26, 33, 37, 22, -31, 66, -8, 27, - -4, 4, -56, 64, -18, 18, 38, 20, 26, 43, -15, -16, 10, -56, -10, 23, 47, 44, -77, 25, 44, 9, 15, - 21, 23, 11, 16, 55, 6, 2, 57, 16, 57, 35, -16, 12, -1, 46, -46, -12, 11, 18, 28, -35, 1, -5, - -2, -54, -10, -24, 20, 0, -11, 25, 7, 26, 18, 13, 8, 20, -17, 28, 11, -6, 25, -23, -11, 46, -78, - -43, 5, 37, 23, 55, 24, 1, 8, 41, 52, -2, -35, -56, 5, 26, -5, -23, 3, -20, -24, 22, 51, -35, - 41, -13, 5, 26, 26, 40, 51, 0, -24, 44, 25, 16, 23, 31, 46, 97, 67, 50, 40, -22, -39, 22, 52, - 16, -28, -6, 4, 13, 34, 35, 12, 3, 9, -1, -35, -75, -45, 30, 41, -34, 54, 2, 46, 21, 18, -18, - -23, 49, 55, 35, -35, 23, 18, 76, 25, 16, 44, -32, 36, 43, -7, 50, 18, -3, -27, 40, 15, 21, 25, - -14, 9, 52, 21, 25, 2, -17, 17, 10, 12, 31, -41, 20, 80, 2, 19, 16, 16, -31, 73, 49, -37, -50, - 0, -22, -1, 56, 69, -19, 27, 23, 45, 33, 28, 98, -19, 8, 44, 27, 40, 6, 17, -16, -4, 39, 5, - 6, 57, -10, 18, 40, -6, 3, 5, 35, 38, 15, 10, 10, 1, 27, 40, 60, 11, -13, 22, 23, 78, 11, - 44, -20, 12, -34, -4, 4, 16, -20, 41, -13, -4, -19, 43, 25, 41, 29, -20, 53, -4, 65, -11, -13, -91, - 64, -5, -14, 4, 3, 10, 39, 16, -14, 5, -41, -14, 44, 26, 28, -71, 1, 28, 27, 32, 23, 31, 23, - -30, 47, 19, 8, 46, 39, 41, 39, 3, 28, 5, 43, -31, -19, 11, 23, 35, -35, 7, -2, 24, -32, 9, - -13, 46, 29, -12, 25, -3, 61, 38, 13, 14, 26, -12, 37, -6, 6, -19, -31, -12, 39, -78, -43, 9, 34, - -7, 55, -1, 16, 23, 61, 55, 5, -11, -62, 15, 16, -17, -29, 35, -37, -12, 37, 47, -35, 11, -26, -16, - 18, 14, 45, 36, -45, -29, 66, 30, 13, 28, 34, 51, 83, 76, 58, 57, -28, -9, 23, 45, -16, -44, -34, - -11, -10, 38, 13, 16, 10, 17, 0, -35, -67, -31, 30, 42, -23, 36, -4, 39, 9, 26, -52, -37, 26, 52, - 22, -14, 19, 9, 81, 42, 24, 62, -25, 6, 48, -1, 26, 28, 1, -38, 4, 33, 8, 42, -25, -4, 49, - 43, 36, 20, -8, -3, 26, 30, 31, -19, 14, 54, 20, 32, 9, 41, -20, 56, 73, -51, -51, -11, -5, 14, - 40, 52, -11, 22, -3, 53, 19, 14, 78, -4, 7, 32, -11, 28, 61, 20, -18, -20, 39, 27, -22, 37, -10, - 28, 39, 5, 15, 37, 18, -3, 50, -12, -2, -14, 34, 55, 60, 15, 2, 22, 28, 76, 1, 33, -58, 42, - -29, 2, 6, 56, 11, 23, 27, -20, 6, 38, 38, 50, 28, -21, 47, -5, 39, 23, -10, -69, 70, -7, -9, - 62, 54, 24, -1, 3, -7, 37, -20, 19, 28, 51, 53, -40, 10, 45, 4, 31, 21, 29, 14, -10, 29, 13, - 4, 63, 21, 45, 38, -2, 18, -7, 57, -20, -9, 17, 50, 19, -7, 1, 3, 28, -57, 7, -19, 36, 13, - -1, 28, 6, 71, 23, 21, 17, 35, -38, 47, -4, 13, 41, -32, -31, 26, -71, -43, 12, 44, 14, 53, 20, - 6, 26, 61, 55, 1, -8, -37, 14, 42, -25, -22, 36, -23, -25, 51, 47, -35, 45, -55, -4, 49, 3, 47, - 52, -29, -17, 42, 8, 6, 11, 40, 52, 74, 86, 46, 42, -25, -4, 19, 77, -24, -63, -25, -12, 9, 63, - 11, 17, 14, 18, 5, -38, -62, -49, 30, 37, -27, 46, 15, 44, 19, 30, -26, -43, 42, 42, 32, 5, 44, - 41, 84, 47, -6, 49, -46, -2, 26, -4, 29, 49, -10, -43, 36, 48, 22, 28, -37, -7, 44, 41, 17, 32, - 2, 18, 30, 5, 31, -25, 16, 70, 36, 31, 15, 47, -32, 65, 55, -43, -52, 9, 0, 11, 50, 78, -6, - 19, 3, 42, 1, 35, 54, -4, 5, 6, -4, 44, 17, 29, -19, -18, 29, 26, -17, 38, -11, 12, 41, -17, - 0, 42, 31, 36, 74, 26, -16, 2, 17, 20, 58, -16, -7, 36, 29, 40, 2, 35, -8, 23, 1, 10, -1, - 52, -29, 31, -9, -3, 10, 21, 26, 61, 23, -41, 39, 1, 38, -32, 17, -26, 50, 19, -11, -17, 15, 52, - 7, 39, 6, 38, -10, 3, 26, 39, 47, -46, 29, 12, 1, 33, 19, 20, 36, -48, 33, 32, -13, 62, 54, - 61, 47, -7, 9, -2, 55, -18, -20, 18, 24, 23, 12, 30, 3, 38, -26, 14, -5, 17, 40, -29, 34, 25, - 55, 21, 10, 30, 29, -17, 28, 25, -13, -25, -5, 0, 45, -42, -39, 11, 40, 25, 28, 34, -10, 33, 39, - 35, 2, 5, -19, -20, 23, 22, -12, 28, 1, -1, 30, 48, -34, -3, -35, 17, 13, 2, 51, 30, -2, -15, - 57, 40, 13, 25, 46, 47, 48, 47, 53, 44, -13, -26, 3, 62, -26, -36, -28, 25, 12, 51, 32, -2, 8, - -7, 4, -16, -32, -41, 36, 54, 5, 42, 6, 44, 3, 28, -7, -29, 80, 40, 22, 19, 11, 54, 60, 38, - 9, 29, -22, 21, 39, -17, 31, 13, 1, -15, 59, 11, 28, 14, -6, 21, 37, 32, 30, -3, -8, 7, 7, - 37, 48, -26, -24, 51, 36, 22, 11, 6, -26, 59, 12, -14, -19, -33, -11, 0, 46, 88, -1, 8, 32, 68, - 20, 17, 79, -5, 8, 28, -13, 46, 29, 10, -18, -5, 46, 28, -21, 49, -18, 36, 29, -1, -7, 42, 42, - 30, 19, -16, 19, -18, 49, 27, 43, 21, 7, 21, 31, 63, 9, 21, -11, 37, -22, -32, 1, 44, -9, 45, - 16, -7, 2, 16, 42, 31, 29, -13, 52, -14, 34, 16, -15, -62, 69, -22, -23, 20, 59, 31, 22, 30, -20, - 18, -53, -9, 16, 41, 30, -63, 12, 43, 11, 42, 0, 52, 36, -16, 70, 9, -2, 43, 43, 59, 37, -15, - 19, 0, 62, -29, -23, 25, 33, 27, -47, 6, 6, 17, -20, 5, 12, 28, 21, -10, 32, 12, 25, 35, 22, - 22, 30, -39, 57, 29, -1, 15, -30, -18, 25, -63, -32, 12, 44, -3, 43, 15, 10, 20, 23, 49, 7, -36, - -22, -9, 35, -11, -19, 57, -18, -14, 27, 58, -36, 25, 12, 10, 25, 16, 41, 38, -1, -27, 34, 16, -3, - 5, 22, 44, 63, 45, 51, 59, -7, -24, -2, 60, 7, -35, -21, -6, -17, 70, 16, 6, 13, 48, -8, -16, - -44, -36, 36, 47, -15, 56, -25, 54, 6, 26, -52, -50, 45, 44, 24, -33, 19, 17, 91, 48, 0, 48, -10, - -3, 28, -15, 34, 38, -4, -16, 35, 33, 20, 33, -17, -13, 31, 32, 26, 21, -10, 11, 8, 5, 10, -37, - 21, 60, 0, 7, 21, 36, -58, 70, 55, -53, -62, -38, -28, 11, 48, 94, -16, 7, 0, 41, 24, -1, 91, - -19, 16, 35, 4, 9, 43, 18, -3, -20, 55, 12, -7, 60, -15, 20, 32, -8, 15, 52, 10, 39, 47, -7, - 1, -10, 43, 54, 45, 17, -5, 32, 25, 59, 25, 38, -42, 13, -9, 8, 1, 37, 8, 11, -38, 0, 16, - 27, 35, 48, 38, 3, 45, 21, 52, -11, -13, -73, 67, -4, -4, 34, 28, 28, 19, -2, -6, 37, -47, 7, - 12, 43, 35, -29, 34, 47, 2, 44, 5, 35, 21, -19, 63, 18, 14, 61, 4, 62, 34, -1, 37, 2, 64, - -47, -32, 23, 36, 26, -54, -5, -1, 23, -48, 5, -6, 17, 13, -4, 16, 21, 45, 24, 9, 13, 30, -13, - 30, -15, 21, 13, -20, -17, 9, -78, -39, 14, 57, 4, 27, 7, 22, 6, 20, 36, -20, -18, -20, 0, 31, - -44, -22, 23, -27, -18, 42, 57, -32, 28, -42, 4, 28, -16, 46, 23, -33, -20, 63, 27, 10, 22, 28, 50, - 80, 43, 41, 40, -29, -2, 42, 72, -27, -40, -33, -20, -12, 38, 0, 27, 6, 46, -7, -24, -26, -33, 33, - 45, -5, 71, 8, 34, -7, 14, -48, -44, 64, 43, 27, -27, 32, 41, 108, 42, 41, 12, 12, -15, 27, -4, - 28, 33, -5, -24, 33, 14, 18, 36, -28, -8, 27, 42, 25, 19, 2, -1, 6, 19, 30, -32, 39, 69, 2, - -2, 37, 25, -52, 80, 31, -43, -55, 0, -15, 2, 49, 89, -3, 25, 19, 60, 30, 23, 103, -12, 9, 37, - 13, 31, 21, 11, -11, 15, 51, 37, -9, 50, 1, 27, 46, -1, 26, 28, 0, 0, 56, 10, 4, 2, 30, - 36, 76, -6, -10, 25, 22, 64, 5, 51, -24, 1, -7, 6, 6, 50, -21, 24, 0, -11, -17, 53, 23, 57, - 5, -54, 69, -17, 39, -2, -10, -82, 68, 8, -1, 4, 41, 3, 20, 9, -22, 21, -27, -14, 31, 37, 53, - -51, 4, 57, 14, 47, 17, 15, 14, -27, 44, 19, 26, 40, 61, 51, 40, -7, 41, 5, 72, -13, -25, 15, - 43, 22, -59, 17, -7, 35, -50, 3, -17, 15, 42, -28, 22, 3, 38, 34, 20, 4, 30, -44, 52, 7, 11, - 22, -32, 3, 9, -85, -48, -6, 43, -12, 44, 33, 5, 32, 38, 34, 16, -23, -31, 21, 27, -23, -38, 20, - -29, -12, 40, 58, -14, 30, -4, -22, 38, -5, 41, 38, -8, -37, 58, 13, 7, 20, 30, 55, 71, 66, 43, - 55, -26, -32, 15, 47, 2, -66, -31, -12, 12, 50, -20, 1, -2, 25, 3, -26, -35, -40, 36, 56, -3, 60, - -10, 43, 11, 21, -31, -27, 47, 40, 12, -12, 29, 48, 99, 39, 8, 69, -30, -19, 39, 2, 42, 44, 16, - -21, 20, 16, 17, 44, -25, -15, 18, 44, 27, 31, 3, 0, -8, 26, 26, -52, 10, 68, 22, 24, 25, 49, - -39, 77, 43, -71, -40, -24, -14, 25, 42, 48, -11, 26, 1, 35, 22, 13, 109, -24, 14, 36, 16, 45, 23, - 19, -29, -18, 48, 24, 0, 34, -18, 35, 39, -5, 9, 4, 21, 12, 44, -13, 5, -12, 47, 55, 74, -8, - -13, 16, 41, 77, 10, 47, -32, 15, -49, -1, 9, 67, 6, 16, -1, -13, 11, 36, 36, 42, 25, -21, 34, - 17, 48, 11, 6, -80, 72, -5, -35, 40, 3, 37, 3, 6, -2, 13, -62, 14, 9, 49, 55, -50, 18, 21, - 23, 44, 12, 34, 15, -1, 62, 12, -8, 30, 4, 39, 43, -10, 33, 9, 73, -41, -20, 35, 21, 7, -34, - -5, 14, 15, -33, -3, -28, 21, 25, -16, 20, 15, 79, 29, 2, 23, 47, -28, 40, -1, 3, 8, -30, -27, - -1, -69, -25, 2, 48, -3, 51, -6, 13, 41, 54, 45, 21, 7, -32, 19, 18, -41, -23, 41, -24, -18, 48, - 66, -29, 22, -45, -10, 17, 11, 39, 57, -45, -40, 36, 19, 13, 21, 14, 55, 73, 97, 41, 58, -10, -34, - 10, 16, -20, -39, -18, 8, -9, 55, 9, 9, 12, 30, -4, -34, -48, -44, 38, 41, -21, 51, -22, 48, 25, - 30, -16, -35, 41, 39, 16, 0, 23, 41, 66, 39, 21, 17, -34, -3, 42, -1, 20, 26, 7, -31, 48, 25, - 6, 41, -36, 5, 7, 46, 22, 18, -9, 26, 16, 17, 7, -38, 5, 57, 13, 19, 25, 53, -26, 75, 33, - -50, -35, 2, 7, -24, 46, 70, -33, 22, 2, 57, 40, 7, 102, 0, 17, 33, -17, 16, 38, 28, -25, 7, - 46, 42, -21, 51, 0, 28, 43, -3, 8, 22, 21, 43, 29, 19, 1, -18, 47, 55, 48, -5, -2, 29, 27, - 73, 9, 37, -52, 40, 1, -14, 3, 60, -29, 34, 12, -19, 23, 1, 20, 41, 22, -24, 60, -12, 31, -24, - -23, -81, 78, -17, 8, 12, 40, 11, 12, -12, -18, 28, -57, -20, 24, 56, 37, -33, 28, 31, -15, 54, 10, - 41, 5, -26, 38, 28, -25, 41, 21, 38, 37, -10, 19, 14, 68, -36, -25, 9, 42, 25, -40, 32, 5, 32, - -54, -8, -25, 28, 21, -8, 8, 5, 75, 25, 6, 1, 4, -31, 30, -20, 20, 3, -49, -6, 10, -56, -30, - -8, 45, 16, 48, 21, 15, 16, 41, 39, 2, 16, -36, -2, 37, -17, -39, 16, -13, -10, 46, 51, -18, 16, - -38, 10, 39, -12, 45, 43, -7, -38, 30, 8, 17, 13, 25, 49, 82, 68, 56, 41, -26, -19, 2, 72, -2, - -20, -14, 4, 7, 41, 25, 12, 9, 16, 0, -32, -38, -38, 37, 55, -12, 60, -5, 48, 19, 19, -38, -43, - 15, 50, 11, 10, 12, 33, 84, 60, 18, 48, -35, 3, 20, -14, 43, 39, -4, -44, 12, 31, 27, 42, -40, - -3, 2, 28, 31, 23, -18, 21, 23, 17, 16, -34, 3, 70, 15, 14, 25, 38, -22, 71, 42, -15, -42, -30, - -5, 20, 46, 57, -27, 28, 31, 29, 20, 24, 77, -4, 10, 37, 23, 42, 27, 16, -16, -34, 28, 8, -8, - 52, -10, 11, 28, -6, 23, 51, 49, 18, 25, -13, 17, -9, 33, 47, 41, 15, 8, 20, 31, 86, 19, 58, - -59, 2, -12, -11, 4, 20, -14, 31, -8, 0, -6, 3, 21, 48, 21, -31, 46, -10, 56, -4, 2, -45, 77, - 3, -11, 40, 39, 26, 24, 27, -1, 28, -18, 5, 48, 41, 54, -36, 4, 64, -19, 34, 18, 1, 14, 16, - 50, 11, 0, 46, 41, 67, 28, 0, 13, -3, 68, -57, -17, 28, 35, 26, -14, 12, 1, 6, -44, 10, -9, - 6, 33, -12, 20, 5, 48, 27, 2, 14, 38, -42, 43, 13, 13, 31, -30, -19, 11, -74, -26, 4, 28, 4, - 51, 22, 5, 52, 35, 38, -1, -3, -43, 6, 27, -46, -32, 15, -21, -19, 38, 46, -39, 38, -21, -20, 37, - 2, 49, 24, -38, -29, 60, 21, -3, 21, 28, 45, 63, 66, 47, 51, -23, 17, 16, 44, -34, -31, -38, 15, - 20, 47, 8, -22, 16, -5, -8, -39, -63, -45, 34, 38, -22, 54, -21, 51, 11, 21, -4, -34, 48, 54, 33, - 0, 34, 56, 72, 37, 36, 55, -61, -2, 32, -6, 44, 22, 3, -23, 31, 2, 11, 38, -29, 12, -9, 43, - 23, 3, 6, -5, 24, 13, 25, -40, 22, 57, 11, 11, 32, 22, -36, 70, 82, -25, -50, 2, -22, 27, 50, - 92, -24, 7, 13, 34, 8, -1, 49, -6, 3, 10, -11, 16, 29, 26, -19, -21, 34, 46, -2, 26, -17, 23, - 40, -10, 6, 39, 64, 22, 83, 19, -22, 8, 34, 20, 52, -11, -11, 32, 30, 55, 9, -2, 0, 14, -8, - 17, 1, 13, 1, 25, 8, 0, 6, 14, 33, 70, 14, -45, 63, 19, 49, -31, 18, -31, 59, 17, -5, -8, - 9, 40, -2, 27, 9, 42, -3, 12, 19, 41, 58, -61, 6, 19, 7, 27, 24, 43, 34, -42, 47, 28, -12, - 37, 50, 21, 48, 31, 9, -6, 59, -2, -24, 12, 28, 25, 12, 23, 16, 17, -28, 14, -2, 29, 37, -24, - 34, 8, 33, 24, 16, 28, 38, -7, 32, -25, -13, -16, -16, 6, 10, -55, -11, 10, 22, 5, 41, 33, -7, - 19, 36, 37, 10, -10, -25, -6, 24, 2, -16, 32, -7, -6, 25, 43, -30, 0, -28, 26, 14, -2, 50, 18, - -23, -20, 55, 18, 15, 23, 44, 45, 46, 44, 47, 35, -6, -17, 2, 74, -16, -39, -29, 16, 5, 49, 9, - -8, 10, -15, -6, -25, -46, -37, 38, 57, -3, 30, 8, 35, 8, 27, -7, -8, 59, 46, 17, 5, 8, 49, - 51, 23, 23, 26, -8, 17, 49, -7, 28, 23, 8, -10, 64, 19, 27, 38, 3, 10, 10, 35, 29, -9, 3, - 10, -7, 26, 34, -31, -26, 46, 30, 29, 8, 32, -32, 70, 5, -18, -12, -34, 2, 12, 42, 81, -19, 13, - 11, 79, 5, -11, 77, -21, 9, 19, 17, 12, 28, 11, -3, -12, 39, 9, -34, 25, 0, 31, 38, 2, 20, - 26, 8, 17, 49, -3, 17, -17, 46, 46, 55, 9, 5, 26, 45, 74, 3, 62, -22, 28, -2, -17, 6, 21, - -37, 42, -1, -6, 6, 27, 30, 34, 27, -6, 63, -13, 56, -8, -3, -94, 57, -16, -3, 21, 27, 22, 32, - 10, -2, 49, -52, -22, 25, 37, 42, -50, 19, 42, -9, 21, 14, 18, 32, 11, 56, 7, -18, 44, 29, 43, - 42, -5, 6, 5, 60, -35, -13, 12, 37, 23, -40, -4, -12, 29, -48, 5, -4, 12, 45, -12, 23, 7, 76, - 4, 11, 18, 31, -25, 43, -7, 0, 4, -28, -14, 13, -60, -32, 21, 29, 9, 25, 14, 14, 26, 59, 44, - 19, -13, -31, -8, 26, 1, -32, 16, -16, -24, 50, 66, -51, 21, -21, 24, 56, -6, 48, 64, -19, -11, 43, - 27, 0, 14, 38, 48, 50, 63, 40, 50, -17, -22, 29, 65, 6, -36, -16, -1, -16, 28, 5, 4, 9, 47, - -4, -15, -46, -26, 34, 31, -7, 44, -11, 41, 12, 26, -47, -32, 25, 49, 27, -17, 9, 38, 87, 55, 14, - 31, -8, -3, 37, -14, 38, 30, -4, -27, 50, 28, 15, 22, -21, 17, 17, 39, 38, 42, -9, 24, 28, -15, - 22, -22, -12, 74, 9, 38, 33, 39, -13, 73, 59, -80, -69, -16, -1, -5, 44, 104, -33, 13, 11, 48, 30, - 29, 98, -19, 15, 14, -10, 44, 42, 15, -12, -6, 59, 25, -14, 47, -8, 23, 36, -4, 28, 18, 57, 44, - 40, -13, 12, -28, 42, 47, 62, -1, -4, 20, 25, 58, -9, 36, -26, 21, -14, -4, 7, 30, 6, 8, -4, - -45, -7, 16, 34, 36, 27, -7, 46, 9, 32, -9, -4, -83, 73, 16, -16, 31, 13, -10, 4, 8, -6, 39, - -56, 6, 28, 45, 53, -45, 21, 35, 1, 41, 8, 27, 22, -1, 66, 35, 6, 53, 29, 49, 30, -3, 9, - 3, 58, -13, -3, 13, 36, 18, -54, 24, -5, 34, -40, 4, 0, 30, -5, -4, 28, -20, 42, 0, 10, 14, - 33, -37, 46, 22, 23, 24, -38, -7, 21, -65, -23, 17, 50, 45, 40, 6, -9, 38, 58, 41, -18, -8, -9, - 10, 37, -21, -24, 36, -33, -10, 37, 67, -37, 23, -36, 0, 27, 4, 44, 44, -35, -20, 51, 25, -4, 16, - 15, 47, 96, 56, 43, 43, -16, -10, 21, 55, -2, -41, -23, -17, 6, 21, 27, -9, 18, 27, -4, -25, -54, - -43, 35, 43, -10, 27, -15, 41, 11, 35, -26, -37, 30, 38, 30, -18, 13, 40, 86, 35, 15, 67, -17, 21, - 60, 6, 37, 46, 1, -39, 35, 17, 20, 40, -24, -19, 7, 46, 29, 39, -22, 28, 42, -9, 7, -36, 37, - 49, 11, 26, 1, 69, -33, 82, 64, -32, -46, -6, -15, 37, 58, 56, -25, 27, 16, 67, 24, 6, 95, -20, - 18, 14, 14, 41, 41, 17, -11, -11, 40, 12, -5, 63, -13, 35, 19, -5, 1, 25, 31, 13, 44, 4, 7, - -12, 29, 17, 69, 12, -3, 21, 26, 60, 18, 52, -62, 23, -18, -4, 7, -8, 22, 25, -5, 16, 15, 22, - 44, 34, 13, -54, 60, 9, 57, 10, -13, -74, 58, -11, -9, 18, 19, 3, 28, 9, -34, 16, -28, -6, 33, - 44, 25, -61, 31, 54, 3, 18, 1, 26, 5, -1, 52, 21, -21, 56, 39, 47, 41, -14, 8, 12, 46, -42, - -3, 38, 43, 20, -55, -1, 16, 32, -25, -5, 1, 5, 12, -19, 15, 31, 43, 2, 10, 13, 22, -36, 39, - -17, -2, 8, -18, -6, 47, -77, 2, 5, 55, 43, 38, 8, 12, 35, 57, 41, -1, -10, -16, 23, 32, -10, - -45, 30, -27, -15, 50, 65, -43, 47, -33, -16, 43, -3, 46, 28, -19, -9, 42, 4, 4, 13, 6, 50, 83, - 79, 42, 53, -10, -16, -8, 62, -15, -22, -9, -13, 5, 15, 6, -13, 1, 38, -4, -42, -54, -39, 34, 47, - -8, 33, 21, 40, 13, 29, -47, -29, 59, 46, 33, -41, 28, 31, 90, 46, -6, 8, -61, 6, 29, -16, 46, - 33, 3, -32, 39, 33, 16, 32, -28, 13, -12, 37, 19, 17, 1, -6, 15, 14, 26, -16, 22, 69, 15, 30, - 37, 23, -35, 91, 31, -32, -31, -11, -16, 12, 41, 79, -14, 29, 20, 43, 32, -19, 84, -3, 13, 7, 1, - 34, 13, 13, -2, -17, 67, 12, -29, 44, -6, 32, 33, -2, 7, 55, 7, 21, 54, -11, -8, -2, 32, 49, - 46, 8, 2, 13, 30, 61, 21, 36, -27, 25, 11, 20, 7, -33, -32, 13, 12, -8, -1, 28, 41, 33, 26, - -9, 65, -7, 45, -11, 0, -77, 69, -21, -10, 31, 14, 29, 39, -6, 1, 23, -22, 3, 17, 44, 9, -40, - 3, 38, -3, 35, 11, 49, 27, -18, 62, 14, 2, 67, 15, 50, 45, -12, 11, 2, 69, -26, -5, 11, 41, - 28, -35, 18, 24, 9, -35, 5, -12, 38, 17, -11, 27, 0, 22, -2, -13, 14, 29, -14, 44, -6, 15, -8, - -40, -20, 26, -58, -10, 20, 62, 27, 35, 26, 1, 16, 32, 45, 25, -45, -47, 8, 29, -11, -25, 24, -10, - -14, 44, 58, -32, 5, -32, -14, 23, -5, 39, 31, -41, 1, 69, 33, 14, 17, 25, 49, 85, 56, 55, 57, - -10, -9, 27, 26, -17, -41, -42, 30, 10, 34, -16, -6, 6, 55, -6, -27, -55, -33, 32, 55, -21, 36, -5, - 49, 11, 22, -32, -47, 50, 51, 26, -34, -13, 11, 79, 40, 9, 71, -65, -6, 28, 7, 29, 46, -6, -39, - 37, 9, 11, 29, -40, 3, -4, 26, 33, 17, -12, 34, 21, 12, 24, -20, -2, 42, 11, 22, 0, 55, -19, - 96, 28, -46, -37, 19, -24, -13, 42, 81, -13, 15, 18, 51, 23, -2, 99, -18, 11, 37, 23, 37, 55, 18, - -13, -4, 49, 6, -37, 37, -17, 38, 36, -2, 15, 59, 27, 33, 18, -6, 1, -2, 40, 30, 68, 8, -1, - 37, 45, 80, 14, 12, -42, -3, -2, -25, 2, -26, -9, 33, -2, -12, -1, 46, 31, 51, 23, -2, 71, -1, - 49, -7, -12, -53, 79, -8, -14, 29, 22, 23, 9, 19, -23, 31, -26, -21, 19, 59, 11, -48, 32, 59, -2, - 39, 23, 2, 12, 23, 52, 18, 6, 35, 25, 40, 28, 12, -2, 4, 65, -63, -27, 17, 36, 11, -49, 23, - 3, 39, -29, 1, -20, 11, 23, 1, 31, -4, 32, -4, 8, 19, 54, -38, 43, 5, 14, 39, -45, -11, 27, - -52, -25, 21, 54, 21, 56, 2, 11, 51, 28, 28, 6, -47, -44, -9, 21, -10, -7, 7, -14, -20, 38, 48, - -36, 31, -15, -21, 40, -10, 52, 37, -25, -6, 45, 7, 12, 27, 23, 49, 78, 59, 39, 46, 5, -25, 35, - 69, -17, -65, -13, -21, 7, 35, 1, 12, 16, 36, -12, -17, -41, -43, 34, 25, -14, 36, 2, 56, 13, 28, - -44, -30, 41, 47, 32, -17, 19, 30, 68, 48, 22, 37, -53, -6, 37, -9, 25, 10, -13, -29, 17, 25, 17, - 36, -38, 6, -5, 60, 19, 41, -12, -5, -1, 7, 14, -37, 31, 60, -8, 1, 26, 17, -45, 77, 51, -44, - -37, -31, -12, -6, 51, 64, -18, 14, -8, 26, 25, 1, 84, -23, 12, 37, -2, 38, 13, 15, -12, -11, 39, - 32, -20, 44, 0, 9, 30, -7, 30, 40, 32, 36, 52, -3, 16, -10, 44, 44, 66, 5, -8, 27, 21, 71, - 8, 5, -37, 28, -5, 18, 6, -10, -24, 43, -1, -26, 22, 46, 25, 20, 7, -46, 50, -18, 40, 2, -23, - -44, 69, 4, -15, 3, 38, 15, 23, 10, -18, 47, -13, 15, 10, 47, 35, -39, 14, 54, 6, 31, 17, 45, - -3, -10, 60, 23, 12, 28, 37, 49, 39, 22, 13, 9, 46, -26, -6, 30, 26, 21, -11, -5, -14, 47, -27, - 11, -17, 31, 7, -18, 44, 28, 55, 2, -15, 15, 10, -16, 44, 16, 19, 40, -40, -8, 43, -63, -23, 10, - 47, 27, 49, 33, 15, 11, 32, 35, 13, -32, -13, 24, 32, -28, -26, 23, -23, -27, 25, 44, -38, 16, -11, - 2, 43, 19, 47, 42, -27, -17, 44, 10, -10, 21, 19, 50, 35, 92, 31, 46, 1, -12, 30, 38, -23, -52, - -32, 16, 11, 12, -8, -5, 0, 5, -4, -28, -26, -44, 36, 51, 0, 47, 11, 43, 4, 32, -27, -31, 53, - 55, 29, -2, 36, 22, 60, 39, 14, 22, -32, -10, 34, -20, 44, 60, -2, -19, 24, 11, 19, 63, -25, 4, - 6, 33, 32, 35, -4, 29, 25, 24, 21, -40, 13, 67, -3, 17, 20, 52, -52, 90, 26, -9, -47, -17, 2, - 2, 49, 87, -28, 25, 15, 62, 32, 12, 50, -1, 5, -5, -9, 17, 34, 22, -12, -19, 22, 28, -30, 31, - -15, 19, 43, -14, 17, 38, 41, 9, 64, 26, -19, 5, 30, 17, 54, -2, -14, 27, 29, 55, 4, 13, -4, - 15, -17, -6, 4, 16, -9, 31, 5, -16, 2, 27, 33, 48, 16, -32, 75, 26, 56, -20, 20, -25, 57, 4, - -4, 11, 11, 29, 15, 36, 6, 38, -8, 12, 39, 38, 55, -64, 15, 11, 2, 28, 22, 38, 33, -38, 44, - 29, -13, 41, 36, 58, 44, 20, 8, 3, 40, -22, -32, 17, 32, 25, 13, 13, 14, 42, -44, 12, 9, 45, - 36, -26, 31, 1, 27, 17, 24, 23, 45, -14, 21, -1, -13, -37, -27, -12, 43, -44, -18, 12, 18, 33, 41, - 22, -9, 28, 37, 37, 1, -14, -17, -13, 18, 20, -10, 28, -3, -3, 35, 46, -19, 6, -27, 10, 1, 2, - 51, 17, -5, -19, 52, 20, 16, 17, 40, 47, 40, 60, 47, 50, -11, 0, -9, 80, -23, -25, -21, 11, 12, - 31, 3, 4, 21, -14, -1, -31, -24, -34, 39, 56, -16, 42, 9, 33, 14, 28, 9, -31, 68, 46, 7, 30, - 11, 36, 46, 39, 6, 51, 12, 14, 24, -11, 30, 8, 7, -10, 54, 31, 25, 35, 5, 18, 30, 23, 26, - 16, 4, -14, 10, 34, 35, -28, -24, 47, 32, 33, 1, 13, -21, 74, 38, -19, -19, -4, -12, 26, 41, 73, - -18, 10, 31, 72, 5, -3, 84, -14, 11, 45, -5, 49, 35, 18, -26, -16, 45, 21, 5, 25, -6, 35, 36, - -2, 29, 24, 27, 20, 64, 5, 6, -17, 34, 38, 58, 12, -2, 30, 33, 69, 8, 45, -24, 23, -11, -23, - 9, 26, -9, 32, -10, -11, 18, 29, 21, 49, 39, -35, 47, 6, 36, -7, 1, -89, 61, -15, -18, 6, 21, - 10, 33, 11, -11, 12, -35, -11, 18, 51, 40, -45, 28, 43, 22, 52, 15, 19, 11, 4, 39, 13, -3, 45, - 6, 38, 32, 16, 12, -2, 49, -63, -17, 16, 22, 16, -36, 10, -1, 22, -39, -5, -15, 18, 10, -11, 36, - -9, 44, 31, 9, 18, 32, -35, 38, 15, 19, 17, -22, -16, 16, -75, -14, 11, 11, 12, 49, 16, 10, 26, - 21, 51, 6, 7, -45, 9, 47, -29, -48, 23, -33, -15, 36, 49, -38, 26, -25, 0, 21, -6, 42, 48, -46, - -53, 47, 27, 11, 23, 27, 51, 59, 54, 48, 58, -25, -15, 37, 71, -33, -52, -15, 12, -37, 52, 1, 15, - 11, 52, 3, -23, -53, -31, 27, 52, -13, 52, 1, 44, 9, 19, -28, -37, 54, 42, 14, -16, 38, 27, 70, - 42, 10, 56, -20, -1, 27, -2, 42, 27, -16, -39, 33, 11, 16, 67, -32, -6, 11, 42, 30, 41, -7, 9, - 38, -1, 32, -32, -5, 52, 10, 10, 10, 58, -41, 81, 54, -45, -27, 4, -18, -20, 50, 71, 3, 9, 2, - 34, 26, -3, 100, -21, 17, 6, -1, 39, 18, 20, -23, 4, 66, 34, -38, 33, -16, 31, 32, 2, 18, 36, - 21, 16, 31, 9, 7, -25, 49, 25, 42, -14, -3, 9, 31, 81, 3, 48, -34, 22, -20, -6, 2, 14, -17, - 13, -13, -1, 19, 39, 35, 51, 2, 0, 48, -14, 47, -7, -20, -87, 66, -4, 2, 34, 16, 9, 32, 25, - -9, 35, -44, 19, 37, 42, 25, -54, 9, 34, 30, 43, 11, 35, 34, -21, 45, 24, -22, 35, 32, 45, 41, - 7, -1, -2, 62, -28, -30, 32, 12, 27, -38, 23, -3, 4, -51, -6, -29, 40, 22, -7, 15, 28, 57, 30, - 8, 14, 26, -14, 39, -12, 24, -3, -14, -18, 20, -53, -40, 14, 43, 20, 35, 4, 21, 27, 36, 45, -7, - 12, -59, 17, 27, 1, -16, 33, -22, -16, 45, 58, -27, 18, -42, 10, 14, 14, 46, 34, -18, -27, 50, 20, - 6, 16, 19, 47, 57, 48, 37, 64, -23, -21, 9, 38, -16, -25, -22, -17, -11, 62, 14, -8, 3, 44, -2, - -9, -54, -30, 33, 61, -15, 53, 10, 35, 4, 23, -29, -29, 42, 53, 20, -12, 18, 19, 66, 43, 9, 19, - -29, 12, 29, -12, 34, 28, -7, -48, 17, 29, 15, 53, -31, 19, 14, 27, 14, 20, -5, -18, 46, -4, 27, - -28, 12, 70, 17, 13, 17, 29, -34, 75, 28, -68, -43, -30, -2, 1, 41, 82, -18, 38, 11, 73, 26, 16, - 95, -20, 16, 29, 5, 16, 54, 7, -18, -28, 39, 21, -44, 56, -1, 33, 32, -1, -9, 21, 52, 8, 32, - 10, -6, -30, 39, 29, 37, 13, -12, 29, 36, 83, 2, 23, -21, 23, -16, -19, 8, 41, 34, 21, -6, -45, - -4, 37, 22, 41, 31, -48, 59, -4, 43, -3, -35, -88, 69, 3, 2, 28, 45, 28, 1, -3, -21, 19, -49, - -38, 29, 46, 37, -37, 26, 29, -9, 24, 14, 42, 37, -20, 45, 23, 8, 51, 32, 34, 29, 1, 2, 7, - 58, -38, -27, 9, 43, 36, -37, -1, 12, 40, -39, 14, -17, 16, 2, -19, 51, -17, 54, 18, 2, 25, 55, - -38, 53, 1, 4, 9, -34, -25, 17, -70, -49, 5, 25, 5, 45, 5, 10, 36, 48, 42, -17, -8, -47, 4, - 39, -18, -41, 19, -7, -29, 15, 49, -17, 43, -51, -8, 34, 22, 40, 37, -23, -30, 58, 24, 10, 11, 8, - 43, 92, 62, 57, 61, -9, -9, 13, 67, 1, -42, -9, -10, 4, 66, -9, 3, 10, 46, -4, -17, -55, -41, - 35, 31, 2, 51, -2, 53, 11, 32, -14, -43, 55, 41, 24, -30, 22, 29, 78, 37, 21, 63, -72, 13, 51, - -5, 51, 48, 4, -46, 42, 5, 24, 33, -29, -11, 42, 32, 36, 2, 2, 36, 30, 0, 24, -26, 1, 59, - 10, 16, -2, 63, -20, 86, 72, -50, -34, -13, -12, 25, 47, 70, -29, 24, 15, 40, 25, 17, 82, -6, 13, - 2, 11, 17, 26, 19, -14, 10, 49, 14, -24, 60, -11, 36, 43, -9, 11, 41, 46, 33, 63, -8, 6, -27, - 40, 40, 66, 6, 2, 32, 34, 69, 3, 12, -51, 6, -21, 15, 9, 41, -44, 20, -17, 10, -4, 23, 19, - 50, 19, 4, 50, -18, 28, -21, -23, -89, 52, -20, -13, 25, 39, 36, 17, -3, -7, 34, -33, 17, 18, 42, - 48, -44, 29, 29, -1, 25, 18, 16, 16, -10, 72, 7, 12, 49, 23, 65, 40, -7, 11, 16, 41, -39, -15, - 16, 32, 23, -25, 15, 8, 12, -19, -6, -14, 37, 26, -5, 43, 23, 61, 12, 6, 6, 11, -23, 33, -2, - -2, 18, -41, -5, 10, -71, -45, -5, 19, 1, 54, 16, 15, 57, 45, 33, -1, -25, -24, 3, 26, -21, -37, - 31, -9, -28, 40, 54, -11, 19, -25, -9, 62, -15, 47, 62, -3, -24, 44, 11, 23, 19, 15, 47, 48, 46, - 49, 67, -8, -15, 24, 42, -41, -23, -34, -2, -13, 46, -21, -1, 0, 47, -3, -24, -55, -42, 29, 26, -13, - 48, 22, 58, 28, 33, -21, -42, 50, 35, 25, -36, 23, 19, 70, 46, 30, 20, -63, 12, 39, -7, 49, 13, - 2, -35, 23, 52, 18, 46, -21, 7, 19, 60, 23, 30, -11, 11, 34, 24, 28, -39, 4, 72, -6, 23, 16, - 28, -28, 77, 34, -67, -50, -8, -18, -6, 41, 89, -21, 11, 18, 52, -5, 18, 94, -6, 10, 34, -2, 32, - 33, 18, -8, -21, 39, 16, 3, 33, -13, 25, 45, 0, 24, 14, 41, 34, 27, 11, -4, -15, 40, 42, 48, - 15, -12, 20, 17, 66, 17, 56, -9, 39, 11, -9, 5, 46, 14, 27, 14, -23, 4, 26, 21, 35, 30, -21, - 41, -6, 59, -10, -22, -92, 79, 7, -11, 43, 22, 18, 13, 27, -16, 40, -37, -28, 16, 32, 47, -41, 16, - 47, 7, 19, 13, 48, 40, 4, 62, 25, -2, 48, 34, 53, 44, -10, 33, 8, 48, -19, -30, 35, 18, 31, - -40, 15, 18, 8, -23, 0, -8, 16, 17, -7, 14, 4, 50, 20, 6, -1, 32, -38, 48, -12, 11, 24, -35, - -9, 5, -71, -22, -1, 49, 34, 45, 24, -6, 26, 61, 34, 3, -7, -42, -3, 43, -1, -14, 43, -23, -4, - 24, 66, -19, 28, -25, -22, 23, -9, 43, 55, -38, -34, 60, 30, 0, 26, 6, 45, 72, 83, 47, 34, -14, - -12, 6, 62, -12, -22, -31, 2, 12, 50, -11, 8, 4, 50, -3, 4, -40, -52, 31, 49, -21, 47, -4, 52, - 25, 28, -13, -25, 47, 39, 26, -11, 28, 26, 72, 39, 20, 67, -25, 30, 33, 4, 24, 40, 22, -34, 47, - 21, 21, 44, -12, -6, 17, 18, 21, 50, 3, 17, 25, 12, 22, -39, 31, 63, 8, 14, 14, 39, -48, 77, - 66, -48, -54, 3, -7, 3, 48, 63, -20, 33, 6, 37, 30, 17, 93, -21, 6, 26, -11, 21, 45, 21, -3, - -19, 51, 20, -25, 52, -14, 28, 39, 1, 28, 39, 16, 12, 77, -17, 7, -22, 29, 37, 56, -9, -7, 15, - 21, 74, 17, 47, -55, 5, -34, 8, 10, 62, -8, 22, -17, -6, 1, 12, 41, 59, 13, -31, 52, -15, 63, - 5, -38, -77, 73, -20, -17, 15, 24, 23, 10, 2, -6, 48, -23, 20, 36, 36, 23, -54, 12, 29, 33, 17, - -1, 36, 20, -28, 51, 23, -16, 54, 28, 41, 27, -4, 25, -6, 58, -43, -13, 3, 20, 25, -20, -3, 13, - 6, -48, -3, -23, 47, 26, -15, 25, 3, 30, 26, 9, 13, 42, -46, 46, -7, 13, 53, -50, -33, 7, -89, - -31, -1, 47, 25, 58, -2, 4, 52, 38, 44, 8, -19, -25, 41, 22, -37, -31, 43, -22, -35, 22, 58, -22, - 11, -41, 4, 51, 26, 44, 56, -33, -39, 62, 9, 3, 8, -7, 45, 77, 83, 52, 74, -24, -4, 10, 39, - 10, -48, -37, 16, 2, 81, 25, -9, 16, 28, -7, 4, -19, -37, 33, 44, -4, 51, 5, 50, 24, 29, -28, - -60, 55, 34, 15, -19, 28, 8, 67, 53, -2, 30, -27, 17, 46, -10, 32, 35, 20, -29, 27, 27, 12, 37, - -7, 2, 34, 42, 22, 46, -18, -5, 36, 10, 32, -32, 11, 60, 4, 23, 7, 42, -62, 78, 23, -36, -52, - 13, 9, 29, 44, 68, -4, 26, 26, 72, 28, 0, 57, 1, 5, 18, 17, 42, 15, 24, -8, -31, 26, 36, - 2, 31, -8, 10, 38, -15, 18, 37, 37, 5, 46, 8, -16, -1, 33, 20, 55, -4, -6, 38, 38, 42, 3, - 35, 11, 21, 13, 4, 5, 40, -19, 32, 1, -5, 12, 21, 32, 75, 11, -37, 68, 30, 45, -12, 9, -47, - 50, -2, -3, 2, 25, 19, 11, 28, 5, 29, 4, 5, 32, 44, 37, -66, 11, 14, 43, 19, 19, 41, 38, - -14, 42, 32, -20, 47, 33, 50, 46, -2, 24, -4, 68, -21, -32, 27, 35, 24, 7, 33, 20, 33, -34, 16, - 8, 31, 16, -29, 37, 15, 58, 26, 29, 24, 34, -14, 24, 7, -12, -42, -25, -20, 24, -52, -32, 4, 53, - 17, 23, 44, -8, 26, 36, 46, 11, -19, -11, -29, 26, 2, -11, 25, -2, 4, 21, 44, -10, 7, -21, 13, - 26, 8, 53, 42, -23, -24, 71, 27, 24, 27, 35, 45, 16, 40, 57, 41, -22, -7, -1, 81, -35, -41, -28, - 12, 25, 71, 32, -8, 16, -5, -2, -17, -8, -33, 37, 57, -22, 51, 13, 39, 15, 27, 8, -36, 72, 36, - -3, 3, 6, 55, 50, 29, 5, 33, 11, 17, 26, -18, 29, 19, 3, -13, 47, 30, 25, 4, 16, 11, 26, - 41, 33, 18, -4, 7, -5, 41, 40, -16, -24, 52, 37, 35, 37, 28, -36, 70, 30, -7, -19, -24, -14, 5, - 47, 58, 2, 5, 13, 63, 14, 10, 53, -5, 4, 36, 4, 12, 31, 20, -12, 4, 50, 44, -18, 21, -14, - 40, 41, -5, 17, 38, 22, 9, 36, -21, 13, 5, 26, 13, 35, 31, -14, 20, 30, 61, -5, 5, -4, 17, - -17, -7, 6, -16, 3, 10, 4, -25, 0, -4, 49, 67, 31, -25, 33, -1, 53, -2, 2, -45, 55, -13, -1, - 25, 24, 7, 22, 26, -11, 42, -6, -9, 26, 36, 1, -61, -4, 3, -18, 0, 24, 29, -15, -11, 30, 3, - -22, 30, 11, 40, 50, 18, 21, 21, 41, -23, 7, 33, 38, 16, -13, 9, 26, -2, -20, -5, 19, 22, 39, - -6, 44, 16, 22, 42, 18, 13, 32, -11, 36, -19, 11, 19, -5, -15, 0, -52, -60, 32, 57, 45, 28, 23, - 12, 14, 66, 32, 14, 16, -7, -1, 25, -36, -32, -2, -18, -8, 11, 36, -14, 38, -41, 4, 20, 21, 52, - 52, -20, 8, 43, 37, 30, 19, 24, 41, 69, 39, 48, 45, -24, -2, 8, 46, -9, -20, -10, -1, -10, 77, - -35, 5, 16, 3, -15, 11, -23, -28, 40, 54, -9, 27, 5, 27, 25, 15, -17, -20, 43, 62, 6, -15, 22, - 43, 49, 46, 21, 44, 5, -17, 35, 1, 38, 57, 37, -27, 9, 44, 21, 13, -11, 17, 28, 49, 23, -4, - -13, -2, -10, 25, 10, -17, -12, 64, 22, 22, 13, 14, -33, 57, 57, -40, 4, -22, -7, -26, 36, 56, -43, - 33, 16, 59, 34, -7, 55, -4, 3, 24, 15, 8, 34, 29, -18, 5, 58, 32, 0, 23, -5, 37, 34, -2, - 9, 43, 12, 2, 53, -15, 17, 4, 37, 21, 39, 25, -14, 23, 26, 68, -2, -9, -16, 38, -5, -34, 3, - -12, 4, 8, 3, -15, 17, 5, 42, 71, 15, -17, 35, -5, 32, 10, 2, -49, 49, 11, -15, 18, 26, 7, - 40, 7, -4, 13, -11, 0, 15, 39, 5, -44, 8, 12, -20, 1, 14, 26, -8, -28, 20, 17, -14, 29, 18, - 27, 47, 22, 34, 12, 37, -38, -6, 11, 26, 12, -3, 11, 24, -13, -16, 7, 9, 19, 32, -15, 55, 13, - 22, 40, 27, 5, 42, -21, 37, -6, 13, 4, -14, -13, 3, -47, -45, 32, 46, 33, 36, 29, 14, 29, 51, - 30, 15, -10, -2, 16, 18, -20, -28, -2, -21, -26, 4, 42, -16, 15, -52, 8, 26, 26, 43, 54, -33, -6, - 21, 23, 14, 16, 38, 43, 59, 52, 44, 41, -26, 5, -18, 49, -12, -21, -26, -11, -2, 84, -18, -10, 13, - 17, -16, 1, -25, -19, 43, 48, -21, 29, 19, 38, 10, 14, -18, -18, 27, 60, 6, 10, 5, 58, 51, 47, - 6, 49, -22, -16, 26, 1, 41, 35, 30, -29, 12, 26, 21, 2, 1, 1, 45, 20, 28, -9, -14, 23, 13, - 22, 21, -14, -16, 59, 34, 43, 10, 33, -38, 55, 43, -41, -9, -16, -13, 20, 44, 65, -31, 9, 26, 56, - 37, -12, 54, -3, 4, 23, 11, 18, 8, 22, -29, -12, 53, 29, 1, 27, -15, 37, 39, -1, 0, 58, 14, - -5, 32, 6, -1, 0, 43, 31, 34, 4, -12, 30, 22, 74, -1, 5, -4, 31, -7, 6, 5, -24, 22, 8, - 3, -2, 20, 2, 44, 70, 37, -32, 33, -9, 60, -1, -13, -46, 53, -7, -3, 9, 15, 9, 31, 2, 11, - 33, -7, -8, 34, 38, 28, -26, 13, 22, -4, 5, 4, 25, 16, -24, 26, 5, -22, 20, 34, 34, 49, 17, - 35, 7, 40, -26, -3, 21, 36, 11, -2, 10, 26, -6, -12, 15, 14, 12, 31, -13, 31, 28, 25, 51, 15, - 14, 25, -19, 39, -4, 7, 3, -20, -8, -19, -33, -32, 37, 66, 27, 34, -4, 10, 5, 38, 27, 13, -5, - -13, -6, 25, -19, -38, 32, -13, -26, 20, 41, -21, 27, -24, 9, 36, 40, 41, 39, -38, -6, 18, 38, 34, - 16, 34, 44, 79, 48, 48, 53, -33, -7, 1, 34, -20, -27, -41, -5, -2, 80, 12, -3, 17, 15, -13, 10, - -15, -17, 41, 55, -20, 30, 25, 32, 19, 4, -16, -27, 27, 63, 0, -7, 21, 54, 57, 44, 4, 33, -43, - 5, 20, -2, 40, 46, 17, -24, 17, 33, 22, 20, 9, 14, 38, 53, 23, -6, -13, 11, 4, 11, 7, -20, - -20, 57, 21, 29, 16, 19, -36, 63, 45, -37, -14, 2, -18, -30, 39, 58, -27, 12, 16, 43, 20, -13, 52, - -3, 3, 24, 8, 19, 37, 16, -30, 16, 53, 38, 3, 38, -8, 42, 41, -9, 11, 52, 16, 21, 46, -23, - -7, -3, 32, 32, 28, 14, -7, 22, 19, 57, -8, 17, -17, 35, 17, 2, 2, -6, -15, -3, 13, -33, 14, - 7, 26, 58, 15, -22, 32, -13, 27, -4, -13, -54, 48, 6, 8, 44, 11, 19, 30, 18, -8, 29, -21, -4, - 38, 40, 15, -25, 7, 21, 3, 23, 8, 34, -1, -16, 44, -4, -29, 27, 20, 17, 45, 21, 30, 7, 43, - -34, -7, 13, 20, 20, 8, 7, 25, -7, -15, 9, 11, 12, 28, -2, 39, 0, 23, 49, 32, 14, 50, 2, - 42, -3, -5, 23, -14, 4, -9, -44, -36, 36, 62, 30, 32, 25, 5, 4, 51, 27, 14, 15, -9, 21, 20, - -38, -37, 1, -1, -24, 5, 37, -29, 39, -9, 7, -1, 13, 47, 41, -27, -12, 21, 35, 18, 22, 27, 42, - 76, 50, 41, 21, -30, 14, -10, 49, -6, -17, -12, -5, 8, 75, 12, 3, 14, 15, -11, -1, -22, -31, 40, - 62, -29, 29, 11, 36, 14, 11, -24, -14, 39, 63, 3, -30, 21, 48, 54, 47, 13, 42, -10, -3, 24, -6, - 42, 38, 18, -19, 4, 16, 19, 12, -10, 11, 32, 24, 25, -1, -12, -10, 19, 25, 7, -15, -10, 42, 21, - 32, 6, 20, -52, 67, 41, -51, -12, -3, -39, 7, 35, 72, -30, 30, 5, 41, 10, -2, 54, -5, 0, 24, - 8, 14, 27, 19, -26, 2, 49, 36, -18, 34, -6, 31, 30, -5, 18, 49, 4, 9, 32, 4, -3, 2, 32, - 25, 29, 16, -11, 9, 23, 39, -15, 2, -2, 30, -5, -11, -1, -15, 1, 14, -5, -11, 14, 6, 39, 63, - 27, -34, 36, -13, 52, 9, -16, -51, 47, -3, -12, 26, 26, 16, 26, 2, -1, 39, -20, -8, 29, 41, 7, - -37, 11, 13, 0, 20, 13, 28, 4, -29, 35, -4, -25, 29, 27, 38, 48, 26, 11, 18, 41, -40, 6, 20, - 27, 13, -17, 9, 31, -1, -18, -1, 22, 13, 52, 4, 56, 19, 28, 46, 23, 14, 35, -13, 41, 4, 2, - 7, -23, -2, 3, -55, -53, 35, 55, 40, 37, 22, 0, 9, 51, 29, 14, 26, -9, -2, 31, -27, -30, 10, - -16, -18, 14, 38, -34, 19, -40, 3, 25, 15, 44, 48, -16, -2, 26, 18, 23, 16, 13, 43, 63, 67, 44, - 51, -10, 9, -4, 58, -13, -21, -20, 8, 2, 79, -4, 7, 22, 20, -6, 5, -16, -29, 42, 60, -12, 33, - 21, 31, 16, 14, -29, -36, 20, 59, 18, -15, 9, 53, 56, 45, 10, 31, -1, -23, 31, -6, 56, 27, 13, - -22, 27, 19, 19, 28, -1, -8, 52, 46, 29, -1, -11, 6, 3, 26, 18, -14, 4, 54, 33, 25, 12, 17, - -35, 59, 45, -41, -30, -18, -20, -21, 39, 59, -33, 8, 37, 47, 39, -13, 54, -5, 1, 32, 13, 18, 13, - 22, -28, 1, 53, 43, -13, 24, -9, 40, 28, -2, 8, 46, 1, 5, 63, -1, 21, 1, 41, 15, 32, 7, - -3, 25, 28, 50, -21, 4, -5, 43, 9, -19, 1, -36, 10, 2, 1, -12, 7, 18, 42, 69, 23, -34, 47, - -17, 45, 14, -22, -49, 58, -10, 6, 12, 30, 33, 23, 16, 7, 10, -21, -1, 23, 42, 17, -54, 8, 8, - -30, 17, 16, 33, -4, -16, 28, 3, -19, 29, 27, 30, 51, 20, 12, 15, 39, -34, -4, 29, 40, 14, -8, - 4, 31, -13, -18, -8, 13, 19, 36, -15, 37, -3, 36, 36, 35, 8, 38, -7, 43, -13, 4, 35, -22, -15, - 1, -68, -52, 33, 50, 22, 23, 29, 0, 8, 38, 34, 12, 11, -10, 21, 8, -25, -33, 3, -25, 0, 21, - 43, -20, 19, -38, 5, 8, 30, 46, 41, -19, 0, 14, 32, 23, 14, 9, 43, 77, 47, 44, 46, -23, -11, - -4, 31, -20, -17, -14, 1, 10, 73, -21, -3, 6, 17, -8, 12, -9, -32, 41, 58, 4, 27, 17, 31, 21, - 10, -34, -26, 35, 65, 5, -2, 17, 55, 60, 50, -3, 48, -3, -2, 21, -4, 45, 40, 3, -20, 13, 37, - 18, 0, 18, -4, 42, 38, 29, -3, -8, 17, -5, 8, 24, -12, -6, 57, 26, 35, 23, 29, -32, 65, 37, - -37, -17, -29, -13, 15, 38, 56, -43, 37, 3, 63, 26, -4, 48, -4, 0, 6, 15, 9, 20, 31, -14, -13, - 46, 46, -4, 37, -5, 28, 40, -9, 1, 30, 10, 2, 44, -2, -17, 9, 31, 21, 46, -1, -2, 35, 24, - 52, 3, -3, 6, 28, 10, 23, -1, -3, -7, 16, 14, -30, 4, 21, 35, 55, 18, -25, 39, 3, 55, -13, - 6, -26, 48, 13, -22, 1, 35, 18, 18, 5, 13, 44, 10, 3, 24, 45, 34, -41, 19, 17, -17, 24, 20, - 11, 18, -20, 36, 24, -14, 36, 25, 16, 41, 30, 12, 5, 38, -34, -19, 25, 42, 22, 10, 22, 30, 10, - -11, 19, 20, 8, 33, -21, 35, 30, 18, 30, 24, 14, 36, 6, 40, -12, -7, -20, -8, 0, 3, -46, -33, - 23, 36, 15, 22, 29, -4, -1, 36, 28, 20, 15, -9, -12, 31, -8, -19, 33, -9, 2, 20, 41, -6, 26, - -6, 13, -2, 21, 51, 33, -26, -13, 44, 41, 32, 28, 40, 44, 59, 25, 43, 23, -27, 6, -12, 67, -5, - -31, -33, -3, 30, 48, -4, -1, 9, -7, -5, 2, -7, -21, 43, 49, -2, 28, 14, 26, -2, 19, -14, -4, - 54, 56, -6, 9, 5, 62, 50, 43, -6, 36, 15, 21, 29, -8, 33, 16, 0, -9, 31, 9, 22, 26, 22, - -3, 31, 21, 22, -10, -9, -4, -13, 25, 34, -19, -24, 38, 39, 40, 1, 0, -31, 55, 31, -24, 4, -5, - -2, 6, 45, 51, -38, 2, 36, 45, 23, -10}; + 13, 61, 18, 3, -54, -2, 52, -35, 2, -11, 5, 16, -10, -58, -46, -10, 50, 30, -27, 14, -6, 18, 23, + 1, -10, 43, 15, -5, 21, 24, 29, 35, -33, -49, 15, -12, 19, 17, 33, -12, 55, 27, 31, -12, 10, 12, + 29, -10, -15, 17, -20, -6, -22, 84, -13, 23, 27, -19, -72, 25, -53, -11, 6, -22, 64, 55, 39, 21, 24, + 2, 47, 30, 27, -3, 5, 50, -38, -14, 0, 33, -27, -6, -23, -14, 7, 5, -18, 17, -32, -20, -35, 36, + 98, 50, -1, 31, 4, 9, -19, 19, -40, 20, -20, -28, -44, -19, -39, -14, 21, -16, 5, -42, 0, -43, 36, + 52, -37, -23, 19, -33, -19, 15, -55, -25, -15, -10, -2, 15, 37, 2, 14, 41, 34, 17, -29, -28, 18, -49, + -29, 4, 47, -23, 18, -2, 2, -48, 23, -20, -8, 33, 1, 56, 18, 36, 38, 61, -30, 7, 0, 26, -25, + 8, 40, -10, 18, -3, 38, 1, 1, 40, 6, -24, 8, 14, 37, 7, -21, 9, 9, 11, 10, 42, 55, 1, + 38, -19, 39, 28, -27, -16, 60, 23, 9, 33, 41, 9, -7, -5, 62, 33, -3, 7, -1, 21, -23, -14, 10, + -13, -5, 7, 28, -20, 16, 21, 49, 12, -12, 54, 40, -32, 23, -10, 11, -19, 32, -55, 7, 3, 21, -20, + -49, 12, 18, -7, -27, 20, -39, 39, 38, -13, 28, 6, 53, 14, 11, -93, 24, 56, 20, -2, -5, 19, 75, + 0, -53, -30, 47, -32, -9, -5, -12, 19, -12, -73, -42, -7, 50, 48, -24, 22, 4, 31, 20, 2, -27, 21, + 26, -19, 27, 24, 28, 19, -27, -51, -6, -18, -13, 3, 10, 13, 33, 25, 46, -11, 18, 16, 33, -8, 21, + 3, -2, -29, -43, 26, 8, -12, 33, -27, -40, 1, -61, 1, -15, -15, 46, 39, 1, 19, 27, 14, 47, 35, + -11, 0, -7, 75, -51, -12, -40, 43, -25, -4, -8, -31, 35, 6, -20, 17, -54, -16, -48, 42, 107, 45, 18, + -11, 3, 25, -17, 23, -40, 16, -32, -16, -72, -4, -23, -1, 10, -3, 4, -35, -3, -50, 37, 47, -27, -28, + 34, -14, -1, 14, -44, -19, 4, 5, -14, 1, 59, -26, 17, 59, 26, -34, -51, -2, 33, -43, -34, 6, 33, + 1, 21, -6, -17, -34, 23, -16, -18, 29, 11, 27, 40, 15, 30, 59, -45, 21, 27, 26, -17, 4, 32, -8, + 36, -20, 39, 17, -32, 22, 29, -10, -14, 19, 44, -7, -3, 16, 22, 36, 29, 52, 52, -11, 35, -20, 28, + 20, 1, -33, 67, 18, 4, 14, 53, 5, -12, -5, 62, 47, -17, -41, -35, 10, 10, -11, 18, 5, 0, 6, + 9, 14, 32, 25, 30, 11, 13, 56, 45, -27, 30, -4, 32, -15, 26, -54, 15, -7, 5, -3, -39, 21, 26, + -13, -34, 39, -39, 53, 20, 9, 21, 23, 21, 33, 1, -81, 23, 26, 36, -11, 15, 17, 61, 9, -46, -18, + 56, -28, -12, 8, 5, 14, -9, -70, -54, 20, 57, 49, -19, 18, -13, 33, 8, 12, -13, 21, 19, -8, 28, + 39, 25, 31, -27, -33, 4, -21, -42, 21, -2, -8, 39, 19, 38, -32, 9, 9, 38, -12, -3, 23, -1, -35, + -39, 74, -6, 12, 24, -17, -29, 8, -30, -5, -20, 4, 45, 59, 15, 26, 22, -4, 46, 18, 20, -7, -9, + 19, -27, -16, -19, 48, -35, -8, 1, -20, 34, 10, -40, 17, -50, 5, -55, 29, 70, 52, 10, 31, -1, 19, + -39, 28, -33, 24, -35, -25, -44, -27, -14, -11, 13, -22, 2, -64, 9, -57, 44, 53, -23, -24, 32, -29, 11, + 5, -37, 2, -31, -7, -27, -11, 53, 5, 12, 37, 29, -14, -22, -7, 17, -48, -23, 2, 37, 6, 37, -30, + -14, -40, 32, -27, -14, 40, -5, 31, 35, 15, 38, 61, -28, 6, 15, 18, -6, 26, 37, -6, 31, -19, 25, + 10, -12, 10, 5, -13, 0, 10, 29, -7, -12, 5, 15, 29, 23, 26, 50, 11, 29, -24, 42, 26, -17, -43, + 55, 13, -10, 26, 53, 0, -14, -24, 65, 33, 0, -20, -6, 18, 6, -21, 4, -6, 0, 6, 19, 10, 22, + 27, 48, 5, 2, 29, 34, -35, 32, -21, 41, -6, 4, -59, 17, -5, 13, -38, -48, 15, 17, -14, -52, 34, + -45, 70, 26, 6, 31, 9, 26, 11, -15, -80, 28, 39, 36, -6, 16, -6, 28, 10, -40, -1, 32, -40, 16, + -20, -3, 7, 15, -64, -55, 13, 38, 39, -27, 3, -20, 25, 9, 6, -14, 22, 24, -18, 12, 23, 13, 35, + -41, -43, -2, -6, -33, 31, 26, -15, 33, 27, 16, -17, 10, -6, 20, -5, 12, 16, -25, -16, -23, 36, 16, + 12, 17, 9, -56, -2, -50, 4, -22, -15, 52, 56, -9, 5, 23, 21, 34, 53, 4, 0, -9, 46, -38, -20, + -21, 15, -32, -9, 9, -29, 9, 5, 4, 18, -42, -21, -41, 31, 70, 49, 0, 10, -5, 46, -34, 28, -42, + -7, -41, -22, -47, -40, -18, 13, 9, -3, 5, -38, -12, -38, 60, 34, -40, -39, 43, -18, 11, -15, -39, -11, + -7, 21, -7, -4, 28, -30, 12, 41, 22, 5, -27, -5, 16, -39, -3, -7, 39, 13, 57, -5, -2, -54, 31, + -5, -8, 42, -14, 26, 19, 38, 40, 67, -21, -11, 14, 50, -24, 19, 21, 2, 27, -23, 19, -29, -16, -12, + 29, -11, 0, 28, 64, -4, -19, 9, -14, 4, 17, 26, 54, 6, 38, -22, 29, 31, -8, -48, 50, 15, 8, + 21, 79, 0, -21, -6, 55, 50, -21, -33, -20, 23, -3, 1, 0, -13, -6, 41, 53, 8, 15, 27, 39, 8, + -17, 39, 31, -48, 28, 2, 13, -7, 34, -53, 2, -1, -10, -2, -56, 29, 18, -10, -50, 25, -50, 49, 30, + -3, 11, 24, 15, 10, -40, -89, 39, 42, 35, 7, -26, 34, 55, 7, -33, -21, 33, -47, 20, -4, 9, 21, + 3, -60, -31, -4, 43, 57, -24, 1, -18, 13, 17, 2, -8, 14, 32, -9, 32, 35, 3, 18, -41, -40, -19, + 3, -12, 30, 29, 10, 25, 21, 45, -31, 8, -24, 17, -10, 8, 27, -18, -20, -22, 56, -8, -1, 22, 27, + -37, 9, -48, 2, -28, -19, 55, 49, 21, 23, 22, 31, 55, 26, 12, -2, 1, 57, -37, -19, -19, 42, -28, + -13, -31, -11, 10, 15, -2, 18, -39, -32, -42, 52, 78, 44, 14, 9, 11, 12, -15, 25, -42, 38, -50, -24, + -56, -45, -9, 28, 23, 1, 11, -38, 3, -40, 53, 27, -37, -22, 37, -17, -12, 0, -49, -23, 2, -10, -24, + 8, 46, -27, 17, 52, 24, -28, -33, -8, 6, -45, -14, -5, 50, -2, 56, -16, 3, -42, 25, -26, -12, 65, + -4, 49, 3, 35, 60, 72, -18, -3, 18, 34, -11, 4, 40, 2, 33, -30, 47, 6, -34, 29, 20, -25, 2, + 39, 55, 10, -42, 14, 31, 32, 15, 28, 74, -3, 34, -9, 44, 32, -32, -63, 49, 10, 2, 23, 49, -12, + -9, -24, 66, 48, -26, -26, 24, 14, -17, 7, 17, 7, -14, -2, 30, -2, 28, 16, 25, 18, 16, 69, 49, + -40, 31, -4, 22, -12, 11, -34, 10, -11, -2, -12, -47, -4, 7, -18, -31, 58, -34, 46, 32, 4, 14, 7, + 12, 20, -21, -91, 38, 39, 9, -29, 12, 9, 17, -3, -58, -2, 31, -30, 22, -6, -22, 0, 7, -70, -28, + -5, 49, 33, -27, 17, -17, 38, 6, 2, -11, 23, 24, -5, 16, 33, 9, 36, -22, -37, -31, -4, -18, 31, + 18, -5, 41, 30, 34, -24, 17, 10, 30, -6, -10, 23, 5, -27, -37, 54, -6, 4, 31, -18, -47, -8, -27, + -8, 2, 3, 65, 44, -4, 21, 22, 13, 72, 32, 12, -9, -15, 36, -46, -18, -20, 19, -30, -5, 23, -18, + 50, 9, -46, 23, -27, -18, -36, 37, 66, 48, 22, 19, -16, 23, -13, 47, -45, 17, -49, -14, -55, -20, -17, + 14, 22, 3, 5, -25, -13, -25, 53, 29, -36, -29, 36, -8, -14, 0, -70, -34, -17, 11, -21, 0, 38, -13, + 18, 45, 36, -17, -41, -17, 27, -48, -22, -2, 44, 1, 52, -34, -3, -75, 31, 6, -15, 62, -9, 37, 36, + -5, 49, 70, -22, 0, 13, 14, -22, 18, 14, -13, 18, -17, 42, 5, -19, 25, 14, -10, -27, 28, 73, -5, + -21, 17, -10, 49, 26, 31, 85, -16, 34, -19, 38, 20, -5, -28, 37, 6, -7, 53, 36, 22, -7, 9, 51, + 28, -19, -35, -25, 32, -9, 24, 12, 38, -25, 27, 20, 15, 14, 28, 44, 15, -24, 52, 33, -55, 27, 7, + 17, -8, 54, -40, 15, -3, 17, -9, -60, 5, 13, -9, -41, 27, -37, 56, 34, -9, 18, 18, 6, 18, -11, + -78, 35, 57, 3, -7, 16, 5, 36, -9, -37, -8, 14, -22, 47, -30, 7, 14, 11, -52, -43, 30, 17, 37, + -19, 37, 12, 28, 24, -3, -19, 5, 12, -11, 55, 42, 13, 43, -6, -30, -18, 2, -15, 32, 8, 10, 18, + 4, 25, -37, 8, 10, 35, 8, -11, 7, 16, -7, -37, 69, 4, -19, 2, 40, 1, 16, -6, 15, -9, -26, + 25, 37, 23, 29, 33, 35, 26, 19, 14, 7, 11, 31, -51, -4, -7, 15, -11, -1, -12, -8, 14, 3, 30, + 10, -47, 0, -43, 34, 74, 40, 38, 38, 24, 12, -16, 34, -41, 29, -42, -4, -21, -25, 9, 8, 21, -13, + 6, -33, 3, -64, 48, 15, -16, -5, 25, -14, 17, -14, -31, -25, -24, -18, -1, 22, 28, -12, 34, 32, -6, + 21, -22, -3, 42, -33, 18, 9, 27, -16, 23, -14, -24, -9, 19, -1, -11, 27, 9, 19, -1, 0, 40, 60, + -28, -3, 23, 52, -23, 18, 25, 10, 15, -23, 17, -6, -10, 11, 23, -2, 5, 15, 47, 2, -20, -11, 45, + 18, 4, 21, 58, 15, 37, -23, 34, 23, -20, -19, 45, 18, 27, 12, 3, 3, -4, -10, 30, 26, -1, 23, + 6, 18, 39, 4, 10, 22, -24, -14, 5, -1, 20, 25, 35, 16, 18, 12, 45, -27, 21, 19, 22, -11, 19, + -29, 20, -27, 3, -12, -26, 47, 10, -11, -42, 30, -32, 41, 16, -36, 17, 36, 40, 17, 14, -51, 38, 35, + 19, -3, 30, 20, 14, 1, -60, 19, 50, -25, 7, -14, 0, -12, -8, -61, -44, 12, 25, 27, -13, 25, 17, + 27, 2, 10, -6, 9, 16, -9, 45, 28, 32, 45, -25, -39, -21, -3, -17, 25, 39, -5, 27, 29, 40, -36, + 15, -6, 33, 7, -28, 31, 12, 6, -16, 72, 14, 18, 23, 11, -51, -2, -33, -6, -17, -6, 54, 42, -8, + 18, 23, 34, 67, 28, 11, -4, -4, 28, -54, -3, -4, 3, -19, -12, 15, -18, 39, 23, 1, 8, -23, -2, + -45, 36, 111, 36, 18, 11, -18, 15, -14, 27, -47, 25, -40, -3, -62, -35, -21, 17, 17, -20, 25, -24, 7, + -65, 46, 44, -20, -29, 36, -36, -16, 19, -44, -4, -23, -7, -18, -6, 43, -22, 33, 59, 25, -28, -32, -26, + 26, -60, 3, -3, 42, 8, 48, -20, -12, -27, 27, -11, -8, 42, -7, 34, 21, 18, 35, 52, -7, -5, 12, + 45, -36, 8, 45, -9, 30, -36, 42, -21, -7, -3, 27, -6, -16, 19, 40, -14, -2, -1, 2, 16, 10, 44, + 79, -8, 41, -5, 23, 35, -12, -44, 49, 14, 10, 35, 22, 11, -6, 4, 54, 20, -19, -23, 3, 24, -21, + 1, 20, 5, -15, -1, 43, 16, 7, 24, 56, 10, 11, 40, 40, -55, 7, -11, -7, 1, 36, -11, 17, -12, + 13, 2, -45, 27, 13, -15, -47, 51, -73, 51, 24, -4, 28, 11, 27, 27, 2, -105, 21, 14, 1, -3, -8, + 27, 19, -2, -56, 15, 57, -39, 17, 13, 3, 23, -17, -79, -19, -2, 66, 52, -25, 24, 27, 12, 29, 14, + 11, 32, 29, -16, 10, 22, 33, 41, -25, -42, -7, -17, 18, 50, 14, -1, 33, -15, 17, -12, 9, 0, 31, + -7, 15, 16, 11, -7, -35, 51, 13, 0, 33, 29, -8, 8, -62, 5, -14, -11, 51, 34, 36, 10, 24, -3, + 47, 25, 9, -5, -12, 61, -38, -8, -3, 10, -35, -4, 6, -22, 10, 9, -8, 18, -28, 14, -33, 48, 65, + 43, -10, 41, 16, 38, -48, 26, -36, 20, -56, -18, -48, -38, -12, 12, 14, -21, 22, -36, -10, -43, 36, 42, + -29, -29, 34, -30, -5, 16, -60, -29, -17, -9, -11, 5, 29, -22, 20, 46, 31, -12, -18, -17, 23, -54, -11, + -11, 63, 2, 42, -8, -3, -41, 34, -4, -18, 20, -15, 45, 42, 36, 42, 44, -18, -5, 13, 42, -1, -3, + 54, 1, 18, -33, 40, -1, -12, 27, 9, -29, -18, 18, 21, 19, -18, 4, 9, 41, 22, 33, 97, -16, 33, + -30, 30, 29, -21, -43, 47, 21, 29, -1, 26, 7, -9, 0, 60, 38, -15, -33, 7, 45, -17, -13, 5, 2, + -9, 8, 37, -12, 39, 18, 36, 11, -9, 58, 35, -48, 20, -9, 12, 4, 2, -10, 10, -16, 15, -24, -46, + 16, 16, -4, -48, 18, -28, 24, 37, -7, 28, 24, 36, 17, -26, -101, 36, 38, 15, 1, 19, 28, 58, 1, + -61, -2, 54, -48, 22, -2, -3, 11, -12, -87, -33, 16, 67, 31, -24, 11, 26, 35, 14, 22, 5, 21, 26, + -18, 26, 27, 22, 40, -32, -45, 0, -10, 10, 41, 22, -12, 39, 4, 45, -36, 10, 25, 36, -18, 24, 3, + 13, 4, -28, 52, 0, 19, 23, 3, -56, 17, -57, -2, -5, -3, 83, 43, 9, 24, 21, 24, 62, 40, 2, + -2, -6, 46, -30, -10, -37, 17, -20, -1, -22, -9, 16, 13, -32, 9, -49, -7, -50, 36, 77, 42, -1, 13, + -5, 7, -16, 37, -33, 34, -54, -33, -32, -41, -21, 10, 10, -7, 5, -44, 3, -65, 50, 46, -30, -15, 15, + -6, -3, 11, -49, -30, 3, 1, -38, -1, 28, -34, 7, 37, 18, -3, -33, -36, 19, -45, -37, -5, 40, 6, + 49, -28, 5, -75, 18, -28, -14, 48, -6, 36, 25, 0, 34, 55, -26, 6, 21, 28, -14, 10, 23, -16, 11, + -9, 35, -17, -21, -1, 22, -17, -26, 16, 57, -10, -38, -13, 26, 19, 31, 50, 75, 11, 44, -17, 30, 9, + -10, -40, 49, 17, 24, 23, 41, 13, -11, 14, 67, 42, -20, -18, 7, 19, -12, -7, 12, 9, -18, 4, 3, + 11, 34, 31, 42, 14, 15, 38, 40, -44, 14, -7, 17, -2, 5, -29, -10, 9, 14, 0, -37, 9, 5, -12, + -19, 40, -26, 26, 23, -12, 29, 27, 8, 15, -45, -90, 33, 45, 4, -12, 19, 33, 41, 4, -29, -3, 35, + -43, 20, -8, -1, 24, -15, -60, -62, 19, 48, 26, -23, 18, 14, 34, 21, 12, -4, 22, 28, -4, 20, 32, + 13, 53, -43, -38, -14, -7, 11, 23, 6, 9, 44, 17, 46, -15, 7, 18, 51, 12, -4, 15, 10, -10, -48, + 22, -11, -10, 21, -11, -21, -8, -46, -17, -28, -16, 49, 43, -12, 9, 29, 5, 58, 20, 19, 12, -15, 82, + -39, -9, -26, 17, -39, -8, 9, -12, 21, 17, -8, 13, -32, -23, -52, 36, 61, 43, -9, 29, -8, 17, -29, + 31, -44, 26, -49, -40, -36, -17, -26, 11, 17, 0, -5, -30, 6, -36, 35, 17, -39, -19, 29, -31, 7, 8, + -56, -27, -27, -3, 4, -14, 50, 16, 18, 37, 14, 5, -43, -31, 11, -43, -26, -5, 32, -14, 22, -16, -1, + -46, 29, -10, -3, 59, 13, 20, 29, 17, 28, 64, -40, 15, 15, 47, -27, 9, 47, -2, 36, -15, 29, 11, + -27, 14, 18, -13, -2, 18, 33, -3, -24, -6, -10, 44, 31, 42, 63, -5, 27, -9, 30, 28, -2, -56, 51, + 28, 13, 51, 29, -15, -14, -12, 41, 43, -25, -17, 12, 24, -10, -1, 20, 19, -28, 43, 20, -6, 7, 15, + 28, 25, 13, 25, 53, -29, 11, 2, 39, -2, 19, -48, 7, -14, 2, 12, -46, 25, 29, -20, -20, 45, -19, + 23, 30, -16, 10, 34, 28, 31, -8, -92, 33, 52, 5, -31, 0, 19, 81, 5, -56, 6, 50, -36, 17, 6, + -2, 22, 6, -38, -22, 13, 38, 39, -22, 9, -4, 19, 21, 4, -16, 18, 24, -13, 32, 38, 23, 37, -41, + -50, 26, -14, 2, 14, 33, -6, 31, -16, 10, -18, 8, 21, 45, 4, 6, 30, 10, -37, -12, 67, 3, -10, + 20, -12, -40, 1, -58, 9, -19, -6, 50, 51, 5, 16, 22, 29, 62, 13, 4, -7, 4, 53, -60, -5, -15, + 21, -20, -6, 12, -25, 38, 16, -22, 10, -41, -9, -41, 42, 71, 47, -3, 31, 11, 51, -14, 36, -38, 38, + -31, -37, -81, 5, -13, -7, 23, -11, -8, -39, 14, -49, 37, 34, -28, -21, 38, -18, -14, 8, -36, -4, -10, + -12, -33, -16, 42, -32, 22, 53, 32, -8, -39, -22, 24, -50, -22, -3, 56, 6, 37, -7, -13, -35, 31, -22, + -5, 62, 3, 21, 22, 34, 41, 63, -27, 15, 2, 29, 4, 1, 29, -9, 6, -37, 26, -21, -38, -4, 6, + -22, 6, 35, 49, 2, 8, 3, 15, 19, -4, 42, 85, 5, 32, -23, 31, 26, -4, -26, 41, 11, 7, 20, + 16, -8, -10, -21, 30, 43, -14, -38, -21, 33, 11, -21, 6, 0, 4, -14, 50, 29, -2, 22, 33, 9, -24, + 24, 39, -36, 16, -33, 0, -7, 25, -48, 11, 7, 23, -21, -57, 12, 21, -17, -30, 31, -26, 41, 21, -26, + 17, 14, 26, 23, -12, -88, 27, 29, 30, 11, 18, 10, 59, 3, -27, 11, 50, -24, 14, -8, 15, -6, 3, + -44, -30, 3, 47, 37, -25, 6, 6, 12, 21, 4, -7, 21, 23, -21, 9, 28, 15, 24, -31, -42, -13, 2, + -25, 24, 37, -9, 27, 21, 39, -14, 22, -7, 38, -10, 12, 17, 22, -11, -26, 77, -2, 0, 15, 26, -24, + 19, -51, -3, -27, -27, 41, 39, 33, 44, 24, 30, 64, 39, 25, -9, 1, 20, -34, 5, -35, 6, -32, -9, + -24, -10, 19, 7, -30, 9, -28, -14, -29, 47, 78, 41, 36, 10, -18, 10, -13, 50, -41, 30, -34, -40, -58, + -19, -6, 13, 21, -24, 7, -42, -15, -7, 45, 22, -23, -18, 12, -16, -8, 4, -50, -10, -22, 8, -5, -12, + 52, -8, 30, 56, 48, 9, -16, -33, 25, -27, -28, -3, 40, -12, 25, -2, -11, -59, 20, 0, -9, 70, -4, + 23, 15, 1, 54, 68, -23, 9, 23, 24, -10, 25, 21, 14, 36, -19, 47, -2, -35, 8, 16, -3, -27, 31, + 42, -1, -10, 10, -16, 15, 34, 32, 96, -25, 56, 6, 30, 22, -19, -24, 51, 8, 15, 50, 11, 22, -8, + -15, 42, 36, -24, 11, 19, 42, 10, -33, 0, -29, 7, 35, 49, 6, 40, 21, 36, 11, 12, 28, 38, -43, + 15, -20, 17, -14, 27, -32, -5, -12, 26, -14, -68, 10, 2, -9, -44, 10, -29, 47, 32, -8, 24, 18, 34, + 2, 1, -72, 29, 31, 23, -19, 8, 11, 51, -6, -51, -13, 22, -26, 14, 2, -3, 17, 14, -54, -33, 22, + -3, 38, -30, 22, 9, 32, 20, -4, -7, 40, 14, -14, 45, 42, 10, 32, -11, -36, -12, 21, -20, 26, 6, + 3, 38, 11, 42, -39, 9, -32, 38, 0, -7, 4, 20, 19, -21, 49, -8, -20, 18, 6, -17, -2, -13, 15, + -14, 2, 13, 35, 18, 27, 36, 34, 26, 31, 4, 7, 6, 55, -68, -1, -4, 39, -12, -8, 2, -14, 19, + 15, 23, 13, -39, -16, -31, 41, 72, 38, 24, 26, 22, 16, -1, 20, -39, 29, -41, -17, -15, -28, -2, 19, + 19, -31, 10, -37, -1, -44, 32, -7, -17, -9, 22, -22, 18, -2, -42, -14, -10, -7, -2, 12, 37, -18, 34, + 31, 10, -5, -37, -6, 49, -26, 3, 13, 23, -17, 17, -11, -17, -1, 19, 1, -13, 23, -13, 26, 18, -2, + 34, 53, -26, 11, 41, 56, -38, 29, 15, -2, 36, -11, 19, 3, -18, 8, 23, 0, -5, 8, 62, 0, -24, + -12, 48, 42, 1, 7, 64, 19, 35, -41, 35, 31, 0, -27, 59, 12, 14, 25, 29, -1, -6, -8, 38, 29, + 5, 5, -1, 29, 25, -20, 13, -24, -40, -1, 28, -10, 14, 41, 50, 28, 5, 19, 35, -23, 20, 11, 18, + -14, 36, -12, 13, 1, -15, -3, -28, 35, -5, -4, -39, 36, -20, 16, 26, -37, 19, 44, 24, 29, 16, -53, + 37, 48, 3, -23, 4, 41, 48, -2, -51, -19, 39, -46, 23, -3, 10, 31, -25, -80, -53, 0, 82, 46, -17, + 35, -18, 33, 9, 10, -17, 25, 20, -16, 31, 26, 23, 32, -26, -34, -14, -16, 15, 18, 7, -13, 48, 28, + 43, -6, 18, 8, 31, 11, -16, 23, 16, -22, -45, 49, -12, 14, 36, -22, -29, -10, -37, 0, -2, -33, 36, + 49, 15, -3, 19, 22, 69, 27, 6, -1, 10, 64, -31, -12, -43, 25, -14, -6, -11, -23, 34, 21, 11, 22, + -49, -16, -36, 46, 75, 50, 18, 22, 0, 29, -39, 13, -40, 28, -40, -24, -44, -15, -10, 20, 26, -15, 14, + -32, 7, -37, 40, 39, -35, -8, 28, -16, -8, -13, -44, -16, -17, 4, -7, -16, 49, -19, 22, 46, 6, -21, + -24, -30, 18, -32, -19, 7, 59, -14, 34, -9, 2, -47, 18, -20, -9, 49, 3, 57, 33, 4, 60, 59, -27, + 7, 45, 41, 0, 11, 27, 4, 22, -6, 24, 4, -23, 6, 20, 1, 5, 34, 50, 4, -29, -13, -5, 26, + 24, 34, 80, -6, 24, 14, 33, 14, -21, -49, 49, 27, 16, 39, 50, 16, -13, -19, 51, 38, -29, -34, 16, + 27, -8, -20, 20, 24, -14, -14, 36, 5, 8, 10, 41, 4, -9, 27, 45, -39, 26, -3, 41, 2, 5, -34, + 12, -21, 6, -19, -43, 30, 13, -21, -52, 25, -36, 51, 34, -12, 23, 12, 43, 30, -10, -77, 34, 23, 19, + -8, 17, 16, 49, -5, -38, -18, 30, -39, 2, -4, -13, 18, -20, -72, -52, 6, 56, 26, -18, 18, -12, 34, + 30, 10, -13, 22, 20, -4, 16, 41, 21, 31, -13, -51, 14, -27, -5, 8, 33, -3, 59, 1, 41, -36, 11, + -20, 29, -4, 1, 47, 19, 1, -2, 60, -1, -2, 21, -15, -62, -11, -52, -3, 3, -1, 69, 45, -1, 16, + 32, 6, 41, 17, -7, -4, -4, 62, -39, -10, -1, 42, -43, -4, -15, 0, 12, 9, 3, 7, -28, -13, -29, + 38, 71, 50, 6, 2, 5, 7, -5, 41, -25, 45, -39, -15, -40, -31, -26, 23, 29, -7, 4, -42, -3, -41, + 61, 15, -30, -36, 15, -23, -8, -6, -43, -3, -11, -16, -29, -4, 44, 0, 4, 47, -10, -1, -32, -15, 33, + -37, -19, -6, 51, -4, 48, -21, -10, -18, 28, -2, -11, 38, 17, 61, 32, 19, 64, 51, -16, 2, 16, 20, + -15, -6, 49, -18, 32, -13, 32, -9, -23, 15, 9, -15, 9, 16, 57, -9, -18, -14, 21, 20, 26, 21, 82, + -14, 34, -19, 25, 39, -3, -16, 47, 18, 8, 7, 32, 5, -10, 1, 58, 56, -15, -10, 12, 44, -8, 9, + 8, 25, -25, 30, 35, 28, 0, 34, 29, 0, 32, 32, 40, -32, 21, 4, 24, -6, 25, -18, -11, 10, 24, + -19, -51, 35, 14, -17, -41, 30, -61, 47, 21, -20, 22, 15, 3, 26, -43, -57, 40, 61, 28, 4, 6, 10, + 67, 1, -42, 10, 33, -29, 16, -32, 16, 13, -8, -62, -27, 3, 39, 42, -17, 12, -22, 26, 14, 1, 0, + 19, 28, -14, 21, 25, 27, 58, -32, -31, -14, -35, -6, 26, 1, 8, 72, 29, 35, -25, 10, 27, 24, 3, + 2, 6, 1, -1, -38, 34, 4, 14, 25, 23, -74, -6, -70, 4, -7, -27, 74, 30, 3, 7, 30, 1, 47, + 55, 21, -7, -2, 72, -37, -8, 5, 27, -18, -8, 4, -10, 34, 5, -18, 13, -51, -17, -43, 44, 79, 40, + 10, 33, -14, 45, -28, 42, -24, 15, -56, -2, -57, -22, -23, 10, 27, -15, -14, -37, -14, -30, 54, 38, -20, + -32, 50, 0, -6, 19, -49, -9, -27, 0, -21, -14, 44, -15, 15, 23, 8, -12, -24, -1, 12, -29, -17, -4, + 53, -3, 34, -15, 2, -62, 37, -19, -13, 33, -1, 46, 25, 27, 51, 62, 11, -2, 5, 20, -22, -2, 45, + 2, 34, -32, 48, -10, -19, 29, 24, -25, -1, 7, 45, -4, -17, -1, 34, 43, 22, 29, 78, 20, 38, -22, + 30, 23, 0, -34, 42, 13, 9, 13, 60, 29, -13, 3, 79, 36, -19, -31, -2, 34, -11, -10, 11, 17, -30, + 11, 32, -15, 28, 21, 40, 8, -26, 34, 46, -58, 21, -15, 6, -13, 22, -30, -5, -25, 9, 4, -47, 4, + 7, -14, -29, 47, -43, 57, 35, -9, 18, 16, 7, 35, -63, -81, 30, 22, 12, -25, -10, 0, 38, 9, -50, + 20, 22, -36, -12, -8, -7, -6, -9, -48, -18, 23, 66, 44, -19, 14, 5, 33, 7, 6, 10, 18, 28, -21, + 10, 28, 28, 37, -25, -47, -1, 1, -22, 38, 18, -17, 50, 33, 34, -20, 8, 16, 17, -3, 8, 10, 11, + -3, -25, 27, -1, 8, 28, -11, -15, 0, -49, -15, -14, -23, 58, 30, 16, 11, 26, -5, 60, 31, 4, -9, + -8, 58, -66, -7, 6, -4, -43, -4, -10, -20, 28, 5, -27, 19, -45, 4, -50, 39, 97, 44, 18, 22, 5, + 34, -11, 27, -43, 17, -30, 1, -60, -20, -27, 15, 24, -38, -15, -40, 8, -46, 42, 32, -30, -11, 45, -40, + -3, 15, -56, -22, -18, 5, -20, -1, 27, -38, 30, 18, 14, -4, -25, -6, 20, -35, -17, 14, 54, -19, 25, + -11, -6, -49, 15, -22, -4, 43, -16, 36, 35, 22, 33, 66, 4, -7, 26, 30, 0, 12, 44, -1, 14, -28, + 42, 3, -16, 9, 19, -7, -36, 16, 38, 3, 4, 19, 14, 18, 22, 48, 61, -21, 36, -13, 37, 27, -9, + -53, 55, 10, -8, 37, 43, -15, -8, 25, 80, 47, -2, -10, 7, 28, 27, -7, -4, -16, -8, -11, 3, 25, + 52, 16, 32, 12, 11, 39, 38, -17, 23, -10, 17, -8, 14, -34, -1, 5, 29, -9, -50, 0, 30, -11, -46, + 43, -32, 40, 27, -15, 5, 18, 16, 14, -23, -79, 41, 39, 7, 10, 27, 23, 53, 10, -52, 4, 46, -29, + 0, -16, 6, 3, -13, -43, -61, 12, 32, 27, -12, 0, -10, 43, 9, 16, 6, 0, 19, -6, 34, 32, 14, + 52, -37, -47, -5, -15, -10, 34, 20, -20, 47, -3, 50, -18, 6, -17, 20, 10, -21, 0, 15, -14, -41, 81, + 9, 7, 8, 1, -57, 14, -39, 5, -1, -5, 35, 37, -12, 11, 20, -12, 50, 47, 15, 4, -9, 34, -46, + -3, -22, -10, -19, -10, 4, -19, 17, 6, -7, 14, -40, 6, -54, 35, 64, 54, 18, 6, 12, 0, -21, 39, + -38, 35, -38, -11, -52, -15, -16, 19, 14, -36, -12, -46, 5, -60, 56, 40, -26, -17, 17, -11, -21, 6, -52, + -19, -13, -4, -2, -5, 55, 11, 23, 31, 17, -11, -26, -11, 34, -36, -13, -5, 23, -15, 44, -35, -20, -52, + 35, -9, -11, 56, -16, 36, 22, 23, 42, 64, -24, -6, 21, 29, -29, 12, 52, 7, 32, -20, 38, 1, -13, + 1, -1, -18, 8, 18, 56, -19, -22, 4, -3, 39, 11, 39, 68, 8, 44, -17, 30, 22, -23, -40, 58, 2, + 14, 5, 56, -2, -20, 11, 72, 33, -10, -49, 18, 32, -1, 1, 21, -13, -27, 13, 18, -19, 25, 29, 23, + 17, 5, 32, 33, -31, 18, 0, 19, 1, 33, -20, -2, -16, 14, -22, -45, 28, 31, -14, -43, 26, -42, 36, + 34, -16, 15, 16, 31, 12, -17, -93, 30, 27, -2, -32, -24, 30, 33, 12, -57, 2, 39, -40, -19, 15, -1, + 26, -9, -54, -17, 2, 22, 37, -16, -2, -3, 30, 29, 8, -3, 21, 25, -27, 38, 34, 4, 50, -29, -37, + -10, -35, -1, 20, 7, -27, 37, 10, 25, -35, 11, 31, 28, -1, -8, 25, -1, -5, -32, 55, -21, 17, 31, + -27, -64, -21, -58, 8, -1, -11, 61, 51, -7, 8, 32, -14, 48, 23, 0, 8, 12, 33, -33, -2, 2, 24, + -32, -9, -6, -10, 23, 20, -36, 18, -45, -4, -31, 37, 70, 38, 7, 16, -16, 25, -7, 26, -35, 3, -47, + -8, -50, -14, -36, -11, 15, -21, 11, -43, -19, -29, 44, 23, -40, -18, 24, -34, 4, 6, -40, -25, -1, -18, + -11, 5, 65, -37, 23, 32, 22, -2, -46, -34, 11, -49, -13, -27, 44, -16, 46, -24, 15, -58, 27, 1, -11, + 69, -10, 44, 46, 26, 52, 63, -36, -8, 18, 29, 5, 22, 36, -26, 24, -26, 45, -15, -19, 13, 33, -28, + 8, 34, 32, 14, -9, 0, -2, 36, 24, 35, 83, -19, 43, -12, 24, 33, 0, -41, 65, -2, 20, 26, 44, + 14, -2, 7, 68, 60, -18, 14, -24, 34, 5, -7, 21, 10, -19, 19, 28, 13, 12, 19, 46, 7, -4, 65, + 50, -28, 19, 1, -1, 5, 17, -26, 6, 4, 22, 14, -54, 27, 25, -24, -28, 26, -50, 27, 22, 3, 10, + 8, 4, 20, -35, -70, 43, 61, 38, 2, 23, 12, 39, -5, -34, -29, 21, -30, 32, -8, -2, 10, -1, -63, + -28, 18, 8, 38, -28, 27, 12, 28, 26, -1, -28, 23, 13, -25, 38, 37, 25, 44, -13, -34, 7, 12, -4, + 12, 25, 0, 11, 33, 19, -39, 16, -14, 36, 5, -18, -8, 9, 7, -17, 38, -8, -2, 22, 22, -13, 31, + -21, 10, 1, -10, 40, 38, 33, 35, 39, 22, 16, 16, 23, 9, 10, 71, -56, -10, 3, 39, -2, -8, -7, + -5, 17, 27, 27, 14, -43, 25, -32, 32, 76, 39, 34, 19, 19, 15, -4, 20, -27, 36, -44, -3, -11, -12, + 1, -9, 17, -6, 7, -30, 19, -28, 48, 0, -23, 0, 29, -20, 12, 5, -31, -21, -29, 0, -13, 16, 33, + -13, 24, 25, -8, -3, -29, -4, 46, -49, -11, 8, 21, -16, 38, -28, -29, -12, 26, -2, -13, 25, 13, 33, + -5, -14, 26, 48, -32, 9, 18, 41, -41, 20, 32, 15, 16, -24, 29, 5, -16, 7, 20, 4, -4, 15, 52, + 1, 0, -17, 45, 16, 4, 6, 57, 25, 21, -41, 36, 32, -3, -42, 58, 7, 2, 14, 23, -4, -15, -5, + 53, 24, 4, -3, 8, 28, 25, 2, -1, 12, -34, 8, 19, 6, 18, 24, 35, 8, 6, 25, 37, -50, 23, + -9, 10, -5, 53, -17, 30, -31, 10, -7, -23, 24, 9, -16, -25, 26, -30, 43, 25, -29, 18, 47, 30, 20, + 15, -49, 39, 41, 7, -8, 2, 17, 55, 6, -43, 16, 64, -39, -3, -1, -15, 16, -17, -80, -23, 0, 32, + 46, -24, 10, 2, 23, 14, 5, -7, 19, 17, -28, 3, 35, 20, 34, -36, -36, 10, 3, -10, 30, 22, -24, + 60, 21, 32, -26, 7, 23, 44, -21, -10, 24, 17, 8, -36, 45, -21, 10, 13, 5, -44, 2, -42, -1, -23, + 14, 40, 36, -7, 31, 31, 24, 58, 42, -15, 5, -2, 55, -33, -17, -19, 26, -28, -3, -2, -4, 5, 15, + -2, 10, -41, 5, -35, 39, 83, 57, 12, 22, 6, 26, -15, 19, -45, 33, -34, -25, -56, -34, -29, 8, 28, + -12, 21, -38, -8, -35, 49, 37, -20, -6, 10, -4, 11, -1, -40, -20, 0, 5, -26, 0, 64, -7, 6, 62, + 31, 3, -26, -15, 6, -47, -37, 0, 65, -9, 38, -18, 5, -46, 35, -26, -12, 63, 2, 46, 8, 9, 61, + 53, -44, -2, 5, 24, -6, 4, 34, -4, 31, -36, 30, 7, -2, 33, 26, -8, -24, 36, 52, 0, -26, 6, + -3, 51, 22, 31, 67, -4, 26, -16, 21, 25, -1, -23, 62, 17, 11, 23, 39, 2, -17, 12, 38, 39, -31, + -35, -18, 29, -9, -10, 5, -8, -48, 12, 18, 6, 41, 25, 34, 22, 4, 27, 39, -22, 19, -33, 8, -8, + 16, -29, 7, 2, 22, -3, -43, 14, 20, -4, -38, 15, -22, 49, 25, 2, 11, 22, 31, 29, -28, -64, 18, + 31, 29, -18, 28, 12, 35, -4, -63, 9, 39, -35, 2, 23, 11, 11, -39, -56, -58, 12, 8, 54, -30, 17, + 3, 13, 5, 0, -10, 3, 30, -33, 29, 41, 15, 30, -34, -41, 1, 14, 5, 17, -1, -2, 40, -17, 53, + -14, 17, 17, 43, 12, -2, 11, 4, -3, -36, 30, 2, 13, 10, 1, -33, 6, -40, -12, -34, -35, 55, 36, + 30, 13, 37, 34, 45, 44, 30, 1, -4, 57, -54, -12, -38, -8, -26, -6, -14, -5, 19, 21, -21, 19, -54, + 8, -35, 39, 57, 56, 15, 36, 17, 18, -25, 42, -26, 13, -48, -42, -30, -7, -25, 22, 30, -31, 1, -51, + 14, -40, 44, 20, -21, -37, 28, -32, 2, 1, -40, 0, -36, -21, -33, 4, 66, -45, 25, 47, 8, -18, -28, + -46, 33, -40, -26, 3, 56, -4, 50, -8, 0, -21, 15, -3, -7, 63, -11, 22, 42, 29, 65, 64, -45, 19, + 35, 28, -18, 3, 48, 13, 30, -23, 13, 0, -13, 13, 3, -16, -10, 34, 57, 9, -14, 12, 25, 22, 26, + 20, 87, -6, 38, -27, 32, 14, -8, -30, 66, 23, 12, 25, 62, -6, -14, 4, 49, 47, 11, -22, -7, 39, + 3, -5, 6, 7, 0, 20, 31, 11, 19, 17, 44, 27, -22, 41, 45, -28, 27, 3, 21, -4, 26, -33, 9, + -14, 8, 20, -36, 31, 32, -14, -48, 33, -39, 46, 36, -9, 19, 20, 8, 40, -61, -59, 33, 43, 23, -13, + 12, 18, 36, -4, -37, 16, 25, -37, -12, 11, -11, 9, -1, -54, -58, -13, 39, 33, -25, 12, 13, 17, 13, + 7, -8, 10, 26, -8, 41, 31, 27, 46, -24, -33, 2, -10, 19, 19, 22, 6, 63, 40, 24, -24, 18, 15, + 49, 2, 38, 21, 9, -7, -37, 57, 12, 25, 32, -4, -16, 3, -41, 5, -11, 14, 31, 62, -11, 34, 33, + 14, 49, 15, -2, -6, -10, 57, -42, -4, 1, -21, -13, -2, -12, -27, 17, 13, -11, 13, -25, 9, -37, 34, + 77, 39, 16, 18, -17, 35, -10, 30, -33, 17, -54, -34, -52, -7, -25, 10, 24, -4, -12, -42, -17, -49, 31, + 51, -22, -33, 22, -14, -11, 8, -42, -23, -10, -1, -4, -12, 42, 6, 33, 25, 20, 12, -39, -16, 8, -45, + -17, 22, 47, -14, 44, -2, 1, -26, 27, -20, -12, 55, -2, 10, 15, -1, 63, 72, -36, 1, 40, 33, -13, + 5, 38, -4, 25, -8, 38, 8, 1, 9, 21, -3, -15, 17, 71, -3, -11, 18, 17, 31, 21, 30, 73, 14, + 41, -19, 39, 16, -29, -51, 54, 7, 27, 18, 51, 19, -7, -4, 69, 50, -7, -25, 5, 15, -10, -20, 4, + 20, -11, 22, 29, 15, 14, 23, 25, 26, 13, 54, 37, -50, 20, -13, 42, -5, 28, -31, 8, 12, -14, -11, + -48, 27, 18, -8, -59, 23, -52, 52, 23, -4, 17, 15, 51, 22, -65, -53, 33, 39, 16, 4, 2, 45, 65, + 1, -26, 5, 34, -35, -2, 16, 22, 6, -1, -48, -36, 23, 45, 35, -27, 8, -8, 22, 12, 19, -25, 38, + 21, -16, -4, 34, 22, 59, -26, -50, -3, -17, 16, 42, 26, -2, 50, 32, 40, -9, 23, 24, 46, 2, 4, + 28, -9, -5, -14, 52, 1, -6, 8, 34, -53, 14, -45, 5, -14, -21, 47, 58, 19, 18, 31, 8, 59, 30, + 18, 0, 1, 9, -63, 1, -14, -2, -36, -9, 2, -14, 23, 5, -35, 14, -43, 4, -42, 41, 71, 37, 8, + -5, 1, 14, -20, 35, -42, 47, -46, -22, -34, -8, -15, -2, 21, -3, -12, -36, 7, -29, 41, 22, -33, -22, + 42, -40, -6, 13, -41, -24, -4, -2, -20, -12, 31, -8, 28, 41, 48, -21, -23, -46, 23, -35, -21, 6, 31, + -8, 62, -13, -19, -45, 34, -8, -14, 50, -10, 37, 16, 17, 38, 70, -58, 4, 17, 27, -27, 17, 36, -2, + 35, -6, 52, 7, -1, 25, 5, -26, 12, 8, 58, -4, -28, 0, 8, 30, 22, 28, 70, 4, 40, -1, 27, + 33, -5, -52, 43, -11, 8, 42, 5, -5, -16, -26, 66, 35, -12, -29, -3, 38, 14, 15, 25, 28, -16, -3, + 34, -3, 24, 33, 36, 2, -6, 56, 41, -30, 20, -13, 15, -10, 4, -36, 7, -20, 14, -7, -58, 9, 31, + -9, -48, 53, -64, 24, 35, 15, 31, 26, 41, 6, -15, -79, 31, 48, 22, -9, -5, 20, 46, -2, -37, -7, + 41, -39, 22, 27, -13, 24, -15, -44, -44, 21, 8, 33, -27, -5, 6, 19, -6, 11, -13, 27, 26, -6, 20, + 29, 1, 30, -19, -30, -35, -5, -1, 46, 10, -9, 25, -17, 5, -38, 12, 10, 47, -3, 5, 26, -8, -34, + -42, 80, -6, 2, 39, -22, -59, -4, -23, -9, -14, -3, 27, 41, 16, 22, 32, 30, 48, 13, 6, 7, -5, + 43, -42, -7, -3, 27, -26, -2, -3, -13, 26, 6, 10, 17, -46, -16, -62, 40, 82, 57, 18, 25, 2, 39, + -14, 11, -31, 39, -49, -17, -38, -22, -28, 18, 25, -3, 8, -26, -3, -49, 34, 35, -26, -22, 30, -19, -1, + 4, -53, -29, -12, -4, 0, -5, 29, -32, 25, 42, 39, -3, -29, -34, 37, -33, -15, 4, 45, -4, 44, -4, + 15, -68, 30, 5, -10, 60, 6, 49, 18, 2, 51, 68, -37, -4, 11, 36, -16, 14, 39, -11, 19, -14, 31, + 0, -16, 35, 26, -15, -13, 17, 61, 7, -20, -6, 1, 20, 11, 41, 74, 1, 31, -23, 28, 8, 0, -28, + 45, -8, -5, 20, 36, 5, -10, -25, 72, 50, -21, -23, 40, 35, 8, -8, 3, 10, -15, 19, 22, 7, 25, + 9, 32, -6, 12, 36, 51, -43, 17, 2, 21, -1, 23, -54, 17, -11, 5, -2, -57, 22, 35, -19, -64, 24, + -35, 35, 31, -4, 30, -4, 9, 18, 3, -96, 20, 43, 11, 3, 10, 24, 40, -1, -52, -3, 36, -39, 2, + -12, -2, 34, 9, -59, -63, -10, 21, 55, -17, 20, 19, 30, -1, 1, -4, 17, 19, 8, 41, 26, 7, 18, + -36, -42, -34, -28, 9, 34, 37, -23, 31, 36, 54, -27, 10, -7, 35, -4, 8, 30, 8, -32, -34, 34, 2, + 22, 18, 16, -30, 14, -39, 9, -43, -28, 74, 43, -6, 21, 28, 34, 37, 42, 19, 1, -12, 45, -36, -15, + -8, 29, -45, -9, 0, -10, -1, 24, -21, 14, -36, 0, -49, 40, 76, 42, 27, 11, -5, 23, 0, 19, -37, + 28, -28, -23, -30, -36, -27, 33, 21, 2, 30, -36, -10, -29, 30, 3, -41, -38, 37, -30, -8, 2, -42, -30, + -18, -3, -29, 10, 29, -1, 12, 48, 37, -16, -28, -42, 12, -46, -33, -1, 22, -1, 37, -6, -7, -52, 21, + -21, -9, 73, -3, 39, 35, 40, 58, 58, -44, 10, 18, 23, -7, 7, 53, -3, 21, -11, 42, 21, -28, 21, + 21, -7, -3, 25, 53, -4, -17, -6, 18, 26, 28, 51, 69, -18, 37, -4, 23, 21, -20, -47, 44, 3, 12, + 35, 43, 10, -15, -17, 69, 60, -14, -8, 6, 27, 16, -7, 15, 13, -18, -10, 38, -5, 16, 26, 28, 13, + -3, 48, 29, -17, 17, -9, 14, -10, 34, -54, 12, -28, 4, 8, -30, 21, 21, -2, -61, 13, -51, 50, 33, + -13, 20, 16, 46, 11, -25, -99, 22, 23, 7, -2, 30, 16, 25, 0, -42, 5, 15, -29, 29, -24, 1, -4, + 10, -52, -27, 19, 1, 38, -23, 29, 20, 32, 16, -7, -19, 18, 12, -8, 42, 29, 29, 35, -12, -24, -12, + 9, -3, 34, 4, 8, 19, 31, 31, -24, 23, -5, 28, 19, -7, -5, 12, -6, -24, 46, -1, -19, 13, -9, + 1, 24, -19, 13, -16, -19, 28, 37, 29, 24, 33, 28, 38, 25, 12, 8, 16, 54, -38, -9, -12, 18, 0, + -4, -7, -1, 26, 22, 12, 13, -41, -22, -41, 32, 82, 31, 37, 10, 17, 10, -28, 41, -37, 31, -37, -12, + -15, -27, 1, 20, 19, -10, 1, -39, 11, -48, 42, 8, -10, -3, 25, -2, 21, -1, -30, -27, -21, -11, -16, + 19, 18, -31, 22, 12, 2, 8, -36, 9, 47, -41, 8, 12, 23, -16, 21, -19, -26, -2, 23, 4, -8, 26, + 1, 29, 16, -17, 29, 45, -29, 17, 37, 40, -30, 19, 47, 7, 22, -19, 30, -17, -17, 7, 21, -5, 4, + 18, 50, 2, -17, -23, 40, 43, 3, 25, 53, 22, 20, -45, 47, 26, 5, -42, 54, 16, -3, 24, 35, 3, + -13, -5, 44, 21, 5, 5, 6, 21, 32, -7, 14, 7, -37, 1, 9, 17, 22, 25, 60, 31, 6, 25, 41, + -36, 22, 22, 4, -19, 38, -29, 23, 10, 2, -19, -16, 30, 2, -2, -42, 30, -49, 33, 20, -19, 26, 44, + 16, 26, -1, -51, 36, 52, 27, -7, 23, 5, 16, 3, -27, -24, 33, -44, 45, -9, 9, 5, 15, -66, -7, + -4, -1, 25, -14, 38, -12, 28, -3, -6, -32, 4, 14, 10, 12, 22, 29, 14, -20, -32, -26, -37, -39, 5, + 41, 1, 14, 27, 29, -15, 18, 13, 47, 14, 31, 6, 30, 9, -21, 42, 16, -18, 1, -11, 10, 2, -19, + -8, -41, -18, 24, 30, 12, -5, 38, -4, 40, 23, 21, 12, 3, 57, -44, 7, 26, 42, -25, -9, -10, -5, + 8, 8, -8, 15, -17, 7, -29, 28, 55, 48, 17, 28, -9, 14, -11, 42, -35, 25, -40, -15, -43, -1, 10, + -18, 26, -22, -10, -29, -10, -34, 50, -1, -18, -9, 29, -10, 7, -24, -32, -8, -14, 18, -14, 17, 35, -22, + 33, 43, 7, -14, -33, -20, 33, -39, -32, -1, 15, 2, 15, -11, -7, -32, 19, -25, -5, 33, -31, 38, 28, + 14, 20, 51, -24, 20, 51, 29, -5, 19, 26, -8, 24, 8, 19, -10, 0, 30, 22, -29, -43, 14, 23, -4, + 11, -22, 7, 27, 10, 16, 71, -22, 22, -16, 39, 11, -24, -32, 66, 36, 29, -4, 8, 1, -16, -35, 24, + 33, 4, -28, -14, 2, 16, 1, 19, -17, -25, 31, 37, 6, 11, 27, 49, 25, 5, 66, 38, -29, 18, -21, + 30, 5, 29, -32, 18, 1, 4, 8, -38, 16, 20, -19, -14, 13, -5, 30, 16, -28, 17, 27, 20, 34, -2, + -38, 22, 41, 12, 2, 12, 10, 29, 5, -22, -32, 38, -35, 42, -38, 6, 6, -3, -68, -12, 2, 7, 18, + -12, 24, -4, 46, 13, -5, -35, 21, 16, 12, 20, 24, 26, 22, -23, -24, -28, -23, -45, 32, 37, 20, 33, + 11, 23, -17, 10, -6, 49, 5, 4, -8, 37, 13, -34, 31, -13, -9, 13, -27, -29, 21, -12, -3, -39, 3, + 30, 31, 2, 3, 39, 13, 53, 29, 4, 13, 2, 62, -50, 4, 19, 33, -19, -11, 0, -11, 18, 18, -22, + 9, -27, 13, -28, 27, 47, 53, 18, 19, 5, 17, -4, 29, -17, 26, -44, -27, -3, -9, -17, -30, 22, -17, + 0, -44, 11, 0, 54, 14, -19, -5, 32, -23, 10, -24, -34, -12, 7, 2, -11, 11, 53, -14, 33, 24, 19, + 12, -27, -25, 36, -63, -30, -2, 33, -6, 15, -12, -7, -27, 25, 8, 0, 34, -28, 31, 34, 33, 24, 57, + -18, 9, 44, 55, -26, 22, 13, -3, 36, 0, 16, -11, 4, 41, 21, -20, -4, 11, 27, -5, 0, -18, 23, + 36, 26, 5, 62, -13, 24, -12, 38, 8, -24, -37, 56, 29, 17, 3, 10, 0, -15, -33, 24, 46, -7, -15, + -29, 17, 26, -10, 20, -29, -6, 15, 56, -17, 19, 40, 38, 23, 7, 47, 38, -17, 18, -21, 49, 1, 14, + -37, 16, -2, -14, -16, -24, 6, 13, -15, -10, 12, -18, 12, 23, -27, 20, 20, 24, 26, -31, -50, 12, 41, + 35, -1, 3, -13, 8, -5, -38, -28, 28, -31, 48, -25, 13, 9, 7, -72, -18, -15, 16, 31, -14, 30, -12, + 36, -1, -2, -33, 15, 11, -1, 33, 22, 32, 14, -17, -33, -1, -12, -46, 22, 24, 23, 25, 31, 36, -18, + 3, 6, 45, 15, 27, -16, 37, 14, -11, 58, 3, -16, 5, -20, -4, 17, -5, 3, -44, -25, 37, 21, 15, + 9, 37, 17, 25, 36, 12, 18, 0, 40, -47, -3, -4, 65, -31, -8, 9, -3, 6, 13, -11, 16, -29, 7, + -28, 29, 47, 49, 20, 20, -5, 2, -5, 23, -20, 35, -40, -14, -37, -19, 7, -13, 16, -27, 0, -46, -18, + -31, 62, 3, -13, -17, 30, -19, -5, -24, -34, 5, -13, 15, -8, -1, 69, -28, 36, 10, 11, -3, -20, 4, + 35, -50, -18, 10, 25, -16, 1, -8, -14, -17, 31, -8, 2, 36, -23, 52, 33, 26, 27, 57, -37, 9, 39, + 29, -13, 21, 36, -9, 38, 6, 4, -4, -2, 46, 25, -15, -19, 12, 40, 1, 5, -21, 22, 30, 13, 15, + 53, -25, 27, -10, 38, 1, 1, -37, 63, 14, 17, 2, -4, 7, -13, -25, 25, 40, -3, -11, -14, 3, 9, + -12, 19, -37, -51, 19, 41, 12, 2, 37, 33, 23, 5, 36, 40, -33, 17, -16, 24, -7, 28, -34, 24, -4, + -8, -6, -29, 21, 8, -20, -8, 25, -17, 11, 23, -16, 19, 10, 17, 18, -31, -37, 11, 40, 29, -3, 11, + -14, 7, -7, -33, -41, 33, -36, 52, -17, -4, 17, 24, -44, -26, -1, 0, 32, -15, 21, 4, 34, 7, -3, + -35, 1, 13, -1, 10, 24, 29, 23, -8, -24, -4, -17, -36, 0, 16, 25, 19, 10, 31, -18, 4, 26, 48, + 16, 23, -7, 51, 12, -36, 52, 1, -21, 16, -15, -24, 28, -11, 11, -42, 11, 46, 24, -1, -7, 38, -2, + 37, 28, 6, 18, 2, 42, -51, -7, 4, 64, -19, -9, -3, -8, 11, 10, -15, 11, -13, 11, -21, 30, 74, + 43, 34, 35, -1, 20, -6, 18, -34, 34, -50, -13, -18, -23, -2, -12, 23, -33, -8, -36, -3, -21, 54, 3, + -19, -13, 25, -29, -10, -23, -39, 16, -15, -5, -14, -8, 53, -17, 33, 35, 25, 14, -31, -14, 35, -34, -9, + 4, 26, -2, 4, -3, 4, -22, 24, -27, -3, 21, -20, 46, 28, 27, 18, 46, -27, 14, 49, 35, -14, 22, + 42, -13, 28, -1, 28, -20, 1, 36, 23, -2, -43, 4, 10, -1, 5, -25, 8, 29, 30, 13, 57, -9, 17, + -11, 41, 12, -22, -32, 73, 12, 42, 0, 25, 4, -17, -13, 26, 35, -23, -25, -38, 7, 20, -7, 14, -31, + -20, 29, 43, 1, 5, 23, 40, 27, -1, 44, 39, -37, 25, -25, 55, -2, 13, -25, 0, 9, 5, 0, -26, + 20, 17, -17, -14, 19, -20, 25, 16, -15, 24, 30, 9, 26, -4, -38, 8, 38, 52, -2, 21, -4, 29, -2, + -21, -33, 30, -39, 44, -39, 11, 10, 11, -26, -7, 6, 6, 37, -19, 43, -21, 40, 12, -5, -30, 15, 14, + -7, 19, 26, 19, 31, -8, -30, -6, -22, -28, 0, 30, 11, 27, 6, 50, -18, 15, 31, 46, 12, 30, -4, + 50, 11, -13, 56, 7, -5, 3, 4, -17, 7, -6, -11, -43, -22, 25, 30, 4, 14, 38, -6, 53, 43, 10, + 12, 4, 55, -41, -4, -5, 50, -33, -7, -10, 0, 6, 19, 3, 9, -27, 20, -35, 30, 55, 51, 22, 14, + -7, 10, -3, 35, -17, 21, -53, -31, -25, -17, -5, -29, 26, -17, 7, -28, -18, -27, 55, 1, -28, -8, 16, + -17, 4, -23, -41, 8, -19, 15, -14, -7, 47, -28, 30, 35, 32, -11, -29, 3, 38, -34, -26, -7, 18, 3, + 5, -16, 9, -46, 21, 8, -2, 27, -22, 53, 25, 17, 19, 48, -23, 18, 54, 50, -28, 20, 35, 5, 31, + 6, 18, -3, 19, 25, 23, -9, -24, 3, 32, -3, -2, -23, 29, 23, 5, 9, 60, -15, 27, -27, 37, 5, + -17, -32, 66, 15, 33, 2, 23, 7, -14, -17, 24, 27, -2, -13, -27, 11, 31, -23, 19, -19, -35, 33, 36, + 16, 17, 37, 67, 34, -8, 43, 39, -22, 27, -21, 39, -3, 28, -33, 26, 6, -16, 6, -45, 13, 22, -16, + -3, 10, -26, 36, 23, -27, 16, 7, 22, 22, 23, -59, 15, 38, 28, -2, 23, 9, 28, 2, -46, -10, 25, + -39, 40, -23, 6, 1, 13, -27, -19, -12, 2, 21, -19, 24, -21, 32, 9, -7, -40, 4, 12, -7, 28, 25, + 22, 23, -17, -24, -1, -34, -29, 12, 22, 10, 15, 22, 24, -14, 6, 36, 54, 11, 11, 4, 41, 16, -28, + 32, 15, -11, 9, -17, 9, 19, -8, 4, -33, 5, 45, 26, 9, 10, 36, 13, 20, 10, 4, 7, 1, 39, + -33, 1, 3, 31, -19, -13, -8, 0, 19, 26, 2, 14, -24, 14, -37, 32, 71, 52, 20, 18, -5, 13, -18, + 37, -20, 16, -58, -24, -24, -9, -12, -28, 27, -20, 4, -39, 1, -4, 63, 6, -10, -12, 21, -17, -7, -25, + -44, 3, -2, 16, -22, 5, 31, -8, 22, 21, 33, 16, -16, -10, 27, -36, -23, 16, 22, -9, 16, -15, -4, + -53, 20, -18, -2, 33, -25, 72, 26, 31, 25, 49, -28, 21, 55, 37, -16, 15, 36, 0, 18, 6, 20, -23, + 0, 53, 20, -8, -26, 8, 29, -2, -8, -25, 28, 32, 20, 34, 61, -21, 21, -12, 48, 5, -19, -39, 70, + 23, 13, 4, 8, 8, -18, -28, 26, 50, 5, -12, -20, 14, 23, -18, 15, -21, -26, 6, 53, 5, 21, 29, + 31, 29, 16, 45, 46, -7, 17, -17, 50, 1, 30, -20, 21, 11, 12, -15, -22, 15, 24, -23, -9, 18, -18, + 28, 20, -17, 18, 15, 16, 30, 14, -59, 31, 44, 13, -5, 7, 9, 24, -3, -41, -22, 17, -25, 56, -10, + -8, 21, 39, -23, -35, 3, -1, 36, -20, 44, 4, 33, 13, -7, -29, 15, 15, -11, 35, 26, 33, 22, -18, + -23, 4, -10, -16, 38, 22, 23, 15, 3, 44, -14, 6, 17, 49, 6, 12, -7, 31, 23, -34, 40, -20, -24, + 1, 9, -9, 13, -4, 11, -34, -10, 18, 33, -1, 19, 40, 28, 36, 26, 4, 10, 5, 28, -29, -4, -10, + 16, -4, -2, -11, 3, 34, 23, 3, 9, -30, 27, -35, 32, 53, 43, 24, 44, 6, 19, 0, 15, -32, 11, + -46, -7, -19, -20, 11, -7, 34, -22, -15, -29, -8, -37, 55, -5, -11, -14, 28, -13, 10, -22, -32, -13, -21, + -5, -20, 16, 17, -34, 37, 27, 7, 4, -40, 5, 38, -29, -13, 38, 18, -18, 19, 8, -24, -23, 15, 3, + -1, 7, -21, 44, 23, 11, 17, 47, -14, 1, 37, 52, -22, 27, 36, 11, 20, -3, 21, 13, -8, 19, 18, + -7, -4, 9, 42, 0, -7, -27, 34, 37, 13, 21, 51, 0, 15, -34, 34, 13, -9, -23, 58, 32, 22, 16, + 3, -4, -15, -13, 26, 15, 2, 0, 1, 10, 22, -15, 17, -25, -27, 4, 30, -24, 21, 42, 49, 22, 7, + 45, 42, -15, 21, -10, 19, -4, 19, -10, 22, -8, -25, 4, -15, 28, 11, -16, -15, 11, -27, 22, 21, -8, + 28, 32, 7, 35, 18, -40, 37, 38, 32, 0}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_shift_data.h index babb78f3..a475582f 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_shift_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/output_shift_data.h @@ -1,17 +1,17 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include const int32_t depthwise_eq_in_out_ch_output_shift[250] = { - -9, -10, -11, -10, -9, -9, -10, -11, -10, -10, -9, -9, -9, -10, -11, -10, -11, -11, -10, -9, -9, -10, -10, - -10, -9, -10, -10, -10, -9, -9, -10, -10, -10, -10, -10, -9, -9, -9, -9, -9, -11, -9, -9, -10, -9, -9, - -10, -10, -10, -10, -10, -10, -9, -9, -10, -10, -9, -9, -10, -10, -10, -9, -9, -9, -9, -9, -9, -9, -9, - -9, -9, -10, -9, -10, -9, -9, -9, -9, -10, -9, -9, -9, -10, -10, -9, -10, -10, -10, -10, -10, -10, -10, - -10, -10, -10, -10, -10, -10, -10, -10, -9, -9, -9, -10, -10, -9, -10, -10, -9, -10, -9, -10, -10, -10, -10, - -10, -10, -9, -10, -9, -10, -9, -9, -10, -10, -9, -9, -9, -10, -10, -10, -10, -9, -10, -10, -9, -9, -10, - -10, -9, -10, -9, -10, -9, -10, -10, -10, -10, -9, -9, -9, -9, -11, -10, -10, -10, -9, -10, -10, -10, -9, - -11, -9, -10, -10, -10, -10, -10, -9, -9, -9, -9, -10, -9, -9, -9, -9, -10, -10, -9, -11, -10, -9, -10, - -11, -10, -10, -10, -10, -10, -10, -11, -10, -10, -9, -11, -10, -9, -10, -10, -10, -10, -10, -9, -9, -9, -10, - -10, -10, -9, -10, -10, -10, -9, -10, -9, -10, -9, -9, -10, -11, -9, -10, -9, -9, -10, -10, -10, -9, -10, - -10, -10, -9, -9, -9, -10, -9, -9, -9, -9, -10, -9, -11, -9, -10, -10, -10, -9, -10, -10}; + -9, -9, -9, -11, -9, -9, -10, -10, -9, -9, -10, -10, -9, -9, -9, -10, -9, -10, -11, -10, -9, -10, -10, + -11, -10, -10, -10, -10, -10, -10, -10, -10, -10, -10, -9, -10, -9, -9, -10, -10, -9, -10, -10, -10, -10, -9, + -10, -10, -10, -10, -10, -10, -9, -9, -10, -10, -10, -9, -9, -10, -9, -10, -9, -10, -9, -10, -9, -9, -11, + -10, -10, -9, -10, -10, -10, -9, -10, -10, -9, -9, -10, -11, -9, -10, -9, -10, -9, -11, -10, -10, -10, -10, + -9, -11, -9, -9, -10, -9, -10, -10, -10, -10, -10, -9, -9, -9, -9, -10, -10, -10, -10, -10, -10, -10, -10, + -10, -11, -9, -9, -10, -10, -10, -10, -10, -10, -10, -10, -10, -9, -10, -10, -9, -10, -10, -11, -10, -10, -9, + -9, -9, -9, -10, -9, -9, -10, -9, -10, -10, -11, -10, -10, -9, -10, -10, -9, -11, -10, -10, -10, -10, -10, + -10, -9, -10, -10, -10, -10, -9, -10, -9, -10, -9, -9, -10, -10, -10, -9, -10, -9, -10, -10, -10, -9, -10, + -10, -10, -10, -10, -10, -10, -10, -10, -10, -9, -9, -10, -11, -10, -9, -9, -9, -9, -9, -10, -9, -10, -10, + -10, -10, -9, -10, -9, -9, -10, -10, -10, -10, -10, -11, -9, -11, -9, -10, -11, -10, -10, -10, -10, -10, -10, + -9, -10, -10, -11, -9, -9, -10, -10, -11, -9, -10, -10, -10, -10, -9, -10, -10, -10, -9, -10}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/test_data.h index 8102687f..8314dc23 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/test_data.h @@ -1,5 +1,5 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #include "biases_data.h" #include "config_data.h" #include "input_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/weights_data.h index 1fe55aad..b4ad5bd8 100644 --- a/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/weights_data.h +++ b/Tests/UnitTest/TestCases/TestData/depthwise_eq_in_out_ch/weights_data.h @@ -1,59 +1,59 @@ -// Generated by generate_test_data.py using tensorflow version 2.10.0 (Keras version 2.10.0). -// Interpreter from tensorflow version 2.10.0 and revision upstream/v2.10.0-0-g359c3cdfc5f. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. #pragma once #include const int8_t depthwise_eq_in_out_ch_weights[1000] = { - 20, 15, 101, 127, 26, -49, -91, -122, -127, -63, -28, 44, 124, 102, 88, -72, -69, 1, 117, - 68, -127, 127, 119, 51, -55, -31, 127, -127, -25, -7, -83, 106, 47, 127, -86, -106, 94, 127, - 35, 127, 127, -127, -115, 39, -24, -78, -101, -127, 127, 127, -127, -103, 52, 29, 127, -127, -63, - -29, 64, -72, -122, -87, -32, 127, -8, 127, -127, -2, -127, -14, -127, 8, -69, -121, 127, -99, - -114, -94, -127, 2, -48, -127, 127, 17, -38, -45, 127, -123, -33, 49, 127, 52, -46, -88, -35, - 127, 127, -27, -47, 127, -47, -21, 16, 34, -85, 64, -116, -77, -32, 127, -101, 18, 127, 102, - -97, 127, 37, -83, 20, -117, -34, -98, -127, -127, -127, -25, 88, 127, -14, 84, 69, -116, 18, - 26, -108, 125, -68, -116, 127, -33, -5, 35, -127, 18, -84, 27, -99, -127, -127, -50, -127, -119, - 53, 92, -127, 45, -4, 45, 34, 127, -34, -30, 74, 127, 127, -64, -127, -127, -51, 127, 20, - -87, -127, 86, -101, 127, -127, -126, 127, 68, -32, -83, -70, -127, 8, 20, -127, -58, -71, -56, - 127, -114, -58, 68, 20, 112, 71, -106, 127, 69, 55, -29, 127, 80, -117, -79, -83, 85, -42, - -90, 113, -76, 124, 4, 42, -127, -115, -127, 127, -116, -3, -5, -33, -116, 12, -71, -99, 55, - -34, -36, 35, 127, -127, -127, -127, 59, 35, -62, -118, -58, 116, 101, 127, 95, -76, -60, 77, - 127, -127, -112, 24, 3, 9, 110, -101, 16, 122, -89, -37, 71, 47, -78, -72, -2, -85, 34, - -127, 105, 5, 100, -66, -78, -127, -121, 127, -57, -9, 7, -116, 127, -28, -127, 42, 104, -127, - 29, -127, -3, -109, -104, 87, -127, 127, -80, -93, 71, 127, -48, 0, -10, 29, -44, 69, -80, - -39, 108, -107, -112, 8, -127, 120, 127, -81, 1, 106, -78, -12, -127, -59, -40, 6, -79, -127, - -98, -8, -2, -127, -61, -115, 96, -127, 32, 51, -127, -38, -75, -94, 28, 43, -43, 89, 127, - -19, 2, 127, -74, 14, -127, -127, -38, -33, -127, -34, -127, -18, 52, 50, 127, 81, -98, -4, - 122, -36, -127, 48, -50, 26, 72, 108, 127, -91, -20, -84, -109, -115, 67, 69, 56, 105, -127, - 105, 44, 127, 54, 64, 36, 66, 118, -74, -127, -127, -127, -94, -127, -16, -41, -127, 81, -114, - -47, 52, -17, -127, 91, 8, 81, -127, -97, -119, -102, -127, -46, 120, 113, 64, 127, -48, 86, - 60, -121, -52, 48, 54, -40, -127, 123, -104, -45, -63, 127, -127, 72, -39, 37, -41, 54, -69, - -4, 84, 75, 79, -127, -122, -108, -127, 127, 127, -127, 58, -53, 70, 30, 78, -18, -127, -127, - 54, 15, 127, 127, 122, -127, -82, 127, -11, 31, -127, -1, 78, 127, 65, 34, -25, 127, 123, - -127, -127, -6, 127, 127, -120, -50, 14, 125, -110, 127, 84, -127, -95, -102, -127, -127, -115, 34, - -41, 127, -127, -9, 66, -18, 118, 29, 90, -65, -121, 127, 127, 1, 70, -127, -127, -127, -127, - 37, -127, -127, -90, -87, 127, 58, 105, 112, 74, 127, -25, -71, -82, 47, 79, -62, -38, -12, - 127, -11, -41, 127, -88, -116, -127, 47, 47, 117, -122, 127, -127, 127, 36, 73, -7, -76, -95, - -127, 127, 127, 11, -99, -3, -126, 44, -90, 74, -43, -79, 76, -127, 93, -63, -57, -110, 117, - 49, -47, 113, -127, 24, 31, 127, 42, 31, 127, 83, -53, 75, 51, 32, 127, 108, 127, 22, - -127, -87, -112, 127, 106, -89, -21, 67, 37, -73, 75, -127, 127, -127, -3, -127, 127, -24, -77, - 1, -46, 127, -115, -86, 20, 44, -65, -110, 127, -49, -91, -127, -127, 92, -88, 71, -127, -127, - 3, 127, -13, -63, 127, -30, 127, -127, -78, -93, -107, 68, 106, 73, -7, -12, -39, 127, 127, - -89, -72, -107, 66, 51, -127, 30, -127, 121, -70, 108, -127, -127, 7, -23, 77, -125, 122, 92, - 114, 6, -66, 80, 96, -123, -12, -26, 120, -124, -123, 105, -20, -40, 63, 94, -127, -89, -124, - -127, 48, -82, 127, -79, 127, 13, 48, 108, -127, 110, -115, 68, 68, 80, -74, 50, -127, 86, - -115, -116, 44, 127, -127, -50, -8, -62, -98, 127, 91, 57, 83, -25, 15, -59, 62, 108, 127, - 127, 90, 94, 20, 100, -127, 1, 86, -43, -52, 18, 110, 3, 122, -127, 74, -105, -127, 17, - 49, 51, 127, 127, 15, -95, 29, -89, 119, 127, -127, 127, 126, 127, 20, -78, -127, 20, 48, - 87, 18, 53, 127, 119, 29, 125, 127, -77, -127, -6, -1, -54, -45, 22, -127, 101, 120, 127, - -19, 127, 35, -109, 120, 121, 103, -97, 18, 10, -30, 10, 90, 46, 52, 78, -72, -116, 111, - -20, -91, 45, 126, -23, -88, 19, 28, -127, -127, 127, 11, -127, 95, 127, -103, 31, -125, -12, - 72, -116, -127, -58, 127, 58, 53, 7, 127, 90, 127, 17, -75, 27, 107, 42, 28, 127, 71, - -68, 13, -127, -49, -32, -27, 38, -127, -29, 29, -73, 119, -110, -94, -89, 74, -40, -15, -117, - -35, -127, 45, -127, 22, 82, -127, -100, 56, -127, -79, 127, -99, 127, 119, -27, 28, 45, -119, - 63, -46, 13, -89, 19, -23, 127, 28, -69, 42, -24, -127, -127, 127, 47, -61, -42, 107, 14, - 11, 52, 76, 46, 65, 72, -127, 76, 7, -47, 49, -123, -127, 46, 43, -31, 8, -51, 127, - 127, -15, -126, 17, 47, -113, 127, -45, 127, -127, 11, -127, 36, -98, 2, 127, -3, 96, 75, - -105, -127, -55, -122, -127, 31, 47, -127, 48, -40, 106, -127, -35, -39, -96, 36, -57, -73, -127, - 127, 84, -43, 127, 10, 37, -48, 127, -44, -5, -127, -83, 26, -92, -127, 57, -74, -46, -106, - -23, -127, 77, -69, -86, 127, -124, 5, -56, 124, 19, -127, -102, -28, -12, -18, 26, 125, -90, - -127, 119, 60, 22, 78, -46, 10, -12, 53, -88, -6, 127}; + 42, -124, 62, 127, -127, -127, 81, -54, 66, -127, -1, 20, -127, -127, -96, 79, -56, -10, 54, + 41, -25, -51, 7, -5, -62, -127, -40, 53, 46, 17, -74, 82, -127, 1, -127, -80, -94, -127, + -124, -42, -27, 127, 78, -62, -127, 127, 66, -127, -75, 118, 94, 48, -116, 117, -127, -79, -12, + -127, 110, 127, -71, 127, -86, 127, -101, -5, -72, 86, 23, -119, -124, -101, -66, 76, -27, 127, + -112, 74, 124, 127, 127, 127, -60, 23, 42, -127, 30, 24, -61, 117, -127, -70, 49, 20, -119, + 34, 54, 50, -2, -24, -92, 86, -127, -79, 95, 107, 10, -87, -127, -127, 122, -127, 127, -95, + 127, -73, 127, -58, -59, 127, -6, 30, -111, 30, -123, -121, 78, -61, 75, -127, 11, 127, 127, + 127, -127, 127, 89, -127, -127, -127, 71, 11, -89, 127, -73, -34, -118, 79, -99, 13, -78, 95, + 17, 20, 80, 127, -108, 127, 89, 127, -91, -113, 127, 61, 127, -30, 118, 60, -61, -23, 39, + 42, 59, 102, 127, -57, 105, -1, 41, 119, -89, 127, 96, 83, 17, -99, 127, 1, -127, -127, + 102, -127, 114, 96, -113, 127, 51, 33, 15, -56, 64, 127, 76, 97, 84, -127, 44, -113, -127, + -104, -16, -89, 86, 48, 127, 63, 73, 114, 127, -127, -126, -59, -58, 110, 127, -118, 70, 105, + -97, -58, -32, 64, 54, -94, 72, 32, -127, -127, 126, -85, -127, 35, 52, 9, -119, -127, -17, + 36, -120, 39, 66, -80, -118, 74, 53, -34, 127, -127, -29, 3, 126, -70, -43, -127, 75, -42, + 127, -101, 52, -127, -127, 71, -127, 57, -34, -5, 2, 127, -127, -48, -92, -99, -90, -66, -122, + -127, -63, -57, 127, -112, 107, 50, -85, -56, -6, 82, 99, 5, 127, -60, 77, -26, 49, -20, + 44, 75, 75, -47, -123, -82, -107, -84, -51, -54, 60, -57, 14, -127, -26, -127, 119, 127, 61, + 33, -40, 101, -12, 62, 20, 105, -88, -47, 112, -95, -127, -55, -109, 127, 103, 22, 3, -56, + 120, 103, 41, -102, -127, -52, -86, 118, 63, 88, -112, -127, -127, 71, -127, -116, -42, -54, 127, + -121, -35, 90, 66, 123, -119, -17, -33, -43, -117, -10, -66, 127, 127, 127, 33, -127, 127, 122, + -127, 78, 118, -90, 113, -34, -50, -81, -69, -121, -9, 127, -44, -14, 127, -127, 127, -127, 36, + 127, -72, 127, 54, 109, 20, 80, -127, 52, 55, -56, 127, -24, 11, -127, -12, 77, -51, -121, + 127, 127, 24, -127, -127, 3, -109, 28, 17, 44, -127, -127, 127, 64, 127, -127, 102, 34, -47, + -70, 3, -95, 127, -81, 34, -71, 21, 3, -11, -109, -6, 127, -127, -87, -127, -74, -127, -72, + -8, -70, 101, 127, 116, 127, -116, -89, -115, 17, -127, 92, -81, -54, -127, -63, 127, 127, -31, + -127, -127, 7, 96, -127, -127, -127, 127, -54, 41, 69, 53, -25, -11, -12, -84, -127, 94, -87, + -121, -63, -127, 52, 127, -37, 102, -2, 127, -31, 26, 66, -8, -34, -101, 98, 78, -89, -75, + -59, -39, 127, 124, 3, -127, -127, 70, -11, 99, 127, 89, 115, -1, -85, 105, 127, -127, 127, + -16, -127, -1, 82, 127, 54, -4, -127, 127, 107, -122, -116, 38, -123, -105, -30, -124, 127, -127, + -86, 49, 127, 53, 59, 127, 112, -127, -74, -63, 84, 127, -113, 33, 29, 118, 54, -59, 94, + 22, 73, 127, -59, 34, 114, -127, 15, -127, -9, 23, -9, 120, -127, -79, -84, 76, 71, -127, + -127, -118, 30, 127, -127, 26, -127, 125, -30, -127, 127, -127, 127, -39, 14, -61, -33, -56, 106, + -75, 33, 74, -122, 89, -127, -54, 113, -125, 21, -63, 45, 63, 127, 7, -90, 14, -93, 86, + 37, 38, 117, -124, 54, 23, -95, 36, 23, 26, -5, 24, -25, 12, 15, 43, -43, -11, 76, + 116, 38, -108, 93, 127, -22, -127, -127, 127, 68, -3, -62, -122, 8, -10, -127, 31, -45, -95, + -127, 76, -69, -115, -50, 66, 28, 50, 93, 80, 120, -127, 97, 33, -115, -75, 29, -10, 123, + 127, -101, -37, 127, 47, -110, 20, -109, -127, 127, 85, 113, 127, 43, 124, 30, -58, 81, 50, + 127, -40, 4, -127, 127, 59, -109, -127, 123, -51, -127, -50, -105, -119, -127, -123, -75, 13, 127, + -74, -52, 98, -3, -65, -127, 127, -113, -34, 69, -57, 127, -127, 127, -82, 75, 32, -45, -62, + 81, 127, -127, -38, -116, 11, 102, -25, -127, -127, 127, 106, 56, -88, 80, 103, 26, -127, 74, + -127, 127, -103, 3, -127, -27, 94, 127, 18, -43, 22, -127, 45, 109, 127, -39, 127, -103, -109, + -10, -36, 95, -105, -12, 79, -43, 56, 6, -86, -61, 38, -127, 127, 127, -8, 47, -127, -92, + -45, 124, -127, -127, -127, -57, -58, 127, -6, -54, -86, -69, -127, -87, 19, 80, 127, 127, -127, + 38, -127, -34, 127, -69, -118, -127, -127, -83, 104, -127, -32, -117, -85, 39, -127, 41, 108, 12, + -127, -83, -90, -105, -70, 127, 26, 111, -127, 56, -73, 127, 35, -102, -25, -98, 49, 42, -95, + -127, -46, 127, 15, 91, -34, 31, -59, -53, -70, 127, -83, -127, 127, -77, -127, 122, -127, -76, + -58, -75, -127, -109, -26, -88, -54, 87, 94, 4, -115, -113, -127, 26, -82, -34, 127, -43, 127, + -11, 5, -120, -47, -71, -127, 125, 52, -80, 93, 100, 97, 80, 116, -83, -127, -108, 18, -23, + 35, 34, 81, -118, 127, 127, -81, -120, -127, -15, 74, 127, 125, -127, -7, 127, -126, 93, 102, + 94, 33, 10, 42, 127, -54, 57, -51, 60, -99, -8, 9, 15, 127, -64, -68, 127, 127, 49, + -11, -120, 113, 37, -100, 110, 45, 126, 112, 63, -12, -96, 127, 7, 2, -127, 119, 35, 124, + -11, 61, -58, -101, -36, -126, -19, -35, 29, 33, 80, -95, -59, 9, -49, -106, -42, -101, 114, + 127, 127, 54, -71, -125, 65, -127, -119, 64, -127, -103, 56}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/biases_data.h new file mode 100644 index 00000000..8c5cbdf0 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/biases_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_1_biases[22] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/config_data.h new file mode 100644 index 00000000..9bb9d6b6 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_1_OUT_CH 22 +#define DEPTHWISE_INT4_1_IN_CH 22 +#define DEPTHWISE_INT4_1_INPUT_W 1 +#define DEPTHWISE_INT4_1_INPUT_H 23 +#define DEPTHWISE_INT4_1_DST_SIZE 462 +#define DEPTHWISE_INT4_1_INPUT_SIZE 506 +#define DEPTHWISE_INT4_1_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_1_OUT_ACTIVATION_MAX 127 +#define DEPTHWISE_INT4_1_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_1_FILTER_X 1 +#define DEPTHWISE_INT4_1_FILTER_Y 3 +#define DEPTHWISE_INT4_1_STRIDE_X 1 +#define DEPTHWISE_INT4_1_STRIDE_Y 1 +#define DEPTHWISE_INT4_1_PAD_X 0 +#define DEPTHWISE_INT4_1_PAD_Y 0 +#define DEPTHWISE_INT4_1_OUTPUT_W 1 +#define DEPTHWISE_INT4_1_OUTPUT_H 21 +#define DEPTHWISE_INT4_1_CH_MULT 1 +#define DEPTHWISE_INT4_1_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_1_OUTPUT_OFFSET 2 +#define DEPTHWISE_INT4_1_DILATION_X 1 +#define DEPTHWISE_INT4_1_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/input_data.h new file mode 100644 index 00000000..02701bab --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/input_data.h @@ -0,0 +1,33 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_1_input[506] = { + 116, 100, 111, 69, 112, -23, -110, 20, 91, 19, 101, -87, 67, 47, -78, -58, -100, 32, 68, + -4, -77, -64, 61, 86, -49, 80, -105, -118, 58, -49, 24, 50, -81, -68, -102, -17, -100, -104, + 87, -103, -100, 20, -50, -26, 3, 84, -63, 52, -29, -19, 19, 0, 96, -9, -64, -23, -40, + 9, 49, 62, -63, -100, 75, 31, -26, -12, 18, 52, -113, -87, -4, 124, 35, -109, -81, -100, + -82, 23, 19, 6, 122, -95, 8, -67, 3, 93, -86, 107, -114, 50, 38, -71, 55, 48, -55, + -45, -61, 82, -60, 122, -61, -66, -68, 33, 54, 27, -50, 102, -3, 87, 34, -44, -11, 3, + -69, -26, 6, 56, 63, 74, 81, -127, 17, 62, 67, 1, -90, 58, 87, -82, -11, 59, -88, + -109, -94, -71, 121, -56, -57, -73, -13, -70, -16, -85, 42, 13, 15, 31, 68, 74, -8, -71, + -46, -94, 2, -76, -35, -45, 7, -120, -28, -19, -112, 3, -34, 124, -119, 94, -110, 99, -81, + 53, 20, 27, -25, -76, 67, -53, -35, 22, -71, 94, -29, -37, -54, 66, 38, -105, 83, 108, + 122, -112, -71, 82, 62, 98, 28, 42, 39, -106, 120, 106, -69, 126, -81, 19, 83, -56, -22, + 5, 27, 64, -42, -83, -22, -5, 59, 119, -17, -5, 48, 97, -56, -54, 46, 68, -97, -18, + -84, 72, 99, -42, -56, -71, -65, 123, 83, 1, -18, -64, 44, 6, -33, -39, -117, -83, 21, + -94, 118, -52, -83, -54, -75, -22, -63, 21, -73, -68, -19, 42, -113, 41, 126, -98, -83, 10, + -8, -29, 34, -18, -125, 53, -74, 22, -91, -79, -108, 108, -62, -126, 77, -5, 122, -7, 59, + 36, -90, 30, -34, 118, 20, -116, -54, 125, -75, -78, -124, 46, 70, -6, 89, 96, 13, -37, + 28, 7, 72, -68, 74, 24, -40, 12, 112, 59, 101, 8, 78, -66, 91, 44, 34, 65, 95, + -10, 49, 6, 32, 15, 107, -47, 18, -37, -114, -51, -115, -77, 82, -56, -125, 61, 70, -16, + -17, -41, 34, -31, -103, -20, 49, -123, 37, 103, -12, 22, -105, -61, -33, -4, 115, -71, -79, + -92, -19, 99, 116, -113, -117, -29, 118, -63, -28, -86, 107, -94, 19, 27, -63, 106, 55, 28, + 27, 49, -8, 97, 113, 21, -63, 103, 31, 112, -45, -113, -124, -13, 102, 91, -57, -10, -16, + -33, -112, -83, -120, 49, 88, 119, 84, -41, 124, 30, 55, -44, 81, 90, -127, -27, 121, -50, + -55, -69, -86, 21, 113, 86, 84, -58, -39, 30, 16, -113, 81, 119, 15, -41, -48, 64, -128, + -105, -56, -37, -72, -45, 112, 111, 122, -70, 28, -39, 93, -61, 81, 65, -77, -100, 4, -6, + -98, -52, -36, 73, -32, 43, 87, 64, 83, 107, 46, 63, -30, 15, 17, -121, -125, 56, 49, + 51, 70, -106, 19, 45, 84, -25, 91, -73, -57, -24, -68, -57, 77, -84, 108, 5, -6, -26, + -64, -107, 83, 104, 101, 54, 5, 86, -94, 38, 30, -72}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_mult_data.h new file mode 100644 index 00000000..bd4cc87e --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_mult_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_1_output_mult[22] = { + 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, + 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, + 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_ref_data.h new file mode 100644 index 00000000..1b45bc77 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_ref_data.h @@ -0,0 +1,27 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_1_output_ref[462] = { + -11, -17, 91, -127, 65, -6, -127, -23, -63, 23, 5, -11, 80, 106, 25, -127, 78, 31, -43, -119, -9, 9, + 9, -17, 23, -127, 113, 66, -127, 30, -93, 17, -26, -27, -18, 74, 72, 37, 30, 53, -76, -127, 39, 39, + -39, -14, -12, -127, 127, 10, -127, -23, -88, 127, -9, -37, -11, 87, 74, -127, 92, 126, -81, -127, -28, -2, + 63, -14, -66, -70, 115, -61, -127, -42, 20, 8, 1, -127, 44, 79, 36, -30, 40, 127, -49, -127, 3, 43, + -74, -6, 51, -54, 127, -42, -126, 32, -31, 0, -103, -95, -20, 52, 67, -127, 50, 127, -89, -37, 33, 18, + 47, 0, 31, -110, 127, -32, -105, -22, -100, 113, -33, 69, 6, 116, 42, -127, 65, 127, -79, -61, 4, 66, + -22, -3, -30, -87, 93, 36, -122, -1, -32, 57, -17, -127, 75, 97, 29, -25, 0, 127, -72, -127, -21, 26, + -38, -5, -40, -127, 59, 67, -89, -26, 38, -17, -74, -75, -69, 127, 77, -127, 47, 127, -92, -127, 31, -18, + -4, 0, -34, -118, 122, -25, -47, 2, -57, 127, -9, -49, 41, 127, 31, -126, 96, 127, -99, -127, -13, 43, + -31, -18, 113, -127, 127, -80, -127, 4, -88, -40, -121, -66, 46, 110, 24, 31, 79, 127, -71, -68, -38, 3, + 6, -6, 6, -80, 127, -39, -127, -42, -11, 114, -4, -76, 11, 46, 23, -127, 67, 127, -23, -125, 46, 68, + 10, -10, -49, -114, 127, -7, -46, -40, -14, -25, -16, -9, 24, 103, 39, -127, 88, 114, -90, -98, 24, -35, + 30, -12, 18, -110, 127, 13, -127, 21, 24, 62, 42, -52, -60, 127, 80, 18, 66, 126, -98, -108, -1, 54, + -70, -12, 25, -127, 116, 11, -127, 13, -58, 88, -99, -127, 33, 85, 76, -127, 33, 127, -83, -88, 54, 36, + -1, -6, 39, -109, 59, -32, -127, 1, -77, -58, -83, -54, 35, 113, 46, -90, 65, 91, -90, -9, -8, -46, + 5, -11, -14, -127, 127, 26, -127, -46, 24, 127, -5, -116, -36, 50, 19, -127, 93, 40, -65, -45, 13, 111, + -33, -12, -33, -74, 92, -30, -100, -14, 17, 27, -101, -127, 88, 31, 61, -19, 55, 127, -17, -88, 13, -50, + 16, -9, -4, -127, 127, 4, -127, 24, -66, 35, -83, -106, -59, 127, 64, -127, 72, 127, 1, -65, 116, 69, + -8, -3, -14, -127, 127, 1, -127, -11, -51, 42, -32, 24, 40, 109, 52, -85, -3, 127, 2, -48, 27, 28, + 30, -5, -96, -127, 127, -23, -127, -25, -38, 26, -115, -42, 77, 127, 55, -21, 48, 127, -31, -127, -60, -17, + -86, -15, 58, -127, 127, -7, -127, -7, -90, 77, 30, -127, -38, 33, 76, -127, 84, 127, -90, -92, 32, 53}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_shift_data.h new file mode 100644 index 00000000..92943902 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/output_shift_data.h @@ -0,0 +1,7 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_1_output_shift[22] = {-3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, + -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/test_data.h new file mode 100644 index 00000000..9f968de9 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/weights_data.h new file mode 100644 index 00000000..09fdf528 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_1/weights_data.h @@ -0,0 +1,8 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_1_weights[33] = {3, -106, -64, 0, 75, -79, 101, -112, 14, 14, 66, + -6, 13, 5, 57, -82, -87, 28, 67, 83, -100, -59, + 2, -51, 54, -55, 99, 66, 0, -95, 116, -32, 42}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/biases_data.h new file mode 100644 index 00000000..bb155aed --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/biases_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_2_biases[19] = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/config_data.h new file mode 100644 index 00000000..d8621b6f --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_2_OUT_CH 19 +#define DEPTHWISE_INT4_2_IN_CH 19 +#define DEPTHWISE_INT4_2_INPUT_W 6 +#define DEPTHWISE_INT4_2_INPUT_H 6 +#define DEPTHWISE_INT4_2_DST_SIZE 76 +#define DEPTHWISE_INT4_2_INPUT_SIZE 684 +#define DEPTHWISE_INT4_2_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_2_OUT_ACTIVATION_MAX 127 +#define DEPTHWISE_INT4_2_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_2_FILTER_X 5 +#define DEPTHWISE_INT4_2_FILTER_Y 5 +#define DEPTHWISE_INT4_2_STRIDE_X 1 +#define DEPTHWISE_INT4_2_STRIDE_Y 1 +#define DEPTHWISE_INT4_2_PAD_X 0 +#define DEPTHWISE_INT4_2_PAD_Y 0 +#define DEPTHWISE_INT4_2_OUTPUT_W 2 +#define DEPTHWISE_INT4_2_OUTPUT_H 2 +#define DEPTHWISE_INT4_2_CH_MULT 1 +#define DEPTHWISE_INT4_2_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_2_OUTPUT_OFFSET 2 +#define DEPTHWISE_INT4_2_DILATION_X 1 +#define DEPTHWISE_INT4_2_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/input_data.h new file mode 100644 index 00000000..77d6ee3d --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/input_data.h @@ -0,0 +1,42 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_2_input[684] = { + -27, -25, 7, 48, -107, -78, 90, -34, -48, 39, 50, -44, -62, 69, -67, -111, -91, 0, -12, + -108, 106, 23, -23, 9, -38, 110, 122, -126, -46, 65, 28, 98, -88, 97, 7, 1, -112, -4, + 68, -122, 101, -14, 125, -45, 40, -101, -123, 122, -43, -100, -96, 101, 108, 13, 67, 53, -34, + 32, 98, -2, -101, 41, -102, 6, -8, -37, -107, 14, 74, -10, 5, 14, -90, -62, -81, -110, + -113, -76, -32, 77, 37, 119, 80, -99, 47, -55, -96, -37, -99, -37, -47, 13, 97, -113, -124, + -112, -123, 101, 113, -52, -5, 70, -105, -37, 36, 10, -104, 117, 93, 76, -70, 24, 67, 78, + 88, -12, 101, -64, -94, -112, 5, 13, 56, 77, -10, -47, -113, 100, -92, -86, -97, 58, -35, + 56, -5, 54, -57, 104, -108, -121, 53, -17, -104, 8, -102, 73, -40, 85, 68, -85, 108, -48, + -32, -49, 24, -6, -105, 102, 77, 50, -115, -7, -118, -12, -67, 117, -118, -80, -105, -99, -63, + -9, -16, 119, 3, 2, -90, 70, 105, -22, 70, -7, -70, -127, 118, 109, 89, -90, -123, 29, + -29, 79, -106, -67, 126, -61, 41, -6, 18, -80, -27, 69, -105, -22, -112, 85, -10, -78, -97, + 6, -75, -72, -119, -53, -120, 94, -102, -70, 44, 27, -87, 89, -63, 20, 18, 74, -65, 79, + -104, -105, 47, -89, -20, -7, -27, 36, 78, 98, 115, -44, -107, 16, 49, 79, 120, -14, 50, + 78, 81, -58, 41, -125, -110, -69, 87, -110, -31, 35, 66, 35, 24, -35, 61, 2, -43, -35, + 107, -7, 49, 125, -70, 77, 56, -13, 51, 46, -113, 51, -18, 4, 7, -32, -118, -40, -1, + 52, 116, -108, -62, 38, -55, 49, -18, 37, 54, -128, -86, 47, -100, -81, 8, 69, 94, -23, + -100, 68, -97, -42, -7, -68, 96, 34, -49, -6, 101, 7, 91, 122, -103, -84, 26, 104, -82, + 33, 15, -102, -23, -58, -86, 57, 97, 39, -2, -96, -96, 39, 36, 115, 119, 84, 98, 120, + 69, 93, 87, -26, -39, -22, 0, -118, 4, 40, 4, 12, 44, 20, -85, 108, 54, -18, 110, + -87, 20, 114, -81, 111, 13, 66, 83, 105, 54, 20, 10, 102, -99, 0, -98, 116, -119, -23, + -127, -7, 122, 102, 79, 13, 28, 108, 110, -61, -59, -109, -44, -94, 122, -9, 30, 105, -17, + -60, -84, -68, 70, 96, 50, -86, -51, -41, -91, 52, -109, 24, 32, -85, 15, 121, 18, 9, + -44, 10, -36, 66, -23, 81, 62, -3, -25, 96, 73, -66, -27, 86, 17, 18, 32, 63, -124, + -95, -127, 24, -9, 59, -59, -88, 54, 82, -49, 120, 30, -28, -22, 16, -78, -91, -90, -33, + -105, -121, -28, 78, 98, 42, -57, 113, -16, -29, -29, -32, -44, 57, -56, -64, -53, 81, -68, + -56, -107, -126, 88, 34, -109, 33, -115, -61, 25, 62, -63, 99, 81, 115, 105, -46, 71, -76, + 53, 56, -26, 9, -98, -109, -29, 16, 44, 81, -7, 8, -88, 75, 111, 80, -100, 109, 117, + 121, -125, 17, -53, -55, 55, 109, 40, -55, -93, -28, 71, -46, -108, -16, -44, 34, 120, 98, + -120, -90, 52, 101, 78, 74, 112, -117, 95, 49, -31, 78, 71, -16, -79, -28, -91, -77, 75, + -89, -68, 28, -77, 24, 88, 90, 59, 69, -17, -121, 15, 108, 117, 49, -78, -52, 41, -47, + 9, -67, -44, 66, -124, 68, -25, -85, -81, 40, -91, -102, -55, 100, -79, 23, 81, -81, 56, + 6, 117, -122, 26, 17, -119, 99, -86, -85, -80, 48, -82, 119, 8, 122, -84, -48, -46, -54, + 88, 74, -76, -103, -72, -102, 81, -43, -57, 117, -13, -109, -67, -43, 125, 41, -24, -44, 37, + -56, 68, 99, 58, -36, 87, 5, 8, -115, 26, 110, 59, -15, -83, -49, -94, -119, 55, 71, + 26, 107, -92, 43, 59, 96, 110, 88, -24, -115, -85, 37, -35, -13, 16, -92, 51, 111, 107, + -93, -106, -17, 52, -63, -41, -120, 115, -70, 109, 20, -109, -100, 81, -111, 48, 60, 12, 85}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_mult_data.h new file mode 100644 index 00000000..2abe9d9c --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_mult_data.h @@ -0,0 +1,24 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_2_output_mult[19] = {1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358, + 1533286358}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_ref_data.h new file mode 100644 index 00000000..14e63afa --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_ref_data.h @@ -0,0 +1,10 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_2_output_ref[76] = { + -127, 61, -12, -127, -127, -127, -101, -37, -127, -127, -127, -127, -127, -127, -127, -127, 127, 67, -127, + -127, -127, 16, -127, -127, -127, 53, -125, -104, -127, -127, 34, -127, -127, -69, 60, 127, 127, -127, + -127, -127, 41, -127, -127, 51, -14, 127, -127, -127, -127, -127, -77, -127, 61, -127, -124, 127, -127, + -127, -127, 33, -127, -127, -127, -77, -31, -127, -127, -127, -127, -127, -127, -9, 13, 9, 50, -127}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_shift_data.h new file mode 100644 index 00000000..0119328a --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/output_shift_data.h @@ -0,0 +1,7 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_2_output_shift[19] = + {-3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3, -3}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/test_data.h new file mode 100644 index 00000000..8314dc23 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/weights_data.h new file mode 100644 index 00000000..ff8a4a9a --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_2/weights_data.h @@ -0,0 +1,18 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_2_weights[238] = { + -85, -30, -44, 30, -81, 91, -109, 45, -76, -48, 34, 22, 26, -109, -43, 16, -15, 11, -27, -68, + -79, 65, -75, -15, -91, -84, 48, 83, -91, 36, 5, 77, 43, 86, -102, 17, 63, 33, -35, -109, + -98, 14, 86, -103, -54, -65, 102, 4, 12, -63, 106, 33, 41, 5, 51, -26, -32, 31, -81, 12, + 6, -86, -11, 83, 3, -31, -26, 106, -63, -111, 41, 76, -98, 28, -73, 6, 19, 30, 98, -17, + -111, 19, -1, 75, -52, -76, 53, -86, 3, 6, -96, 46, 86, 77, -18, 1, 78, 48, 57, -15, + 32, -74, -68, 25, 94, -21, 11, 15, -91, -51, 6, -61, 5, 74, 62, -2, 77, 86, 1, -83, + 80, 33, 68, 45, 54, -21, -96, 66, -54, 26, -16, -54, 108, -60, 62, 106, 100, -76, -107, 11, + 64, -49, 11, -111, -103, -58, 6, 45, -27, 97, -14, -1, -69, -64, -11, -112, 26, -4, 4, -79, + 16, -101, 105, 74, -108, -106, 82, -32, -47, 69, 0, -20, -26, -7, 0, 27, -96, -32, 35, -12, + 58, 18, 0, -81, 27, 33, 28, -22, 59, -77, -14, -64, 75, -26, 12, 29, -96, -52, -68, -84, + 9, -48, 85, 105, -86, -106, 10, 12, -13, 106, -39, -83, -99, 80, 73, -71, 5, 64, 28, -84, + -36, 106, 27, 0, 68, 2, 9, -90, 15, -15, -120, 28, 57, -86, 26, 10, 64, 6}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/biases_data.h new file mode 100644 index 00000000..ee28608f --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/biases_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_3_biases[1] = {0}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/config_data.h new file mode 100644 index 00000000..e0478ebf --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_3_OUT_CH 1 +#define DEPTHWISE_INT4_3_IN_CH 1 +#define DEPTHWISE_INT4_3_INPUT_W 2 +#define DEPTHWISE_INT4_3_INPUT_H 2 +#define DEPTHWISE_INT4_3_DST_SIZE 1 +#define DEPTHWISE_INT4_3_INPUT_SIZE 4 +#define DEPTHWISE_INT4_3_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_3_OUT_ACTIVATION_MAX 127 +#define DEPTHWISE_INT4_3_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_3_FILTER_X 2 +#define DEPTHWISE_INT4_3_FILTER_Y 2 +#define DEPTHWISE_INT4_3_STRIDE_X 1 +#define DEPTHWISE_INT4_3_STRIDE_Y 1 +#define DEPTHWISE_INT4_3_PAD_X 0 +#define DEPTHWISE_INT4_3_PAD_Y 0 +#define DEPTHWISE_INT4_3_OUTPUT_W 1 +#define DEPTHWISE_INT4_3_OUTPUT_H 1 +#define DEPTHWISE_INT4_3_CH_MULT 1 +#define DEPTHWISE_INT4_3_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_3_OUTPUT_OFFSET 2 +#define DEPTHWISE_INT4_3_DILATION_X 1 +#define DEPTHWISE_INT4_3_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/input_data.h new file mode 100644 index 00000000..d5515c98 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/input_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_3_input[4] = {-119, 86, 7, -119}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_mult_data.h new file mode 100644 index 00000000..ca4e5c09 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_mult_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_3_output_mult[1] = {1533286358}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_ref_data.h new file mode 100644 index 00000000..b02ce138 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_ref_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_3_output_ref[1] = {113}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_shift_data.h new file mode 100644 index 00000000..ca9e3012 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/output_shift_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_3_output_shift[1] = {-3}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/test_data.h new file mode 100644 index 00000000..9f968de9 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/weights_data.h new file mode 100644 index 00000000..088c5a1b --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_3/weights_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_3_weights[2] = {62, -91}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/biases_data.h new file mode 100644 index 00000000..b728b5af --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/biases_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_4_biases[3] = {0, 0, 0}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/config_data.h new file mode 100644 index 00000000..fda77e31 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_4_OUT_CH 3 +#define DEPTHWISE_INT4_4_IN_CH 3 +#define DEPTHWISE_INT4_4_INPUT_W 4 +#define DEPTHWISE_INT4_4_INPUT_H 4 +#define DEPTHWISE_INT4_4_DST_SIZE 12 +#define DEPTHWISE_INT4_4_INPUT_SIZE 48 +#define DEPTHWISE_INT4_4_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_4_OUT_ACTIVATION_MAX 127 +#define DEPTHWISE_INT4_4_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_4_FILTER_X 2 +#define DEPTHWISE_INT4_4_FILTER_Y 2 +#define DEPTHWISE_INT4_4_STRIDE_X 2 +#define DEPTHWISE_INT4_4_STRIDE_Y 2 +#define DEPTHWISE_INT4_4_PAD_X 0 +#define DEPTHWISE_INT4_4_PAD_Y 0 +#define DEPTHWISE_INT4_4_OUTPUT_W 2 +#define DEPTHWISE_INT4_4_OUTPUT_H 2 +#define DEPTHWISE_INT4_4_CH_MULT 1 +#define DEPTHWISE_INT4_4_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_4_OUTPUT_OFFSET 2 +#define DEPTHWISE_INT4_4_DILATION_X 1 +#define DEPTHWISE_INT4_4_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/input_data.h new file mode 100644 index 00000000..c6099e80 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/input_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_4_input[48] = {7, 60, 114, -118, -23, -9, -8, -117, -105, -92, 81, -80, + -76, -118, 120, 48, -119, -76, 5, -74, -67, 17, 8, -13, + -36, -125, 98, -91, 3, 11, 25, 116, -103, 105, 15, 43, + -38, 99, 91, -4, 22, -125, -73, -17, 52, 9, -90, -4}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_mult_data.h new file mode 100644 index 00000000..81998c5e --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_mult_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_4_output_mult[3] = {1533286358, 1533286358, 1533286358}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_ref_data.h new file mode 100644 index 00000000..98055707 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_ref_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_4_output_ref[12] = {67, -51, -67, 75, -37, -15, 50, -39, -71, -50, -74, -109}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_shift_data.h new file mode 100644 index 00000000..e2106af1 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/output_shift_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_4_output_shift[3] = {-3, -3, -3}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/test_data.h new file mode 100644 index 00000000..9f968de9 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/weights_data.h new file mode 100644 index 00000000..20d47004 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_4/weights_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.11.0 (Keras version 2.11.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_4_weights[6] = {-45, -92, -48, 4, 26, 45}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/biases_data.h new file mode 100644 index 00000000..e50304da --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/biases_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_biases[8] = {0, 0, 0, 0, 0, 0, 0, 0}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/config_data.h new file mode 100644 index 00000000..fbcbf96b --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_GENERIC_OUT_CH 8 +#define DEPTHWISE_INT4_GENERIC_IN_CH 2 +#define DEPTHWISE_INT4_GENERIC_INPUT_W 16 +#define DEPTHWISE_INT4_GENERIC_INPUT_H 16 +#define DEPTHWISE_INT4_GENERIC_DST_SIZE 200 +#define DEPTHWISE_INT4_GENERIC_INPUT_SIZE 512 +#define DEPTHWISE_INT4_GENERIC_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_GENERIC_OUT_ACTIVATION_MAX 127 +#define DEPTHWISE_INT4_GENERIC_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_GENERIC_FILTER_X 8 +#define DEPTHWISE_INT4_GENERIC_FILTER_Y 8 +#define DEPTHWISE_INT4_GENERIC_STRIDE_X 2 +#define DEPTHWISE_INT4_GENERIC_STRIDE_Y 2 +#define DEPTHWISE_INT4_GENERIC_PAD_X 0 +#define DEPTHWISE_INT4_GENERIC_PAD_Y 0 +#define DEPTHWISE_INT4_GENERIC_OUTPUT_W 5 +#define DEPTHWISE_INT4_GENERIC_OUTPUT_H 5 +#define DEPTHWISE_INT4_GENERIC_CH_MULT 4 +#define DEPTHWISE_INT4_GENERIC_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_GENERIC_OUTPUT_OFFSET 2 +#define DEPTHWISE_INT4_GENERIC_DILATION_X 1 +#define DEPTHWISE_INT4_GENERIC_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/input_data.h new file mode 100644 index 00000000..15ee67d6 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/input_data.h @@ -0,0 +1,33 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_input[512] = { + -82, 113, -101, 0, -123, 0, 95, -51, 69, 0, 58, 57, -27, -125, -74, -109, -24, -111, 78, + -18, 86, 101, -71, -75, 68, 103, -124, 100, -109, -58, -21, 24, 2, 55, -118, 124, -68, -98, + 85, 13, 26, -30, -17, 102, -71, 35, -58, -73, -37, -68, 41, 79, 111, -46, -66, 30, 63, + 29, -106, 80, 20, -4, 25, 13, -45, -61, 71, 51, -31, 71, -24, 18, -121, -21, 116, -106, + 59, 1, -21, -49, 5, 71, 0, -15, -85, -1, 104, 122, 126, 65, -102, 19, 110, -43, 125, + 73, 61, -101, -23, 122, 118, 116, -45, 74, 18, -58, 65, -49, 95, -26, 34, 9, -45, -91, + 90, -88, 73, -44, -28, 122, -68, -52, 17, -39, -43, -37, -86, -11, 113, -102, 23, 34, 33, + -32, -65, 120, 46, 92, -44, 69, -105, 92, -71, -82, -98, -70, 20, 69, -59, -126, 1, 14, + -38, -32, -64, -44, -19, 28, -35, 20, -74, 16, 51, -32, 98, 31, -96, -88, 4, -88, 45, + -96, 75, 81, -117, 105, -88, 102, 13, -14, -75, -92, 60, 8, 36, 98, -107, -29, -83, 126, + 125, -17, -85, -70, -99, -81, -68, -104, -105, 8, -82, 96, 96, 109, 54, 73, -79, -119, 15, + 52, 117, -28, 81, 31, 112, 114, 37, 13, -5, 104, -23, -97, -9, 105, 40, 4, -109, 34, + 104, -37, -15, -56, 16, 28, -101, 70, 118, 46, 35, 69, 63, -24, -26, 90, 97, -7, 114, + -108, -11, -113, -80, 21, -119, 126, -76, 15, -19, -86, -124, 44, -12, -105, 27, -12, -100, -29, + 23, 35, 85, 2, 79, 119, -26, -71, -100, 44, 81, 117, 33, 115, -89, -77, -90, -122, 50, + -18, 61, 11, 50, -12, -124, 112, -82, 58, 35, -21, 116, 51, 10, 4, 7, -60, 74, -110, + -34, 73, -103, 17, 81, 22, 123, -112, -109, 34, -35, 37, 96, -84, 125, 43, -81, 68, -56, + 0, -66, -109, 100, -36, -71, 29, -30, -49, 102, 50, -19, 102, -66, 7, -16, 31, -42, -115, + -52, -70, -120, 46, 60, -98, -103, 81, 12, -27, 111, -50, -106, -96, -53, -119, 60, 50, 17, + -7, 28, -102, 20, -123, -30, -52, -125, -73, 120, 107, -95, -109, -110, -40, -23, 93, 23, -57, + 3, 126, -128, -102, 97, 1, 78, 113, 79, 99, 105, -111, 55, 121, 9, 34, 31, -20, -126, + -49, -17, -50, 10, -35, -6, 70, -27, 60, -104, -116, 25, 76, 40, 90, 108, -63, 43, 7, + 113, 80, 81, -63, 19, -14, 121, -71, 99, 87, -83, -93, 94, -50, -13, -92, -2, 57, -31, + -113, 111, 71, -94, 122, 4, -86, 63, -55, 94, 9, -106, -66, 78, 40, -20, 58, 63, 24, + 110, -69, 105, -91, -52, 39, -121, -16, 61, 17, 65, 104, 83, 22, 124, -8, -85, 90, -89, + 122, 63, -121, 20, 121, 49, 64, -19, -57, -46, -43, -29, 12, -85, -122, -43, -122, 83, -27, + -8, 118, 21, -124, -47, 24, 28, -8, 73, -45, 83, 115, -83, -101, 33, 96, 117, -44}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_mult_data.h new file mode 100644 index 00000000..c8972e37 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_mult_data.h @@ -0,0 +1,7 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_output_mult[8] = + {1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358, 1533286358}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_ref_data.h new file mode 100644 index 00000000..1eec0f8d --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_ref_data.h @@ -0,0 +1,17 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_output_ref[200] = { + -127, -127, -127, -127, 13, 41, -127, -127, -127, -127, -127, -127, -108, -16, -35, -127, -127, -127, -127, + -127, -127, -114, 127, -127, -127, -63, -127, -127, -127, 57, -127, -127, -127, -127, -127, -127, 127, 127, + 116, -127, -127, -127, -127, -127, -109, 127, -110, -127, -127, -127, -127, -127, 127, -127, -127, -127, -127, + -127, -127, -127, -127, 127, 127, -127, -127, -127, -127, -127, 127, -127, -127, -127, -127, -127, -127, -127, + -61, -120, -127, -127, -127, -127, -127, -127, -127, 127, -127, -127, -127, -127, -127, -127, -43, 127, -127, + -127, -127, -127, -127, -127, 127, 127, -127, -127, -127, -54, -127, -127, -127, -127, -127, -127, -127, -127, + -127, -127, -127, -127, -127, -127, -127, -127, 47, 85, 127, -127, -127, -127, -127, -127, -127, -127, -127, + -51, -127, -127, -127, -127, -127, -127, 127, -83, -127, -127, -127, -127, -127, -127, 81, 127, 127, -127, + -127, -127, -127, -127, 121, 127, -53, -127, -127, -127, -127, -127, 90, 28, -6, -127, -127, -127, -127, + -127, -127, -127, -72, -127, -127, -127, -127, -127, 127, 127, -127, -127, -127, -127, -127, -127, -96, 127, + 127, -127, -127, -127, -127, -127, 127, 32, -127, -127}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_shift_data.h new file mode 100644 index 00000000..4e2abddc --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/output_shift_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_output_shift[8] = {-3, -3, -3, -3, -3, -3, -3, -3}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/test_data.h new file mode 100644 index 00000000..8314dc23 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/weights_data.h new file mode 100644 index 00000000..7af248db --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic/weights_data.h @@ -0,0 +1,19 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_weights[256] = { + -17, -69, -63, 41, 5, -115, 64, -101, -96, -64, 3, -109, 115, 12, 11, 111, 43, -53, -6, 4, + -70, -6, -74, -75, -87, 47, -37, -12, -68, 0, 1, -128, 101, 63, -110, 48, -84, 81, 38, 21, + -31, 61, 50, 13, 111, 13, -100, -107, -82, -100, 51, -39, -83, -101, 4, -100, 61, 54, 44, 47, + -54, -93, 106, 25, -92, 5, 85, 2, 19, 80, -29, 64, -6, -11, -37, -61, 6, 64, 87, -96, + 69, -14, -2, -38, 95, -16, -67, -83, 77, 0, 73, 48, 18, 46, 17, 25, -28, -106, 18, 107, + 90, -28, -84, 15, 6, -4, 32, 48, 0, 78, 38, 79, 69, -30, -13, 19, -49, 38, 82, -100, + -101, -71, -106, 107, -34, -27, 93, -12, -50, -69, -77, -18, -2, -35, 60, 68, -95, 111, -84, 3, + 13, 64, 65, -97, 53, -78, -80, -71, 111, -47, 34, -47, 35, 31, 70, -82, -26, -87, 52, 46, + 80, 68, -80, 107, 45, -20, 107, 105, 50, -18, -23, 26, -38, -78, 1, 69, -51, -81, -90, 85, + -37, 50, 82, -107, -49, 62, 108, 50, -85, -110, -98, -84, 54, -96, 96, -20, 11, 101, -76, -76, + 9, 79, -94, -93, -30, 27, 37, -47, -51, 80, 21, -30, 0, -87, 66, -53, 5, 10, 12, -108, + 92, -84, 77, -75, -68, 12, -6, -75, -111, 67, -50, 69, -19, 50, 0, 32, -65, 46, -68, -14, + 109, 54, -64, 0, 100, 5, 110, -80, -48, 41, 105, -106, 80, 31, 32, 30}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/biases_data.h new file mode 100644 index 00000000..345b1c5e --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/biases_data.h @@ -0,0 +1,7 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_2_biases[9] = + {1576623488, -576394688, 67886720, 35252216, -691817216, 1927929216, 2061231872, 1828343296, 1570706560}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/config_data.h new file mode 100644 index 00000000..75bf6158 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_GENERIC_2_OUT_CH 9 +#define DEPTHWISE_INT4_GENERIC_2_IN_CH 3 +#define DEPTHWISE_INT4_GENERIC_2_INPUT_W 9 +#define DEPTHWISE_INT4_GENERIC_2_INPUT_H 9 +#define DEPTHWISE_INT4_GENERIC_2_DST_SIZE 405 +#define DEPTHWISE_INT4_GENERIC_2_INPUT_SIZE 243 +#define DEPTHWISE_INT4_GENERIC_2_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_GENERIC_2_OUT_ACTIVATION_MAX 127 +#define DEPTHWISE_INT4_GENERIC_2_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_GENERIC_2_FILTER_X 6 +#define DEPTHWISE_INT4_GENERIC_2_FILTER_Y 5 +#define DEPTHWISE_INT4_GENERIC_2_STRIDE_X 2 +#define DEPTHWISE_INT4_GENERIC_2_STRIDE_Y 1 +#define DEPTHWISE_INT4_GENERIC_2_PAD_X 2 +#define DEPTHWISE_INT4_GENERIC_2_PAD_Y 2 +#define DEPTHWISE_INT4_GENERIC_2_OUTPUT_W 5 +#define DEPTHWISE_INT4_GENERIC_2_OUTPUT_H 9 +#define DEPTHWISE_INT4_GENERIC_2_CH_MULT 3 +#define DEPTHWISE_INT4_GENERIC_2_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_GENERIC_2_OUTPUT_OFFSET -2 +#define DEPTHWISE_INT4_GENERIC_2_DILATION_X 1 +#define DEPTHWISE_INT4_GENERIC_2_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/input_data.h new file mode 100644 index 00000000..de26acf1 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/input_data.h @@ -0,0 +1,18 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_2_input[243] = { + -12, 42, -37, 61, -44, 27, 26, 8, 57, 15, -53, 31, -33, -24, 16, -75, -87, 88, 66, -59, 60, + -35, 35, 94, 119, -107, -4, 92, 117, -53, -93, 87, -98, 93, 124, -9, 0, -114, 53, -40, -97, -32, + 110, -3, -80, -87, 103, -25, -13, -85, 19, 117, 16, 56, 108, 78, 93, 26, 105, -39, 4, 48, -73, + -114, -75, -65, -72, 29, 2, -66, -47, 81, 96, -38, 90, 66, 125, 113, -5, -45, 79, 45, -77, -107, + -32, -36, -25, -31, 1, 31, 117, -35, 100, 1, -87, 55, -73, -123, 37, -50, -86, 40, 74, 42, -11, + -58, -104, 68, 64, 84, 67, -7, 126, 94, -25, 93, -71, -97, 29, -26, 0, 66, 110, 28, -128, 79, + -8, -88, 118, 67, -56, 38, -74, -98, 38, 14, 30, 8, -5, 45, -26, 62, -113, 68, 114, -108, 110, + 85, 19, -53, -56, 120, 56, 28, 112, 52, 26, -25, -123, 20, 23, -120, 105, 102, 101, 106, 86, -15, + 75, -18, -127, -55, -126, 11, -53, 92, -5, 92, -66, 65, 78, -2, 38, -9, -27, -46, 115, 45, -16, + -102, -74, 68, 97, -38, 123, 61, -83, -93, -3, -58, -80, -59, 111, 37, 5, 74, -7, -66, 41, -33, + -94, 53, -44, 38, 65, -82, 54, -105, 126, 109, 97, 116, -84, -123, -72, -90, -122, -6, 64, -126, -17, + -122, -16, -73, 10, 55, -128, 98, -19, -47, -81, -1, -33}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_mult_data.h new file mode 100644 index 00000000..7220b3b5 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_mult_data.h @@ -0,0 +1,7 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_2_output_mult[9] = + {1442056031, 1442056031, 1442056031, 1442056031, 1442056031, 1442056031, 1442056031, 1442056031, 1442056031}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_ref_data.h new file mode 100644 index 00000000..105ae2f3 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_ref_data.h @@ -0,0 +1,20 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_2_output_ref[405] = { + 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, + -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, + -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, + 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, + 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, + -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, + -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, + 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, + 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, + 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, + -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, + -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, + 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, + 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14, 14, -8, -1, -2, -9, 17, 19, 16, 14}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_shift_data.h new file mode 100644 index 00000000..e54d8f36 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/output_shift_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_2_output_shift[9] = {-26, -26, -26, -26, -26, -26, -26, -26, -26}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/test_data.h new file mode 100644 index 00000000..8314dc23 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/weights_data.h new file mode 100644 index 00000000..8198dd81 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_2/weights_data.h @@ -0,0 +1,13 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_2_weights[135] = { + 11, 80, 32, 75, -17, 94, 57, -68, 17, -80, 96, -98, -101, 1, -91, -82, -64, 105, 80, -100, + -46, -91, 111, -85, 9, 10, -80, -18, -79, -102, 100, -38, -79, -27, 64, 0, 21, -102, -48, -76, + 21, 53, 14, 105, -4, -55, 17, -78, 68, 36, 14, 38, 74, 62, -93, -77, 74, 105, 109, -13, + -17, -83, 63, 28, 35, 44, 109, -46, -66, -16, 110, 20, 21, -34, 37, -99, -58, -39, -67, -15, + -22, -96, 10, -36, 42, -75, -100, -32, -96, 86, -96, -16, -63, -96, 4, 90, 25, -79, -50, -43, + -59, 21, -42, 86, -26, 11, 0, 9, -62, -28, -78, 97, 0, 98, -99, -106, -112, 1, 45, -23, + -23, -14, -80, -44, 14, 12, 97, -108, 22, 46, -3, -1, 54, 53, 48}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/biases_data.h new file mode 100644 index 00000000..717a55e4 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/biases_data.h @@ -0,0 +1,7 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_3_biases[8] = + {-1343856896, 241190736, 1616174080, 1624385408, -1478269696, 510362336, 944345792, -132134768}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/config_data.h new file mode 100644 index 00000000..57fe9a39 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_GENERIC_3_OUT_CH 8 +#define DEPTHWISE_INT4_GENERIC_3_IN_CH 4 +#define DEPTHWISE_INT4_GENERIC_3_INPUT_W 9 +#define DEPTHWISE_INT4_GENERIC_3_INPUT_H 9 +#define DEPTHWISE_INT4_GENERIC_3_DST_SIZE 8 +#define DEPTHWISE_INT4_GENERIC_3_INPUT_SIZE 324 +#define DEPTHWISE_INT4_GENERIC_3_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_GENERIC_3_OUT_ACTIVATION_MAX 125 +#define DEPTHWISE_INT4_GENERIC_3_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_GENERIC_3_FILTER_X 5 +#define DEPTHWISE_INT4_GENERIC_3_FILTER_Y 5 +#define DEPTHWISE_INT4_GENERIC_3_STRIDE_X 1 +#define DEPTHWISE_INT4_GENERIC_3_STRIDE_Y 1 +#define DEPTHWISE_INT4_GENERIC_3_PAD_X 0 +#define DEPTHWISE_INT4_GENERIC_3_PAD_Y 0 +#define DEPTHWISE_INT4_GENERIC_3_OUTPUT_W 1 +#define DEPTHWISE_INT4_GENERIC_3_OUTPUT_H 1 +#define DEPTHWISE_INT4_GENERIC_3_CH_MULT 2 +#define DEPTHWISE_INT4_GENERIC_3_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_GENERIC_3_OUTPUT_OFFSET -2 +#define DEPTHWISE_INT4_GENERIC_3_DILATION_X 2 +#define DEPTHWISE_INT4_GENERIC_3_DILATION_Y 2 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/input_data.h new file mode 100644 index 00000000..75c23573 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/input_data.h @@ -0,0 +1,24 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_3_input[324] = { + 47, -124, -94, 102, 111, -70, -32, -76, -33, 9, 91, -96, 123, 70, 97, 88, -1, 103, + 114, 74, -57, 53, -27, -120, -44, 112, -18, 47, 42, -41, -78, 60, 117, 58, -11, -106, + -101, 29, -97, -106, 120, -99, 74, -73, -57, -18, -93, 44, -90, -5, 81, 110, 52, -38, + -6, -6, 66, -20, 90, -17, -85, 36, -81, -7, 21, 62, 115, -39, -113, 116, 118, 54, + -20, 80, -26, 92, -37, 26, 70, 60, -96, -66, -126, -26, -74, 63, -17, 71, -120, -113, + 32, 18, 14, -10, -67, -48, -125, 83, -20, 85, 73, -125, 16, -118, -88, 107, -71, -3, + -95, 83, 20, -96, 112, 116, 26, 79, 117, -108, 120, -83, 3, 88, 97, 108, -68, 84, + -105, 118, -3, 29, -54, -25, -69, 29, -101, 72, 98, -114, -31, 92, -10, -85, 38, 116, + 57, -14, -16, 1, 55, 21, 45, -41, -83, -74, 109, 80, -85, 91, 49, 67, 58, 96, + 72, 35, 15, 94, 25, 100, -55, -118, 104, 89, -119, 102, 24, 69, -117, 69, 0, 77, + -103, 61, -63, 14, 57, -73, -64, 38, 54, -2, -36, 112, -101, 28, 27, -26, -90, 56, + -55, -82, -41, -53, 4, -116, 17, -16, -23, -102, 82, 18, 98, 40, -50, -37, -128, -106, + -3, 126, 27, -108, 62, 24, 96, 51, 77, 71, -54, -50, -27, 108, -48, -84, -72, -69, + -122, 6, -117, 87, 124, -121, 37, -46, -3, -5, 69, 56, -70, 65, -63, -65, 21, 83, + 55, 5, -51, -45, 8, 67, -113, 88, 52, 48, -112, 125, -5, -117, 14, 96, 117, 96, + -58, 90, 30, 111, -56, 80, -122, 38, -37, 93, 124, -55, 12, 48, 116, 68, -127, -88, + 115, 7, -68, -21, 58, -104, -90, -99, -71, 13, -55, -29, -67, -43, -71, -3, -106, 126, + -91, -99, 94, 18, 84, -113, 109, -29, -48, 60, 24, -21, 107, -49, -20, 110, -13, -96}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_mult_data.h new file mode 100644 index 00000000..57cc4095 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_mult_data.h @@ -0,0 +1,7 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_3_output_mult[8] = + {1442056031, 1442056031, 1442056031, 1442056031, 1442056031, 1442056031, 1442056031, 1442056031}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_ref_data.h new file mode 100644 index 00000000..33999bb2 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_ref_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_3_output_ref[8] = {-15, 0, 14, 14, -17, 3, 7, -3}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_shift_data.h new file mode 100644 index 00000000..278c7a1c --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/output_shift_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_3_output_shift[8] = {-26, -26, -26, -26, -26, -26, -26, -26}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/test_data.h new file mode 100644 index 00000000..8314dc23 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/weights_data.h new file mode 100644 index 00000000..89a290a4 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_3/weights_data.h @@ -0,0 +1,11 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_3_weights[100] = { + 68, 12, -4, 65, 91, 95, -64, -35, -46, 20, 3, 35, -60, 94, -80, 83, -93, -64, 52, 65, + 9, -11, 70, -53, 32, 83, 122, 81, 96, -51, 18, -81, 78, -96, 14, -36, 110, 80, 65, -19, + -110, 47, 85, -62, -71, -14, -110, 37, 98, -17, -111, -37, 7, 31, 57, 15, -84, 107, 57, -107, + 109, 15, -11, 31, -94, -81, -80, -49, 80, -31, -108, 94, -86, 102, -5, -80, -80, 107, 4, 80, + 10, 35, 80, -37, 11, -95, -101, 27, 73, 28, -77, -76, -31, 86, -115, 21, -31, 41, 105, -83}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/biases_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/biases_data.h new file mode 100644 index 00000000..65f1a83c --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/biases_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_4_biases[3] = {1208498688, 1284329088, -843635840}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/config_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/config_data.h new file mode 100644 index 00000000..20e92db7 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/config_data.h @@ -0,0 +1,25 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#define DEPTHWISE_INT4_GENERIC_4_OUT_CH 3 +#define DEPTHWISE_INT4_GENERIC_4_IN_CH 1 +#define DEPTHWISE_INT4_GENERIC_4_INPUT_W 12 +#define DEPTHWISE_INT4_GENERIC_4_INPUT_H 10 +#define DEPTHWISE_INT4_GENERIC_4_DST_SIZE 180 +#define DEPTHWISE_INT4_GENERIC_4_INPUT_SIZE 120 +#define DEPTHWISE_INT4_GENERIC_4_OUT_ACTIVATION_MIN -127 +#define DEPTHWISE_INT4_GENERIC_4_OUT_ACTIVATION_MAX 127 +#define DEPTHWISE_INT4_GENERIC_4_INPUT_BATCHES 1 +#define DEPTHWISE_INT4_GENERIC_4_FILTER_X 5 +#define DEPTHWISE_INT4_GENERIC_4_FILTER_Y 5 +#define DEPTHWISE_INT4_GENERIC_4_STRIDE_X 1 +#define DEPTHWISE_INT4_GENERIC_4_STRIDE_Y 2 +#define DEPTHWISE_INT4_GENERIC_4_PAD_X 2 +#define DEPTHWISE_INT4_GENERIC_4_PAD_Y 1 +#define DEPTHWISE_INT4_GENERIC_4_OUTPUT_W 12 +#define DEPTHWISE_INT4_GENERIC_4_OUTPUT_H 5 +#define DEPTHWISE_INT4_GENERIC_4_CH_MULT 3 +#define DEPTHWISE_INT4_GENERIC_4_INPUT_OFFSET 128 +#define DEPTHWISE_INT4_GENERIC_4_OUTPUT_OFFSET -2 +#define DEPTHWISE_INT4_GENERIC_4_DILATION_X 1 +#define DEPTHWISE_INT4_GENERIC_4_DILATION_Y 1 diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/input_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/input_data.h new file mode 100644 index 00000000..1f8ba555 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/input_data.h @@ -0,0 +1,12 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_4_input[120] = { + -78, -47, 90, 98, -105, -85, -67, 38, 74, -57, 4, -10, 102, -66, 42, 3, 45, 97, 58, 44, + -24, -91, -55, 11, -79, 52, -61, 53, -120, -92, 117, 42, -128, 60, 34, 114, 81, 108, -10, -21, + -96, -17, -15, -34, -3, -38, -88, 41, -128, 90, 34, 84, -60, -35, -19, -24, 120, -90, 123, -22, + -51, -21, -51, 46, 7, 107, -95, -2, -98, -21, 116, 121, -54, -90, -66, -45, 0, 117, -85, -125, + 75, -17, 98, -61, 118, -119, -31, -19, -89, 83, -76, -44, 37, 107, -49, -18, 22, -81, -8, -33, + -24, 85, -105, 1, 47, 73, 108, -107, -115, 67, -57, -4, 26, 34, -86, -89, -113, 75, -86, -101}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_mult_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_mult_data.h new file mode 100644 index 00000000..15d6eec4 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_mult_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_4_output_mult[3] = {1442056031, 1442056031, 1442056031}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_ref_data.h new file mode 100644 index 00000000..13b739f4 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_ref_data.h @@ -0,0 +1,13 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_4_output_ref[180] = { + 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, + 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, + 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, + 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, + 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, + 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, + 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10, 10, 11, -10}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_shift_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_shift_data.h new file mode 100644 index 00000000..e4c11751 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/output_shift_data.h @@ -0,0 +1,6 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int32_t depthwise_int4_generic_4_output_shift[3] = {-26, -26, -26}; diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/test_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/test_data.h new file mode 100644 index 00000000..8314dc23 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/test_data.h @@ -0,0 +1,9 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#include "biases_data.h" +#include "config_data.h" +#include "input_data.h" +#include "output_mult_data.h" +#include "output_ref_data.h" +#include "output_shift_data.h" +#include "weights_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/weights_data.h b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/weights_data.h new file mode 100644 index 00000000..40093157 --- /dev/null +++ b/Tests/UnitTest/TestCases/TestData/depthwise_int4_generic_4/weights_data.h @@ -0,0 +1,8 @@ +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_runtime version 2.16.0 and revision 0.6.0-154906-gb2493fdf794. +#pragma once +#include + +const int8_t depthwise_int4_generic_4_weights[38] = {75, 21, 94, -113, 75, -68, 25, -110, 100, 109, -51, 99, -53, + 5, -76, -17, 12, -28, 22, 86, 111, 30, 57, 41, -87, 82, + 79, -3, -90, -34, 70, 17, -112, 6, 0, -86, -1, 0}; diff --git a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/biases_data.h b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/biases_data.h index cb6dc9c8..c3b3be9c 100644 --- a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/biases_data.h +++ b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/biases_data.h @@ -1,6 +1,6 @@ -// Generated by test_settings.py using tensorflow version 2.15.0-dev20231004 (Keras version 2.15.0.dev2023092207). -// Interpreter from tflite_micro version 0.dev20231002060915-gec477d5c and revision None. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_micro version 0.dev20230817002213-g3bd11ea3 and revision None. #pragma once #include -const int32_t fully_connected_int4_biases[5] = {68, -43, -43, 75, -33}; +const int32_t fully_connected_int4_biases[5] = {17, -88, -107, 94, -29}; diff --git a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/config_data.h b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/config_data.h index a0f836b9..c7998883 100644 --- a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/config_data.h +++ b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/config_data.h @@ -1,5 +1,5 @@ -// Generated by test_settings.py using tensorflow version 2.15.0-dev20231004 (Keras version 2.15.0.dev2023092207). -// Interpreter from tflite_micro version 0.dev20231002060915-gec477d5c and revision None. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_micro version 0.dev20230817002213-g3bd11ea3 and revision None. #pragma once #define FULLY_CONNECTED_INT4_OUT_CH 5 #define FULLY_CONNECTED_INT4_IN_CH 2 diff --git a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/input_data.h b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/input_data.h index 865768e3..6e6cdf47 100644 --- a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/input_data.h +++ b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/input_data.h @@ -1,6 +1,6 @@ -// Generated by test_settings.py using tensorflow version 2.15.0-dev20231004 (Keras version 2.15.0.dev2023092207). -// Interpreter from tflite_micro version 0.dev20231002060915-gec477d5c and revision None. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_micro version 0.dev20230817002213-g3bd11ea3 and revision None. #pragma once #include -const int8_t fully_connected_int4_input[2] = {-62, -87}; +const int8_t fully_connected_int4_input[2] = {-91, 59}; diff --git a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/output_ref_data.h b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/output_ref_data.h index dd3c0bda..c9cc8361 100644 --- a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/output_ref_data.h +++ b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/output_ref_data.h @@ -1,6 +1,6 @@ -// Generated by test_settings.py using tensorflow version 2.15.0-dev20231004 (Keras version 2.15.0.dev2023092207). -// Interpreter from tflite_micro version 0.dev20231002060915-gec477d5c and revision None. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_micro version 0.dev20230817002213-g3bd11ea3 and revision None. #pragma once #include -const int8_t fully_connected_int4_output_ref[5] = {3, 2, -3, -4, -3}; +const int8_t fully_connected_int4_output_ref[5] = {1, -6, -1, 0, -1}; diff --git a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/test_data.h b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/test_data.h index 2adcdca6..cb314c35 100644 --- a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/test_data.h +++ b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/test_data.h @@ -1,5 +1,5 @@ -// Generated by test_settings.py using tensorflow version 2.15.0-dev20231004 (Keras version 2.15.0.dev2023092207). -// Interpreter from tflite_micro version 0.dev20231002060915-gec477d5c and revision None. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_micro version 0.dev20230817002213-g3bd11ea3 and revision None. #include "biases_data.h" #include "config_data.h" #include "input_data.h" diff --git a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/weights_data.h b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/weights_data.h index bdabc64b..76746db6 100644 --- a/Tests/UnitTest/TestCases/TestData/fully_connected_int4/weights_data.h +++ b/Tests/UnitTest/TestCases/TestData/fully_connected_int4/weights_data.h @@ -1,6 +1,6 @@ -// Generated by test_settings.py using tensorflow version 2.15.0-dev20231004 (Keras version 2.15.0.dev2023092207). -// Interpreter from tflite_micro version 0.dev20231002060915-gec477d5c and revision None. +// Generated by test_settings.py using tensorflow version 2.14.0 (Keras version 2.14.0). +// Interpreter from tflite_micro version 0.dev20230817002213-g3bd11ea3 and revision None. #pragma once #include -const int8_t fully_connected_int4_weights[5] = {139, 138, 241, 5, 60}; +const int8_t fully_connected_int4_weights[5] = {24, 22, -72, -22, -5}; diff --git a/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/CMakeLists.txt b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/CMakeLists.txt new file mode 100644 index 00000000..96e686ea --- /dev/null +++ b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/CMakeLists.txt @@ -0,0 +1,23 @@ +# +# Copyright (C) 2023 Arm Limited or its affiliates. All rights reserved. +# +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the License); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an AS IS BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +add_cmsis_nn_unit_test_executable(test_arm_depthwise_conv_s4) + +target_sources(test_arm_depthwise_conv_s4 PRIVATE + Unity/unity_test_arm_depthwise_conv_s4.c + Unity/TestRunner/unity_test_arm_depthwise_conv_s4_runner.c) diff --git a/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/Unity/unity_test_arm_depthwise_conv_s4.c b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/Unity/unity_test_arm_depthwise_conv_s4.c new file mode 100644 index 00000000..c695cfa0 --- /dev/null +++ b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/Unity/unity_test_arm_depthwise_conv_s4.c @@ -0,0 +1,50 @@ +/* + * SPDX-FileCopyrightText: Copyright 2010-2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include "../test_arm_depthwise_conv_s4.c" +#include "unity.h" + +#ifdef USING_FVP_CORSTONE_300 +extern void uart_init(void); +#endif + +/* This function is called from the autogenerated file. + * The name must be exactly like this + */ +void setUp(void) +{ /* This is run before EACH TEST */ +#ifdef USING_FVP_CORSTONE_300 + uart_init(); +#endif +} + +/* This function is called from the autogenerated file. + * The name must be exactly like this + */ +void tearDown(void) {} + +void test_depthwise_int4_generic_arm_depthwise_conv_s4(void) { depthwise_int4_generic_arm_depthwise_conv_s4(); } +void test_depthwise_int4_generic_2_arm_depthwise_conv_s4(void) { depthwise_int4_generic_2_arm_depthwise_conv_s4(); } +void test_depthwise_int4_generic_3_arm_depthwise_conv_s4(void) { depthwise_int4_generic_3_arm_depthwise_conv_s4(); } +void test_depthwise_int4_generic_4_arm_depthwise_conv_s4(void) { depthwise_int4_generic_4_arm_depthwise_conv_s4(); } diff --git a/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/test_arm_depthwise_conv_s4.c b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/test_arm_depthwise_conv_s4.c new file mode 100644 index 00000000..09303214 --- /dev/null +++ b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4/test_arm_depthwise_conv_s4.c @@ -0,0 +1,400 @@ +/* + * SPDX-FileCopyrightText: Copyright 2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include "../TestData/depthwise_int4_generic/test_data.h" +#include "../TestData/depthwise_int4_generic_2/test_data.h" +#include "../TestData/depthwise_int4_generic_3/test_data.h" +#include "../TestData/depthwise_int4_generic_4/test_data.h" +#include "../Utils/utils.h" +#include "../Utils/validate.h" + +void depthwise_int4_generic_arm_depthwise_conv_s4(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_GENERIC_DST_SIZE] = {0}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_generic_biases; + const int8_t *kernel_data = depthwise_int4_generic_weights; + const int8_t *input_data = depthwise_int4_generic_input; + + input_dims.n = DEPTHWISE_INT4_GENERIC_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_GENERIC_INPUT_W; + input_dims.h = DEPTHWISE_INT4_GENERIC_INPUT_H; + input_dims.c = DEPTHWISE_INT4_GENERIC_IN_CH; + filter_dims.w = DEPTHWISE_INT4_GENERIC_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_GENERIC_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_GENERIC_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_GENERIC_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_GENERIC_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_GENERIC_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_GENERIC_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_GENERIC_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_GENERIC_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_GENERIC_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_GENERIC_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_GENERIC_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_GENERIC_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_GENERIC_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_GENERIC_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_GENERIC_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_generic_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_generic_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + + arm_cmsis_nn_status result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_output_ref, DEPTHWISE_INT4_GENERIC_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_output_ref, DEPTHWISE_INT4_GENERIC_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_DST_SIZE); +} + +void depthwise_int4_generic_2_arm_depthwise_conv_s4(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_GENERIC_2_DST_SIZE] = {0}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_generic_2_biases; + const int8_t *kernel_data = depthwise_int4_generic_2_weights; + const int8_t *input_data = depthwise_int4_generic_2_input; + + input_dims.n = DEPTHWISE_INT4_GENERIC_2_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_GENERIC_2_INPUT_W; + input_dims.h = DEPTHWISE_INT4_GENERIC_2_INPUT_H; + input_dims.c = DEPTHWISE_INT4_GENERIC_2_IN_CH; + filter_dims.w = DEPTHWISE_INT4_GENERIC_2_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_GENERIC_2_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_GENERIC_2_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_GENERIC_2_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_GENERIC_2_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_GENERIC_2_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_GENERIC_2_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_GENERIC_2_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_GENERIC_2_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_GENERIC_2_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_GENERIC_2_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_GENERIC_2_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_GENERIC_2_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_GENERIC_2_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_GENERIC_2_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_GENERIC_2_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_generic_2_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_generic_2_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + + arm_cmsis_nn_status result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_2_output_ref, DEPTHWISE_INT4_GENERIC_2_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_2_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_2_output_ref, DEPTHWISE_INT4_GENERIC_2_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_2_DST_SIZE); +} + +void depthwise_int4_generic_3_arm_depthwise_conv_s4(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_GENERIC_3_DST_SIZE] = {0}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_generic_3_biases; + const int8_t *kernel_data = depthwise_int4_generic_3_weights; + const int8_t *input_data = depthwise_int4_generic_3_input; + + input_dims.n = DEPTHWISE_INT4_GENERIC_3_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_GENERIC_3_INPUT_W; + input_dims.h = DEPTHWISE_INT4_GENERIC_3_INPUT_H; + input_dims.c = DEPTHWISE_INT4_GENERIC_3_IN_CH; + filter_dims.w = DEPTHWISE_INT4_GENERIC_3_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_GENERIC_3_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_GENERIC_3_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_GENERIC_3_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_GENERIC_3_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_GENERIC_3_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_GENERIC_3_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_GENERIC_3_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_GENERIC_3_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_GENERIC_3_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_GENERIC_3_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_GENERIC_3_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_GENERIC_3_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_GENERIC_3_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_GENERIC_3_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_GENERIC_3_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_generic_3_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_generic_3_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + + arm_cmsis_nn_status result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_3_output_ref, DEPTHWISE_INT4_GENERIC_3_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_3_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_3_output_ref, DEPTHWISE_INT4_GENERIC_3_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_3_DST_SIZE); +} + +void depthwise_int4_generic_4_arm_depthwise_conv_s4(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_GENERIC_4_DST_SIZE] = {0}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_generic_4_biases; + const int8_t *kernel_data = depthwise_int4_generic_4_weights; + const int8_t *input_data = depthwise_int4_generic_4_input; + + input_dims.n = DEPTHWISE_INT4_GENERIC_4_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_GENERIC_4_INPUT_W; + input_dims.h = DEPTHWISE_INT4_GENERIC_4_INPUT_H; + input_dims.c = DEPTHWISE_INT4_GENERIC_4_IN_CH; + filter_dims.w = DEPTHWISE_INT4_GENERIC_4_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_GENERIC_4_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_GENERIC_4_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_GENERIC_4_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_GENERIC_4_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_GENERIC_4_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_GENERIC_4_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_GENERIC_4_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_GENERIC_4_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_GENERIC_4_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_GENERIC_4_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_GENERIC_4_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_GENERIC_4_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_GENERIC_4_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_GENERIC_4_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_GENERIC_4_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_generic_4_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_generic_4_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + + arm_cmsis_nn_status result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_4_output_ref, DEPTHWISE_INT4_GENERIC_4_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_4_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_generic_4_output_ref, DEPTHWISE_INT4_GENERIC_4_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_GENERIC_4_DST_SIZE); +} diff --git a/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/CMakeLists.txt b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/CMakeLists.txt new file mode 100644 index 00000000..f6477d93 --- /dev/null +++ b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/CMakeLists.txt @@ -0,0 +1,23 @@ +# +# Copyright (C) 2023 Arm Limited or its affiliates. All rights reserved. +# +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the License); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an AS IS BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +add_cmsis_nn_unit_test_executable(test_arm_depthwise_conv_s4_opt) + +target_sources(test_arm_depthwise_conv_s4_opt PRIVATE + Unity/unity_test_arm_depthwise_conv_s4_opt.c + Unity/TestRunner/unity_test_arm_depthwise_conv_s4_opt_runner.c) diff --git a/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/Unity/unity_test_arm_depthwise_conv_s4_opt.c b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/Unity/unity_test_arm_depthwise_conv_s4_opt.c new file mode 100644 index 00000000..fec10560 --- /dev/null +++ b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/Unity/unity_test_arm_depthwise_conv_s4_opt.c @@ -0,0 +1,50 @@ +/* + * SPDX-FileCopyrightText: Copyright 2010-2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include "../test_arm_depthwise_conv_s4_opt.c" +#include "unity.h" + +#ifdef USING_FVP_CORSTONE_300 +extern void uart_init(void); +#endif + +/* This function is called from the autogenerated file. + * The name must be exactly like this + */ +void setUp(void) +{ /* This is run before EACH TEST */ +#ifdef USING_FVP_CORSTONE_300 + uart_init(); +#endif +} + +/* This function is called from the autogenerated file. + * The name must be exactly like this + */ +void tearDown(void) {} + +void test_depthwise_int4_1_arm_depthwise_conv_s4_opt(void) { depthwise_int4_1_arm_depthwise_conv_s4_opt(); } +void test_depthwise_int4_2_arm_depthwise_conv_s4_opt(void) { depthwise_int4_2_arm_depthwise_conv_s4_opt(); } +void test_depthwise_int4_3_arm_depthwise_conv_s4_opt(void) { depthwise_int4_3_arm_depthwise_conv_s4_opt(); } +void test_depthwise_int4_4_arm_depthwise_conv_s4_opt(void) { depthwise_int4_4_arm_depthwise_conv_s4_opt(); } diff --git a/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/test_arm_depthwise_conv_s4_opt.c b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/test_arm_depthwise_conv_s4_opt.c new file mode 100644 index 00000000..c5a88461 --- /dev/null +++ b/Tests/UnitTest/TestCases/test_arm_depthwise_conv_s4_opt/test_arm_depthwise_conv_s4_opt.c @@ -0,0 +1,490 @@ +/* + * SPDX-FileCopyrightText: Copyright 2023 Arm Limited and/or its affiliates + * + * SPDX-License-Identifier: Apache-2.0 + * + * Licensed under the Apache License, Version 2.0 (the License); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an AS IS BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include "../TestData/depthwise_int4_1/test_data.h" +#include "../TestData/depthwise_int4_2/test_data.h" +#include "../TestData/depthwise_int4_3/test_data.h" +#include "../TestData/depthwise_int4_4/test_data.h" +#include "../Utils/utils.h" +#include "../Utils/validate.h" + +void depthwise_int4_1_arm_depthwise_conv_s4_opt(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_1_DST_SIZE] = {0}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_1_biases; + const int8_t *kernel_data = depthwise_int4_1_weights; + const int8_t *input_data = depthwise_int4_1_input; + + input_dims.n = DEPTHWISE_INT4_1_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_1_INPUT_W; + input_dims.h = DEPTHWISE_INT4_1_INPUT_H; + input_dims.c = DEPTHWISE_INT4_1_IN_CH; + filter_dims.w = DEPTHWISE_INT4_1_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_1_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_1_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_1_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_1_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_1_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_1_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_1_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_1_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_1_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_1_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_1_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_1_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_1_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_1_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_1_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_1_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_1_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + + arm_cmsis_nn_status result = arm_depthwise_conv_s4_opt(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_1_output_ref, DEPTHWISE_INT4_1_DST_SIZE)); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_1_output_ref, DEPTHWISE_INT4_1_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_1_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_1_output_ref, DEPTHWISE_INT4_1_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_1_DST_SIZE); +} + +void depthwise_int4_2_arm_depthwise_conv_s4_opt(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_2_DST_SIZE] = {}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_2_biases; + const int8_t *kernel_data = depthwise_int4_2_weights; + const int8_t *input_data = depthwise_int4_2_input; + + input_dims.n = DEPTHWISE_INT4_2_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_2_INPUT_W; + input_dims.h = DEPTHWISE_INT4_2_INPUT_H; + input_dims.c = DEPTHWISE_INT4_2_IN_CH; + filter_dims.w = DEPTHWISE_INT4_2_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_2_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_2_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_2_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_2_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_2_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_2_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_2_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_2_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_2_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_2_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_2_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_2_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_2_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_2_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_2_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_2_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_2_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + + arm_cmsis_nn_status result = arm_depthwise_conv_s4_opt(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_2_output_ref, DEPTHWISE_INT4_2_DST_SIZE)); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_2_output_ref, DEPTHWISE_INT4_2_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_2_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_2_output_ref, DEPTHWISE_INT4_2_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_2_DST_SIZE); +} + +void depthwise_int4_3_arm_depthwise_conv_s4_opt(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_3_DST_SIZE] = {}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_3_biases; + const int8_t *kernel_data = depthwise_int4_3_weights; + const int8_t *input_data = depthwise_int4_3_input; + + input_dims.n = DEPTHWISE_INT4_3_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_3_INPUT_W; + input_dims.h = DEPTHWISE_INT4_3_INPUT_H; + input_dims.c = DEPTHWISE_INT4_3_IN_CH; + filter_dims.w = DEPTHWISE_INT4_3_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_3_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_3_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_3_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_3_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_3_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_3_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_3_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_3_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_3_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_3_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_3_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_3_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_3_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_3_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_3_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_3_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_3_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + + arm_cmsis_nn_status result = arm_depthwise_conv_s4_opt(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_3_output_ref, DEPTHWISE_INT4_3_DST_SIZE)); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_3_output_ref, DEPTHWISE_INT4_3_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_3_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_3_output_ref, DEPTHWISE_INT4_3_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_3_DST_SIZE); +} + +void depthwise_int4_4_arm_depthwise_conv_s4_opt(void) +{ + const arm_cmsis_nn_status expected = ARM_CMSIS_NN_SUCCESS; + int8_t output[DEPTHWISE_INT4_4_DST_SIZE] = {0}; + + cmsis_nn_context ctx; + cmsis_nn_dw_conv_params dw_conv_params; + cmsis_nn_per_channel_quant_params quant_params; + cmsis_nn_dims input_dims; + cmsis_nn_dims filter_dims; + cmsis_nn_dims bias_dims; + cmsis_nn_dims output_dims; + + const int32_t *bias_data = depthwise_int4_4_biases; + const int8_t *kernel_data = depthwise_int4_4_weights; + const int8_t *input_data = depthwise_int4_4_input; + + input_dims.n = DEPTHWISE_INT4_4_INPUT_BATCHES; + input_dims.w = DEPTHWISE_INT4_4_INPUT_W; + input_dims.h = DEPTHWISE_INT4_4_INPUT_H; + input_dims.c = DEPTHWISE_INT4_4_IN_CH; + filter_dims.w = DEPTHWISE_INT4_4_FILTER_X; + filter_dims.h = DEPTHWISE_INT4_4_FILTER_Y; + output_dims.w = DEPTHWISE_INT4_4_OUTPUT_W; + output_dims.h = DEPTHWISE_INT4_4_OUTPUT_H; + output_dims.c = DEPTHWISE_INT4_4_OUT_CH; + + dw_conv_params.padding.w = DEPTHWISE_INT4_4_PAD_X; + dw_conv_params.padding.h = DEPTHWISE_INT4_4_PAD_Y; + dw_conv_params.stride.w = DEPTHWISE_INT4_4_STRIDE_X; + dw_conv_params.stride.h = DEPTHWISE_INT4_4_STRIDE_Y; + dw_conv_params.dilation.w = DEPTHWISE_INT4_4_DILATION_X; + dw_conv_params.dilation.h = DEPTHWISE_INT4_4_DILATION_Y; + + dw_conv_params.ch_mult = DEPTHWISE_INT4_4_CH_MULT; + + dw_conv_params.input_offset = DEPTHWISE_INT4_4_INPUT_OFFSET; + dw_conv_params.output_offset = DEPTHWISE_INT4_4_OUTPUT_OFFSET; + dw_conv_params.activation.min = DEPTHWISE_INT4_4_OUT_ACTIVATION_MIN; + dw_conv_params.activation.max = DEPTHWISE_INT4_4_OUT_ACTIVATION_MAX; + quant_params.multiplier = (int32_t *)depthwise_int4_4_output_mult; + quant_params.shift = (int32_t *)depthwise_int4_4_output_shift; + + ctx.size = arm_depthwise_conv_s4_opt_get_buffer_size(&input_dims, &filter_dims); + TEST_ASSERT_TRUE(ctx.size > 0); + + ctx.buf = malloc(ctx.size); + arm_cmsis_nn_status result = arm_depthwise_conv_s4_opt(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_4_output_ref, DEPTHWISE_INT4_4_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_4_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_wrapper_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_4_output_ref, DEPTHWISE_INT4_4_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_4_DST_SIZE); + + ctx.buf = malloc(ctx.size); + result = arm_depthwise_conv_s4(&ctx, + &dw_conv_params, + &quant_params, + &input_dims, + input_data, + &filter_dims, + kernel_data, + &bias_dims, + bias_data, + &output_dims, + output); + + if (ctx.buf) + { + // The caller is responsible to clear the scratch buffers for security reasons if applicable. + memset(ctx.buf, 0, ctx.size); + free(ctx.buf); + } + TEST_ASSERT_EQUAL(expected, result); + TEST_ASSERT_TRUE(validate(output, depthwise_int4_4_output_ref, DEPTHWISE_INT4_4_DST_SIZE)); + memset(output, 0, DEPTHWISE_INT4_4_DST_SIZE); +} diff --git a/Tests/UnitTest/TestCases/test_arm_fully_connected_s8/test_arm_fully_connected_s8.c b/Tests/UnitTest/TestCases/test_arm_fully_connected_s8/test_arm_fully_connected_s8.c index 00575a14..c5e0713c 100644 --- a/Tests/UnitTest/TestCases/test_arm_fully_connected_s8/test_arm_fully_connected_s8.c +++ b/Tests/UnitTest/TestCases/test_arm_fully_connected_s8/test_arm_fully_connected_s8.c @@ -254,7 +254,7 @@ void fully_connected_null_bias_0_arm_fully_connected_s8(void) arm_cmsis_nn_status ip_check = ARM_CMSIS_NN_SUCCESS; for (int i = 0; i < FULLY_CONNECTED_NULL_BIAS_0_OUT_CH; i++) { - if (bias_data[i] != 0) + if (bias_data && (bias_data[i] != 0)) { ip_check = ARM_CMSIS_NN_ARG_ERROR; break; diff --git a/Tests/UnitTest/conv_settings.py b/Tests/UnitTest/conv_settings.py index ad07dfae..559223a7 100644 --- a/Tests/UnitTest/conv_settings.py +++ b/Tests/UnitTest/conv_settings.py @@ -19,6 +19,8 @@ import tensorflow as tf import numpy as np +import math + class ConvSettings(TestSettings): @@ -50,7 +52,8 @@ def __init__(self, bias_max=TestSettings.INT32_MAX, dilation_x=1, dilation_y=1, - interpreter="tensorflow"): + interpreter="tensorflow", + int4_weights=False): super().__init__(dataset, testtype, regenerate_weights, @@ -78,7 +81,8 @@ def __init__(self, bias_max=bias_max, dilation_x=dilation_x, dilation_y=dilation_y, - interpreter=interpreter) + interpreter=interpreter, + int4_weights=int4_weights) self.scaling_factors = [] @@ -87,6 +91,9 @@ def __init__(self, if self.output_ch % self.input_ch != 0: raise RuntimeError("out channel ({}) is not multiple of in channel ({})".format(out_ch, in_ch)) + if self.int4_weights: + self.json_template = "TestCases/Common/dw_s4_weights_template.json" + def write_c_config_header(self) -> None: super().write_c_config_header() @@ -134,85 +141,179 @@ def generate_data(self, input_data=None, weights=None, biases=None) -> None: bias_datatype = "int32_t" input_data = self.get_randomized_input_data(input_data) + biases = self.get_randomized_bias_data(biases) if self.test_type == 'conv' or self.test_type == 'transpose_conv': out_channel = self.output_ch elif self.test_type == 'depthwise_conv': out_channel = self.channel_multiplier - if self.test_type == 'transpose_conv': - weight_shape = [self.filter_y, self.filter_x, out_channel, self.input_ch] - else: - weight_shape = [self.filter_y, self.filter_x, self.input_ch, out_channel] + if self.int4_weights: + + w_shape = [self.filter_y * self.filter_x * self.input_ch * out_channel] + + if weights is not None: + weights = tf.reshape(weights, w_shape) + else: + weights = self.get_randomized_data(w_shape, + self.kernel_table_file, + minrange=TestSettings.INT4_MIN, + maxrange=TestSettings.INT4_MAX, + decimals=1, + regenerate=self.regenerate_new_weights) + + input_scale = 0.046774 + input_zp = -128 + + bias_scale = [64751.269531] * self.output_ch + bias_zp = [0] * self.output_ch + if self.generate_bias: + output_scale = 4684910.0 + output_zp = -2 + else: + output_scale = 0.525255 + output_zp = 2 + + weight_scales = [1.002234] * self.output_ch + w_zp = [0] * self.output_ch + + if self.has_padding: + # TODO dilation with padding + output_x = math.ceil(float(self.x_input) / float(self.stride_x)) + output_y = math.ceil(float(self.y_input) / float(self.stride_y)) + else: + dilation_filter_x = (self.filter_x - 1) * (self.dilation_x - 1) + dilation_filter_y = (self.filter_y - 1) * (self.dilation_y - 1) + + output_x = math.ceil(float(self.x_input - self.filter_x - dilation_filter_x + 1) / float(self.stride_x)) + output_y = math.ceil(float(self.y_input - self.filter_y - dilation_filter_y + 1) / float(self.stride_y)) + + self.json_replacements = { + "batches": self.batches, + "input_ch": self.input_ch, + "output_ch": self.output_ch, + "input_x": self.x_input, + "input_y": self.y_input, + "weight_x": self.filter_x, + "weight_y": self.filter_y, + "output_x": output_x, + "output_y": output_y, + "input_scale": input_scale, + "input_zp": input_zp, + "w_scale": weight_scales, + "w_zp": w_zp, + "bias_scale": bias_scale, + "bias_zp": bias_zp, + "output_scale": output_scale, + "output_zp": output_zp, + "stride_x": self.stride_x, + "stride_y": self.stride_y, + "dilation_x": self.dilation_x, + "dilation_y": self.dilation_y, + "type_pad": self.padding, + "ch_mult": self.channel_multiplier + } + + # Pack weights + weights_original_size = weights.numpy().size + weights = weights.numpy().flatten().astype(np.uint8) + if len(weights % 2): + weights = np.append(weights, [0]) + temp = np.zeros(len(weights) // 2) + for x, y in zip(range(0, len(temp)), range(0, len(weights), 2)): + temp[x] = 0xff & ((0xf0 & (weights[y + 1] << 4)) | (weights[y] & 0xf)) + weights = tf.convert_to_tensor(temp) + weights_size = weights.numpy().size * 2 + if weights_original_size % 2: + weights_size -= 1 + + # Generate tflite model + generated_json = self.generate_json_from_template( + None, weights, int8_time_weights=True, bias_data=biases, bias_buffer=3) + self.flatc_generate_tflite(generated_json, self.schema_file) + + filter_index = 1 + bias_index = 2 - if weights is not None: - weights = tf.reshape(weights, weight_shape) else: - weights = self.get_randomized_data(weight_shape, - self.kernel_table_file, - minrange=TestSettings.INT32_MIN, - maxrange=TestSettings.INT32_MAX, - decimals=1, - regenerate=self.regenerate_new_weights) - biases = self.get_randomized_bias_data(biases) + if self.test_type == 'transpose_conv': + weight_shape = [self.filter_y, self.filter_x, out_channel, self.input_ch] + else: + weight_shape = [self.filter_y, self.filter_x, self.input_ch, out_channel] - # Create a one layer Keras model. - model = tf.keras.models.Sequential() - input_shape = (self.batches, self.y_input, self.x_input, self.input_ch) - model.add(tf.keras.layers.InputLayer(input_shape=input_shape[1:], batch_size=self.batches)) - if self.test_type == 'conv': - conv_layer = tf.keras.layers.Conv2D(self.output_ch, - kernel_size=(self.filter_y, self.filter_x), - strides=(self.stride_y, self.stride_x), - padding=self.padding, - input_shape=input_shape[1:], - dilation_rate=(self.dilation_y, self.dilation_x)) - model.add(conv_layer) - conv_layer.set_weights([weights, biases]) - elif self.test_type == 'depthwise_conv': - depthwise_layer = tf.keras.layers.DepthwiseConv2D(kernel_size=(self.filter_y, self.filter_x), - strides=(self.stride_y, self.stride_x), - padding=self.padding, - depth_multiplier=self.channel_multiplier, - input_shape=input_shape[1:], - dilation_rate=(self.dilation_y, self.dilation_x)) - model.add(depthwise_layer) - depthwise_layer.set_weights([weights, biases]) - elif self.test_type == 'transpose_conv': - transposed_conv_layer = tf.keras.layers.Conv2DTranspose(self.output_ch, - kernel_size=(self.filter_y, self.filter_x), - strides=(self.stride_y, self.stride_x), - padding=self.padding, - input_shape=input_shape[1:], - dilation_rate=(self.dilation_y, self.dilation_x), - use_bias=self.generate_bias) - model.add(transposed_conv_layer) - if self.generate_bias: - transposed_conv_layer.set_weights([weights, biases]) + if weights is not None: + weights = tf.reshape(weights, weight_shape) else: - transposed_conv_layer.set_weights([weights]) + weights = self.get_randomized_data(weight_shape, + self.kernel_table_file, + minrange=TestSettings.INT32_MIN, + maxrange=TestSettings.INT32_MAX, + decimals=1, + regenerate=self.regenerate_new_weights) + weights_size = weights.numpy().size + + # Create a one layer Keras model. + model = tf.keras.models.Sequential() + input_shape = (self.batches, self.y_input, self.x_input, self.input_ch) + model.add(tf.keras.layers.InputLayer(input_shape=input_shape[1:], batch_size=self.batches)) + if self.test_type == 'conv': + conv_layer = tf.keras.layers.Conv2D(self.output_ch, + kernel_size=(self.filter_y, self.filter_x), + strides=(self.stride_y, self.stride_x), + padding=self.padding, + input_shape=input_shape[1:], + dilation_rate=(self.dilation_y, self.dilation_x)) + model.add(conv_layer) + conv_layer.set_weights([weights, biases]) + elif self.test_type == 'depthwise_conv': + depthwise_layer = tf.keras.layers.DepthwiseConv2D(kernel_size=(self.filter_y, self.filter_x), + strides=(self.stride_y, self.stride_x), + padding=self.padding, + depth_multiplier=self.channel_multiplier, + input_shape=input_shape[1:], + dilation_rate=(self.dilation_y, self.dilation_x)) + model.add(depthwise_layer) + depthwise_layer.set_weights([weights, biases]) + elif self.test_type == 'transpose_conv': + transposed_conv_layer = tf.keras.layers.Conv2DTranspose(self.output_ch, + kernel_size=(self.filter_y, self.filter_x), + strides=(self.stride_y, self.stride_x), + padding=self.padding, + input_shape=input_shape[1:], + dilation_rate=(self.dilation_y, + self.dilation_x), + use_bias=self.generate_bias) + model.add(transposed_conv_layer) + if self.generate_bias: + transposed_conv_layer.set_weights([weights, biases]) + else: + transposed_conv_layer.set_weights([weights]) + + if self.test_type == 'transpose_conv' and self.generate_bias: + filter_index = 3 + bias_index = 2 + else: + filter_index = 2 + bias_index = 1 + + self.convert_model(model, inttype) - interpreter = self.convert_and_interpret(model, inttype, input_data) + interpreter = self.interpret_model(input_data, inttype) all_layers_details = interpreter.get_tensor_details() - if self.test_type == 'transpose_conv': - if self.generate_bias: - filter_layer = all_layers_details[3] - bias_layer = all_layers_details[2] - else: - filter_layer = all_layers_details[2] + filter_layer = all_layers_details[filter_index] + bias_layer = all_layers_details[bias_index] - # TODO: real null bias for all operators and not only transpose conv. - bias_layer = None - biases = [] + # TODO: NULL bias for conv and depthwise conv as well. + if self.test_type == 'transpose_conv' and not self.generate_bias: + bias_layer = None + biases = [] - else: - filter_layer = all_layers_details[2] - bias_layer = all_layers_details[1] - if weights.numpy().size != interpreter.get_tensor(filter_layer['index']).size or \ - (self.generate_bias and biases.numpy().size != interpreter.get_tensor(bias_layer['index']).size): - raise RuntimeError(f"Dimension mismatch for {self.testdataset}") + if weights_size != interpreter.get_tensor(filter_layer['index']).size or \ + (self.generate_bias and biases.numpy().size != interpreter.get_tensor(bias_layer['index']).size): + print(weights_size, interpreter.get_tensor(filter_layer['index']).size) + raise RuntimeError(f"Dimension mismatch for {self.testdataset}") output_details = interpreter.get_output_details() @@ -225,7 +326,8 @@ def generate_data(self, input_data=None, weights=None, biases=None) -> None: self.calculate_padding(self.x_output, self.y_output, self.x_input, self.y_input) self.generate_c_array(self.input_data_file_prefix, input_data, datatype=datatype) - self.generate_c_array(self.weight_data_file_prefix, interpreter.get_tensor(filter_layer['index'])) + self.generate_c_array( + self.weight_data_file_prefix, interpreter.get_tensor(filter_layer['index']), pack=self.int4_weights) self.scaling_factors = filter_layer['quantization_parameters']['scales'] per_channel_multiplier, per_channel_shift = self.generate_quantize_per_channel_multiplier() diff --git a/Tests/UnitTest/fully_connected_settings.py b/Tests/UnitTest/fully_connected_settings.py index a0938c4c..74d33485 100644 --- a/Tests/UnitTest/fully_connected_settings.py +++ b/Tests/UnitTest/fully_connected_settings.py @@ -19,6 +19,7 @@ import tensorflow as tf import numpy as np + class FullyConnectedSettings(TestSettings): def __init__(self, @@ -53,11 +54,9 @@ def __init__(self, w_zp=0, bias_scale=0.00002, bias_zp=0, - state_scale=0.005, - state_zp=0, output_scale=0.1, output_zp=0, - packed_4bit = False + int4_weights=False ): super().__init__(dataset, testtype, @@ -83,33 +82,29 @@ def __init__(self, int16xint8=int16xint8, bias_min=bias_min, bias_max=bias_max, - interpreter=interpreter) + interpreter=interpreter, + int4_weights=int4_weights) - self.packed_4bit = packed_4bit - if self.packed_4bit: + if self.int4_weights: if self.generate_bias: self.json_template = "TestCases/Common/fc_s4_weights_template.json" else: self.json_template = "TestCases/Common/fc_s4_weights_template_null_bias.json" - self.in_activation_max = TestSettings.INT4_MAX - self.in_activation_min = TestSettings.INT4_MIN - - self.json_replacements = { - "batches" : batches, - "input_size" : in_ch * x_in * y_in, - "input_scale" : input_scale, - "input_zp" : input_zp, - "w_scale" : w_scale, - "w_zp" : w_zp, - "bias_size" : out_ch, - "bias_scale" : bias_scale, - "bias_zp" : bias_zp, - "output_size" : out_ch, - "output_scale" : output_scale, - "output_zp" : output_zp - } - + self.json_replacements = { + "batches": batches, + "input_size": in_ch * x_in * y_in, + "input_scale": input_scale, + "input_zp": input_zp, + "w_scale": w_scale, + "w_zp": w_zp, + "bias_size": out_ch, + "bias_scale": bias_scale, + "bias_zp": bias_zp, + "output_size": out_ch, + "output_scale": output_scale, + "output_zp": output_zp + } def write_c_config_header(self) -> None: super().write_c_config_header() @@ -125,18 +120,14 @@ def write_c_config_header(self) -> None: f.write("#define {}_INPUT_OFFSET {}\n".format(prefix, -self.input_zero_point)) f.write("#define {}_OUTPUT_OFFSET {}\n".format(prefix, self.output_zero_point)) - def quantize_multiplier(self): - input_product_scale = self.input_scale * self.weights_scale + def quantize_multiplier(self, weights_scale): + input_product_scale = self.input_scale * weights_scale if input_product_scale < 0: raise RuntimeError("negative input product scale") real_multipler = input_product_scale / self.output_scale (self.quantized_multiplier, self.quantized_shift) = self.quantize_scale(real_multipler) def generate_data(self, input_data=None, weights=None, biases=None) -> None: - if self.packed_4bit: - if not self.use_tflite_micro_interpreter: - print("Warning: interpreter tflite_micro must be used for fully_connected int4. Skipping generating headers.") - return if self.is_int16xint8: inttype = tf.int16 @@ -148,18 +139,20 @@ def generate_data(self, input_data=None, weights=None, biases=None) -> None: bias_datatype = "int32_t" # Generate data - fc_input_format = [self.batches, self.input_ch * self.x_input * self.y_input] + fc_input_format = [self.batches, self.input_ch * self.x_input * self.y_input] if input_data is not None: input_data = tf.reshape(input_data, fc_input_format) else: input_data = self.get_randomized_input_data(input_data, fc_input_format) # Generate bias - biases = self.get_randomized_bias_data(biases) + if self.generate_bias: + biases = self.get_randomized_bias_data(biases) + else: + biases = None - # Generate weights - if self.packed_4bit: - # Generate packed and unpacked model from JSON + if self.int4_weights: + # Generate weights, both packed and unpacked model from JSON temp1 = self.model_path temp2 = self.json_template @@ -167,81 +160,95 @@ def generate_data(self, input_data=None, weights=None, biases=None) -> None: if weights is not None: weights = tf.reshape(weights, fc_weights_format) else: - weights = self.get_randomized_data(fc_weights_format, self.kernel_table_file, minrange=TestSettings.INT4_MIN, maxrange=TestSettings.INT4_MAX, regenerate=self.regenerate_new_weights) - - if not self.generate_bias: - biases = None + weights = self.get_randomized_data(fc_weights_format, + self.kernel_table_file, + minrange=TestSettings.INT4_MIN, + maxrange=TestSettings.INT4_MAX, + regenerate=self.regenerate_new_weights) # Unpacked model is used for reference during debugging only and not used by default self.model_path = self.model_path + "_unpacked" self.json_template = self.json_template[:-5] + "_unpacked.json" - generated_json = self.generate_json_from_template(weights, bias_data = biases, bias_buffer=2) + generated_json = self.generate_json_from_template(weights, bias_data=biases, bias_buffer=2) self.flatc_generate_tflite(generated_json, self.schema_file) self.model_path = temp1 self.json_template = temp2 - temp = np.reshape(weights, (len(weights)//2, 2)).astype(np.uint8) - temp = 0xff & ((0xf0 & (temp[:,1] << 4)) | (temp[:,0] & 0xf)) + temp = np.reshape(weights, (len(weights) // 2, 2)).astype(np.uint8) + temp = 0xff & ((0xf0 & (temp[:, 1] << 4)) | (temp[:, 0] & 0xf)) weights = tf.convert_to_tensor(temp) - generated_json = self.generate_json_from_template(weights, bias_data = biases, bias_buffer=2) + weights_size = weights.numpy().size * 2 + generated_json = self.generate_json_from_template(weights, bias_data=biases, bias_buffer=2) self.flatc_generate_tflite(generated_json, self.schema_file) - interpreter = self.Interpreter(model_path=str(self.model_path_tflite), experimental_op_resolver_type=self.OpResolverType.BUILTIN_REF) - interpreter.allocate_tensors() + filter_index = 1 + bias_index = 2 else: - # Generate model in tensorflow with one fully_connected layer fc_weights_format = [self.input_ch * self.y_input * self.x_input, self.output_ch] if weights is not None: weights = tf.reshape(weights, fc_weights_format) else: - weights = self.get_randomized_data(fc_weights_format, self.kernel_table_file, minrange=TestSettings.INT32_MIN, maxrange=TestSettings.INT32_MAX, regenerate=self.regenerate_new_weights) - + weights = self.get_randomized_data(fc_weights_format, + self.kernel_table_file, + minrange=TestSettings.INT32_MIN, + maxrange=TestSettings.INT32_MAX, + regenerate=self.regenerate_new_weights) + weights_size = weights.numpy().size + # Generate model in tensorflow with one fully_connected layer model = tf.keras.models.Sequential() model.add( tf.keras.layers.InputLayer(input_shape=(self.y_input * self.x_input * self.input_ch, ), - batch_size=self.batches)) - fully_connected_layer = tf.keras.layers.Dense(self.output_ch, activation=None) + batch_size=self.batches)) + fully_connected_layer = tf.keras.layers.Dense(self.output_ch, activation=None, use_bias=self.generate_bias) model.add(fully_connected_layer) - fully_connected_layer.set_weights([weights, biases]) - interpreter = self.convert_and_interpret(model, inttype, input_data) + if self.generate_bias: + fully_connected_layer.set_weights([weights, biases]) + else: + fully_connected_layer.set_weights([weights]) + self.convert_model(model, inttype) + + bias_index = 1 + if self.generate_bias: + filter_index = 2 + else: + filter_index = 1 + + interpreter = self.interpret_model(input_data, inttype) # Get layer information all_layers_details = interpreter.get_tensor_details() - input_layer = all_layers_details[0] - (self.input_scale, self.input_zero_point) = self.get_scale_and_zp(input_layer) - filter_layer = all_layers_details[1] - (self.weights_scale, self.weights_zero_point) = self.get_scale_and_zp(filter_layer) - if self.generate_bias: - output_layer = all_layers_details[3] - else: - output_layer = all_layers_details[2] - (self.output_scale, self.output_zero_point) = self.get_scale_and_zp(output_layer) + filter_layer = all_layers_details[filter_index] + bias_layer = all_layers_details[bias_index] + + if weights_size != interpreter.get_tensor(filter_layer['index']).size or \ + (self.generate_bias and biases.numpy().size != interpreter.get_tensor(bias_layer['index']).size): + raise RuntimeError(f"Dimension mismatch for {self.testdataset}") + self.x_output = 1 self.y_output = 1 - self.quantize_multiplier() + weights_scale = filter_layer['quantization_parameters']['scales'][0] + self.quantize_multiplier(weights_scale) # Generate reference output - if self.packed_4bit: - interpreter = self.tflite_micro.runtime.Interpreter.from_file(model_path=str(self.model_path_tflite)) - interpreter.set_input(tf.cast(input_data, tf.int8), input_layer["index"]) - interpreter.invoke() - output_data = interpreter.get_output(0) - else: - output_details = interpreter.get_output_details() - interpreter.invoke() - output_data = interpreter.get_tensor(output_details[0]["index"]) + output_details = interpreter.get_output_details() + interpreter.invoke() + output_data = interpreter.get_tensor(output_details[0]["index"]) # Save results self.generate_c_array(self.input_data_file_prefix, input_data, datatype=datatype) - self.generate_c_array(self.weight_data_file_prefix, weights, datatype=datatype) - if self.generate_bias: - self.generate_c_array(self.bias_data_file_prefix, biases, datatype=bias_datatype) - self.generate_c_array(self.output_data_file_prefix, np.clip(output_data, self.out_activation_min, self.out_activation_max), datatype=datatype) + self.generate_c_array( + self.weight_data_file_prefix, interpreter.get_tensor(filter_layer['index']), pack=self.int4_weights) + if not self.generate_bias: + bias = [] + else: + bias = interpreter.get_tensor(bias_layer['index']) + self.generate_c_array(self.bias_data_file_prefix, bias, datatype=bias_datatype) + + self.generate_c_array(self.output_data_file_prefix, + np.clip(output_data, self.out_activation_min, self.out_activation_max), + datatype=datatype) self.write_c_config_header() self.write_c_header_wrapper() - - def get_scale_and_zp(self, layer): - return (layer['quantization_parameters']['scales'][0], layer['quantization_parameters']['zero_points'][0]) diff --git a/Tests/UnitTest/generate_test_data.py b/Tests/UnitTest/generate_test_data.py index 7f98ff1e..0c172df7 100755 --- a/Tests/UnitTest/generate_test_data.py +++ b/Tests/UnitTest/generate_test_data.py @@ -900,8 +900,8 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, y_in=5, w_x=3, w_y=4, - stride_x=2, - stride_y=2, + stride_x=1, + stride_y=1, pad=True, out_activation_min=-70, out_activation_max=127, @@ -1189,6 +1189,176 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, int16xint8=True, generate_bias=False, interpreter=interpreter) + dataset = 'depthwise_int4_1' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=22, + out_ch=22, + x_in=1, + y_in=23, + w_x=1, + w_y=3, + stride_x=1, + stride_y=1, + pad=False, + out_activation_min=-127, + out_activation_max=127, + generate_bias=False, + interpreter=interpreter, + int4_weights=True) + dataset = 'depthwise_int4_2' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=19, + out_ch=19, + x_in=6, + y_in=6, + w_x=5, + w_y=5, + stride_x=1, + stride_y=1, + pad=False, + out_activation_min=-127, + out_activation_max=127, + generate_bias=False, + interpreter=interpreter, + int4_weights=True) + dataset = 'depthwise_int4_3' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=1, + out_ch=1, + x_in=2, + y_in=2, + w_x=2, + w_y=2, + stride_x=1, + stride_y=1, + pad=False, + out_activation_min=-127, + out_activation_max=127, + generate_bias=False, + interpreter=interpreter, + int4_weights=True) + dataset = 'depthwise_int4_4' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=3, + out_ch=3, + x_in=4, + y_in=4, + w_x=2, + w_y=2, + stride_x=2, + stride_y=2, + pad=False, + out_activation_min=-127, + out_activation_max=127, + generate_bias=False, + interpreter=interpreter, + int4_weights=True) + dataset = 'depthwise_int4_generic' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=2, + out_ch=8, + x_in=16, + y_in=16, + w_x=8, + w_y=8, + stride_x=2, + stride_y=2, + pad=False, + out_activation_min=-127, + out_activation_max=127, + generate_bias=False, + interpreter=interpreter, + int4_weights=True) + dataset = 'depthwise_int4_generic_2' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=3, + out_ch=9, + x_in=9, + y_in=9, + w_x=6, + w_y=5, + stride_x=2, + stride_y=1, + pad=True, + out_activation_min=-127, + out_activation_max=127, + generate_bias=True, + interpreter=interpreter, + int4_weights=True) + dataset = 'depthwise_int4_generic_3' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=4, + out_ch=8, + x_in=9, + y_in=9, + w_x=5, + w_y=5, + stride_x=1, + stride_y=1, + pad=False, + out_activation_min=-127, + out_activation_max=125, + dilation_x=2, + dilation_y=2, + generate_bias=True, + interpreter=interpreter, + int4_weights=True) + dataset = 'depthwise_int4_generic_4' + testdata_sets[dataset] = ConvSettings(dataset, + type_of_test, + regenerate_weights, + regenerate_input, + regenerate_biases, + schema_file, + in_ch=1, + out_ch=3, + x_in=12, + y_in=10, + w_x=5, + w_y=5, + stride_x=1, + stride_y=2, + pad=True, + out_activation_min=-127, + out_activation_max=127, + generate_bias=True, + interpreter=interpreter, + int4_weights=True) type_of_test = 'fully_connected' dataset = 'fully_connected' @@ -1279,7 +1449,7 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, regenerate_input, regenerate_biases, schema_file, - packed_4bit=True, + int4_weights=True, in_ch=2, out_ch=5, x_in=1, @@ -1298,7 +1468,7 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, regenerate_input, regenerate_biases, schema_file, - packed_4bit=True, + int4_weights=True, in_ch=4, out_ch=7, x_in=1, @@ -1318,7 +1488,7 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, regenerate_input, regenerate_biases, schema_file, - packed_4bit=True, + int4_weights=True, in_ch=6, out_ch=9, x_in=1, @@ -1339,7 +1509,7 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, regenerate_input, regenerate_biases, schema_file, - packed_4bit=True, + int4_weights=True, in_ch=5, out_ch=2, x_in=1, @@ -1359,7 +1529,7 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, regenerate_input, regenerate_biases, schema_file, - packed_4bit=True, + int4_weights=True, in_ch=7, out_ch=4, x_in=1, @@ -1381,7 +1551,7 @@ def load_testdata_sets(regenerate_input, regenerate_weights, regenerate_biases, regenerate_input, regenerate_biases, schema_file, - packed_4bit=True, + int4_weights=True, in_ch=9, out_ch=6, x_in=1, diff --git a/Tests/UnitTest/test_settings.py b/Tests/UnitTest/test_settings.py index 9354f4c1..8601f554 100644 --- a/Tests/UnitTest/test_settings.py +++ b/Tests/UnitTest/test_settings.py @@ -83,7 +83,10 @@ def __init__(self, bias_max=np.iinfo(np.dtype('int32')).max, dilation_x=1, dilation_y=1, - interpreter="tensorflow"): + interpreter="tensorflow", + int4_weights=False): + + self.int4_weights = int4_weights if self.INT8_MIN != np.iinfo(np.dtype('int8')).min or self.INT8_MAX != np.iinfo(np.dtype('int8')).max or \ self.INT16_MIN != np.iinfo(np.dtype('int16')).min or self.INT16_MAX != np.iinfo(np.dtype('int16')).max or \ @@ -360,7 +363,7 @@ def get_data_file_name_info(self, name_prefix) -> (str, str): filepath = self.headers_dir + filename return filename, filepath - def generate_c_array(self, name, array, datatype="int8_t", const="const ") -> None: + def generate_c_array(self, name, array, datatype="int8_t", const="const ", pack=False) -> None: w = None if type(array) is list: @@ -375,6 +378,9 @@ def generate_c_array(self, name, array, datatype="int8_t", const="const ") -> No w = w.ravel() size = tf.size(array) + if pack: + size = size // 2 + (size % 2) + filename, filepath = self.get_data_file_name_info(name) self.generated_header_files.append(filename) @@ -443,6 +449,11 @@ def convert_and_interpret(self, model, inttype, input_data=None, dataset_shape=N """ Compile and convert a model to Tflite format, run interpreter and allocate tensors. """ + + self.convert_model(model, inttype, dataset_shape) + return self.interpret_model(input_data, inttype) + + def convert_model(self, model, inttype, dataset_shape=None): model.compile(loss=tf.keras.losses.categorical_crossentropy, optimizer=tf.keras.optimizers.Adam(), metrics=['accuracy']) @@ -473,6 +484,7 @@ def convert_and_interpret(self, model, inttype, input_data=None, dataset_shape=N with open(self.model_path_tflite, "wb") as model: model.write(tflite_model) + def interpret_model(self, input_data, inttype): interpreter = self.Interpreter(model_path=str(self.model_path_tflite), experimental_op_resolver_type=self.OpResolverType.BUILTIN_REF) interpreter.allocate_tensors() @@ -489,6 +501,7 @@ def convert_and_interpret(self, model, inttype, input_data=None, dataset_shape=N return interpreter + # TODO: make it a more generic function and remove reference to svdf specific names def generate_json_from_template(self, weights_feature_data=None, weights_time_data=None, @@ -518,6 +531,7 @@ def generate_json_from_template(self, data["buffers"][w_2_buffer_index]["data"] = self.to_bytes(weights_time_data.numpy().ravel(), 1) else: data["buffers"][w_2_buffer_index]["data"] = self.to_bytes(weights_time_data.numpy().ravel(), 2) + if bias_data is not None: bias_buffer_index = bias_buffer data["buffers"][bias_buffer_index]["data"] = self.to_bytes(bias_data.numpy().ravel(), 4)