Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(query): improve build keys state #13004

Merged
merged 18 commits into from
Sep 27, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
166 changes: 109 additions & 57 deletions src/query/expression/src/kernels/group_by_hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ use common_exception::ErrorCode;
use common_exception::Result;
use common_hashtable::DictionaryKeys;
use common_hashtable::FastHash;
use common_io::prelude::BinaryWrite;
use ethnum::i256;
use ethnum::u256;
use ethnum::U256;
Expand All @@ -37,7 +36,6 @@ use crate::types::nullable::NullableColumn;
use crate::types::number::Number;
use crate::types::number::NumberColumn;
use crate::types::string::StringColumn;
use crate::types::string::StringColumnBuilder;
use crate::types::string::StringIterator;
use crate::types::DataType;
use crate::types::DecimalDataType;
Expand Down Expand Up @@ -217,20 +215,38 @@ impl HashMethod for HashMethodSerializer {
fn build_keys_state(
&self,
group_columns: &[(Column, DataType)],
rows: usize,
num_rows: usize,
) -> Result<KeysState> {
let approx_size = group_columns.len() * rows * 8;
let mut builder = StringColumnBuilder::with_capacity(rows, approx_size);

for row in 0..rows {
let mut data_size = 0;
for (column, _) in group_columns.iter() {
data_size += column.serialize_size();
}
let mut data: Vec<u8> = Vec::with_capacity(data_size);
let mut offsets: Vec<u64> = Vec::with_capacity(num_rows + 1);
offsets.push(0);
let mut data_ptr = data.as_mut_ptr();
let mut offsets_ptr = unsafe { offsets.as_mut_ptr().add(1) };
Dousir9 marked this conversation as resolved.
Show resolved Hide resolved

let mut offset = 0;
for i in 0..num_rows {
let old_ptr = data_ptr;
for (col, _) in group_columns {
serialize_column_binary(col, row, &mut builder.data);
serialize_column_binary(col, i, &mut data_ptr);
}
offset += data_ptr as u64 - old_ptr as u64;
unsafe {
std::ptr::write(offsets_ptr, offset);
offsets_ptr = offsets_ptr.add(1);
}
builder.commit_row();
}

let col = builder.build();
Ok(KeysState::Column(Column::String(col)))
unsafe {
offsets.set_len(num_rows + 1);
data.set_len(data_size);
}
Ok(KeysState::Column(Column::String(StringColumn::new(
data.into(),
offsets.into(),
))))
}

fn build_keys_iter<'a>(&self, key_state: &'a KeysState) -> Result<Self::HashKeyIter<'a>> {
Expand All @@ -257,42 +273,55 @@ impl HashMethod for HashMethodDictionarySerializer {
fn build_keys_state(
&self,
group_columns: &[(Column, DataType)],
rows: usize,
num_rows: usize,
) -> Result<KeysState> {
// fixed type serialize one column to dictionary
let mut dictionary_columns = Vec::with_capacity(group_columns.len());

let mut other_columns = Vec::new();
for (group_column, _) in group_columns {
if let Column::String(v) = group_column {
debug_assert_eq!(v.len(), rows);
dictionary_columns.push(v.clone());
} else if let Column::Variant(v) = group_column {
debug_assert_eq!(v.len(), rows);
dictionary_columns.push(v.clone());
match group_column {
Column::String(v) | Column::Variant(v) | Column::Bitmap(v) => {
debug_assert_eq!(v.len(), num_rows);
dictionary_columns.push(v.clone());
}
_ => other_columns.push(group_column),
}
}

if dictionary_columns.len() != group_columns.len() {
let approx_size = group_columns.len() * rows * 8;
let mut builder = StringColumnBuilder::with_capacity(rows, approx_size);

for row in 0..rows {
for (group_column, _) in group_columns {
if !matches!(group_column, Column::String(_) | Column::Variant(_)) {
serialize_column_binary(group_column, row, &mut builder.data);
}
if !other_columns.is_empty() {
let mut data_size = 0;
for column in other_columns {
data_size += column.serialize_size();
}
let mut data: Vec<u8> = Vec::with_capacity(data_size);
let mut offsets: Vec<u64> = Vec::with_capacity(num_rows + 1);
offsets.push(0);
let mut data_ptr = data.as_mut_ptr();
let mut offsets_ptr = unsafe { offsets.as_mut_ptr().add(1) };

let mut offset = 0;
for i in 0..num_rows {
let old_ptr = data_ptr;
for (col, _) in group_columns {
serialize_column_binary(col, i, &mut data_ptr);
}
offset += data_ptr as u64 - old_ptr as u64;
unsafe {
std::ptr::write(offsets_ptr, offset);
offsets_ptr = offsets_ptr.add(1);
}

builder.commit_row();
}

dictionary_columns.push(builder.build());
unsafe {
offsets.set_len(num_rows + 1);
data.set_len(data_size);
}
dictionary_columns.push(StringColumn::new(data.into(), offsets.into()));
}

let mut keys = Vec::with_capacity(rows * dictionary_columns.len());
let mut points = Vec::with_capacity(rows * dictionary_columns.len());
let mut keys = Vec::with_capacity(num_rows * dictionary_columns.len());
let mut points = Vec::with_capacity(num_rows * dictionary_columns.len());

for row in 0..rows {
for row in 0..num_rows {
let start = points.len();

for dictionary_column in &dictionary_columns {
Expand Down Expand Up @@ -624,50 +653,73 @@ fn build(
}

/// This function must be consistent with the `push_binary` function of `src/query/expression/src/values.rs`.
pub fn serialize_column_binary(column: &Column, row: usize, vec: &mut Vec<u8>) {
pub fn serialize_column_binary(column: &Column, row: usize, row_space: &mut *mut u8) {
match column {
Column::Null { .. } | Column::EmptyArray { .. } | Column::EmptyMap { .. } => {}
Column::Number(v) => with_number_mapped_type!(|NUM_TYPE| match v {
NumberColumn::NUM_TYPE(v) => vec.extend_from_slice(v[row].to_le_bytes().as_ref()),
NumberColumn::NUM_TYPE(v) => {
unsafe {
std::ptr::write(row_space.cast::<NUM_TYPE>(), v[row]);
*row_space = row_space.add(std::mem::size_of::<NUM_TYPE>());
}
}
}),
Column::Boolean(v) => vec.push(v.get_bit(row) as u8),
Column::String(v) => {
BinaryWrite::write_binary(vec, unsafe { v.index_unchecked(row) }).unwrap()
}
Column::Decimal(_) => {
with_decimal_mapped_type!(|DECIMAL_TYPE| match column {
Column::Decimal(DecimalColumn::DECIMAL_TYPE(v, _)) =>
vec.extend_from_slice(v[row].to_le_bytes().as_ref()),
Column::Decimal(DecimalColumn::DECIMAL_TYPE(v, _)) => {
unsafe {
std::ptr::write(row_space.cast::<DECIMAL_TYPE>(), v[row]);
*row_space = row_space.add(std::mem::size_of::<DECIMAL_TYPE>());
}
}
_ => unreachable!(),
})
}
Column::Timestamp(v) => vec.extend_from_slice(v[row].to_le_bytes().as_ref()),
Column::Date(v) => vec.extend_from_slice(v[row].to_le_bytes().as_ref()),
Column::Boolean(v) => unsafe {
std::ptr::write(*row_space, v.get_bit(row) as u8);
*row_space = row_space.add(1);
},
Column::String(v) | Column::Bitmap(v) | Column::Variant(v) => unsafe {
let value = v.index_unchecked(row);
let len = value.len();
std::ptr::write(row_space.cast::<u64>(), len as u64);
*row_space = row_space.add(std::mem::size_of::<u64>());
std::ptr::copy_nonoverlapping(value.as_ptr(), *row_space, len);
*row_space = row_space.add(len);
},
Column::Timestamp(v) => unsafe {
std::ptr::write(row_space.cast::<i64>(), v[row]);
*row_space = row_space.add(std::mem::size_of::<i64>());
},
Column::Date(v) => unsafe {
std::ptr::write(row_space.cast::<i32>(), v[row]);
*row_space = row_space.add(std::mem::size_of::<i32>());
},
Column::Array(array) | Column::Map(array) => {
let data = array.index(row).unwrap();
BinaryWrite::write_uvarint(vec, data.len() as u64).unwrap();
unsafe {
std::ptr::write(row_space.cast::<u64>(), data.len() as u64);
*row_space = row_space.add(std::mem::size_of::<u64>());
}
for i in 0..data.len() {
serialize_column_binary(&data, i, vec);
serialize_column_binary(&data, i, row_space);
}
}
Column::Bitmap(v) => {
BinaryWrite::write_binary(vec, unsafe { v.index_unchecked(row) }).unwrap()
}
Column::Nullable(c) => {
let valid = c.validity.get_bit(row);
vec.push(valid as u8);
unsafe {
std::ptr::write(*row_space, valid as u8);
*row_space = row_space.add(1);
}
if valid {
serialize_column_binary(&c.column, row, vec);
serialize_column_binary(&c.column, row, row_space);
}
}
Column::Tuple(fields) => {
for inner_col in fields.iter() {
serialize_column_binary(inner_col, row, vec);
serialize_column_binary(inner_col, row, row_space);
}
}
Column::Variant(v) => {
BinaryWrite::write_binary(vec, unsafe { v.index_unchecked(row) }).unwrap()
}
}
}

Expand Down
1 change: 1 addition & 0 deletions src/query/expression/src/types/string.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,7 @@ impl StringColumn {
/// # Safety
///
/// Calling this method with an out-of-bounds index is *[undefined behavior]*
#[inline]
pub unsafe fn index_unchecked(&self, index: usize) -> &[u8] {
&self.data[(self.offsets[index] as usize)..(self.offsets[index + 1] as usize)]
}
Expand Down
33 changes: 28 additions & 5 deletions src/query/expression/src/values.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1819,6 +1819,29 @@ impl Column {
}
}

pub fn serialize_size(&self) -> usize {
Dousir9 marked this conversation as resolved.
Show resolved Hide resolved
match self {
Column::Null { .. } | Column::EmptyArray { .. } | Column::EmptyMap { .. } => 0,
Column::Number(NumberColumn::UInt8(col)) => col.len(),
Column::Number(NumberColumn::UInt16(col)) => col.len() * 2,
Column::Number(NumberColumn::UInt32(col)) => col.len() * 4,
Column::Number(NumberColumn::UInt64(col)) => col.len() * 8,
Column::Number(NumberColumn::Float32(col)) => col.len() * 4,
Column::Number(NumberColumn::Float64(col)) => col.len() * 8,
Column::Number(NumberColumn::Int8(col)) => col.len(),
Column::Number(NumberColumn::Int16(col)) => col.len() * 2,
Column::Number(NumberColumn::Int32(col)) | Column::Date(col) => col.len() * 4,
Column::Number(NumberColumn::Int64(col)) | Column::Timestamp(col) => col.len() * 8,
Column::Decimal(DecimalColumn::Decimal128(col, _)) => col.len() * 16,
Column::Decimal(DecimalColumn::Decimal256(col, _)) => col.len() * 32,
Column::Boolean(c) => c.len(),
Column::String(col) | Column::Bitmap(col) | Column::Variant(col) => col.memory_size(),
Column::Array(col) | Column::Map(col) => col.values.serialize_size() + col.len() * 8,
Column::Nullable(c) => c.column.serialize_size() + c.len(),
Column::Tuple(fields) => fields.iter().map(|f| f.serialize_size()).sum(),
}
}

/// Returns (is_all_null, Option bitmap)
pub fn validity(&self) -> (bool, Option<&Bitmap>) {
match self {
Expand Down Expand Up @@ -2230,10 +2253,10 @@ impl ColumnBuilder {
ColumnBuilder::String(builder)
| ColumnBuilder::Variant(builder)
| ColumnBuilder::Bitmap(builder) => {
let offset: u64 = reader.read_uvarint()?;
builder.data.resize(offset as usize + builder.data.len(), 0);
let offset = reader.read_scalar::<u64>()? as usize;
builder.data.resize(offset + builder.data.len(), 0);
let last = *builder.offsets.last().unwrap() as usize;
reader.read_exact(&mut builder.data[last..last + offset as usize])?;
reader.read_exact(&mut builder.data[last..last + offset])?;
builder.commit_row();
}
ColumnBuilder::Timestamp(builder) => {
Expand All @@ -2246,7 +2269,7 @@ impl ColumnBuilder {
builder.push(value);
}
ColumnBuilder::Array(builder) => {
let len = reader.read_uvarint()?;
let len = reader.read_scalar::<u64>()?;
for _ in 0..len {
builder.builder.push_binary(reader)?;
}
Expand All @@ -2255,7 +2278,7 @@ impl ColumnBuilder {
ColumnBuilder::Map(builder) => {
const KEY: usize = 0;
const VALUE: usize = 1;
let len = reader.read_uvarint()?;
let len = reader.read_scalar::<u64>()?;
let map_builder = builder.builder.as_tuple_mut().unwrap();
for _ in 0..len {
map_builder[KEY].push_binary(reader)?;
Expand Down