Skip to content

Commit

Permalink
apacheGH-45075: [C++] Remove result_internal.h (apache#45066)
Browse files Browse the repository at this point in the history
### Rationale for this change

The result_internal.h file contains only the definition of `ASSIGN_OR_RAISE` and thus can be removed because `ARROW_ASSIGN_OR_RAISE` is preferred over `ASSIGN_OR_RAISE`.

### What changes are included in this PR?

Remove result_internal.h and use `ARROW_ASSIGN_OR_RAISE` to replace `ASSIGN_OR_RAISE`.

### Are these changes tested?

Pass CI.

### Are there any user-facing changes?

No.
* GitHub Issue: apache#45075

Authored-by: Gang Wu <[email protected]>
Signed-off-by: Sutou Kouhei <[email protected]>
  • Loading branch information
wgtmac authored Dec 19, 2024
1 parent 6f66f1f commit 6f65782
Show file tree
Hide file tree
Showing 5 changed files with 33 additions and 55 deletions.
34 changes: 17 additions & 17 deletions cpp/src/arrow/csv/writer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
#include "arrow/ipc/writer.h"
#include "arrow/record_batch.h"
#include "arrow/result.h"
#include "arrow/result_internal.h"
#include "arrow/stl_allocator.h"
#include "arrow/util/iterator.h"
#include "arrow/util/logging.h"
Expand Down Expand Up @@ -129,15 +128,15 @@ class ColumnPopulator {
// threading overhead would not be justified.
ctx.set_use_threads(false);
if (data.type() && is_large_binary_like(data.type()->id())) {
ASSIGN_OR_RAISE(array_, compute::Cast(data, /*to_type=*/large_utf8(),
compute::CastOptions(), &ctx));
ARROW_ASSIGN_OR_RAISE(array_, compute::Cast(data, /*to_type=*/large_utf8(),
compute::CastOptions(), &ctx));
} else {
auto casted = compute::Cast(data, /*to_type=*/utf8(), compute::CastOptions(), &ctx);
if (casted.ok()) {
array_ = std::move(casted).ValueOrDie();
} else if (casted.status().IsCapacityError()) {
ASSIGN_OR_RAISE(array_, compute::Cast(data, /*to_type=*/large_utf8(),
compute::CastOptions(), &ctx));
ARROW_ASSIGN_OR_RAISE(array_, compute::Cast(data, /*to_type=*/large_utf8(),
compute::CastOptions(), &ctx));
} else {
return casted.status();
}
Expand Down Expand Up @@ -501,8 +500,8 @@ class CSVWriterImpl : public ipc::RecordBatchWriter {
return Status::Invalid("Null string cannot contain quotes.");
}

ASSIGN_OR_RAISE(std::shared_ptr<Buffer> null_string,
arrow::AllocateBuffer(options.null_string.length()));
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<Buffer> null_string,
arrow::AllocateBuffer(options.null_string.length()));
memcpy(null_string->mutable_data(), options.null_string.data(),
options.null_string.length());

Expand All @@ -511,7 +510,7 @@ class CSVWriterImpl : public ipc::RecordBatchWriter {
for (int col = 0; col < schema->num_fields(); col++) {
const std::string& end_chars =
col < schema->num_fields() - 1 ? delimiter : options.eol;
ASSIGN_OR_RAISE(
ARROW_ASSIGN_OR_RAISE(
populators[col],
MakePopulator(*schema->field(col), end_chars, options.delimiter, null_string,
options.quoting_style, options.io_context.pool()));
Expand All @@ -528,7 +527,7 @@ class CSVWriterImpl : public ipc::RecordBatchWriter {
Status WriteRecordBatch(const RecordBatch& batch) override {
RecordBatchIterator iterator = RecordBatchSliceIterator(batch, options_.batch_size);
for (auto maybe_slice : iterator) {
ASSIGN_OR_RAISE(std::shared_ptr<RecordBatch> slice, maybe_slice);
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<RecordBatch> slice, maybe_slice);
RETURN_NOT_OK(TranslateMinimalBatch(*slice));
RETURN_NOT_OK(sink_->Write(data_buffer_));
stats_.num_record_batches++;
Expand Down Expand Up @@ -570,10 +569,11 @@ class CSVWriterImpl : public ipc::RecordBatchWriter {
Status PrepareForContentsWrite() {
// Only called once, as part of initialization
if (data_buffer_ == nullptr) {
ASSIGN_OR_RAISE(data_buffer_,
AllocateResizableBuffer(
options_.batch_size * schema_->num_fields() * kColumnSizeGuess,
options_.io_context.pool()));
ARROW_ASSIGN_OR_RAISE(
data_buffer_,
AllocateResizableBuffer(
options_.batch_size * schema_->num_fields() * kColumnSizeGuess,
options_.io_context.pool()));
}
return Status::OK();
}
Expand Down Expand Up @@ -665,24 +665,24 @@ class CSVWriterImpl : public ipc::RecordBatchWriter {

Status WriteCSV(const Table& table, const WriteOptions& options,
arrow::io::OutputStream* output) {
ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(output, table.schema(), options));
ARROW_ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(output, table.schema(), options));
RETURN_NOT_OK(writer->WriteTable(table));
return writer->Close();
}

Status WriteCSV(const RecordBatch& batch, const WriteOptions& options,
arrow::io::OutputStream* output) {
ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(output, batch.schema(), options));
ARROW_ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(output, batch.schema(), options));
RETURN_NOT_OK(writer->WriteRecordBatch(batch));
return writer->Close();
}

Status WriteCSV(const std::shared_ptr<RecordBatchReader>& reader,
const WriteOptions& options, arrow::io::OutputStream* output) {
ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(output, reader->schema(), options));
ARROW_ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(output, reader->schema(), options));
std::shared_ptr<RecordBatch> batch;
while (true) {
ASSIGN_OR_RAISE(batch, reader->Next());
ARROW_ASSIGN_OR_RAISE(batch, reader->Next());
if (batch == nullptr) break;
RETURN_NOT_OK(writer->WriteRecordBatch(*batch));
}
Expand Down
12 changes: 6 additions & 6 deletions cpp/src/arrow/csv/writer_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
#include "arrow/io/memory.h"
#include "arrow/ipc/writer.h"
#include "arrow/record_batch.h"
#include "arrow/result_internal.h"
#include "arrow/result.h"
#include "arrow/testing/gtest_util.h"
#include "arrow/testing/matchers.h"
#include "arrow/type.h"
Expand Down Expand Up @@ -287,19 +287,19 @@ class TestWriteCSV : public ::testing::TestWithParam<WriterTestParams> {
template <typename Data>
Result<std::string> ToCsvString(const Data& data, const WriteOptions& options) {
std::shared_ptr<io::BufferOutputStream> out;
ASSIGN_OR_RAISE(out, io::BufferOutputStream::Create());
ARROW_ASSIGN_OR_RAISE(out, io::BufferOutputStream::Create());

RETURN_NOT_OK(WriteCSV(data, options, out.get()));
ASSIGN_OR_RAISE(std::shared_ptr<Buffer> buffer, out->Finish());
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<Buffer> buffer, out->Finish());
return std::string(reinterpret_cast<const char*>(buffer->data()), buffer->size());
}

Result<std::string> ToCsvStringUsingWriter(const Table& data,
const WriteOptions& options) {
std::shared_ptr<io::BufferOutputStream> out;
ASSIGN_OR_RAISE(out, io::BufferOutputStream::Create());
ARROW_ASSIGN_OR_RAISE(out, io::BufferOutputStream::Create());
// Write row-by-row
ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(out, data.schema(), options));
ARROW_ASSIGN_OR_RAISE(auto writer, MakeCSVWriter(out, data.schema(), options));
TableBatchReader reader(data);
reader.set_chunksize(1);
std::shared_ptr<RecordBatch> batch;
Expand All @@ -310,7 +310,7 @@ class TestWriteCSV : public ::testing::TestWithParam<WriterTestParams> {
}
RETURN_NOT_OK(writer->Close());
EXPECT_EQ(data.num_rows(), writer->stats().num_record_batches);
ASSIGN_OR_RAISE(std::shared_ptr<Buffer> buffer, out->Finish());
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<Buffer> buffer, out->Finish());
return std::string(reinterpret_cast<const char*>(buffer->data()), buffer->size());
}
};
Expand Down
6 changes: 3 additions & 3 deletions cpp/src/arrow/ipc/writer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
#include "arrow/ipc/metadata_internal.h"
#include "arrow/ipc/util.h"
#include "arrow/record_batch.h"
#include "arrow/result_internal.h"
#include "arrow/result.h"
#include "arrow/sparse_tensor.h"
#include "arrow/status.h"
#include "arrow/table.h"
Expand Down Expand Up @@ -840,8 +840,8 @@ Status WriteRecordBatch(const RecordBatch& batch, int64_t buffer_start_offset,

Status WriteRecordBatchStream(const std::vector<std::shared_ptr<RecordBatch>>& batches,
const IpcWriteOptions& options, io::OutputStream* dst) {
ASSIGN_OR_RAISE(std::shared_ptr<RecordBatchWriter> writer,
MakeStreamWriter(dst, batches[0]->schema(), options));
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<RecordBatchWriter> writer,
MakeStreamWriter(dst, batches[0]->schema(), options));
for (const auto& batch : batches) {
DCHECK(batch->schema()->Equals(*batches[0]->schema())) << "Schemas unequal";
RETURN_NOT_OK(writer->WriteRecordBatch(*batch));
Expand Down
22 changes: 0 additions & 22 deletions cpp/src/arrow/result_internal.h

This file was deleted.

14 changes: 7 additions & 7 deletions cpp/src/parquet/arrow/schema.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
#include "arrow/extension_type.h"
#include "arrow/io/memory.h"
#include "arrow/ipc/api.h"
#include "arrow/result_internal.h"
#include "arrow/result.h"
#include "arrow/type.h"
#include "arrow/util/base64.h"
#include "arrow/util/checked_cast.h"
Expand Down Expand Up @@ -484,8 +484,8 @@ bool IsDictionaryReadSupported(const ArrowType& type) {
::arrow::Result<std::shared_ptr<ArrowType>> GetTypeForNode(
int column_index, const schema::PrimitiveNode& primitive_node,
SchemaTreeContext* ctx) {
ASSIGN_OR_RAISE(std::shared_ptr<ArrowType> storage_type,
GetArrowType(primitive_node, ctx->properties));
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<ArrowType> storage_type,
GetArrowType(primitive_node, ctx->properties));
if (ctx->properties.read_dictionary(column_index) &&
IsDictionaryReadSupported(*storage_type)) {
return ::arrow::dictionary(::arrow::int32(), storage_type);
Expand Down Expand Up @@ -723,8 +723,8 @@ Status ListToSchemaField(const GroupNode& group, LevelInfo current_levels,
// yields list<item: TYPE not null> ?nullable
const auto& primitive_node = static_cast<const PrimitiveNode&>(list_node);
int column_index = ctx->schema->GetColumnIndex(primitive_node);
ASSIGN_OR_RAISE(std::shared_ptr<ArrowType> type,
GetTypeForNode(column_index, primitive_node, ctx));
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<ArrowType> type,
GetTypeForNode(column_index, primitive_node, ctx));
auto item_field = ::arrow::field(list_node.name(), type, /*nullable=*/false,
FieldIdMetadata(list_node.field_id()));
RETURN_NOT_OK(
Expand Down Expand Up @@ -799,8 +799,8 @@ Status NodeToSchemaField(const Node& node, LevelInfo current_levels,
// repeated $TYPE $FIELD_NAME
const auto& primitive_node = static_cast<const PrimitiveNode&>(node);
int column_index = ctx->schema->GetColumnIndex(primitive_node);
ASSIGN_OR_RAISE(std::shared_ptr<ArrowType> type,
GetTypeForNode(column_index, primitive_node, ctx));
ARROW_ASSIGN_OR_RAISE(std::shared_ptr<ArrowType> type,
GetTypeForNode(column_index, primitive_node, ctx));
if (node.is_repeated()) {
// One-level list encoding, e.g.
// a: repeated int32;
Expand Down

0 comments on commit 6f65782

Please sign in to comment.