diff --git a/cpp/src/arrow/compute/expression.cc b/cpp/src/arrow/compute/expression.cc index 532869b3453a7..556efa3d7fadc 100644 --- a/cpp/src/arrow/compute/expression.cc +++ b/cpp/src/arrow/compute/expression.cc @@ -763,7 +763,7 @@ Result ExecuteScalarExpression(const Expression& expr, const ExecBatch& i for (size_t i = 0; i < arguments.size(); ++i) { ARROW_ASSIGN_OR_RAISE( arguments[i], ExecuteScalarExpression(call->arguments[i], input, exec_context)); - if (arguments[i].is_array()) { + if (!arguments[i].is_scalar()) { all_scalar = false; } } diff --git a/cpp/src/arrow/compute/expression_test.cc b/cpp/src/arrow/compute/expression_test.cc index 30bd882b2c039..18b661fd96821 100644 --- a/cpp/src/arrow/compute/expression_test.cc +++ b/cpp/src/arrow/compute/expression_test.cc @@ -909,6 +909,39 @@ TEST(Expression, ExecuteCallWithNoArguments) { EXPECT_EQ(actual.length(), kCount); } +TEST(Expression, ExecuteChunkedArray) { + auto input_schema = struct_({field("a", struct_({ + field("a", float64()), + field("b", float64()), + }))}); + + auto chunked_array_input = ChunkedArrayFromJSON(input_schema, {R"([ + {"a": {"a": 6.125, "b": 3.375}}, + {"a": {"a": 0.0, "b": 1}} + ])", + R"([ + {"a": {"a": -1, "b": 4.75}} + ])"}); + + ASSERT_OK_AND_ASSIGN(auto table_input, + Table::FromChunkedStructArray(chunked_array_input)); + + auto expr = add(field_ref(FieldRef("a", "a")), field_ref(FieldRef("a", "b"))); + + ASSERT_OK_AND_ASSIGN(expr, expr.Bind(input_schema)); + std::vector inputs{table_input->column(0)}; + ExecBatch batch{inputs, 3}; + + ASSERT_OK_AND_ASSIGN(Datum res, ExecuteScalarExpression(expr, batch)); + + AssertDatumsEqual(res, ArrayFromJSON(float64(), + R"([ + 9.5, + 1, + 3.75 + ])")); +} + TEST(Expression, ExecuteDictionaryTransparent) { ExpectExecute( equal(field_ref("a"), field_ref("b")),