Skip to content

Commit

Permalink
[SPARK-51258][SQL][FOLLOWUP] Remove unnecessary inheritance from SQLC…
Browse files Browse the repository at this point in the history
…onfHelper

### What changes were proposed in this pull request?
This PR proposes to remove unnecessary inheritance from `SQLConfHelper`.

### Why are the changes needed?

1. There are some trait already extends `SQLConfHelper`, so we should avoid the duplicated inheritance.
```
trait TreeNodeResolver[UnresolvedNode <: TreeNode[_], ResolvedNode <: TreeNode[_]]
    extends SQLConfHelper
    with QueryErrorsBase {
  def resolve(unresolvedNode: UnresolvedNode): ResolvedNode
}
```
```
trait SQLHelper extends SQLConfHelper {
...
}
```
```
trait PlanTestBase extends PredicateHelper with SQLHelper { self: Suite =>
..
}
trait PlanTest extends SparkFunSuite with PlanTestBase
```
2. `V2SessionCatalog` already with `SQLConfHelper`, so we should use `conf` directly.

### Does this PR introduce _any_ user-facing change?
'No'.
Just update the inner code.

### How was this patch tested?
GA.

### Was this patch authored or co-authored using generative AI tooling?
'No'.

Closes #50046 from beliefer/SPARK-51258_followup.

Authored-by: beliefer <beliefer@163.com>
Signed-off-by: beliefer <beliefer@163.com>
  • Loading branch information
beliefer committed Feb 23, 2025
1 parent 7e9547c commit 9a1f921
Show file tree
Hide file tree
Showing 4 changed files with 5 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

package org.apache.spark.sql.catalyst.analysis.resolver

import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.{AnsiTypeCoercion, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.{ConditionalExpression, Expression}

Expand All @@ -28,8 +27,7 @@ class ConditionalExpressionResolver(
expressionResolver: ExpressionResolver,
timezoneAwareExpressionResolver: TimezoneAwareExpressionResolver)
extends TreeNodeResolver[ConditionalExpression, Expression]
with ResolvesExpressionChildren
with SQLConfHelper {
with ResolvesExpressionChildren {

private val typeCoercionTransformations: Seq[Expression => Expression] =
if (conf.ansiEnabled) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import org.scalatest.Suite
import org.scalatest.Tag

import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.SimpleAnalyzer
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.CodegenObjectFactoryMode
Expand Down Expand Up @@ -62,7 +61,7 @@ trait CodegenInterpretedPlanTest extends PlanTest {
* Provides helper methods for comparing plans, but without the overhead of
* mandating a FunSuite.
*/
trait PlanTestBase extends PredicateHelper with SQLHelper with SQLConfHelper { self: Suite =>
trait PlanTestBase extends PredicateHelper with SQLHelper { self: Suite =>

protected def normalizeExprIds(plan: LogicalPlan): LogicalPlan =
NormalizePlan.normalizeExprIds(plan)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class V2SessionCatalog(catalog: SessionCatalog)
extends TableCatalog with FunctionCatalog with SupportsNamespaces with SQLConfHelper {
import V2SessionCatalog._

override val defaultNamespace: Array[String] = Array(SQLConf.get.defaultDatabase)
override val defaultNamespace: Array[String] = Array(conf.defaultDatabase)

override def name: String = CatalogManager.SESSION_CATALOG_NAME

Expand Down Expand Up @@ -83,7 +83,7 @@ class V2SessionCatalog(catalog: SessionCatalog)
}

private def hasCustomSessionCatalog: Boolean = {
catalog.conf.getConf(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION) != "builtin"
conf.getConf(SQLConf.V2_SESSION_CATALOG_IMPLEMENTATION) != "builtin"
}

override def loadTable(ident: Identifier): Table = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
package org.apache.spark.sql.analysis.resolver

import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.SQLConfHelper
import org.apache.spark.sql.catalyst.analysis.UnresolvedStar
import org.apache.spark.sql.catalyst.analysis.resolver.{NameScope, NameScopeStack, NameTarget}
import org.apache.spark.sql.catalyst.expressions.{
Expand All @@ -41,7 +40,7 @@ import org.apache.spark.sql.types.{
StructType
}

class NameScopeSuite extends PlanTest with SQLConfHelper {
class NameScopeSuite extends PlanTest {
private val col1Integer = AttributeReference(name = "col1", dataType = IntegerType)()
private val col1IntegerOther = AttributeReference(name = "col1", dataType = IntegerType)()
private val col2Integer = AttributeReference(name = "col2", dataType = IntegerType)()
Expand Down

0 comments on commit 9a1f921

Please sign in to comment.