Skip to content

Commit

Permalink
[SPARK-51181][SQL] Enforce determinism when pulling out non determini…
Browse files Browse the repository at this point in the history
…stic expressions from logical plan

### What changes were proposed in this pull request?
Enforce determinism when pulling out non deterministic expressions from logical plan.

### Why are the changes needed?
This is needed to avoid plan normalization problem when comparing single-pass and fixed point results.

### Does this PR introduce _any_ user-facing change?
No.

### How was this patch tested?
Existing tests.

### Was this patch authored or co-authored using generative AI tooling?
No.

Closes #49935 from mihailoale-db/enforcedeterminismlinkedhashmap.

Authored-by: mihailoale-db <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
  • Loading branch information
mihailoale-db authored and cloud-fan committed Feb 13, 2025
1 parent 80fb9d0 commit 18ff7b3
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,14 @@

package org.apache.spark.sql.catalyst.analysis

import java.util.LinkedHashMap

import org.apache.spark.sql.catalyst.expressions._

object NondeterministicExpressionCollection {
def getNondeterministicToAttributes(
expressions: Seq[Expression]): Map[Expression, NamedExpression] = {
expressions: Seq[Expression]): LinkedHashMap[Expression, NamedExpression] = {
val nonDeterministicToAttributes = new LinkedHashMap[Expression, NamedExpression]
expressions
.filterNot(_.deterministic)
.flatMap { expr =>
Expand All @@ -34,9 +37,9 @@ object NondeterministicExpressionCollection {
case n: NamedExpression => n
case _ => Alias(e, "_nondeterministic")()
}
e -> ne
nonDeterministicToAttributes.put(e, ne)
}
}
.toMap
nonDeterministicToAttributes
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

package org.apache.spark.sql.catalyst.analysis

import scala.jdk.CollectionConverters._

import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.rules.Rule
Expand All @@ -36,9 +38,9 @@ object PullOutNondeterministic extends Rule[LogicalPlan] {
case a: Aggregate if a.groupingExpressions.exists(!_.deterministic) =>
val nondeterToAttr =
NondeterministicExpressionCollection.getNondeterministicToAttributes(a.groupingExpressions)
val newChild = Project(a.child.output ++ nondeterToAttr.values, a.child)
val newChild = Project(a.child.output ++ nondeterToAttr.values.asScala.toSeq, a.child)
a.transformExpressions { case e =>
nondeterToAttr.get(e).map(_.toAttribute).getOrElse(e)
Option(nondeterToAttr.get(e)).map(_.toAttribute).getOrElse(e)
}.copy(child = newChild)

// Don't touch collect metrics. Top-level metrics are not supported (check analysis will fail)
Expand All @@ -55,9 +57,9 @@ object PullOutNondeterministic extends Rule[LogicalPlan] {
val nondeterToAttr =
NondeterministicExpressionCollection.getNondeterministicToAttributes(p.expressions)
val newPlan = p.transformExpressions { case e =>
nondeterToAttr.get(e).map(_.toAttribute).getOrElse(e)
Option(nondeterToAttr.get(e)).map(_.toAttribute).getOrElse(e)
}
val newChild = Project(p.child.output ++ nondeterToAttr.values, p.child)
val newChild = Project(p.child.output ++ nondeterToAttr.values.asScala.toSeq, p.child)
Project(p.output, newPlan.withNewChildren(newChild :: Nil))
}
}

0 comments on commit 18ff7b3

Please sign in to comment.