Skip to content

[SPARK-51034][SQL] Reformat Describe As JSON statistics dict for parse-ability #49728

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import com.fasterxml.jackson.annotation.JsonInclude.Include
import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper}
import com.fasterxml.jackson.module.scala.{ClassTagExtensions, DefaultScalaModule}
import org.apache.commons.lang3.StringUtils
import org.json4s.JsonAST.{JArray, JBool, JDouble, JInt, JLong, JNull, JObject, JString, JValue}
import org.json4s.JsonAST.{JArray, JBool, JDecimal, JDouble, JInt, JLong, JNull, JObject, JString, JValue}
import org.json4s.jackson.JsonMethods._

import org.apache.spark.SparkException
Expand Down Expand Up @@ -61,36 +61,60 @@ trait MetadataMapSupport {
jsonToString(toJsonLinkedHashMap)
}

/**
* Some fields from JsonLinkedHashMap are reformatted for human readability in `describe table`.
* If a field does not require special re-formatting, it is simply handled by `jsonToString`.
*/
private def jsonToStringReformat(key: String, jValue: JValue): Option[(String, String)] = {
val reformattedValue: Option[String] = key match {
case "Statistics" =>
jValue match {
case JObject(fields) =>
Some(fields.flatMap {
case ("size_in_bytes", JDecimal(bytes)) => Some(s"$bytes bytes")
case ("num_rows", JDecimal(rows)) => Some(s"$rows rows")
case _ => None
}.mkString(", "))
case _ => Some(jValue.values.toString)
}
case "Created Time" | "Last Access" =>
jValue match {
case JLong(value) => Some(new Date(value).toString)
case _ => Some(jValue.values.toString)
}
case _ => None
}
reformattedValue.map(value => key -> value)
}

protected def jsonToString(
jsonMap: mutable.LinkedHashMap[String, JValue]): mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val timestampKeys = Set("Created Time", "Last Access")
jsonMap.foreach { case (key, jValue) =>
val stringValue = jValue match {
case JString(value) => value
case JArray(values) =>
values.map(_.values)
.map {
case str: String => quoteIdentifier(str)
case other => other.toString
}
.mkString("[", ", ", "]")
case JObject(fields) =>
fields.map { case (k, v) =>
s"$k=${v.values.toString}"
}
.mkString("[", ", ", "]")
case JInt(value) => value.toString
case JDouble(value) => value.toString
case JLong(value) =>
if (timestampKeys.contains(key)) {
new Date(value).toString
} else {
value.toString
jsonToStringReformat(key, jValue) match {
case Some((formattedKey, formattedValue)) =>
map.put(formattedKey, formattedValue)
case None =>
val stringValue = jValue match {
case JString(value) => value
case JArray(values) =>
values.map(_.values)
.map {
case str: String => quoteIdentifier(str)
case other => other.toString
}
.mkString("[", ", ", "]")
case JObject(fields) =>
fields.map { case (k, v) =>
s"$k=${v.values.toString}"
}.mkString("[", ", ", "]")
case JInt(value) => value.toString
case JDouble(value) => value.toString
case JLong(value) => value.toString
case _ => jValue.values.toString
}
case _ => jValue.values.toString
map.put(key, stringValue)
}
map.put(key, stringValue)
}
map
}
Expand Down Expand Up @@ -642,7 +666,9 @@ case class CatalogTable(
map += "View Query Output Columns" -> viewQueryOutputColumns
}
if (tableProperties != JNull) map += "Table Properties" -> tableProperties
if (stats.isDefined) map += "Statistics" -> JString(stats.get.simpleString)
stats.foreach { s =>
map += "Statistics" -> JObject(s.jsonString.toList)
}
map ++= storage.toJsonLinkedHashMap.map { case (k, v) => k -> v }
if (tracksPartitionsInCatalog) map += "Partition Provider" -> JString("Catalog")
if (partitionColumns != JNull) map += "Partition Columns" -> partitionColumns
Expand Down Expand Up @@ -811,6 +837,14 @@ case class CatalogStatistics(
val rowCountString = if (rowCount.isDefined) s", ${rowCount.get} rows" else ""
s"$sizeInBytes bytes$rowCountString"
}

def jsonString: Map[String, JValue] = {
val rowCountInt: BigInt = rowCount.getOrElse(0L)
Map(
"size_in_bytes" -> JDecimal(BigDecimal(sizeInBytes)),
"num_rows" -> JDecimal(BigDecimal(rowCountInt))
)
}
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ trait DescribeTableSuiteBase extends command.DescribeTableSuiteBase
}
}

test("DESCRIBE AS JSON partition spec") {
test("DESCRIBE AS JSON partition spec and statistics") {
withNamespaceAndTable("ns", "table") { t =>
val tableCreationStr =
s"""
Expand All @@ -289,6 +289,7 @@ trait DescribeTableSuiteBase extends command.DescribeTableSuiteBase
|""".stripMargin
spark.sql(tableCreationStr)
spark.sql(s"ALTER TABLE $t ADD PARTITION (region='USA', category='tech')")
spark.sql(s"ANALYZE TABLE $t COMPUTE STATISTICS FOR ALL COLUMNS")

val descriptionDf =
spark.sql(s"DESCRIBE FORMATTED $t PARTITION (region='USA', category='tech') AS JSON")
Expand Down Expand Up @@ -324,7 +325,11 @@ trait DescribeTableSuiteBase extends command.DescribeTableSuiteBase
},
partition_provider = Some("Catalog"),
partition_columns = Some(List("region", "category")),
partition_values = Some(Map("region" -> "USA", "category" -> "tech"))
partition_values = Some(Map("region" -> "USA", "category" -> "tech")),
statistics = Some(Map(
"size_in_bytes" -> 0,
"num_rows" -> 0
))
)

assert(parsedOutput.location.isDefined)
Expand Down Expand Up @@ -726,6 +731,7 @@ case class DescribeTableJson(
partition_provider: Option[String] = None,
partition_columns: Option[List[String]] = Some(Nil),
partition_values: Option[Map[String, String]] = None,
statistics: Option[Map[String, Any]] = None,
view_text: Option[String] = None,
view_original_text: Option[String] = None,
view_schema_mode: Option[String] = None,
Expand Down