Skip to content

Commit 1b229b6

Browse files
committed
[KYUUBI #5323] [AUTHZ] Drop Hive and Iceberg tables with PURGE option in tests
### _Why are the changes needed?_ - `DROP TABLE` for Iceberg tables only removes the table from catalog by default, which may contaminates other tests with same table - Enable PURGE option for dropping Iceberg and Hive table - Iceberg Spark DDL `DROP TABLE ... PURGE` - To drop the table from the catalog and delete the table’s contents ### _How was this patch tested?_ - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible - [ ] Add screenshots for manual tests if appropriate - [ ] [Run test](https://kyuubi.readthedocs.io/en/master/contributing/code/testing.html#running-tests) locally before make a pull request ### _Was this patch authored or co-authored using generative AI tooling?_ Closes #5323 from bowenliang123/iceberg-purge. Closes #5323 ce4188d [Bowen Liang] purge Authored-by: Bowen Liang <[email protected]> Signed-off-by: Bowen Liang <[email protected]>
1 parent 74e52f6 commit 1b229b6

File tree

6 files changed

+33
-14
lines changed

6 files changed

+33
-14
lines changed

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/SparkSessionProvider.scala

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,7 @@ import org.scalatest.Assertions._
2727

2828
import org.apache.kyuubi.Utils
2929
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
30+
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
3031
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
3132

3233
trait SparkSessionProvider {
@@ -79,7 +80,15 @@ trait SparkSessionProvider {
7980
f
8081
} finally {
8182
res.foreach {
82-
case (t, "table") => doAs(admin, sql(s"DROP TABLE IF EXISTS $t"))
83+
case (t, "table") => doAs(
84+
admin, {
85+
val purgeOption =
86+
if (isSparkV32OrGreater && isCatalogSupportPurge(
87+
spark.sessionState.catalogManager.currentCatalog.name())) {
88+
"PURGE"
89+
} else ""
90+
sql(s"DROP TABLE IF EXISTS $t $purgeOption")
91+
})
8392
case (db, "database") => doAs(admin, sql(s"DROP DATABASE IF EXISTS $db"))
8493
case (fn, "function") => doAs(admin, sql(s"DROP FUNCTION IF EXISTS $fn"))
8594
case (view, "view") => doAs(admin, sql(s"DROP VIEW IF EXISTS $view"))
@@ -96,4 +105,11 @@ trait SparkSessionProvider {
96105
doAs(user, assert(sql(query).collect() === result))
97106
}
98107

108+
private def isCatalogSupportPurge(catalogName: String): Boolean = {
109+
val unsupportedCatalogs = Set(v2JdbcTableCatalogClassName)
110+
spark.conf.getOption(s"spark.sql.catalog.$catalogName") match {
111+
case Some(catalog) if !unsupportedCatalogs.contains(catalog) => true
112+
case _ => false
113+
}
114+
}
99115
}

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/V2JdbcTableCatalogPrivilegesBuilderSuite.scala

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import scala.util.Try
2222

2323
import org.scalatest.Outcome
2424

25+
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
2526
import org.apache.kyuubi.plugin.spark.authz.serde._
2627
import org.apache.kyuubi.util.AssertionUtils._
2728

@@ -38,9 +39,7 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends V2CommandsPrivilegesSuite
3839
val jdbcUrl: String = s"$dbUrl;create=true"
3940

4041
override def beforeAll(): Unit = {
41-
spark.conf.set(
42-
s"spark.sql.catalog.$catalogV2",
43-
"org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
42+
spark.conf.set(s"spark.sql.catalog.$catalogV2", v2JdbcTableCatalogClassName)
4443
spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
4544
spark.conf.set(
4645
s"spark.sql.catalog.$catalogV2.driver",
@@ -170,3 +169,8 @@ class V2JdbcTableCatalogPrivilegesBuilderSuite extends V2CommandsPrivilegesSuite
170169
}
171170
}
172171
}
172+
173+
object V2JdbcTableCatalogPrivilegesBuilderSuite {
174+
val v2JdbcTableCatalogClassName: String =
175+
"org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog"
176+
}

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/V2JdbcTableCatalogRangerSparkExtensionSuite.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@ import scala.util.Try
2424
import org.apache.kyuubi.plugin.spark.authz.AccessControlException
2525
import org.apache.kyuubi.plugin.spark.authz.RangerTestNamespace._
2626
import org.apache.kyuubi.plugin.spark.authz.RangerTestUsers._
27+
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
2728
import org.apache.kyuubi.plugin.spark.authz.util.AuthZUtils._
2829

2930
/**
@@ -44,9 +45,7 @@ class V2JdbcTableCatalogRangerSparkExtensionSuite extends RangerSparkExtensionSu
4445
val jdbcUrl: String = s"$dbUrl;create=true"
4546

4647
override def beforeAll(): Unit = {
47-
spark.conf.set(
48-
s"spark.sql.catalog.$catalogV2",
49-
"org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
48+
spark.conf.set(s"spark.sql.catalog.$catalogV2", v2JdbcTableCatalogClassName)
5049
spark.conf.set(s"spark.sql.catalog.$catalogV2.url", jdbcUrl)
5150
spark.conf.set(
5251
s"spark.sql.catalog.$catalogV2.driver",

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/datamasking/DataMaskingForJDBCV2Suite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,13 @@ import scala.util.Try
2323
import org.apache.spark.SparkConf
2424
import org.scalatest.Outcome
2525

26+
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
27+
2628
class DataMaskingForJDBCV2Suite extends DataMaskingTestBase {
2729
override protected val extraSparkConf: SparkConf = {
2830
new SparkConf()
2931
.set("spark.sql.defaultCatalog", "testcat")
30-
.set(
31-
"spark.sql.catalog.testcat",
32-
"org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
32+
.set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
3333
.set(s"spark.sql.catalog.testcat.url", "jdbc:derby:memory:testcat;create=true")
3434
.set(
3535
s"spark.sql.catalog.testcat.driver",

extensions/spark/kyuubi-spark-authz/src/test/scala/org/apache/kyuubi/plugin/spark/authz/ranger/rowfiltering/RowFilteringForJDBCV2Suite.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,13 @@ import scala.util.Try
2424
import org.apache.spark.SparkConf
2525
import org.scalatest.Outcome
2626

27+
import org.apache.kyuubi.plugin.spark.authz.V2JdbcTableCatalogPrivilegesBuilderSuite._
28+
2729
class RowFilteringForJDBCV2Suite extends RowFilteringTestBase {
2830
override protected val extraSparkConf: SparkConf = {
2931
new SparkConf()
3032
.set("spark.sql.defaultCatalog", "testcat")
31-
.set(
32-
"spark.sql.catalog.testcat",
33-
"org.apache.spark.sql.execution.datasources.v2.jdbc.JDBCTableCatalog")
33+
.set("spark.sql.catalog.testcat", v2JdbcTableCatalogClassName)
3434
.set(s"spark.sql.catalog.testcat.url", "jdbc:derby:memory:testcat;create=true")
3535
.set(
3636
s"spark.sql.catalog.testcat.driver",

kyuubi-common/src/test/scala/org/apache/kyuubi/operation/IcebergMetadataTests.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,7 @@ trait IcebergMetadataTests extends HiveJDBCTestHelper with IcebergSuiteMixin wit
133133
}
134134
assert(!rs1.next())
135135
} finally {
136-
statement.execute(s"DROP TABLE IF EXISTS $cg.$db.tbl")
136+
statement.execute(s"DROP TABLE IF EXISTS $cg.$db.tbl PURGE")
137137
}
138138
}
139139
}

0 commit comments

Comments
 (0)