From 2c3f96a7a0845fbb00d1d2565122a69de274b73b Mon Sep 17 00:00:00 2001 From: cindyyuanjiang Date: Mon, 18 Sep 2023 17:58:57 -0700 Subject: [PATCH] removed unit test since conv is marked unsupported Signed-off-by: cindyyuanjiang --- .../tool/planparser/SqlPlanParserSuite.scala | 30 ------------------- 1 file changed, 30 deletions(-) diff --git a/core/src/test/scala/com/nvidia/spark/rapids/tool/planparser/SqlPlanParserSuite.scala b/core/src/test/scala/com/nvidia/spark/rapids/tool/planparser/SqlPlanParserSuite.scala index b60b6a914..a5a7c8262 100644 --- a/core/src/test/scala/com/nvidia/spark/rapids/tool/planparser/SqlPlanParserSuite.scala +++ b/core/src/test/scala/com/nvidia/spark/rapids/tool/planparser/SqlPlanParserSuite.scala @@ -976,36 +976,6 @@ class SQLPlanParserSuite extends BaseTestSuite { } } - test("conv is supported in ProjectExec") { - TrampolineUtil.withTempDir { parquetoutputLoc => - TrampolineUtil.withTempDir { eventLogDir => - val (eventLog, _) = ToolTestUtils.generateEventLog(eventLogDir, - "ProjectExprsSupported") { spark => - import spark.implicits._ - val df1 = Seq("10", "-10", "-1", "FFFFFFFFFFFFFFFF").toDF("value") - // write df1 to parquet to transform LocalTableScan to ProjectExec - df1.write.parquet(s"$parquetoutputLoc/testtext") - val df2 = spark.read.parquet(s"$parquetoutputLoc/testtext") - // conv should be part of ProjectExec - df2.select(conv(df2("value"), 16, 10)) - } - val pluginTypeChecker = new PluginTypeChecker() - val app = createAppFromEventlog(eventLog) - assert(app.sqlPlans.size == 2) - val parsedPlans = app.sqlPlans.map { case (sqlID, plan) => - SQLPlanParser.parseSQLPlan(app.appId, plan, sqlID, "", pluginTypeChecker, app) - } - val allExecInfo = getAllExecsFromPlan(parsedPlans.toSeq) - val wholeStages = allExecInfo.filter(_.exec.contains("WholeStageCodegen")) - assert(wholeStages.size == 1) - assert(wholeStages.forall(_.duration.nonEmpty)) - val allChildren = wholeStages.flatMap(_.children).flatten - val projects = allChildren.filter(_.exec == "Project") - assertSizeAndSupported(1, projects) - } - } - } - test("Parse SQL function Name in HashAggregateExec") { TrampolineUtil.withTempDir { eventLogDir => val (eventLog, _) = ToolTestUtils.generateEventLog(eventLogDir, "sqlmetric") { spark =>