From 2416b8ee597cc1985afbfd7b2a4b09b7e0cbb115 Mon Sep 17 00:00:00 2001 From: NickEdwards7502 Date: Thu, 19 Sep 2024 15:45:44 +1000 Subject: [PATCH] REFACTOR: Update pairwise operation tests based on import changes (#237) Reference changed to importTransposedCSV --- .../variantspark/api/CommonPairwiseOperationTest.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/test/scala/au/csiro/variantspark/api/CommonPairwiseOperationTest.scala b/src/test/scala/au/csiro/variantspark/api/CommonPairwiseOperationTest.scala index 98efa0d9..275e683b 100644 --- a/src/test/scala/au/csiro/variantspark/api/CommonPairwiseOperationTest.scala +++ b/src/test/scala/au/csiro/variantspark/api/CommonPairwiseOperationTest.scala @@ -24,7 +24,7 @@ class CommonPairwiseOperationTest extends SparkTest { @Test def testManhattanPaiwiseOperation() { implicit val vsContext = VSContext(spark) - val features = vsContext.importCSV("src/test/data/synthetic_100x10k.csv"); + val features = vsContext.importTransposedCSV("src/test/data/synthetic_100x10k.csv"); val result = features.pairwiseOperation("manhattan").value val expected = TestCsvUtils.readColumnToDoubleArray( "src/test/data/synthetic_100x10k_metrics.csv", "manhattan") @@ -34,7 +34,7 @@ class CommonPairwiseOperationTest extends SparkTest { @Test def testEuclideanPaiwiseOperation() { implicit val vsContext = VSContext(spark) - val features = vsContext.importCSV("src/test/data/synthetic_100x10k.csv"); + val features = vsContext.importTransposedCSV("src/test/data/synthetic_100x10k.csv"); val result = features.pairwiseOperation("euclidean").value val expected = TestCsvUtils.readColumnToDoubleArray( "src/test/data/synthetic_100x10k_metrics.csv", "euclidean") @@ -44,7 +44,7 @@ class CommonPairwiseOperationTest extends SparkTest { @Test def testAnySharedAltCountPaiwiseOperation() { implicit val vsContext = VSContext(spark) - val features = vsContext.importCSV("src/test/data/synthetic_100x10k.csv"); + val features = vsContext.importTransposedCSV("src/test/data/synthetic_100x10k.csv"); val result = features.pairwiseOperation("anySharedAltAlleleCount").value val expected = TestCsvUtils.readColumnToDoubleArray( "src/test/data/synthetic_100x10k_metrics.csv", "anySharedCount") @@ -54,7 +54,7 @@ class CommonPairwiseOperationTest extends SparkTest { @Test def testAllSharedAltCountPaiwiseOperation() { implicit val vsContext = VSContext(spark) - val features = vsContext.importCSV("src/test/data/synthetic_100x10k.csv"); + val features = vsContext.importTransposedCSV("src/test/data/synthetic_100x10k.csv"); val result = features.pairwiseOperation("sharedAltAlleleCount").value val expected = TestCsvUtils.readColumnToDoubleArray( "src/test/data/synthetic_100x10k_metrics.csv", "allSharedCount")