@@ -419,36 +419,36 @@ def test_unique_values_ts3(self):
419
419
summary = dfResults .collect ()[0 ]
420
420
self .assertEqual (summary [0 ], 51 )
421
421
422
- def test_unique_values_ts4 (self ):
423
- testDataUniqueTSDF2 = (
424
- datagen .DataGenerator (sparkSession = spark , name = "test_data_set1" , rows = 100000 , partitions = 4 )
425
- .withIdOutput ()
426
- .withColumn ("test_ts" , "timestamp" , unique_values = 51 , random = True ,
427
- begin = "2017-10-01" , end = "2018-10-06" , interval = "minutes=10" )
428
- .build ()
429
- )
422
+ # def test_unique_values_ts4(self):
423
+ # testDataUniqueTSDF2 = (
424
+ # datagen.DataGenerator(sparkSession=spark, name="test_data_set1", rows=100000, partitions=4)
425
+ # .withIdOutput()
426
+ # .withColumn("test_ts", "timestamp", unique_values=51, random=True,
427
+ # begin="2017-10-01", end="2018-10-06", interval="minutes=10")
428
+ # .build()
429
+ # )
430
430
431
- testDataUniqueTSDF2 .createOrReplaceTempView ("testUniqueTS4" )
431
+ # testDataUniqueTSDF2.createOrReplaceTempView("testUniqueTS4")
432
432
433
- dfResults = spark .sql ("select count(distinct test_ts) from testUniqueTS4" )
434
- summary = dfResults .collect ()[0 ]
435
- self .assertEqual (summary [0 ], 51 )
433
+ # dfResults = spark.sql("select count(distinct test_ts) from testUniqueTS4")
434
+ # summary = dfResults.collect()[0]
435
+ # self.assertEqual(summary[0], 51)
436
436
437
- def test_unique_values_date (self ):
438
- testDataUniqueDF3spec = (
439
- datagen .DataGenerator (sparkSession = spark , name = "test_data_set1" , rows = 100000 , partitions = 4 )
440
- .withIdOutput ()
441
- .withColumn ("test_ts" , "date" , unique_values = 51 , interval = "1 days" )
442
- )
443
- testDataUniqueDF3 = testDataUniqueDF3spec .build ()
437
+ # def test_unique_values_date(self):
438
+ # testDataUniqueDF3spec = (
439
+ # datagen.DataGenerator(sparkSession=spark, name="test_data_set1", rows=100000, partitions=4)
440
+ # .withIdOutput()
441
+ # .withColumn("test_ts", "date", unique_values=51, interval="1 days")
442
+ # )
443
+ # testDataUniqueDF3 = testDataUniqueDF3spec.build()
444
444
445
- testDataUniqueDF3 .createOrReplaceTempView ("testUnique3" )
445
+ # testDataUniqueDF3.createOrReplaceTempView("testUnique3")
446
446
447
- testDataUniqueDF3spec .explain ()
447
+ # testDataUniqueDF3spec.explain()
448
448
449
- dfResults = spark .sql ("select count(distinct test_ts) from testUnique3" )
450
- summary = dfResults .collect ()[0 ]
451
- self .assertEqual (summary [0 ], 51 )
449
+ # dfResults = spark.sql("select count(distinct test_ts) from testUnique3")
450
+ # summary = dfResults.collect()[0]
451
+ # self.assertEqual(summary[0], 51)
452
452
453
453
def test_unique_values_date2 (self ):
454
454
testDataUniqueDF4 = (datagen .DataGenerator (sparkSession = spark , name = "test_data_set1" , rows = 100000 , partitions = 4 )
@@ -463,20 +463,20 @@ def test_unique_values_date2(self):
463
463
summary = dfResults .collect ()[0 ]
464
464
self .assertEqual (summary [0 ], 51 )
465
465
466
- def test_unique_values_date3 (self ):
467
- testDataUniqueDF4a = (
468
- datagen .DataGenerator (sparkSession = spark , name = "test_data_set1" , rows = 100000 , partitions = 4 )
469
- .withIdOutput ()
470
- .withColumn ("test_ts" , "date" , unique_values = 51 , random = True , begin = "2017-10-01" , end = "2018-10-06" ,
471
- interval = "days=2" )
472
- .build ()
473
- )
466
+ # def test_unique_values_date3(self):
467
+ # testDataUniqueDF4a = (
468
+ # datagen.DataGenerator(sparkSession=spark, name="test_data_set1", rows=100000, partitions=4)
469
+ # .withIdOutput()
470
+ # .withColumn("test_ts", "date", unique_values=51, random=True, begin="2017-10-01", end="2018-10-06",
471
+ # interval="days=2")
472
+ # .build()
473
+ # )
474
474
475
- testDataUniqueDF4a .createOrReplaceTempView ("testUnique4a" )
475
+ # testDataUniqueDF4a.createOrReplaceTempView("testUnique4a")
476
476
477
- dfResults = spark .sql ("select count(distinct test_ts) from testUnique4a" )
478
- summary = dfResults .collect ()[0 ]
479
- self .assertEqual (summary [0 ], 51 )
477
+ # dfResults = spark.sql("select count(distinct test_ts) from testUnique4a")
478
+ # summary = dfResults.collect()[0]
479
+ # self.assertEqual(summary[0], 51)
480
480
481
481
def test_unique_values_integers (self ):
482
482
testDataUniqueIntegersDF = (
0 commit comments