Skip to content

Commit

Permalink
Added representation and validation tests
Browse files Browse the repository at this point in the history
  • Loading branch information
canimus committed Oct 1, 2023
1 parent ede094e commit e6cd8cc
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 3 deletions.
37 changes: 37 additions & 0 deletions test/unit/class_check/test_representation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from cuallee import Check, CheckLevel

def test_representation():
check = Check(CheckLevel.WARNING, "Repr")
out = repr(check)
assert "Check(level:0"

def test_numeric_level():
check = Check(1, "Repr")
out = repr(check)
assert "Check(level:CheckLevel.ERROR" in out

def test_with_table():
check = Check(0, "Repr", table_name="users")
out = repr(check)
assert ("Check(level:CheckLevel.WARNING" in out) & ("table:users" in out)

def test_sum():
check = Check(0, "Sum")
check.is_complete("id")
check.is_unique("id")
assert check.sum == 2

def test_keys():
check1 = Check(0, "Sum")
check1.is_complete("id")
check1.is_unique("id")
check2 = Check(0, "Sum")
check2.is_complete("id")
check2.is_unique("id")
assert set(check1.keys) == set(check2.keys)

def test_adjust_coverage():
check = Check(0, "Sum")
check.is_complete("id", pct=0.8)
check.adjust_rule_coverage(0, 0.9)
assert check.rules[0].coverage == 0.9
3 changes: 1 addition & 2 deletions test/unit/class_rule/test_rule_update_functions.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import pyspark.sql.functions as F

from cuallee import Check, CheckLevel


def test_add_rule():
c = Check(CheckLevel.WARNING, "test_add_rule").is_complete("id")
assert len(c._rule) == 1
Expand Down Expand Up @@ -104,3 +102,4 @@ def test_delete_rule_by_coverage(spark):

c.delete_rule_by_attribute("coverage", 1.0)
assert len(c._rule) == 0

13 changes: 13 additions & 0 deletions test/unit/class_rule/test_rule_validations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import pytest
from cuallee import Rule, CheckDataType

def test_negative_coverage():
with pytest.raises(
ValueError, match="Coverage should be between 0 and 1"
):
Rule("is_unique", "id", None, CheckDataType.NUMERIC, -1)

def test_representation():
rule = Rule("is_unique", "id", None, CheckDataType.NUMERIC)
out = str(repr(rule))
assert "Rule(method:is_unique" in out
2 changes: 1 addition & 1 deletion test/unit/pyspark_dataframe/test_are_unique.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ def test_positive(spark):
def test_is_composite_key(spark):
df = spark.range(10).withColumn("id2", F.col("id") + 10)
check = Check(CheckLevel.WARNING, "pytest")
check.are_unique(("id", "id2"))
check.is_composite_key(("id", "id2"))
rs = check.validate(df)
assert rs.first().status == "PASS"

Expand Down

0 comments on commit e6cd8cc

Please sign in to comment.