Skip to content

Commit 8fa5114

Browse files
committed
add separate scaled score column option
1 parent 7ba3c9b commit 8fa5114

File tree

1 file changed

+4
-2
lines changed

1 file changed

+4
-2
lines changed

countess/plugins/score_scale.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,23 +47,25 @@ class ScoreScalingPlugin(DuckdbSimplePlugin):
4747
version = VERSION
4848

4949
score_col = NumericColumnChoiceParam("Score Column")
50+
scaled_col = StringParam("Scaled Score Column", "scaled_score")
5051
classifiers = ArrayParam("Variant Classifiers", ScaleClassParam("Class"), min_size=2, max_size=2, read_only=True)
5152
group_col = ColumnOrNoneChoiceParam("Group By")
5253

5354
def execute(
5455
self, ddbc: DuckDBPyConnection, source: DuckDBPyRelation, row_limit: Optional[int] = None
5556
) -> Optional[DuckDBPyRelation]:
5657
score_col_id = duckdb_escape_identifier(self.score_col.value)
58+
scaled_col_id = duckdb_escape_identifier(self.scaled_col.value)
5759

58-
all_columns = ",".join(duckdb_escape_identifier(c) for c in source.columns if c != self.score_col.value)
60+
all_columns = ",".join("T0." + duckdb_escape_identifier(c) for c in source.columns if c != self.scaled_col.value)
5961

6062
if self.group_col.is_not_none():
6163
group_col_id = "T0." + duckdb_escape_identifier(self.group_col.value)
6264
else:
6365
group_col_id = "1" # dummy value for one big group.
6466

6567
sql = f"""
66-
select {all_columns}, ({score_col_id} - T1.y) / (T1.z - T1.y) as {score_col_id}
68+
select {all_columns}, ({score_col_id} - T1.y) / (T1.z - T1.y) as {scaled_col_id}
6769
from {source.alias} T0 join (
6870
select {group_col_id} as x,
6971
median({score_col_id}) filter ({self.classifiers[0].filter()}) as y,

0 commit comments

Comments
 (0)