-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add validation for tagging type #23
Changes from 7 commits
857e539
3b6d091
a0f1566
29efa8b
6f9dba2
f53e63f
dd8d2ab
3cbc67e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,7 +10,7 @@ | |
from datetime import datetime | ||
import pandas as pd | ||
from typing import Union | ||
|
||
from skit_labels import constants as const | ||
|
||
LOG_LEVELS = ["CRITICAL", "ERROR", "WARNING", "SUCCESS", "INFO", "DEBUG", "TRACE"] | ||
|
||
|
@@ -110,3 +110,37 @@ def add_data_label(input_file: str, data_label: Optional[str] = None) -> str: | |
df = df.assign(data_label=data_label) | ||
df.to_csv(input_file, index=False) | ||
return input_file | ||
|
||
|
||
def validate_headers(input_file, tagging_type): | ||
expected_columns_mapping = const.EXPECTED_COLUMNS_MAPPING | ||
expected_headers = expected_columns_mapping.get(tagging_type) | ||
|
||
df = pd.read_csv(input_file) | ||
column_headers = df.columns.to_list() | ||
column_headers = [header.lower() for header in column_headers] | ||
column_headers = sorted(column_headers) | ||
expected_headers = sorted(expected_headers) | ||
logger.info(f"column_headers: {column_headers}") | ||
logger.info(f"expected_headers: {expected_headers}") | ||
|
||
is_match = column_headers == expected_headers | ||
mismatch_headers = [] | ||
logger.info(f"Is match: {is_match}") | ||
|
||
if not is_match: | ||
mismatch_headers_set =set(column_headers).symmetric_difference(set(expected_headers)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Instead lets just remove non-required columns and ensure that the required ones are there There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Have added that change |
||
mismatch_headers = list(mismatch_headers_set) | ||
return is_match, mismatch_headers | ||
|
||
|
||
def validate_input_data(tagging_type, input_file): | ||
is_valid = True | ||
error = '' | ||
if tagging_type == const.CONVERSATION_TAGGING: | ||
is_match, mismatch_headers = validate_headers(input_file, tagging_type) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. General nitpick: These functions can work with just one return value. Instead of passing is_match checking that mismatch_headers has values should suffice and make the code more readable There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Have fixed this |
||
if not is_match: | ||
error = f'Headers in the input file does not match the expected fields. Mismatched fields = {mismatch_headers}' | ||
is_valid = False | ||
|
||
return is_valid, error |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
This works for now but ideally even field type will be checked in the future. So something like:
"conversation_tagging": {'scenario':[str], 'scenario_category':[int], 'situation_str':, 'call':, 'data_label':}
But this is good for now
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Will add this in the next iteration, when I add the changes for the download functions.