From 3821b0ecf8df5c5b036c5d3b2b4d4bd765b66966 Mon Sep 17 00:00:00 2001 From: Rhyann Clarke <146747548+rclarke0@users.noreply.github.com> Date: Thu, 7 Mar 2024 15:31:00 -0500 Subject: [PATCH] Uploads ABR Runs to Google Sheet (#14607) # Overview Uploads ABR runs to google sheet # Test Plan 1. Uploaded to empty google sheet 2. Uploaded additional lines to filled google sheet 3. Uploaded to sheet during temp/humidity sensor data collection and verified that the other google sheet was not affected. # Changelog - Added time conversion to abr_run_logs.py to convert from UTC to EST - Added check in error logging if error was not documented in final command - removed addition of instrument model from serial number since model information can be extracted from serial. - Added lines to connect to google sheet - Added lines to write to google sheet - Changed try exception block when reading json files to an if statement:read if ends with .json and if run_id exists to avoid reading google_sheets_tools.py and credentials file - In liquid measurement analysis script (analyze_abr.py) changed from reading whole entire file to just the last line because the file stops logging when scale is stable. The last line will always be the stable line. # Review requests # Risk assessment Users need a credentials file and google_sheets_tools.py saved in the folder given as an argument when running ABR_read_logs --- .../abr_tools/abr_read_logs.py | 76 +++++++++++++------ .../abr_tools/abr_run_logs.py | 14 ++-- .../scripts/abr_asair_sensor.py | 2 +- .../hardware_testing/scripts/analyze_abr.py | 36 ++++----- 4 files changed, 78 insertions(+), 50 deletions(-) diff --git a/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py b/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py index e2d98859661..2da0ed088d8 100644 --- a/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py +++ b/hardware-testing/hardware_testing/abr_tools/abr_read_logs.py @@ -1,4 +1,4 @@ -"""Read ABR run logs and save data to ABR testing csv.""" +"""Read ABR run logs and save data to ABR testing csv and google sheet.""" from .abr_run_logs import get_run_ids_from_storage, get_unseen_run_ids from .error_levels import ERROR_LEVELS_PATH from typing import Set, Dict, Tuple, Any, List @@ -6,7 +6,9 @@ import os import csv import json -from datetime import datetime +import sys +from datetime import datetime, timedelta +import time as t def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]: @@ -17,10 +19,14 @@ def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]: "magneticBlockV1", "thermocyclerModuleV2", ) - all_modules = {key: None for key in modList} + all_modules = {key: "" for key in modList} for module in file_results.get("modules", []): if isinstance(module, dict) and module.get("model") in modList: - all_modules[module["model"]] = module.get("serialNumber", "") + try: + all_modules[module["model"]] = module["serialNumber"] + except KeyError: + all_modules[module["model"]] = "EMPTYSN" + return all_modules @@ -41,18 +47,24 @@ def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, st run_command_error: Dict[str, Any] = commands_of_run[-1] error_str: int = len(run_command_error.get("error", "")) if error_str > 1: - error_type = run_command_error["error"].get("errorType", None) - error_code = run_command_error["error"].get("errorCode", None) + error_type = run_command_error["error"].get("errorType", "") + error_code = run_command_error["error"].get("errorCode", "") try: # Instrument Error error_instrument = run_command_error["error"]["errorInfo"]["node"] except KeyError: # Module Error - error_instrument = run_command_error["error"]["errorInfo"].get("port", None) - for error in error_levels: - code_error = error[1] - if code_error == error_code: - error_level = error[4] + error_instrument = run_command_error["error"]["errorInfo"].get("port", "") + else: + error_type = file_results["errors"][0]["errorType"] + print(error_type) + error_code = file_results["errors"][0]["errorCode"] + error_instrument = file_results["errors"][0]["detail"] + for error in error_levels: + code_error = error[1] + if code_error == error_code: + error_level = error[4] + return num_of_errors, error_type, error_code, error_instrument, error_level @@ -97,13 +109,11 @@ def create_data_dictionary( runs_and_robots = {} for filename in os.listdir(storage_directory): file_path = os.path.join(storage_directory, filename) - try: + if file_path.endswith(".json"): with open(file_path) as file: file_results = json.load(file) - except (json.JSONDecodeError, KeyError): - print(f"Ignoring unparsable file {file_path}.") + else: continue - run_id = file_results.get("run_id") if run_id in runs_to_save: robot = file_results.get("robot_name") @@ -131,12 +141,14 @@ def create_data_dictionary( start_time = datetime.strptime( file_results.get("startedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z" ) - start_date = str(start_time.date()) - start_time_str = str(start_time).split("+")[0] + adjusted_start_time = start_time - timedelta(hours=5) + start_date = str(adjusted_start_time.date()) + start_time_str = str(adjusted_start_time).split("+")[0] complete_time = datetime.strptime( file_results.get("completedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z" ) - complete_time_str = str(complete_time).split("+")[0] + adjusted_complete_time = complete_time - timedelta(hours=5) + complete_time_str = str(adjusted_complete_time).split("+")[0] run_time = complete_time - start_time run_time_min = run_time.total_seconds() / 60 except ValueError: @@ -164,9 +176,8 @@ def create_data_dictionary( row_2 = {**row, **all_modules} runs_and_robots[run_id] = row_2 else: - print( - f"Run ID: {run_id} has a run time of 0 minutes. Run not recorded." - ) + os.remove(file_path) + print(f"Run ID: {run_id} has a run time of 0 minutes. Run removed.") return runs_and_robots @@ -183,6 +194,9 @@ def read_abr_data_sheet(storage_directory: str) -> Set[str]: run_id = row[headers[1]] runs_in_sheet.add(run_id) print(f"There are {str(len(runs_in_sheet))} runs documented in the ABR sheet.") + # Read Google Sheet + google_sheet.write_header(headers) + google_sheet.update_row_index() return runs_in_sheet @@ -196,7 +210,11 @@ def write_to_abr_sheet( writer = csv.writer(f) for run in range(len(list_of_runs)): row = runs_and_robots[list_of_runs[run]].values() - writer.writerow(row) + row_list = list(row) + writer.writerow(row_list) + google_sheet.update_row_index() + google_sheet.write_to_row(row_list) + t.sleep(5) if __name__ == "__main__": @@ -210,6 +228,20 @@ def write_to_abr_sheet( ) args = parser.parse_args() storage_directory = args.storage_directory[0] + try: + sys.path.insert(0, storage_directory) + import google_sheets_tool # type: ignore[import] + + credentials_path = os.path.join(storage_directory, "abr.json") + except ImportError: + raise ImportError("Make sure google_sheets_tool.py is in storage directory.") + try: + google_sheet = google_sheets_tool.google_sheet( + credentials_path, "ABR Run Data", tab_number=0 + ) + print("Connected to google sheet.") + except FileNotFoundError: + print("No google sheets credentials. Add credentials to storage notebook.") runs_from_storage = get_run_ids_from_storage(storage_directory) create_abr_data_sheet(storage_directory) runs_in_sheet = read_abr_data_sheet(storage_directory) diff --git a/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py b/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py index c625436127f..0e802ef2d12 100644 --- a/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py +++ b/hardware-testing/hardware_testing/abr_tools/abr_run_logs.py @@ -15,13 +15,11 @@ def get_run_ids_from_storage(storage_directory: str) -> Set[str]: run_ids = set() for this_file in list_of_files: read_file = os.path.join(storage_directory, this_file) - try: + if read_file.endswith(".json"): file_results = json.load(open(read_file)) - except json.JSONDecodeError: - print(f"Ignoring unparsable file {read_file}.") - continue - run_id = file_results["run_id"] - run_ids.add(run_id) + run_id = file_results.get("run_id", "") + if len(run_id) > 0: + run_ids.add(run_id) return run_ids @@ -94,9 +92,7 @@ def get_run_data(one_run: Any, ip: str) -> Dict[str, Any]: ) instrument_data = response.json() for instrument in instrument_data["data"]: - run[instrument["mount"]] = ( - instrument["serialNumber"] + "_" + instrument["instrumentModel"] - ) + run[instrument["mount"]] = instrument["serialNumber"] return run diff --git a/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py b/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py index db977dabbd9..10d62b345f3 100644 --- a/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py +++ b/hardware-testing/hardware_testing/scripts/abr_asair_sensor.py @@ -55,7 +55,7 @@ def __init__(self, robot: str, duration: int, frequency: int) -> None: print("Connected to the google sheet.") except FileNotFoundError: print( - "There is no google sheets credentials. Make sure credentials in jupyter notebook." + "There are no google sheets credentials. Make sure credentials in jupyter notebook." ) results_list = [] # type: List start_time = datetime.datetime.now() diff --git a/hardware-testing/hardware_testing/scripts/analyze_abr.py b/hardware-testing/hardware_testing/scripts/analyze_abr.py index 2ee84c28c3c..f6e7ec0a9b7 100644 --- a/hardware-testing/hardware_testing/scripts/analyze_abr.py +++ b/hardware-testing/hardware_testing/scripts/analyze_abr.py @@ -44,26 +44,26 @@ def _get_user_input(list: List, some_string: str) -> str: results_list = [] try: with open(raw_data_file_csv_path, "r") as f: - for line in f: - # Process the file here - columns = line.split(",") - if len(columns) >= 2: - stable_value = columns[4] - date_of_measurement = columns[0] - date = str(date_of_measurement).split(" ")[0] - row_data = ( - date, - raw_data_file_csv, - plate_state, - robot, - stable_value, - sample, - ) - results_list.append(row_data) - - pass + csvreader = csv.reader(f) + rows = list(csvreader) except Exception as e: print(f"Error opening file: {e}") + last_row = rows[-1] + # Process the file here + stable_value = last_row[-2] + print(stable_value) + date_of_measurement = last_row[0] + date = str(date_of_measurement).split(" ")[0] + row_data = ( + date, + raw_data_file_csv, + plate_state, + robot, + stable_value, + sample, + ) + results_list.append(row_data) + with open(new_csv_file_path, "a", newline="") as csv_file: csv_writer = csv.writer(csv_file) # Write data