Skip to content

Commit

Permalink
Uploads ABR Runs to Google Sheet (#14607)
Browse files Browse the repository at this point in the history
<!--
Thanks for taking the time to open a pull request! Please make sure
you've read the "Opening Pull Requests" section of our Contributing
Guide:


https://github.com/Opentrons/opentrons/blob/edge/CONTRIBUTING.md#opening-pull-requests

To ensure your code is reviewed quickly and thoroughly, please fill out
the sections below to the best of your ability!
-->

# Overview

Uploads ABR runs to google sheet

# Test Plan

1. Uploaded to empty google sheet
2. Uploaded additional lines to filled google sheet
3. Uploaded to sheet during temp/humidity sensor data collection and
verified that the other google sheet was not affected.

# Changelog

- Added time conversion to abr_run_logs.py to convert from UTC to EST
- Added check in error logging if error was not documented in final
command
- removed addition of instrument model from serial number since model
information can be extracted from serial.
- Added lines to connect to google sheet
- Added lines to write to google sheet
- Changed try exception block when reading json files to an if
statement:read if ends with .json and if run_id exists to avoid reading
google_sheets_tools.py and credentials file
- In liquid measurement analysis script (analyze_abr.py) changed from
reading whole entire file to just the last line because the file stops
logging when scale is stable. The last line will always be the stable
line.

# Review requests

<!--
Describe any requests for your reviewers here.
-->

# Risk assessment

Users need a credentials file and google_sheets_tools.py saved in the
folder given as an argument when running ABR_read_logs
  • Loading branch information
rclarke0 authored Mar 7, 2024
1 parent fb23b41 commit 3821b0e
Show file tree
Hide file tree
Showing 4 changed files with 78 additions and 50 deletions.
76 changes: 54 additions & 22 deletions hardware-testing/hardware_testing/abr_tools/abr_read_logs.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
"""Read ABR run logs and save data to ABR testing csv."""
"""Read ABR run logs and save data to ABR testing csv and google sheet."""
from .abr_run_logs import get_run_ids_from_storage, get_unseen_run_ids
from .error_levels import ERROR_LEVELS_PATH
from typing import Set, Dict, Tuple, Any, List
import argparse
import os
import csv
import json
from datetime import datetime
import sys
from datetime import datetime, timedelta
import time as t


def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:
Expand All @@ -17,10 +19,14 @@ def get_modules(file_results: Dict[str, str]) -> Dict[str, Any]:
"magneticBlockV1",
"thermocyclerModuleV2",
)
all_modules = {key: None for key in modList}
all_modules = {key: "" for key in modList}
for module in file_results.get("modules", []):
if isinstance(module, dict) and module.get("model") in modList:
all_modules[module["model"]] = module.get("serialNumber", "")
try:
all_modules[module["model"]] = module["serialNumber"]
except KeyError:
all_modules[module["model"]] = "EMPTYSN"

return all_modules


Expand All @@ -41,18 +47,24 @@ def get_error_info(file_results: Dict[str, Any]) -> Tuple[int, str, str, str, st
run_command_error: Dict[str, Any] = commands_of_run[-1]
error_str: int = len(run_command_error.get("error", ""))
if error_str > 1:
error_type = run_command_error["error"].get("errorType", None)
error_code = run_command_error["error"].get("errorCode", None)
error_type = run_command_error["error"].get("errorType", "")
error_code = run_command_error["error"].get("errorCode", "")
try:
# Instrument Error
error_instrument = run_command_error["error"]["errorInfo"]["node"]
except KeyError:
# Module Error
error_instrument = run_command_error["error"]["errorInfo"].get("port", None)
for error in error_levels:
code_error = error[1]
if code_error == error_code:
error_level = error[4]
error_instrument = run_command_error["error"]["errorInfo"].get("port", "")
else:
error_type = file_results["errors"][0]["errorType"]
print(error_type)
error_code = file_results["errors"][0]["errorCode"]
error_instrument = file_results["errors"][0]["detail"]
for error in error_levels:
code_error = error[1]
if code_error == error_code:
error_level = error[4]

return num_of_errors, error_type, error_code, error_instrument, error_level


Expand Down Expand Up @@ -97,13 +109,11 @@ def create_data_dictionary(
runs_and_robots = {}
for filename in os.listdir(storage_directory):
file_path = os.path.join(storage_directory, filename)
try:
if file_path.endswith(".json"):
with open(file_path) as file:
file_results = json.load(file)
except (json.JSONDecodeError, KeyError):
print(f"Ignoring unparsable file {file_path}.")
else:
continue

run_id = file_results.get("run_id")
if run_id in runs_to_save:
robot = file_results.get("robot_name")
Expand Down Expand Up @@ -131,12 +141,14 @@ def create_data_dictionary(
start_time = datetime.strptime(
file_results.get("startedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
)
start_date = str(start_time.date())
start_time_str = str(start_time).split("+")[0]
adjusted_start_time = start_time - timedelta(hours=5)
start_date = str(adjusted_start_time.date())
start_time_str = str(adjusted_start_time).split("+")[0]
complete_time = datetime.strptime(
file_results.get("completedAt", ""), "%Y-%m-%dT%H:%M:%S.%f%z"
)
complete_time_str = str(complete_time).split("+")[0]
adjusted_complete_time = complete_time - timedelta(hours=5)
complete_time_str = str(adjusted_complete_time).split("+")[0]
run_time = complete_time - start_time
run_time_min = run_time.total_seconds() / 60
except ValueError:
Expand Down Expand Up @@ -164,9 +176,8 @@ def create_data_dictionary(
row_2 = {**row, **all_modules}
runs_and_robots[run_id] = row_2
else:
print(
f"Run ID: {run_id} has a run time of 0 minutes. Run not recorded."
)
os.remove(file_path)
print(f"Run ID: {run_id} has a run time of 0 minutes. Run removed.")
return runs_and_robots


Expand All @@ -183,6 +194,9 @@ def read_abr_data_sheet(storage_directory: str) -> Set[str]:
run_id = row[headers[1]]
runs_in_sheet.add(run_id)
print(f"There are {str(len(runs_in_sheet))} runs documented in the ABR sheet.")
# Read Google Sheet
google_sheet.write_header(headers)
google_sheet.update_row_index()
return runs_in_sheet


Expand All @@ -196,7 +210,11 @@ def write_to_abr_sheet(
writer = csv.writer(f)
for run in range(len(list_of_runs)):
row = runs_and_robots[list_of_runs[run]].values()
writer.writerow(row)
row_list = list(row)
writer.writerow(row_list)
google_sheet.update_row_index()
google_sheet.write_to_row(row_list)
t.sleep(5)


if __name__ == "__main__":
Expand All @@ -210,6 +228,20 @@ def write_to_abr_sheet(
)
args = parser.parse_args()
storage_directory = args.storage_directory[0]
try:
sys.path.insert(0, storage_directory)
import google_sheets_tool # type: ignore[import]

credentials_path = os.path.join(storage_directory, "abr.json")
except ImportError:
raise ImportError("Make sure google_sheets_tool.py is in storage directory.")
try:
google_sheet = google_sheets_tool.google_sheet(
credentials_path, "ABR Run Data", tab_number=0
)
print("Connected to google sheet.")
except FileNotFoundError:
print("No google sheets credentials. Add credentials to storage notebook.")
runs_from_storage = get_run_ids_from_storage(storage_directory)
create_abr_data_sheet(storage_directory)
runs_in_sheet = read_abr_data_sheet(storage_directory)
Expand Down
14 changes: 5 additions & 9 deletions hardware-testing/hardware_testing/abr_tools/abr_run_logs.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,11 @@ def get_run_ids_from_storage(storage_directory: str) -> Set[str]:
run_ids = set()
for this_file in list_of_files:
read_file = os.path.join(storage_directory, this_file)
try:
if read_file.endswith(".json"):
file_results = json.load(open(read_file))
except json.JSONDecodeError:
print(f"Ignoring unparsable file {read_file}.")
continue
run_id = file_results["run_id"]
run_ids.add(run_id)
run_id = file_results.get("run_id", "")
if len(run_id) > 0:
run_ids.add(run_id)
return run_ids


Expand Down Expand Up @@ -94,9 +92,7 @@ def get_run_data(one_run: Any, ip: str) -> Dict[str, Any]:
)
instrument_data = response.json()
for instrument in instrument_data["data"]:
run[instrument["mount"]] = (
instrument["serialNumber"] + "_" + instrument["instrumentModel"]
)
run[instrument["mount"]] = instrument["serialNumber"]
return run


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __init__(self, robot: str, duration: int, frequency: int) -> None:
print("Connected to the google sheet.")
except FileNotFoundError:
print(
"There is no google sheets credentials. Make sure credentials in jupyter notebook."
"There are no google sheets credentials. Make sure credentials in jupyter notebook."
)
results_list = [] # type: List
start_time = datetime.datetime.now()
Expand Down
36 changes: 18 additions & 18 deletions hardware-testing/hardware_testing/scripts/analyze_abr.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,26 +44,26 @@ def _get_user_input(list: List, some_string: str) -> str:
results_list = []
try:
with open(raw_data_file_csv_path, "r") as f:
for line in f:
# Process the file here
columns = line.split(",")
if len(columns) >= 2:
stable_value = columns[4]
date_of_measurement = columns[0]
date = str(date_of_measurement).split(" ")[0]
row_data = (
date,
raw_data_file_csv,
plate_state,
robot,
stable_value,
sample,
)
results_list.append(row_data)

pass
csvreader = csv.reader(f)
rows = list(csvreader)
except Exception as e:
print(f"Error opening file: {e}")
last_row = rows[-1]
# Process the file here
stable_value = last_row[-2]
print(stable_value)
date_of_measurement = last_row[0]
date = str(date_of_measurement).split(" ")[0]
row_data = (
date,
raw_data_file_csv,
plate_state,
robot,
stable_value,
sample,
)
results_list.append(row_data)

with open(new_csv_file_path, "a", newline="") as csv_file:
csv_writer = csv.writer(csv_file)
# Write data
Expand Down

0 comments on commit 3821b0e

Please sign in to comment.