Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(robot-server, api): accept RTP overrides via /protocols and inject them in python executor #14688

Merged
4 changes: 3 additions & 1 deletion api/src/opentrons/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -600,7 +600,9 @@ def _run_file_non_pe(

context.home()
try:
execute_apiv2.run_protocol(protocol, context)
# TODO (spp, 2024-03-18): use true run-time param overrides once enabled
# for cli protocol simulation/ execution
execute_apiv2.run_protocol(protocol, context, run_time_param_overrides=None)
finally:
context.cleanup()

Expand Down
4 changes: 4 additions & 0 deletions api/src/opentrons/protocol_engine/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -909,3 +909,7 @@ class EnumParameter(RTPBase):


RunTimeParameter = Union[IntParameter, FloatParameter, EnumParameter]

RunTimeParamValuesType = Dict[
str, Union[float, bool, str]
] # update value types as more RTP types are added
5 changes: 4 additions & 1 deletion api/src/opentrons/protocol_reader/protocol_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,10 @@ def __init__(
self._file_hasher = file_hasher or FileHasher()

async def save(
self, files: Sequence[BufferedFile], directory: Path, content_hash: str
self,
files: Sequence[BufferedFile],
directory: Path,
content_hash: str,
) -> ProtocolSource:
"""Compute a `ProtocolSource` from buffered files and save them as files.

Expand Down
9 changes: 7 additions & 2 deletions api/src/opentrons/protocol_runner/legacy_wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
)
from opentrons.legacy_broker import LegacyBroker
from opentrons.protocol_engine import ProtocolEngine
from opentrons.protocol_engine.types import RunTimeParamValuesType
from opentrons.protocol_reader import ProtocolSource, ProtocolFileRole
from opentrons.util.broker import Broker

Expand Down Expand Up @@ -168,9 +169,13 @@ class LegacyExecutor:
"""Interface to execute Protocol API v2 protocols in a child thread."""

@staticmethod
async def execute(protocol: LegacyProtocol, context: LegacyProtocolContext) -> None:
async def execute(
protocol: LegacyProtocol,
context: LegacyProtocolContext,
run_time_param_values: Optional[RunTimeParamValuesType],
) -> None:
"""Execute a PAPIv2 protocol with a given ProtocolContext in a child thread."""
await to_thread.run_sync(run_protocol, protocol, context)
await to_thread.run_sync(run_protocol, protocol, context, run_time_param_values)


__all__ = [
Expand Down
28 changes: 20 additions & 8 deletions api/src/opentrons/protocol_runner/protocol_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,11 @@
LegacyExecutor,
LegacyLoadInfo,
)
from ..protocol_engine.types import PostRunHardwareState, DeckConfigurationType
from ..protocol_engine.types import (
PostRunHardwareState,
DeckConfigurationType,
RunTimeParamValuesType,
)


class RunResult(NamedTuple):
Expand Down Expand Up @@ -106,6 +110,7 @@ async def run(
self,
deck_configuration: DeckConfigurationType,
protocol_source: Optional[ProtocolSource] = None,
run_time_param_values: Optional[RunTimeParamValuesType] = None,
) -> RunResult:
"""Run a given protocol to completion."""

Expand Down Expand Up @@ -135,17 +140,18 @@ def __init__(
self._legacy_executor = legacy_executor or LegacyExecutor()
# TODO(mc, 2022-01-11): replace task queue with specific implementations
# of runner interface
self._task_queue = (
task_queue or TaskQueue()
) # cleanup_func=protocol_engine.finish))
self._task_queue = task_queue or TaskQueue()
self._task_queue.set_cleanup_func(
func=protocol_engine.finish,
drop_tips_after_run=drop_tips_after_run,
post_run_hardware_state=post_run_hardware_state,
)

async def load(
self, protocol_source: ProtocolSource, python_parse_mode: PythonParseMode
self,
protocol_source: ProtocolSource,
python_parse_mode: PythonParseMode,
run_time_param_values: Optional[RunTimeParamValuesType],
) -> None:
"""Load a Python or JSONv5(& older) ProtocolSource into managed ProtocolEngine."""
labware_definitions = await protocol_reader.extract_labware_definitions(
Expand Down Expand Up @@ -182,26 +188,30 @@ async def load(
initial_home_command = pe_commands.HomeCreate(
params=pe_commands.HomeParams(axes=None)
)
# this command homes all axes, including pipette plugner and gripper jaw
# this command homes all axes, including pipette plunger and gripper jaw
self._protocol_engine.add_command(request=initial_home_command)

self._task_queue.set_run_func(
func=self._legacy_executor.execute,
protocol=protocol,
context=context,
run_time_param_values=run_time_param_values,
)

async def run( # noqa: D102
self,
deck_configuration: DeckConfigurationType,
protocol_source: Optional[ProtocolSource] = None,
run_time_param_values: Optional[RunTimeParamValuesType] = None,
python_parse_mode: PythonParseMode = PythonParseMode.NORMAL,
) -> RunResult:
# TODO(mc, 2022-01-11): move load to runner creation, remove from `run`
# currently `protocol_source` arg is only used by tests
# currently `protocol_source` arg is only used by tests & protocol analyzer
if protocol_source:
await self.load(
protocol_source=protocol_source, python_parse_mode=python_parse_mode
protocol_source=protocol_source,
python_parse_mode=python_parse_mode,
run_time_param_values=run_time_param_values,
)

self.play(deck_configuration=deck_configuration)
Expand Down Expand Up @@ -301,6 +311,7 @@ async def run( # noqa: D102
self,
deck_configuration: DeckConfigurationType,
protocol_source: Optional[ProtocolSource] = None,
run_time_param_values: Optional[RunTimeParamValuesType] = None,
) -> RunResult:
# TODO(mc, 2022-01-11): move load to runner creation, remove from `run`
# currently `protocol_source` arg is only used by tests
Expand Down Expand Up @@ -344,6 +355,7 @@ async def run( # noqa: D102
self,
deck_configuration: DeckConfigurationType,
protocol_source: Optional[ProtocolSource] = None,
run_time_param_values: Optional[RunTimeParamValuesType] = None,
) -> RunResult:
assert protocol_source is None
await self._hardware_api.home()
Expand Down
7 changes: 6 additions & 1 deletion api/src/opentrons/protocols/execution/execute.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
from typing import Optional, Dict, Union

from opentrons.protocol_api import ProtocolContext
from opentrons.protocols.execution.execute_python import run_python
Expand All @@ -16,7 +17,11 @@
MODULE_LOG = logging.getLogger(__name__)


def run_protocol(protocol: Protocol, context: ProtocolContext) -> None:
def run_protocol(
protocol: Protocol,
context: ProtocolContext,
run_time_param_overrides: Optional[Dict[str, Union[float, bool, str]]] = None,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We should use the PE type here if possible

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I had a TODO for this exact thing. I/ pycharm must have removed it during refactor. Putting it back in.

) -> None:
"""Run a protocol.

:param protocol: The :py:class:`.protocols.types.Protocol` to execute
Expand Down
4 changes: 3 additions & 1 deletion api/src/opentrons/simulate.py
Original file line number Diff line number Diff line change
Expand Up @@ -866,7 +866,9 @@ def _run_file_non_pe(
context.home()
with scraper.scrape():
try:
execute.run_protocol(protocol, context)
# TODO (spp, 2024-03-18): use true run-time param overrides once enabled
# for cli protocol simulation/ execution
execute.run_protocol(protocol, context, run_time_param_overrides=None)
if (
isinstance(protocol, PythonProtocol)
and protocol.api_level >= APIVersion(2, 0)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,11 @@ async def test_runner_with_python(
robot_type="OT-2 Standard",
protocol_config=protocol_source.config,
)
result = await subject.run(deck_configuration=[], protocol_source=protocol_source)
result = await subject.run(
deck_configuration=[],
protocol_source=protocol_source,
run_time_param_values=None,
)
commands_result = result.commands
pipettes_result = result.state_summary.pipettes
labware_result = result.state_summary.labware
Expand Down Expand Up @@ -176,7 +180,11 @@ async def test_runner_with_legacy_python(legacy_python_protocol_file: Path) -> N
robot_type="OT-2 Standard",
protocol_config=protocol_source.config,
)
result = await subject.run(deck_configuration=[], protocol_source=protocol_source)
result = await subject.run(
deck_configuration=[],
protocol_source=protocol_source,
run_time_param_values=None,
)

commands_result = result.commands
pipettes_result = result.state_summary.pipettes
Expand Down Expand Up @@ -235,7 +243,11 @@ async def test_runner_with_legacy_json(legacy_json_protocol_file: Path) -> None:
subject = await create_simulating_runner(
robot_type="OT-2 Standard", protocol_config=protocol_source.config
)
result = await subject.run(deck_configuration=[], protocol_source=protocol_source)
result = await subject.run(
deck_configuration=[],
protocol_source=protocol_source,
run_time_param_values=None,
)

commands_result = result.commands
pipettes_result = result.state_summary.pipettes
Expand Down
5 changes: 5 additions & 0 deletions api/tests/opentrons/protocol_runner/test_protocol_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,6 +456,7 @@ async def test_load_legacy_python(
await legacy_python_runner_subject.load(
legacy_protocol_source,
python_parse_mode=PythonParseMode.ALLOW_LEGACY_METADATA_AND_REQUIREMENTS,
run_time_param_values=None,
)

decoy.verify(
Expand All @@ -468,6 +469,7 @@ async def test_load_legacy_python(
func=legacy_executor.execute,
protocol=legacy_protocol,
context=legacy_context,
run_time_param_values=None,
),
)
assert broker_captor.value is legacy_python_runner_subject.broker
Expand Down Expand Up @@ -526,6 +528,7 @@ async def test_load_python_with_pe_papi_core(
await legacy_python_runner_subject.load(
legacy_protocol_source,
python_parse_mode=PythonParseMode.ALLOW_LEGACY_METADATA_AND_REQUIREMENTS,
run_time_param_values=None,
)

decoy.verify(protocol_engine.add_plugin(matchers.IsA(LegacyContextPlugin)), times=0)
Expand Down Expand Up @@ -587,6 +590,7 @@ async def test_load_legacy_json(
await legacy_python_runner_subject.load(
legacy_protocol_source,
python_parse_mode=PythonParseMode.ALLOW_LEGACY_METADATA_AND_REQUIREMENTS,
run_time_param_values=None,
)

decoy.verify(
Expand All @@ -599,6 +603,7 @@ async def test_load_legacy_json(
func=legacy_executor.execute,
protocol=legacy_protocol,
context=legacy_context,
run_time_param_values=None,
),
)

Expand Down
2 changes: 2 additions & 0 deletions robot-server/robot_server/protocols/analysis_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,8 @@ def add_pending(self, protocol_id: str, analysis_id: str) -> AnalysisSummary:
Returns:
A summary of the just-added analysis.
"""
# TODO (spp, 2024-03-19): cap the number of analyses being stored by
# auto-deleting old ones
new_pending_analysis = self._pending_store.add(
protocol_id=protocol_id, analysis_id=analysis_id
)
Expand Down
7 changes: 6 additions & 1 deletion robot-server/robot_server/protocols/protocol_analyzer.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
"""Protocol analysis module."""
import logging
from typing import Optional

from opentrons import protocol_runner
from opentrons.protocol_engine.errors import ErrorOccurrence
from opentrons.protocol_engine.types import RunTimeParamValuesType
import opentrons.util.helpers as datetime_helper

import robot_server.errors.error_mappers as em
Expand All @@ -27,6 +29,7 @@ async def analyze(
self,
protocol_resource: ProtocolResource,
analysis_id: str,
run_time_param_values: Optional[RunTimeParamValuesType],
) -> None:
"""Analyze a given protocol, storing the analysis when complete."""
runner = await protocol_runner.create_simulating_runner(
Expand All @@ -35,7 +38,9 @@ async def analyze(
)
try:
result = await runner.run(
protocol_source=protocol_resource.source, deck_configuration=[]
protocol_source=protocol_resource.source,
deck_configuration=[],
run_time_param_values=run_time_param_values,
)
except BaseException as error:
internal_error = em.map_unexpected_error(error=error)
Expand Down
32 changes: 30 additions & 2 deletions robot-server/robot_server/protocols/router.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Router for /protocols endpoints."""
import json
import logging
from textwrap import dedent
from datetime import datetime
Expand Down Expand Up @@ -165,6 +166,10 @@ async def create_protocol(
" protocol resources on the robot."
),
),
runTimeParameterValues: Optional[str] = Form(
default=None,
description="Key value pairs of run-time parameters defined in a protocol.",
),
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It might be worth clarifying in this description that:

  • This is a string holding a JSON object.
  • This affects the auto-started analysis resource (GET /protocols/{id}/analyses/{id}).

protocol_directory: Path = Depends(get_protocol_directory),
protocol_store: ProtocolStore = Depends(get_protocol_store),
analysis_store: AnalysisStore = Depends(get_analysis_store),
Expand All @@ -184,6 +189,7 @@ async def create_protocol(
Arguments:
files: List of uploaded files, from form-data.
key: Optional key for client-side tracking
runTimeParameterValues: Key value pairs of run-time parameters defined in a protocol.
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Let's make this snake_case for consistency

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's actually camel case for robot-server HTTP API consistency purposes.

protocol_directory: Location to store uploaded files.
protocol_store: In-memory database of protocol resources.
analysis_store: In-memory database of protocol analyses.
Expand All @@ -205,11 +211,32 @@ async def create_protocol(
assert file.filename is not None
buffered_files = await file_reader_writer.read(files=files) # type: ignore[arg-type]

if isinstance(runTimeParameterValues, str):
# We have to do this isinstance check because if `runTimeParameterValues` is
# not specified in the request, then it gets assigned a Form(None) value
# instead of just a None. \(O.o)/
parsed_rtp = json.loads(runTimeParameterValues)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit, doesn't have to happen in this PR: This will raise a 500 error if the client passes invalid JSON. Ideally, it would return a 422 instead. I wonder if there's a way to make a custom "run-time parameter multipart form field" Pydantic type so FastAPI can do that for us.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hmm.. interesting idea. I'll add a TODO

else:
parsed_rtp = None
content_hash = await file_hasher.hash(buffered_files)
cached_protocol_id = protocol_store.get_id_by_hash(content_hash)

if cached_protocol_id is not None:
# Protocol exists in database
resource = protocol_store.get(protocol_id=cached_protocol_id)
if parsed_rtp:
# This protocol exists in database but needs to be re-analyzed with the
# passed-in RTP overrides
task_runner.run(
protocol_analyzer.analyze,
protocol_resource=resource,
analysis_id=analysis_id,
run_time_param_overrides=parsed_rtp,
)
analysis_store.add_pending(
protocol_id=cached_protocol_id,
analysis_id=analysis_id,
)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This will probably need to be implemented in a different PR, but at some point before releasing this, we'll need to cap the maximum number of protocol analyses. I'm curious if you have any thoughts on what the limit will be?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ya, we'll have to figure that out. How did we land on the max number of protocols and runs to store? Is there a hard cut off for memory availability we need to keep in mind?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's mostly how long it would take to parse them all into Pydantic models if we had to do some kind of migration.

analyses = analysis_store.get_summaries_by_protocol(
protocol_id=cached_protocol_id
)
Expand All @@ -228,7 +255,8 @@ async def create_protocol(
)

log.info(
f'Protocol with id "{cached_protocol_id}" with same contents already exists. returning existing protocol data in response payload'
f'Protocol with id "{cached_protocol_id}" with same contents already exists.'
f" Returning existing protocol data in response payload."
)

return await PydanticResponse.create(
Expand All @@ -238,7 +266,6 @@ async def create_protocol(
)

try:
# Can make the passed in RTPs as part of protocolSource returned here
source = await protocol_reader.save(
files=buffered_files,
directory=protocol_directory / protocol_id,
Expand Down Expand Up @@ -272,6 +299,7 @@ async def create_protocol(
protocol_analyzer.analyze,
protocol_resource=protocol_resource,
analysis_id=analysis_id,
run_time_param_overrides=parsed_rtp,
)
pending_analysis = analysis_store.add_pending(
protocol_id=protocol_id,
Expand Down
1 change: 1 addition & 0 deletions robot-server/robot_server/runs/engine_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@ async def create(
# was uploaded before we added stricter validation, and that
# doesn't conform to the new rules.
python_parse_mode=PythonParseMode.ALLOW_LEGACY_METADATA_AND_REQUIREMENTS,
run_time_param_values=None,
)
elif isinstance(runner, JsonRunner):
assert (
Expand Down
Loading
Loading