Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/chore_release-7.2.0' into pick_u…
Browse files Browse the repository at this point in the history
…p_tip_testing
  • Loading branch information
Carlos-fernandez committed Feb 15, 2024
2 parents 413d816 + 9dc8f2d commit eab5f68
Show file tree
Hide file tree
Showing 131 changed files with 1,850 additions and 714 deletions.
2 changes: 1 addition & 1 deletion .github/actions/python/pypi-deploy/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ runs:
fi
fi
status=0
CI=1 QUIET=1 BUILD_NUMBER=${OT_BUILD} make -C ${{ inputs.project }} clean deploy twine_repository_url=${{ inputs.repository_url }} pypi_username=opentrons pypi_password=${{ inputs.password }} || status=$?
CI=1 QUIET=1 BUILD_NUMBER=${OT_BUILD} make -C ${{ inputs.project }} clean deploy twine_repository_url=${{ inputs.repository_url }} pypi_username=__token__ pypi_password=${{ inputs.password }} || status=$?
if [[ ${status} != 0 ]] && [[ ${{ inputs.repository_url }} =~ "test.pypi.org" ]]; then
echo "upload failures allowed to test pypi"
exit 0
Expand Down
3 changes: 2 additions & 1 deletion .github/actions/python/setup/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ runs:
- shell: bash
run: |
npm install --global [email protected]
$OT_PYTHON -m pip install pipenv==2023.11.15
$OT_PYTHON -m pip install --upgrade pip
$OT_PYTHON -m pip install pipenv==2023.12.1
- shell: bash
run: 'make -C ${{ inputs.project }} setup'
4 changes: 2 additions & 2 deletions .github/workflows/api-test-lint-deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -165,11 +165,11 @@ jobs:
with:
project: 'api'
repository_url: 'https://test.pypi.org/legacy/'
password: '${{ secrets.OT_TEST_PYPI_PASSWORD }}'
password: '${{ secrets.TEST_PYPI_DEPLOY_TOKEN_OPENTRONS }}'
- if: startsWith(env.OT_TAG, 'v')
name: 'upload to real pypi'
uses: './.github/actions/python/pypi-deploy'
with:
project: 'api'
repository_url: 'https://upload.pypi.org/legacy/'
password: '${{ secrets.OT_PYPI_PASSWORD }}'
password: '${{ secrets.PYPI_DEPLOY_TOKEN_OPENTRONS }}'
4 changes: 2 additions & 2 deletions .github/workflows/shared-data-test-lint-deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -179,14 +179,14 @@ jobs:
with:
project: 'shared-data/python'
repository_url: 'https://test.pypi.org/legacy/'
password: '${{ secrets.OT_TEST_PYPI_PASSWORD }}'
password: '${{ secrets.TEST_PYPI_DEPLOY_TOKEN_OPENTRONS_SHARED_DATA }}'
- if: startsWith(env.OT_TAG, 'v')
name: 'upload to pypi'
uses: './.github/actions/python/pypi-deploy'
with:
project: 'shared-data/python'
repository_url: 'https://upload.pypi.org/legacy/'
password: '${{ secrets.OT_PYPI_PASSWORD }}'
password: '${{ secrets.PYPI_DEPLOY_TOKEN_OPENTRONS_SHARED_DATA }}'

publish-switch:
runs-on: 'ubuntu-latest'
Expand Down
8 changes: 4 additions & 4 deletions DEV_SETUP.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ You will need the following tools installed to develop on the Opentrons platform
- curl
- ssh
- Python v3.10
- Node.js v16
- Node.js v18

### macOS

Expand Down Expand Up @@ -82,10 +82,10 @@ Close and re-open your terminal to confirm `nvs` is installed.
nvs --version
```

Now we can use nvs to install Node.js v16 and switch on `auto` mode, which will make sure Node.js v16 is used any time we're in the `opentrons` project directory.
Now we can use nvs to install Node.js v18 and switch on `auto` mode, which will make sure Node.js v18 is used any time we're in the `opentrons` project directory.

```shell
nvs add 16
nvs add 18
nvs auto on
```

Expand Down Expand Up @@ -202,7 +202,7 @@ Once you are inside the repository for the first time, you should do two things:
3. Run `python --version` to confirm your chosen version. If you get the incorrect version and you're using an Apple silicon Mac, try running `eval "$(pyenv init --path)"` and then `pyenv local 3.10.13`. Then check `python --version` again.

```shell
# confirm Node v16
# confirm Node v18
node --version

# set Python version, and confirm
Expand Down
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@ PYTHON_SETUP_TARGETS := $(addsuffix -py-setup, $(PYTHON_DIRS))

.PHONY: setup-py
setup-py:
$(OT_PYTHON) -m pip install pipenv==2023.11.15
$(OT_PYTHON) -m pip install --upgrade pip
$(OT_PYTHON) -m pip install pipenv==2023.12.1
$(MAKE) $(PYTHON_SETUP_TARGETS)


Expand Down
8 changes: 4 additions & 4 deletions api/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -19,18 +19,18 @@ sphinx_build_allow_warnings := $(pipenv) run sphinx-build

ot_project := $(OPENTRONS_PROJECT)
project_rs_default = $(if $(ot_project),$(ot_project),robot-stack)
project_ot3_default = $(if $(ot_project),$(ot_project),ot3)
project_ir_default = $(if $(ot_project),$(ot_project),ot3)

# Find the version of the wheel from git using a helper script. We
# use python here so we can use the same version normalization that will be
# used to create the wheel.
wheel_file = dist/$(call python_get_wheelname,api,$(project_rs_default),opentrons,$(BUILD_NUMBER))

# Find the version of the sdist file from git using a helper script.
sdist_file = dist/$(call python_get_sdistname,api,$(project_ot3_default),opentrons)
sdist_file = dist/$(call python_get_sdistname,api,$(project_rs_default),opentrons)

# Find the branch, sha, version that will be used to update the VERSION.json file
version_file = $(call python_get_git_version,api,$(project_ot3_default),opentrons)
version_file = $(call python_get_git_version,api,$(project_rs_default),opentrons)

# These variables are for simulating python protocols
sim_log_level ?= info
Expand Down Expand Up @@ -100,7 +100,7 @@ wheel:


.PHONY: sdist
sdist: export OPENTRONS_PROJECT=$(project_ot3_default)
sdist: export OPENTRONS_PROJECT=$(project_rs_default)
sdist:
$(clean_sdist_cmd)
$(python) setup.py sdist
Expand Down
1 change: 1 addition & 0 deletions api/docs/v2/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ For example, if we wanted to transfer liquid from well A1 to well B1 on a plate,
# protocol run function
def run(protocol: protocol_api.ProtocolContext):
# labware
trash = protocol.load_trash_bin("A3")
plate = protocol.load_labware(
"corning_96_wellplate_360ul_flat", location="D1"
)
Expand Down
6 changes: 4 additions & 2 deletions api/src/opentrons/execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
from opentrons.protocols.api_support.deck_type import (
guess_from_global_config as guess_deck_type_from_global_config,
should_load_fixed_trash,
should_load_fixed_trash_for_python_protocol,
should_load_fixed_trash_labware_for_python_protocol,
)
from opentrons.protocols.api_support.types import APIVersion
from opentrons.protocols.execution import execute as execute_apiv2
Expand Down Expand Up @@ -540,7 +540,9 @@ def _create_live_context_pe(
config=_get_protocol_engine_config(),
drop_tips_after_run=False,
post_run_hardware_state=PostRunHardwareState.STAY_ENGAGED_IN_PLACE,
load_fixed_trash=should_load_fixed_trash_for_python_protocol(api_version),
load_fixed_trash=should_load_fixed_trash_labware_for_python_protocol(
api_version
),
)
)

Expand Down
5 changes: 5 additions & 0 deletions api/src/opentrons/hardware_control/backends/ot3utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,8 +351,13 @@ def motor_nodes(devices: Set[FirmwareTarget]) -> Set[NodeId]:
NodeId.head_bootloader,
NodeId.gripper_bootloader,
}
hepa_uv_nodes = {
NodeId.hepa_uv,
NodeId.hepa_uv_bootloader,
}
# remove any bootloader nodes
motor_nodes -= bootloader_nodes
motor_nodes -= hepa_uv_nodes
# filter out usb nodes
return {NodeId(target) for target in motor_nodes if target in NodeId}

Expand Down
17 changes: 10 additions & 7 deletions api/src/opentrons/protocol_api/core/engine/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,9 +134,14 @@ def _load_fixed_trash(self) -> None:
def append_disposal_location(
self,
disposal_location: Union[Labware, TrashBin, WasteChute],
skip_add_to_engine: bool = False,
) -> None:
"""Append a disposal location object to the core"""
"""Append a disposal location object to the core."""
self._disposal_locations.append(disposal_location)

def add_disposal_location_to_engine(
self, disposal_location: Union[TrashBin, WasteChute]
) -> None:
"""Verify and add disposal location to engine store and append it to the core."""
if isinstance(disposal_location, TrashBin):
self._engine_client.state.addressable_areas.raise_if_area_not_in_deck_configuration(
disposal_location.area_name
Expand All @@ -152,8 +157,7 @@ def append_disposal_location(
existing_labware_ids=list(self._labware_cores_by_id.keys()),
existing_module_ids=list(self._module_cores_by_id.keys()),
)
if not skip_add_to_engine:
self._engine_client.add_addressable_area(disposal_location.area_name)
self._engine_client.add_addressable_area(disposal_location.area_name)
elif isinstance(disposal_location, WasteChute):
# TODO(jbl 2024-01-25) hardcoding this specific addressable area should be refactored
# when analysis is fixed up
Expand All @@ -166,9 +170,8 @@ def append_disposal_location(
self._engine_client.state.addressable_areas.raise_if_area_not_in_deck_configuration(
"1ChannelWasteChute"
)
if not skip_add_to_engine:
self._engine_client.add_addressable_area("1ChannelWasteChute")
self._disposal_locations.append(disposal_location)
self._engine_client.add_addressable_area("1ChannelWasteChute")
self.append_disposal_location(disposal_location)

def get_disposal_locations(self) -> List[Union[Labware, TrashBin, WasteChute]]:
"""Get disposal locations."""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,14 +136,18 @@ def is_simulating(self) -> bool:
def append_disposal_location(
self,
disposal_location: Union[Labware, TrashBin, WasteChute],
skip_add_to_engine: bool = False,
) -> None:
if isinstance(disposal_location, (TrashBin, WasteChute)):
raise APIVersionError(
"Trash Bin and Waste Chute Disposal locations are not supported in this API Version."
)
self._disposal_locations.append(disposal_location)

def add_disposal_location_to_engine(
self, disposal_location: Union[TrashBin, WasteChute]
) -> None:
assert False, "add_disposal_location_to_engine only supported on engine core"

def add_labware_definition(
self,
definition: LabwareDefinition,
Expand Down
8 changes: 7 additions & 1 deletion api/src/opentrons/protocol_api/core/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,11 +65,17 @@ def add_labware_definition(
def append_disposal_location(
self,
disposal_location: Union[Labware, TrashBin, WasteChute],
skip_add_to_engine: bool = False,
) -> None:
"""Append a disposal location object to the core"""
...

@abstractmethod
def add_disposal_location_to_engine(
self, disposal_location: Union[TrashBin, WasteChute]
) -> None:
"""Verify and add disposal location to engine store and append it to the core."""
...

@abstractmethod
def load_labware(
self,
Expand Down
21 changes: 20 additions & 1 deletion api/src/opentrons/protocol_api/create_protocol_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,14 @@
from opentrons.protocol_engine import ProtocolEngine
from opentrons.protocol_engine.clients import SyncClient, ChildThreadTransport
from opentrons.protocols.api_support.types import APIVersion
from opentrons.protocols.api_support.deck_type import (
should_load_fixed_trash_area_for_python_protocol,
)
from opentrons.protocols.api_support.definitions import MAX_SUPPORTED_VERSION

from .protocol_context import ProtocolContext
from .deck import Deck
from ._trash_bin import TrashBin

from .core.common import ProtocolCore as AbstractProtocolCore
from .core.legacy.deck import Deck as LegacyDeck
Expand Down Expand Up @@ -148,7 +152,7 @@ def create_protocol_context(
# this swap may happen once `ctx.move_labware` off-deck is implemented
deck = None if isinstance(core, ProtocolCore) else cast(Deck, core.get_deck())

return ProtocolContext(
context = ProtocolContext(
api_version=api_version,
# TODO(mm, 2023-05-11): This cast shouldn't be necessary.
# Fix this by making the appropriate TypeVars covariant?
Expand All @@ -158,3 +162,18 @@ def create_protocol_context(
deck=deck,
bundled_data=bundled_data,
)
# If we're loading an engine based core into the context, and we're on api level 2.16 or above, on an OT-2 we need
# to insert a fixed trash addressable area into the protocol engine, for correctness in anything that relies on
# knowing what addressable areas have been loaded (and any checks involving trash geometry). Because the method
# that uses this in the core relies on the sync client and this code will run in the main thread (which if called
# will cause a deadlock), we're directly calling the protocol engine method here where we have access to it.
if (
protocol_engine is not None
and should_load_fixed_trash_area_for_python_protocol(
api_version=api_version,
robot_type=protocol_engine.state_view.config.robot_type,
)
):
assert isinstance(context.fixed_trash, TrashBin)
protocol_engine.add_addressable_area(context.fixed_trash.area_name)
return context
26 changes: 12 additions & 14 deletions api/src/opentrons/protocol_api/protocol_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@
from opentrons.protocols.api_support import instrument as instrument_support
from opentrons.protocols.api_support.deck_type import (
NoTrashDefinedError,
should_load_fixed_trash_for_python_protocol,
should_load_fixed_trash_labware_for_python_protocol,
should_load_fixed_trash_area_for_python_protocol,
)
from opentrons.protocols.api_support.types import APIVersion
from opentrons.protocols.api_support.util import (
Expand Down Expand Up @@ -159,22 +160,19 @@ def __init__(
# protocols after 2.16 expect trash to exist as either a TrashBin or WasteChute object.

self._load_fixed_trash()
if should_load_fixed_trash_for_python_protocol(self._api_version):
if should_load_fixed_trash_labware_for_python_protocol(self._api_version):
self._core.append_disposal_location(self.fixed_trash)
elif (
self._api_version >= APIVersion(2, 16)
and self._core.robot_type == "OT-2 Standard"
elif should_load_fixed_trash_area_for_python_protocol(
self._api_version, self._core.robot_type
):
_fixed_trash_trashbin = TrashBin(
location=DeckSlotName.FIXED_TRASH, addressable_area_name="fixedTrash"
)
# We have to skip adding this fixed trash bin to engine because this __init__ is called in the main thread
# and any calls to sync client will cause a deadlock. This means that OT-2 fixed trashes are not added to
# the engine store until one is first referenced. This should have minimal consequences for OT-2 given that
# we do not need to worry about the 96 channel pipette and partial tip configuration with that pipette.
self._core.append_disposal_location(
_fixed_trash_trashbin, skip_add_to_engine=True
)
# We are just appending the fixed trash to the core's internal list here, not adding it to the engine via
# the core, since that method works through the SyncClient and if called from here, will cause protocols
# to deadlock. Instead, that method is called in protocol engine directly in create_protocol_context after
# ProtocolContext is initialized.
self._core.append_disposal_location(_fixed_trash_trashbin)

self._commands: List[str] = []
self._unsubscribe_commands: Optional[Callable[[], None]] = None
Expand Down Expand Up @@ -517,7 +515,7 @@ def load_trash_bin(self, location: DeckLocation) -> TrashBin:
trash_bin = TrashBin(
location=slot_name, addressable_area_name=addressable_area_name
)
self._core.append_disposal_location(trash_bin)
self._core.add_disposal_location_to_engine(trash_bin)
return trash_bin

@requires_version(2, 16)
Expand All @@ -534,7 +532,7 @@ def load_waste_chute(
API will raise an error.
"""
waste_chute = WasteChute()
self._core.append_disposal_location(waste_chute)
self._core.add_disposal_location_to_engine(waste_chute)
return waste_chute

@requires_version(2, 15)
Expand Down
11 changes: 11 additions & 0 deletions api/src/opentrons/protocol_engine/state/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,9 @@ class CommandState:
are stored on the individual commands themselves.
"""

failed_command: Optional[CommandEntry]
"""The command, if any, that made the run fail and the index in the command list."""

finish_error: Optional[ErrorOccurrence]
"""The error that happened during the post-run finish steps (homing & dropping tips), if any."""

Expand Down Expand Up @@ -189,6 +192,7 @@ def __init__(
commands_by_id=OrderedDict(),
run_error=None,
finish_error=None,
failed_command=None,
run_completed_at=None,
run_started_at=None,
latest_command_hash=None,
Expand Down Expand Up @@ -281,6 +285,7 @@ def handle_action(self, action: Action) -> None: # noqa: C901
),
)

self._state.failed_command = self._state.commands_by_id[action.command_id]
if prev_entry.command.intent == CommandIntent.SETUP:
other_command_ids_to_fail = [
*[i for i in self._state.queued_setup_command_ids],
Expand Down Expand Up @@ -464,6 +469,12 @@ def get_slice(
cursor = commands_by_id[running_command_id].index
elif len(queued_command_ids) > 0:
cursor = commands_by_id[queued_command_ids.head()].index - 1
elif (
self._state.run_result
and self._state.run_result == RunResult.FAILED
and self._state.failed_command
):
cursor = self._state.failed_command.index
else:
cursor = total_length - length

Expand Down
Loading

0 comments on commit eab5f68

Please sign in to comment.