diff --git a/nimbus-dev/nimbus_aspirate_dispense_demo.ipynb b/nimbus-dev/nimbus_aspirate_dispense_demo.ipynb new file mode 100644 index 00000000000..a9b8e7e658f --- /dev/null +++ b/nimbus-dev/nimbus_aspirate_dispense_demo.ipynb @@ -0,0 +1,762 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Nimbus Aspirate and Dispense Demo\n", + "\n", + "This notebook demonstrates aspirate and dispense operations with the Hamilton Nimbus backend.\n", + "\n", + "The demo covers:\n", + "1. Creating a Nimbus Deck and assigning resources\n", + "2. Setting up the NimbusBackend and LiquidHandler\n", + "3. Picking up tips from the tip rack\n", + "4. Aspirating 50 µL from wells (2mm above bottom)\n", + "5. Dispensing to wells (2mm above bottom)\n", + "6. Dropping tips to waste\n", + "7. Cleaning up and closing the connection\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Setup\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Deck created: deck\n", + " Size: 831.85 x 424.18 x 300.0 mm\n", + " Rails: 30\n", + "\n", + "Tip rack assigned: HAM_FTR_300_0001\n", + "Wellplate assigned: Cor_96_wellplate_2mL_Vb_0001\n", + " Waste block: default_long_block\n", + "LiquidHandler created successfully\n", + "INFO - Connecting to TCP server 192.168.100.100:2000...\n", + "INFO - Connected to TCP server 192.168.100.100:2000\n", + "INFO - Initializing Hamilton connection...\n", + "INFO - [INIT] Sending Protocol 7 initialization packet:\n", + "INFO - [INIT] Length: 28 bytes\n", + "INFO - [INIT] Hex: 1a 00 07 30 00 00 00 00 03 00 01 10 00 00 00 00 02 10 00 00 01 00 04 10 00 00 1e 00\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO - [INIT] Received response:\n", + "INFO - [INIT] Length: 28 bytes\n", + "INFO - [INIT] Hex: 1a 00 07 30 00 00 00 00 03 00 01 11 00 00 02 00 02 11 07 00 01 00 04 11 00 00 1e 00\n", + "INFO - [INIT] ✓ Client ID: 2, Address: 2:2:65535\n", + "INFO - Registering Hamilton client...\n", + "INFO - [REGISTER] Sending registration packet:\n", + "INFO - [REGISTER] Length: 48 bytes, Seq: 1\n", + "INFO - [REGISTER] Hex: 2e 00 06 30 00 00 02 00 02 00 ff ff 00 00 00 00 fe ff 01 00 03 03 2a 00 00 00 00 00 00 00 00 00 00 00 02 00 02 00 ff ff 00 00 00 00 00 00 00 00\n", + "INFO - [REGISTER] Src: 2:2:65535, Dst: 0:0:65534\n", + "INFO - [REGISTER] Received response:\n", + "INFO - [REGISTER] Length: 48 bytes\n", + "INFO - [REGISTER] ✓ Registration complete\n", + "INFO - Discovering Hamilton root objects...\n", + "INFO - [DISCOVER_ROOT] Sending root object discovery:\n", + "INFO - [DISCOVER_ROOT] Length: 52 bytes, Seq: 2\n", + "INFO - [DISCOVER_ROOT] Hex: 32 00 06 30 00 00 02 00 02 00 ff ff 00 00 00 00 fe ff 02 00 03 13 2e 00 00 00 00 00 0c 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 04 00 05 02 02 01\n", + "INFO - [DISCOVER_ROOT] ✓ Found 1 root objects\n", + "INFO - ✓ Discovery complete: 1 root objects\n", + "INFO - Hamilton backend setup complete. Client ID: 2\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 0\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:259\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 1\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:263\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 2\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:768\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 3\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:260\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 4\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:257\n", + "INFO - Found Pipette at 1:1:257\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 5\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:262\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 6\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:261\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 7\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:265\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 8\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:266\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 9\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:258\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 10\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:48880\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 11\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:270\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 12\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:271\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 13\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:269\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 14\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:384\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 15\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:49152\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 16\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:49408\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 17\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:272\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 18\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:49409\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 19\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:273\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 20\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:49410\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 21\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:274\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 22\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:49411\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 23\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:275\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 24\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:264\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 25\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:1:268\n", + "INFO - Found DoorLock at 1:1:268\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 26\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:128:48896\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 27\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:129:48896\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 28\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:96:48896\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 29\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 1:32:48896\n", + "INFO - GetSubobjectAddressCommand parameters:\n", + "INFO - object_address: 1:1:48896\n", + "INFO - subobject_index: 30\n", + "INFO - GetObjectCommand parameters:\n", + "INFO - object_address: 96:1:48896\n", + "INFO - GetChannelConfiguration_1 parameters:\n", + "INFO - Channel configuration: 4 channels\n", + "INFO - IsTipPresent parameters:\n", + "INFO - Tip presence: [0, 0, 0, 0]\n", + "INFO - IsInitialized parameters:\n", + "INFO - Instrument initialized: False\n", + "INFO - IsDoorLocked parameters:\n", + "INFO - LockDoor parameters:\n", + "INFO - Door locked successfully\n", + "INFO - SetChannelConfiguration parameters:\n", + "INFO - channel: 1\n", + "INFO - indexes: [1, 3, 4]\n", + "INFO - enables: [True, False, False, False]\n", + "INFO - SetChannelConfiguration parameters:\n", + "INFO - channel: 2\n", + "INFO - indexes: [1, 3, 4]\n", + "INFO - enables: [True, False, False, False]\n", + "INFO - SetChannelConfiguration parameters:\n", + "INFO - channel: 3\n", + "INFO - indexes: [1, 3, 4]\n", + "INFO - enables: [True, False, False, False]\n", + "INFO - SetChannelConfiguration parameters:\n", + "INFO - channel: 4\n", + "INFO - indexes: [1, 3, 4]\n", + "INFO - enables: [True, False, False, False]\n", + "INFO - Channel configuration set for 4 channels\n", + "INFO - InitializeSmartRoll parameters:\n", + "INFO - x_positions: [55375, 55375, 55375, 55375]\n", + "INFO - y_positions: [1986, 188, -7615, -9413]\n", + "INFO - z_start_positions: [13539, 13539, 13539, 13539]\n", + "INFO - z_stop_positions: [13139, 13139, 13139, 13139]\n", + "INFO - z_final_positions: [14600, 14600, 14600, 14600]\n", + "INFO - roll_distances: [900, 900, 900, 900]\n", + "INFO - NimbusCore initialized with InitializeSmartRoll successfully\n", + "\n", + "============================================================\n", + "SETUP COMPLETE\n", + "============================================================\n", + "Setup finished: True\n", + "\n", + "Instrument Configuration:\n", + " Number of channels: 4\n" + ] + } + ], + "source": [ + "# Import necessary modules\n", + "import sys\n", + "import logging\n", + "\n", + "from pylabrobot.liquid_handling import LiquidHandler\n", + "from pylabrobot.liquid_handling.backends.hamilton.nimbus_backend import NimbusBackend\n", + "from pylabrobot.resources.hamilton.nimbus_decks import NimbusDeck\n", + "from pylabrobot.resources.hamilton.tip_racks import hamilton_96_tiprack_300uL_filter\n", + "from pylabrobot.resources.corning import Cor_96_wellplate_2mL_Vb\n", + "from pylabrobot.resources.coordinate import Coordinate\n", + "\n", + "# Setup logging\n", + "plr_logger = logging.getLogger('pylabrobot')\n", + "plr_logger.setLevel(logging.INFO) # INFO for normal use, DEBUG for troubleshooting\n", + "plr_logger.handlers.clear()\n", + "console_handler = logging.StreamHandler(sys.stdout)\n", + "console_handler.setFormatter(logging.Formatter('%(levelname)s - %(message)s'))\n", + "plr_logger.addHandler(console_handler)\n", + "\n", + "# ========================================================================\n", + "# CREATE DECK AND RESOURCES (using coordinates from nimbus_deck_setup.ipynb)\n", + "# ========================================================================\n", + "\n", + "# Create NimbusDeck using default values (layout 8 dimensions)\n", + "deck = NimbusDeck()\n", + "\n", + "print(f\"Deck created: {deck.name}\")\n", + "print(f\" Size: {deck.get_size_x()} x {deck.get_size_y()} x {deck.get_size_z()} mm\")\n", + "print(f\" Rails: {deck.num_rails}\")\n", + "\n", + "# Create and assign tip rack (HAM_FTR_300_0001)\n", + "# Using pre-calculated origin from nimbus_deck_setup.ipynb output:\n", + "# Tip rack origin (PyLabRobot): Coordinate(305.750, 126.537, 128.620)\n", + "tip_rack = hamilton_96_tiprack_300uL_filter(name=\"HAM_FTR_300_0001\", with_tips=True)\n", + "deck.assign_child_resource(tip_rack, location=Coordinate(x=305.750, y=126.537, z=128.620))\n", + "\n", + "print(f\"\\nTip rack assigned: {tip_rack.name}\")\n", + "\n", + "# Create and assign wellplate (Cor_96_wellplate_2mL_Vb_0001)\n", + "# Using pre-calculated origin from nimbus_deck_setup.ipynb output:\n", + "# Wellplate origin (PyLabRobot): Coordinate(438.070, 124.837, 101.490)\n", + "wellplate = Cor_96_wellplate_2mL_Vb(name=\"Cor_96_wellplate_2mL_Vb_0001\", with_lid=False)\n", + "deck.assign_child_resource(wellplate, location=Coordinate(x=438.070, y=124.837, z=101.490))\n", + "\n", + "print(f\"Wellplate assigned: {wellplate.name}\")\n", + "print(f\" Waste block: {deck.get_resource('default_long_block').name}\")\n", + "\n", + "# Serialize the deck #\n", + "#serialized = deck.serialize()\n", + "#with open(\"test_nimbus_deck.json\", \"w\") as f:\n", + "# json.dump(serialized, f, indent=2)\n", + "\n", + "# Load from file and deserialize\n", + "#with open(\"test_nimbus_deck.json\", \"r\") as f:\n", + "# deck_data = json.load(f)\n", + "# Read deck from file example\n", + "# loaded_deck = NimbusDeck.deserialize(deck_data)\n", + "\n", + "# Create NimbusBackend instance\n", + "# Replace with your instrument's IP address\n", + "backend = NimbusBackend(\n", + " host=\"192.168.100.100\", # Replace with your instrument's IP\n", + " port=2000,\n", + " read_timeout=30,\n", + " write_timeout=30\n", + ")\n", + "\n", + "# Create LiquidHandler with backend and deck\n", + "lh = LiquidHandler(backend=backend, deck=deck)\n", + "\n", + "print(\"LiquidHandler created successfully\")\n", + "\n", + "# Setup the robot\n", + "await lh.setup(unlock_door=False)\n", + "\n", + "print(\"\\n\" + \"=\"*60)\n", + "print(\"SETUP COMPLETE\")\n", + "print(\"=\"*60)\n", + "print(f\"Setup finished: {backend.setup_finished}\")\n", + "print(f\"\\nInstrument Configuration:\")\n", + "print(f\" Number of channels: {backend.num_channels}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Define Resources" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Tip rack: HAM_FTR_300_0001 (96 tips)\n", + "Source/Destination plate: Cor_96_wellplate_2mL_Vb_0001 (using same plate, different wells)\n", + "Waste positions: ['default_long_1', 'default_long_2', 'default_long_3', 'default_long_4']\n" + ] + } + ], + "source": [ + "# Resources are already created in the setup cell above\n", + "# tip_rack and wellplate variables are available\n", + "\n", + "print(f\"Tip rack: {tip_rack.name} ({tip_rack.num_items} tips)\")\n", + "print(f\"Source/Destination plate: {wellplate.name} (using same plate, different wells)\")\n", + "\n", + "# Use wellplate as both source and destination\n", + "source_plate = wellplate\n", + "destination_plate = wellplate\n", + "\n", + "# Get waste positions\n", + "waste_block = deck.get_resource(\"default_long_block\")\n", + "waste_positions = waste_block.children[:4]\n", + "\n", + "print(f\"Waste positions: {[wp.name for wp in waste_positions]}\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Pick Up Tips\n", + "\n", + "Pick up tips from positions A1-D1.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Picking up tips from positions: ['E4', 'F4', 'G4', 'H4']\n", + "INFO - IsTipPresent parameters:\n", + "INFO - PickupTips parameters:\n", + "INFO - tips_used: [1, 1, 1, 1]\n", + "INFO - x_positions: [18844, 18844, 18844, 18844]\n", + "INFO - y_positions: [-20499, -21399, -22299, -23199]\n", + "INFO - traverse_height: 14600\n", + "INFO - z_start_positions: [13802, 13802, 13802, 13802]\n", + "INFO - z_stop_positions: [13002, 13002, 13002, 13002]\n", + "INFO - tip_types: [, , , ]\n", + "INFO - num_channels: 4\n", + "INFO - PickupTips parameters:\n", + "INFO - tips_used: [1, 1, 1, 1]\n", + "INFO - x_positions: [18844, 18844, 18844, 18844]\n", + "INFO - y_positions: [-20499, -21399, -22299, -23199]\n", + "INFO - traverse_height: 14600\n", + "INFO - z_start_positions: [13802, 13802, 13802, 13802]\n", + "INFO - z_stop_positions: [13002, 13002, 13002, 13002]\n", + "INFO - tip_types: [, , , ]\n", + "INFO - Picked up tips on channels [0, 1, 2, 3]\n", + "✓ Tips picked up successfully!\n" + ] + } + ], + "source": [ + "# Get the first 4 tip spots (A1, B1, C1, D1)\n", + "tip_spots = tip_rack[\"E4\":\"A5\"]\n", + "\n", + "print(f\"Picking up tips from positions: {[ts.get_identifier() for ts in tip_spots]}\")\n", + "await lh.pick_up_tips(tip_spots)\n", + "\n", + "print(\"✓ Tips picked up successfully!\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Aspirate Operation\n", + "\n", + "Aspirate 50 µL from wells A1-D1, 2mm above the bottom of the well.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Aspirating 50 µL from wells: ['A7', 'B7', 'C7', 'D7']\n", + " Liquid height: 2.0 mm above bottom\n", + "INFO - DisableADC parameters:\n", + "INFO - tips_used: [1, 1, 1, 1]\n", + "INFO - Disabled ADC before aspirate\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 1\n", + "INFO - indexes: [2]\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 2\n", + "INFO - indexes: [2]\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 3\n", + "INFO - indexes: [2]\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 4\n", + "INFO - indexes: [2]\n", + "INFO - Aspirate parameters:\n", + "INFO - aspirate_type: [0, 0, 0, 0]\n", + "INFO - tips_used: [1, 1, 1, 1]\n", + "INFO - x_positions: [35016, 35016, 35016, 35016]\n", + "INFO - y_positions: [-16899, -17799, -18699, -19599]\n", + "INFO - traverse_height: 14600\n", + "INFO - liquid_seek_height: [500, 500, 500, 500]\n", + "INFO - liquid_surface_height: [10594, 10594, 10594, 10594]\n", + "INFO - submerge_depth: [0, 0, 0, 0]\n", + "INFO - follow_depth: [0, 0, 0, 0]\n", + "INFO - z_min_position: [10394, 10394, 10394, 10394]\n", + "INFO - clot_check_height: [0, 0, 0, 0]\n", + "INFO - z_final: 14600\n", + "INFO - liquid_exit_speed: [200, 200, 200, 200]\n", + "INFO - blowout_volume: [400, 400, 400, 400]\n", + "INFO - prewet_volume: [0, 0, 0, 0]\n", + "INFO - aspirate_volume: [500, 500, 500, 500]\n", + "INFO - transport_air_volume: [50, 50, 50, 50]\n", + "INFO - aspirate_speed: [2500, 2500, 2500, 2500]\n", + "INFO - settling_time: [10, 10, 10, 10]\n", + "INFO - mix_volume: [0, 0, 0, 0]\n", + "INFO - mix_cycles: [0, 0, 0, 0]\n", + "INFO - mix_position: [0, 0, 0, 0]\n", + "INFO - mix_follow_distance: [0, 0, 0, 0]\n", + "INFO - mix_speed: [2500, 2500, 2500, 2500]\n", + "INFO - tube_section_height: [0, 0, 0, 0]\n", + "INFO - tube_section_ratio: [0, 0, 0, 0]\n", + "INFO - lld_mode: [0, 0, 0, 0]\n", + "INFO - capacitive_lld_sensitivity: [0, 0, 0, 0]\n", + "INFO - pressure_lld_sensitivity: [0, 0, 0, 0]\n", + "INFO - lld_height_difference: [0, 0, 0, 0]\n", + "INFO - tadm_enabled: False\n", + "INFO - limit_curve_index: [0, 0, 0, 0]\n", + "INFO - recording_mode: 0\n", + "INFO - Aspirated on channels [0, 1, 2, 3]\n", + "✓ Aspiration complete!\n" + ] + } + ], + "source": [ + "# Get source wells (A1, B1, C1, D1)\n", + "source_wells = source_plate[\"A7\":\"E7\"]\n", + "\n", + "print(f\"Aspirating 50 µL from wells: {[w.get_identifier() for w in source_wells]}\")\n", + "print(f\" Liquid height: 2.0 mm above bottom\")\n", + "\n", + "# Aspirate with liquid_height=2.0mm\n", + "# Tips are already picked up, so LiquidHandler will use them automatically\n", + "await lh.aspirate(\n", + " source_wells,\n", + " vols=[50.0, 50.0, 50.0, 50.0], # Can be a single number (applies to all channels) or a list\n", + " liquid_height=[2.0, 2.0, 2.0, 2.0], # 2mm above bottom of well (can be a single float or list)\n", + " flow_rates=[250.0, 250.0, 250.0, 250.0],\n", + " liquid_seek_height=[5.0, 5.0, 5.0, 5.0],\n", + ")\n", + "\n", + "print(\"✓ Aspiration complete!\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Dispense Operation\n", + "\n", + "Dispense 50 µL to wells A2-D2, 2mm above the bottom of the well.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Dispensing 50 µL to wells: ['A12', 'B12', 'C12', 'D12']\n", + " Liquid height: 2.0 mm above bottom\n", + "INFO - DisableADC parameters:\n", + "INFO - tips_used: [1, 1, 1, 1]\n", + "INFO - Disabled ADC before dispense\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 1\n", + "INFO - indexes: [2]\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 2\n", + "INFO - indexes: [2]\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 3\n", + "INFO - indexes: [2]\n", + "INFO - GetChannelConfiguration parameters:\n", + "INFO - channel: 4\n", + "INFO - indexes: [2]\n", + "INFO - Dispense parameters:\n", + "INFO - dispense_type: [0, 0, 0, 0]\n", + "INFO - tips_used: [1, 1, 1, 1]\n", + "INFO - x_positions: [39516, 39516, 39516, 39516]\n", + "INFO - y_positions: [-16899, -17799, -18699, -19599]\n", + "INFO - traverse_height: 14600\n", + "INFO - liquid_seek_height: [500, 500, 500, 500]\n", + "INFO - dispense_height: [10594, 10594, 10594, 10594]\n", + "INFO - submerge_depth: [0, 0, 0, 0]\n", + "INFO - follow_depth: [0, 0, 0, 0]\n", + "INFO - z_min_position: [10394, 10394, 10394, 10394]\n", + "INFO - z_final: 14600\n", + "INFO - liquid_exit_speed: [200, 200, 200, 200]\n", + "INFO - transport_air_volume: [50, 50, 50, 50]\n", + "INFO - dispense_volume: [500, 500, 500, 500]\n", + "INFO - stop_back_volume: [0, 0, 0, 0]\n", + "INFO - blowout_volume: [400, 400, 400, 400]\n", + "INFO - dispense_speed: [4000, 4000, 4000, 4000]\n", + "INFO - cutoff_speed: [250, 250, 250, 250]\n", + "INFO - settling_time: [10, 10, 10, 10]\n", + "INFO - mix_volume: [0, 0, 0, 0]\n", + "INFO - mix_cycles: [0, 0, 0, 0]\n", + "INFO - mix_position: [0, 0, 0, 0]\n", + "INFO - mix_follow_distance: [0, 0, 0, 0]\n", + "INFO - mix_speed: [4000, 4000, 4000, 4000]\n", + "INFO - touch_off_distance: 0\n", + "INFO - dispense_offset: [0, 0, 0, 0]\n", + "INFO - tube_section_height: [0, 0, 0, 0]\n", + "INFO - tube_section_ratio: [0, 0, 0, 0]\n", + "INFO - lld_mode: [0, 0, 0, 0]\n", + "INFO - capacitive_lld_sensitivity: [0, 0, 0, 0]\n", + "INFO - tadm_enabled: False\n", + "INFO - limit_curve_index: [0, 0, 0, 0]\n", + "INFO - recording_mode: 0\n", + "INFO - Dispensed on channels [0, 1, 2, 3]\n", + "✓ Dispense complete!\n" + ] + } + ], + "source": [ + "# Get destination wells (A2, B2, C2, D2)\n", + "dest_wells = destination_plate[\"A12\":\"E12\"]\n", + "\n", + "print(f\"Dispensing 50 µL to wells: {[w.get_identifier() for w in dest_wells]}\")\n", + "print(f\" Liquid height: 2.0 mm above bottom\")\n", + "\n", + "# Dispense with liquid_height=2.0mm\n", + "# Tips are already picked up, so LiquidHandler will use them automatically\n", + "await lh.dispense(\n", + " dest_wells,\n", + " vols=[50.0, 50.0, 50.0, 50.0], # Can be a single number (applies to all channels) or a list\n", + " liquid_height=[2.0, 2.0, 2.0, 2.0], # 2mm above bottom of well (can be a single float or list)\n", + " flow_rates=[400.0, 400.0, 400.0, 400.0],\n", + " liquid_seek_height=[5.0, 5.0, 5.0, 5.0],\n", + ")\n", + "\n", + "print(\"✓ Dispense complete!\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Drop Tips\n", + "\n", + "Drop tips to waste positions.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Dropping tips at waste positions: ['default_long_1', 'default_long_2', 'default_long_3', 'default_long_4']\n", + "INFO - DropTipsRoll parameters:\n", + "INFO - tips_used: [1, 1, 1, 1]\n", + "INFO - x_positions: [55375, 55375, 55375, 55375]\n", + "INFO - y_positions: [1986, 188, -7615, -9413]\n", + "INFO - traverse_height: 14600\n", + "INFO - z_start_positions: [13539, 13539, 13539, 13539]\n", + "INFO - z_stop_positions: [13139, 13139, 13139, 13139]\n", + "INFO - z_final_positions: [14600, 14600, 14600, 14600]\n", + "INFO - roll_distances: [900, 900, 900, 900]\n", + "INFO - Dropped tips on channels [0, 1, 2, 3]\n", + "✓ Tips dropped successfully!\n" + ] + } + ], + "source": [ + "print(f\"Dropping tips at waste positions: {[wp.name for wp in waste_positions]}\")\n", + "await lh.drop_tips(waste_positions)\n", + "\n", + "print(\"✓ Tips dropped successfully!\")\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Cleanup\n", + "\n", + "Finally, we'll stop the liquid handler and close the connection.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO - Park parameters:\n", + "INFO - Instrument parked successfully\n", + "INFO - UnlockDoor parameters:\n", + "INFO - Door unlocked successfully\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING:root:Closing connection to TCP server.\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "INFO - Hamilton backend stopped\n", + "Connection closed successfully\n" + ] + } + ], + "source": [ + "# Stop and close connection\n", + "await lh.backend.park()\n", + "await lh.backend.unlock_door()\n", + "await lh.stop()\n", + "\n", + "print(\"Connection closed successfully\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.18" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pylabrobot/io/socket.py b/pylabrobot/io/socket.py index d7ee727b23e..9d6ab01857e 100644 --- a/pylabrobot/io/socket.py +++ b/pylabrobot/io/socket.py @@ -199,6 +199,50 @@ async def readuntil(self, separator: bytes = b"\n", timeout: Optional[float] = N ) return data + async def read_exact(self, num_bytes: int, timeout: Optional[float] = None) -> bytes: + """Read exactly num_bytes, blocking until all bytes are received. + + Args: + num_bytes: The exact number of bytes to read. + timeout: Maximum time to wait for data before raising a timeout. + Note: The timeout is applied per-chunk read operation, not cumulatively + for the entire read. For small reads (typical use case), this is acceptable. + For large reads, consider that the total time may exceed the timeout value. + + Returns: + Exactly num_bytes of data. + + Raises: + ConnectionError: If the connection is closed before num_bytes are read. + TimeoutError: If timeout is reached before num_bytes are read. + """ + if self._reader is None: + raise RuntimeError("Socket not set up; call setup() first") + timeout = self._read_timeout if timeout is None else timeout + data = bytearray() + async with self._read_lock: + while len(data) < num_bytes: + remaining = num_bytes - len(data) + try: + chunk = await asyncio.wait_for(self._reader.read(remaining), timeout=timeout) + except asyncio.TimeoutError as exc: + logger.error("read_exact timeout: %r", exc) + raise TimeoutError(f"Timeout while reading from socket after {timeout} seconds") from exc + if len(chunk) == 0: + raise ConnectionError("Connection closed before num_bytes are read") + data.extend(chunk) + + result = bytes(data) + logger.log(LOG_LEVEL_IO, "[%s:%d] read_exact %s", self._host, self._port, result.hex()) + capturer.record( + SocketCommand( + device_id=self._unique_id, + action="read_exact", + data=result.hex(), + ) + ) + return result + async def read_until_eof(self, chunk_size: int = 1024, timeout: Optional[float] = None) -> bytes: """Read until EOF is reached. Do not retry on timeouts. @@ -330,6 +374,20 @@ async def readuntil(self, separator: bytes = b"\n", *args, **kwargs) -> bytes: ) return bytes.fromhex(next_command.data) + async def read_exact(self, *args, **kwargs) -> bytes: + """Return captured read_exact data for validation.""" + next_command = SocketCommand(**self.cr.next_command()) + if not ( + next_command.module == "socket" + and next_command.device_id == self._unique_id + and next_command.action == "read_exact" + ): + raise ValidationError( + f"Expected socket read_exact command from {self._unique_id}, " + f"got {next_command.module} {next_command.action} from {next_command.device_id}" + ) + return bytes.fromhex(next_command.data) + async def read_until_eof(self, *args, **kwargs) -> bytes: """Return captured read_until_eof data for validation.""" next_command = SocketCommand(**self.cr.next_command()) diff --git a/pylabrobot/liquid_handling/backends/hamilton/commands.py b/pylabrobot/liquid_handling/backends/hamilton/commands.py new file mode 100644 index 00000000000..9df4a63b63f --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/commands.py @@ -0,0 +1,224 @@ +"""Hamilton command architecture using new simplified TCP stack. + +This module provides the HamiltonCommand base class that uses the new refactored +architecture: Wire → HoiParams → Packets → Messages → Commands. +""" + +from __future__ import annotations + +import inspect +from typing import Optional + +from pylabrobot.liquid_handling.backends.hamilton.protocol import HamiltonProtocol +from pylabrobot.liquid_handling.backends.hamilton.packets import Address +from pylabrobot.liquid_handling.backends.hamilton.messages import CommandMessage, CommandResponse, HoiParams, SuccessResponse + + +class HamiltonCommand: + """Base class for Hamilton commands using new simplified architecture. + + This replaces the old HamiltonCommand from tcp_codec.py with a cleaner design: + - Explicitly uses CommandMessage for building packets + - build_parameters() returns HoiParams object (not bytes) + - Uses Address instead of ObjectAddress + - Cleaner separation of concerns + + Example: + class MyCommand(HamiltonCommand): + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 0 + command_id = 42 + + def __init__(self, dest: Address, value: int): + super().__init__(dest) + self.value = value + + def build_parameters(self) -> HoiParams: + return HoiParams().i32(self.value) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + parser = HoiParamsParser(data) + _, result = parser.parse_next() + return {'result': result} + """ + + # Class-level attributes that subclasses must override + protocol: Optional[HamiltonProtocol] = None + interface_id: Optional[int] = None + command_id: Optional[int] = None + + # Action configuration (can be overridden by subclasses) + action_code: int = 3 # Default: COMMAND_REQUEST + harp_protocol: int = 2 # Default: HOI2 + ip_protocol: int = 6 # Default: OBJECT_DISCOVERY + + def __init__(self, dest: Address): + """Initialize Hamilton command. + + Args: + dest: Destination address for this command + """ + if self.protocol is None: + raise ValueError(f"{self.__class__.__name__} must define protocol") + if self.interface_id is None: + raise ValueError(f"{self.__class__.__name__} must define interface_id") + if self.command_id is None: + raise ValueError(f"{self.__class__.__name__} must define command_id") + + self.dest = dest + self.dest_address = dest # Alias for compatibility + self.sequence_number = 0 + self.source_address: Optional[Address] = None + self._log_params: dict = {} # Initialize empty - will be populated by _assign_params() if called + + def _assign_params(self, exclude: Optional[set] = None): + """Build logging dict from __init__ parameters. + + This method inspects the __init__ signature and builds a dict of + parameter values for logging purposes. Attributes should be explicitly + assigned in __init__ before calling this method. + + Args: + exclude: Set of parameter names to exclude from logging. + Defaults to {'self', 'dest'}. + + Note: + This method must be called from within __init__ after super().__init__() + and after explicit attribute assignments to access the calling frame's + local variables. + """ + exclude = exclude or {'self', 'dest'} + # Use type(self).__init__ to avoid mypy error about accessing __init__ on instance + sig = inspect.signature(type(self).__init__) + current_frame = inspect.currentframe() + if current_frame is None: + # Frame inspection failed, return empty dict + self._log_params = {} + return + frame = current_frame.f_back + if frame is None: + # No calling frame, return empty dict + self._log_params = {} + return + + # Build params dict for logging (no assignments - attributes should be set explicitly) + params = {} + frame_locals = frame.f_locals + for param_name in sig.parameters: + if param_name not in exclude: + if param_name in frame_locals: + value = frame_locals[param_name] + params[param_name] = value + + # Store for logging + self._log_params = params + + def build_parameters(self) -> HoiParams: + """Build HOI parameters for this command. + + Override this method in subclasses to provide command-specific parameters. + Return a HoiParams object (not bytes!). + + Returns: + HoiParams object with command parameters + """ + return HoiParams() + + def get_log_params(self) -> dict: + """Get parameters to log for this command. + + Returns the params dict built by _assign_params() during __init__. + This eliminates duplicate signature inspection and provides efficient + access to logged parameters. + + Subclasses can override to customize formatting (e.g., unit conversions, + array truncation). + + Returns: + Dictionary of parameter names to values (empty dict if _assign_params() not called) + """ + return self._log_params + + def build(self, src: Optional[Address] = None, seq: Optional[int] = None, response_required: bool = True) -> bytes: + """Build complete Hamilton message using CommandMessage. + + Args: + src: Source address (uses self.source_address if None) + seq: Sequence number (uses self.sequence_number if None) + response_required: Whether a response is expected + + Returns: + Complete packet bytes ready to send over TCP + """ + # Use instance attributes if not provided + source = src if src is not None else self.source_address + sequence = seq if seq is not None else self.sequence_number + + if source is None: + raise ValueError("Source address not set - backend should set this before building") + + # Ensure required attributes are set (they should be by subclasses) + if self.interface_id is None: + raise ValueError(f"{self.__class__.__name__} must define interface_id") + if self.command_id is None: + raise ValueError(f"{self.__class__.__name__} must define command_id") + + # Build parameters using command-specific logic + params = self.build_parameters() + + # Create CommandMessage and set parameters directly + # This avoids wasteful serialization/parsing round-trip + msg = CommandMessage( + dest=self.dest, + interface_id=self.interface_id, + method_id=self.command_id, + action_code=self.action_code, + harp_protocol=self.harp_protocol, + ip_protocol=self.ip_protocol + ) + msg.set_params(params) + + # Build final packet + return msg.build(source, sequence, harp_response_required=response_required) + + def interpret_response(self, response: 'SuccessResponse') -> dict: + """Interpret success response using typed response object. + + This is the new interface used by the backend. Default implementation + directly calls parse_response_parameters for efficiency. + + Args: + response: Typed SuccessResponse from ResponseParser + + Returns: + Dictionary with parsed response data + """ + return self.parse_response_parameters(response.raw_params) + + def parse_response_from_message(self, message: CommandResponse) -> dict: + """Parse response from CommandResponse (legacy interface). + + Args: + message: Parsed CommandResponse from messages.py + + Returns: + Dictionary with parsed response data + """ + # Extract HOI parameters and parse using command-specific logic + return self.parse_response_parameters(message.hoi_params) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse response parameters from HOI payload. + + Override this method in subclasses to parse command-specific responses. + + Args: + data: Raw bytes from HOI fragments field + + Returns: + Dictionary with parsed response data + """ + raise NotImplementedError(f"{cls.__name__} must implement parse_response_parameters()") + diff --git a/pylabrobot/liquid_handling/backends/hamilton/messages.py b/pylabrobot/liquid_handling/backends/hamilton/messages.py new file mode 100644 index 00000000000..9a7de1492a3 --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/messages.py @@ -0,0 +1,1080 @@ +"""High-level Hamilton message builders and response parsers. + +This module provides user-facing message builders and their corresponding +response parsers. Each message type is paired with its response type: + +Request Builders: +- InitMessage: Builds IP[Connection] for initialization +- RegistrationMessage: Builds IP[HARP[Registration]] for discovery +- CommandMessage: Builds IP[HARP[HOI]] for method calls + +Response Parsers: +- InitResponse: Parses initialization responses +- RegistrationResponse: Parses registration responses +- CommandResponse: Parses command responses + +This pairing creates symmetry and makes correlation explicit. + +Architectural Note: +Parameter encoding (HoiParams/HoiParamsParser) is conceptually a separate layer +in the Hamilton protocol architecture (per documented architecture), but is +implemented here for efficiency since it's exclusively used by HOI messages. +This preserves the conceptual separation while optimizing implementation. + +Example: + # Build and send + msg = CommandMessage(dest, interface_id=0, method_id=42) + msg.add_i32(100) + packet_bytes = msg.build(src, seq=1) + + # Parse response + response = CommandResponse.from_bytes(received_bytes) + params = response.hoi_params +""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any + +from pylabrobot.liquid_handling.backends.hamilton.packets import ( + Address, + HarpPacket, + HoiPacket, + IpPacket, + RegistrationPacket, +) +from pylabrobot.liquid_handling.backends.hamilton.protocol import HamiltonDataType, HarpTransportableProtocol, RegistrationOptionType +from pylabrobot.liquid_handling.backends.hamilton.wire import Wire + + +# ============================================================================ +# HOI PARAMETER ENCODING - DataFragment wrapping for HOI protocol +# ============================================================================ +# +# Note: This is conceptually a separate layer in the Hamilton protocol +# architecture, but implemented here for efficiency since it's exclusively +# used by HOI messages (CommandMessage). +# ============================================================================ + + +class HoiParams: + """Builder for HOI parameters with automatic DataFragment wrapping. + + Each parameter is wrapped with DataFragment header before being added: + [type_id:1][flags:1][length:2][data:n] + + This ensures HOI parameters are always correctly formatted and eliminates + the possibility of forgetting to add DataFragment headers. + + Example: + params = (HoiParams() + .i32(100) + .string("test") + .u32_array([1, 2, 3]) + .build()) + + # Creates concatenated DataFragments: + # [0x03|0x00|0x04|0x00|100][0x0F|0x00|0x05|0x00|"test\0"][0x1C|0x00|...array...] + """ + + def __init__(self): + self._fragments: list[bytes] = [] + + def _add_fragment(self, type_id: int, data: bytes, flags: int = 0) -> 'HoiParams': + """Add a DataFragment with the given type_id and data. + + Creates: [type_id:1][flags:1][length:2][data:n] + + Args: + type_id: Data type ID + data: Fragment data bytes + flags: Fragment flags (default: 0, but BOOL_ARRAY uses 0x01) + """ + fragment = (Wire.write() + .u8(type_id) + .u8(flags) + .u16(len(data)) + .raw_bytes(data) + .finish()) + self._fragments.append(fragment) + return self + + # Scalar integer types + def i8(self, value: int) -> 'HoiParams': + """Add signed 8-bit integer parameter.""" + data = Wire.write().i8(value).finish() + return self._add_fragment(HamiltonDataType.I8, data) + + def i16(self, value: int) -> 'HoiParams': + """Add signed 16-bit integer parameter.""" + data = Wire.write().i16(value).finish() + return self._add_fragment(HamiltonDataType.I16, data) + + def i32(self, value: int) -> 'HoiParams': + """Add signed 32-bit integer parameter.""" + data = Wire.write().i32(value).finish() + return self._add_fragment(HamiltonDataType.I32, data) + + def i64(self, value: int) -> 'HoiParams': + """Add signed 64-bit integer parameter.""" + data = Wire.write().i64(value).finish() + return self._add_fragment(HamiltonDataType.I64, data) + + def u8(self, value: int) -> 'HoiParams': + """Add unsigned 8-bit integer parameter.""" + data = Wire.write().u8(value).finish() + return self._add_fragment(HamiltonDataType.U8, data) + + def u16(self, value: int) -> 'HoiParams': + """Add unsigned 16-bit integer parameter.""" + data = Wire.write().u16(value).finish() + return self._add_fragment(HamiltonDataType.U16, data) + + def u32(self, value: int) -> 'HoiParams': + """Add unsigned 32-bit integer parameter.""" + data = Wire.write().u32(value).finish() + return self._add_fragment(HamiltonDataType.U32, data) + + def u64(self, value: int) -> 'HoiParams': + """Add unsigned 64-bit integer parameter.""" + data = Wire.write().u64(value).finish() + return self._add_fragment(HamiltonDataType.U64, data) + + # Floating-point types + def f32(self, value: float) -> 'HoiParams': + """Add 32-bit float parameter.""" + data = Wire.write().f32(value).finish() + return self._add_fragment(HamiltonDataType.F32, data) + + def f64(self, value: float) -> 'HoiParams': + """Add 64-bit double parameter.""" + data = Wire.write().f64(value).finish() + return self._add_fragment(HamiltonDataType.F64, data) + + # String and bool + def string(self, value: str) -> 'HoiParams': + """Add null-terminated string parameter.""" + data = Wire.write().string(value).finish() + return self._add_fragment(HamiltonDataType.STRING, data) + + def bool_value(self, value: bool) -> 'HoiParams': + """Add boolean parameter.""" + data = Wire.write().u8(1 if value else 0).finish() + return self._add_fragment(HamiltonDataType.BOOL, data) + + # Array types + def i8_array(self, values: list[int]) -> 'HoiParams': + """Add array of signed 8-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.i8(val) + return self._add_fragment(HamiltonDataType.I8_ARRAY, writer.finish()) + + def i16_array(self, values: list[int]) -> 'HoiParams': + """Add array of signed 16-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.i16(val) + return self._add_fragment(HamiltonDataType.I16_ARRAY, writer.finish()) + + def i32_array(self, values: list[int]) -> 'HoiParams': + """Add array of signed 32-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.i32(val) + return self._add_fragment(HamiltonDataType.I32_ARRAY, writer.finish()) + + def i64_array(self, values: list[int]) -> 'HoiParams': + """Add array of signed 64-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.i64(val) + return self._add_fragment(HamiltonDataType.I64_ARRAY, writer.finish()) + + def u8_array(self, values: list[int]) -> 'HoiParams': + """Add array of unsigned 8-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.u8(val) + return self._add_fragment(HamiltonDataType.U8_ARRAY, writer.finish()) + + def u16_array(self, values: list[int]) -> 'HoiParams': + """Add array of unsigned 16-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.u16(val) + return self._add_fragment(HamiltonDataType.U16_ARRAY, writer.finish()) + + def u32_array(self, values: list[int]) -> 'HoiParams': + """Add array of unsigned 32-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.u32(val) + return self._add_fragment(HamiltonDataType.U32_ARRAY, writer.finish()) + + def u64_array(self, values: list[int]) -> 'HoiParams': + """Add array of unsigned 64-bit integers. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.u64(val) + return self._add_fragment(HamiltonDataType.U64_ARRAY, writer.finish()) + + def f32_array(self, values: list[float]) -> 'HoiParams': + """Add array of 32-bit floats. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.f32(val) + return self._add_fragment(HamiltonDataType.F32_ARRAY, writer.finish()) + + def f64_array(self, values: list[float]) -> 'HoiParams': + """Add array of 64-bit doubles. + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + """ + writer = Wire.write() + for val in values: + writer.f64(val) + return self._add_fragment(HamiltonDataType.F64_ARRAY, writer.finish()) + + def bool_array(self, values: list[bool]) -> 'HoiParams': + """Add array of booleans (stored as u8: 0 or 1). + + Format: [element0][element1]... (NO count prefix - count derived from DataFragment length) + + Note: BOOL_ARRAY uses flags=0x01 in the DataFragment header (unlike other types which use 0x00). + """ + writer = Wire.write() + for val in values: + writer.u8(1 if val else 0) + return self._add_fragment(HamiltonDataType.BOOL_ARRAY, writer.finish(), flags=0x01) + + def string_array(self, values: list[str]) -> 'HoiParams': + """Add array of null-terminated strings. + + Format: [count:4][str0\0][str1\0]... + """ + writer = Wire.write().u32(len(values)) + for val in values: + writer.string(val) + return self._add_fragment(HamiltonDataType.STRING_ARRAY, writer.finish()) + + def build(self) -> bytes: + """Return concatenated DataFragments.""" + return b''.join(self._fragments) + + def count(self) -> int: + """Return number of fragments (parameters).""" + return len(self._fragments) + + +class HoiParamsParser: + """Parser for HOI DataFragment parameters. + + Parses DataFragment-wrapped values from HOI response payloads. + """ + + def __init__(self, data: bytes): + self._data = data + self._offset = 0 + + def parse_next(self) -> tuple[int, Any]: + """Parse the next DataFragment and return (type_id, value). + + Returns: + Tuple of (type_id, parsed_value) + + Raises: + ValueError: If data is malformed or insufficient + """ + if self._offset + 4 > len(self._data): + raise ValueError(f"Insufficient data for DataFragment header at offset {self._offset}") + + # Parse DataFragment header + reader = Wire.read(self._data[self._offset:]) + type_id = reader.u8() + _flags = reader.u8() # Read but unused + length = reader.u16() + + data_start = self._offset + 4 + data_end = data_start + length + + if data_end > len(self._data): + raise ValueError(f"DataFragment data extends beyond buffer: need {data_end}, have {len(self._data)}") + + # Extract data payload + fragment_data = self._data[data_start:data_end] + value = self._parse_value(type_id, fragment_data) + + # Move offset past this fragment + self._offset = data_end + + return (type_id, value) + + def _parse_value(self, type_id: int, data: bytes) -> Any: + """Parse value based on type_id using dispatch table.""" + reader = Wire.read(data) + + # Dispatch table for scalar types + scalar_parsers = { + HamiltonDataType.I8: reader.i8, + HamiltonDataType.I16: reader.i16, + HamiltonDataType.I32: reader.i32, + HamiltonDataType.I64: reader.i64, + HamiltonDataType.U8: reader.u8, + HamiltonDataType.U16: reader.u16, + HamiltonDataType.U32: reader.u32, + HamiltonDataType.U64: reader.u64, + HamiltonDataType.F32: reader.f32, + HamiltonDataType.F64: reader.f64, + HamiltonDataType.STRING: reader.string, + } + + # Check scalar types first + # Cast int to HamiltonDataType enum for dict lookup + try: + data_type = HamiltonDataType(type_id) + if data_type in scalar_parsers: + return scalar_parsers[data_type]() + except ValueError: + pass # Not a valid enum value, continue to other checks + + # Special case: bool + if type_id == HamiltonDataType.BOOL: + return reader.u8() == 1 + + # Dispatch table for array element parsers + array_element_parsers = { + HamiltonDataType.I8_ARRAY: reader.i8, + HamiltonDataType.I16_ARRAY: reader.i16, + HamiltonDataType.I32_ARRAY: reader.i32, + HamiltonDataType.I64_ARRAY: reader.i64, + HamiltonDataType.U8_ARRAY: reader.u8, + HamiltonDataType.U16_ARRAY: reader.u16, + HamiltonDataType.U32_ARRAY: reader.u32, + HamiltonDataType.U64_ARRAY: reader.u64, + HamiltonDataType.F32_ARRAY: reader.f32, + HamiltonDataType.F64_ARRAY: reader.f64, + HamiltonDataType.STRING_ARRAY: reader.string, + } + + # Handle arrays + # Arrays don't have a count prefix - count is derived from DataFragment length + # Calculate element size based on type + element_sizes = { + HamiltonDataType.I8_ARRAY: 1, + HamiltonDataType.I16_ARRAY: 2, + HamiltonDataType.I32_ARRAY: 4, + HamiltonDataType.I64_ARRAY: 8, + HamiltonDataType.U8_ARRAY: 1, + HamiltonDataType.U16_ARRAY: 2, + HamiltonDataType.U32_ARRAY: 4, + HamiltonDataType.U64_ARRAY: 8, + HamiltonDataType.F32_ARRAY: 4, + HamiltonDataType.F64_ARRAY: 8, + HamiltonDataType.STRING_ARRAY: None, # Variable length, handled separately + } + + # Cast int to HamiltonDataType enum for dict lookup + try: + data_type = HamiltonDataType(type_id) + if data_type in array_element_parsers: + element_size = element_sizes.get(data_type) + if element_size is not None: + # Fixed-size elements: calculate count from data length + count = len(data) // element_size + return [array_element_parsers[data_type]() for _ in range(count)] + elif data_type == HamiltonDataType.STRING_ARRAY: + # String arrays: null-terminated strings concatenated, no count prefix + # Parse by splitting on null bytes + strings = [] + current_string = bytearray() + for byte in data: + if byte == 0: + if current_string: + strings.append(current_string.decode('utf-8', errors='replace')) + current_string = bytearray() + else: + current_string.append(byte) + # Handle case where last string doesn't end with null (shouldn't happen, but be safe) + if current_string: + strings.append(current_string.decode('utf-8', errors='replace')) + return strings + except ValueError: + # Not a valid enum value, continue to other checks + # This shouldn't happen for valid Hamilton types, but we continue anyway + pass + + # Special case: bool array (1 byte per element) + if type_id == HamiltonDataType.BOOL_ARRAY: + count = len(data) // 1 # Each bool is 1 byte + return [reader.u8() == 1 for _ in range(count)] + + # Unknown type + raise ValueError(f"Unknown or unsupported type_id: {type_id}") + + def has_remaining(self) -> bool: + """Check if there are more DataFragments to parse.""" + return self._offset < len(self._data) + + def parse_all(self) -> list[tuple[int, Any]]: + """Parse all remaining DataFragments. + + Returns: + List of (type_id, value) tuples + """ + results = [] + while self.has_remaining(): + results.append(self.parse_next()) + return results + + +# ============================================================================ +# MESSAGE BUILDERS +# ============================================================================ + + +class CommandMessage: + """Build HOI command messages for method calls. + + Creates complete IP[HARP[HOI]] packets with proper protocols and actions. + Parameters are automatically wrapped with DataFragment headers via HoiParams. + + Example: + msg = CommandMessage(dest, interface_id=0, method_id=42) + msg.add_i32(100).add_string("test") + packet_bytes = msg.build(src, seq=1) + """ + + def __init__( + self, + dest: Address, + interface_id: int, + method_id: int, + action_code: int = 3, # Default: COMMAND_REQUEST + harp_protocol: int = 2, # Default: HOI2 + ip_protocol: int = 6 # Default: OBJECT_DISCOVERY + ): + """Initialize command message. + + Args: + dest: Destination object address + interface_id: Interface ID (typically 0 for main interface, 1 for extended) + method_id: Method/action ID to invoke + action_code: HOI action code (default 3=COMMAND_REQUEST) + harp_protocol: HARP protocol identifier (default 2=HOI2) + ip_protocol: IP protocol identifier (default 6=OBJECT_DISCOVERY) + """ + self.dest = dest + self.interface_id = interface_id + self.method_id = method_id + self.action_code = action_code + self.harp_protocol = harp_protocol + self.ip_protocol = ip_protocol + self.params = HoiParams() + + # Convenience methods for adding parameters + def add_i8(self, value: int) -> 'CommandMessage': + """Add signed 8-bit integer parameter.""" + self.params.i8(value) + return self + + def add_i16(self, value: int) -> 'CommandMessage': + """Add signed 16-bit integer parameter.""" + self.params.i16(value) + return self + + def add_i32(self, value: int) -> 'CommandMessage': + """Add signed 32-bit integer parameter.""" + self.params.i32(value) + return self + + def add_i64(self, value: int) -> 'CommandMessage': + """Add signed 64-bit integer parameter.""" + self.params.i64(value) + return self + + def add_u8(self, value: int) -> 'CommandMessage': + """Add unsigned 8-bit integer parameter.""" + self.params.u8(value) + return self + + def add_u16(self, value: int) -> 'CommandMessage': + """Add unsigned 16-bit integer parameter.""" + self.params.u16(value) + return self + + def add_u32(self, value: int) -> 'CommandMessage': + """Add unsigned 32-bit integer parameter.""" + self.params.u32(value) + return self + + def add_u64(self, value: int) -> 'CommandMessage': + """Add unsigned 64-bit integer parameter.""" + self.params.u64(value) + return self + + def add_f32(self, value: float) -> 'CommandMessage': + """Add 32-bit float parameter.""" + self.params.f32(value) + return self + + def add_f64(self, value: float) -> 'CommandMessage': + """Add 64-bit double parameter.""" + self.params.f64(value) + return self + + def add_string(self, value: str) -> 'CommandMessage': + """Add string parameter.""" + self.params.string(value) + return self + + def add_bool(self, value: bool) -> 'CommandMessage': + """Add boolean parameter.""" + self.params.bool_value(value) + return self + + def add_i32_array(self, values: list[int]) -> 'CommandMessage': + """Add array of signed 32-bit integers.""" + self.params.i32_array(values) + return self + + def add_u32_array(self, values: list[int]) -> 'CommandMessage': + """Add array of unsigned 32-bit integers.""" + self.params.u32_array(values) + return self + + def add_string_array(self, values: list[str]) -> 'CommandMessage': + """Add array of strings.""" + self.params.string_array(values) + return self + + def set_params(self, params: HoiParams) -> 'CommandMessage': + """Set HOI parameters directly from a HoiParams object. + + This method allows efficient parameter transfer without serializing + and parsing. Used internally by HamiltonCommand to avoid wasteful + round-trip serialization. + + Args: + params: HoiParams object with pre-built parameters + + Returns: + Self for method chaining + """ + self.params = params + return self + + def build(self, src: Address, seq: int, + harp_response_required: bool = True, + hoi_response_required: bool = False) -> bytes: + """Build complete IP[HARP[HOI]] packet. + + Args: + src: Source address (client address) + seq: Sequence number for this request + harp_response_required: Set bit 4 in HARP action byte (default True) + hoi_response_required: Set bit 4 in HOI action byte (default False) + + Returns: + Complete packet bytes ready to send over TCP + """ + # Build HOI - it handles its own action byte construction + hoi = HoiPacket( + interface_id=self.interface_id, + action_code=self.action_code, + action_id=self.method_id, + params=self.params.build(), + response_required=hoi_response_required + ) + + # Build HARP - it handles its own action byte construction + harp = HarpPacket( + src=src, + dst=self.dest, + seq=seq, + protocol=self.harp_protocol, + action_code=self.action_code, + payload=hoi.pack(), + response_required=harp_response_required + ) + + # Wrap in IP packet + ip = IpPacket( + protocol=self.ip_protocol, + payload=harp.pack() + ) + + return ip.pack() + + +class RegistrationMessage: + """Build Registration messages for object discovery. + + Creates complete IP[HARP[Registration]] packets for discovering modules, + objects, and capabilities on the Hamilton instrument. + + Example: + msg = RegistrationMessage(dest, action_code=12) + msg.add_registration_option(RegistrationOptionType.HARP_PROTOCOL_REQUEST, protocol=2, request_id=1) + packet_bytes = msg.build(src, req_addr, res_addr, seq=1) + """ + + def __init__( + self, + dest: Address, + action_code: int, + response_code: int = 0, # Default: no error + harp_protocol: int = 3, # Default: Registration + ip_protocol: int = 6 # Default: OBJECT_DISCOVERY + ): + """Initialize registration message. + + Args: + dest: Destination address (typically 0:0:65534 for registration service) + action_code: Registration action code (e.g., 12=HARP_PROTOCOL_REQUEST) + response_code: Response code (default 0=no error) + harp_protocol: HARP protocol identifier (default 3=Registration) + ip_protocol: IP protocol identifier (default 6=OBJECT_DISCOVERY) + """ + self.dest = dest + self.action_code = action_code + self.response_code = response_code + self.harp_protocol = harp_protocol + self.ip_protocol = ip_protocol + self.options = bytearray() + + def add_registration_option( + self, + option_type: RegistrationOptionType, + protocol: int = 2, + request_id: int = 1 + ) -> 'RegistrationMessage': + """Add a registration packet option. + + Args: + option_type: Type of registration option (from RegistrationOptionType enum) + protocol: For HARP_PROTOCOL_REQUEST: protocol type (2=HOI, default) + request_id: For HARP_PROTOCOL_REQUEST: what to discover (1=root, 2=global) + + Returns: + Self for method chaining + """ + # Registration option format: [option_id:1][length:1][data...] + # For HARP_PROTOCOL_REQUEST (option 5): data is [protocol:1][request_id:1] + data = Wire.write().u8(protocol).u8(request_id).finish() + option = Wire.write().u8(option_type).u8(len(data)).raw_bytes(data).finish() + self.options.extend(option) + return self + + def build( + self, + src: Address, + req_addr: Address, + res_addr: Address, + seq: int, + harp_action_code: int = 3, # Default: COMMAND_REQUEST + harp_response_required: bool = True # Default: request with response + ) -> bytes: + """Build complete IP[HARP[Registration]] packet. + + Args: + src: Source address (client address) + req_addr: Request address (for registration context) + res_addr: Response address (for registration context) + seq: Sequence number for this request + harp_action_code: HARP action code (default 3=COMMAND_REQUEST) + harp_response_required: Whether response required (default True) + + Returns: + Complete packet bytes ready to send over TCP + """ + # Build Registration packet + reg = RegistrationPacket( + action_code=self.action_code, + response_code=self.response_code, + req_address=req_addr, + res_address=res_addr, + options=bytes(self.options) + ) + + # Wrap in HARP packet + harp = HarpPacket( + src=src, + dst=self.dest, + seq=seq, + protocol=self.harp_protocol, + action_code=harp_action_code, + payload=reg.pack(), + response_required=harp_response_required + ) + + # Wrap in IP packet + ip = IpPacket( + protocol=self.ip_protocol, + payload=harp.pack() + ) + + return ip.pack() + + +class InitMessage: + """Build Connection initialization messages. + + Creates complete IP[Connection] packets for establishing a connection + with the Hamilton instrument. Uses Protocol 7 (INITIALIZATION) which + has a different structure than HARP-based messages. + + Example: + msg = InitMessage(timeout=30) + packet_bytes = msg.build() +""" + + def __init__( + self, + timeout: int = 30, + connection_type: int = 1, # Default: standard connection + protocol_version: int = 0x30, # Default: 3.0 + ip_protocol: int = 7 # Default: INITIALIZATION + ): + """Initialize connection message. + + Args: + timeout: Connection timeout in seconds (default 30) + connection_type: Connection type (default 1=standard) + protocol_version: Protocol version byte (default 0x30=3.0) + ip_protocol: IP protocol identifier (default 7=INITIALIZATION) + """ + self.timeout = timeout + self.connection_type = connection_type + self.protocol_version = protocol_version + self.ip_protocol = ip_protocol + + def build(self) -> bytes: + """Build complete IP[Connection] packet. + + Returns: + Complete packet bytes ready to send over TCP + """ + # Build raw connection parameters (NOT DataFragments) + # Frame: [version:1][message_id:1][count:1][unknown:1] + # Parameters: [id:1][type:1][reserved:2][value:2] repeated + params = (Wire.write() + # Frame + .u8(0) # version + .u8(0) # message_id + .u8(3) # count (3 parameters) + .u8(0) # unknown + # Parameter 1: connection_id (request allocation) + .u8(1) # param id + .u8(16) # param type + .u16(0) # reserved + .u16(0) # value (0 = request allocation) + # Parameter 2: connection_type + .u8(2) # param id + .u8(16) # param type + .u16(0) # reserved + .u16(self.connection_type) # value + # Parameter 3: timeout + .u8(4) # param id + .u8(16) # param type + .u16(0) # reserved + .u16(self.timeout) # value + .finish()) + + # Build IP packet + packet_size = 1 + 1 + 2 + len(params) # protocol + version + opts_len + params + + return (Wire.write() + .u16(packet_size) + .u8(self.ip_protocol) + .u8(self.protocol_version) + .u16(0) # options_length + .raw_bytes(params) + .finish()) + + +# ============================================================================ +# RESPONSE PARSERS - Paired with message builders above +# ============================================================================ + + +@dataclass +class InitResponse: + """Parsed initialization response. + + Pairs with InitMessage - parses Protocol 7 (INITIALIZATION) responses. + """ + raw_bytes: bytes + client_id: int + connection_type: int + timeout: int + + @classmethod + def from_bytes(cls, data: bytes) -> 'InitResponse': + """Parse initialization response. + + Args: + data: Raw bytes from TCP socket + + Returns: + Parsed InitResponse with connection parameters + """ + # Skip IP header (size + protocol + version + opts_len = 6 bytes) + parser = Wire.read(data[6:]) + + # Parse frame + _version = parser.u8() # Read but unused + _message_id = parser.u8() # Read but unused + _count = parser.u8() # Read but unused + _unknown = parser.u8() # Read but unused + + # Parse parameter 1 (client_id) + _param1_id = parser.u8() # Read but unused + _param1_type = parser.u8() # Read but unused + _param1_reserved = parser.u16() # Read but unused + client_id = parser.u16() + + # Parse parameter 2 (connection_type) + _param2_id = parser.u8() # Read but unused + _param2_type = parser.u8() # Read but unused + _param2_reserved = parser.u16() # Read but unused + connection_type = parser.u16() + + # Parse parameter 4 (timeout) + _param4_id = parser.u8() # Read but unused + _param4_type = parser.u8() # Read but unused + _param4_reserved = parser.u16() # Read but unused + timeout = parser.u16() + + return cls( + raw_bytes=data, + client_id=client_id, + connection_type=connection_type, + timeout=timeout + ) + + +@dataclass +class RegistrationResponse: + """Parsed registration response. + + Pairs with RegistrationMessage - parses IP[HARP[Registration]] responses. + """ + raw_bytes: bytes + ip: IpPacket + harp: HarpPacket + registration: RegistrationPacket + + @classmethod + def from_bytes(cls, data: bytes) -> 'RegistrationResponse': + """Parse registration response. + + Args: + data: Raw bytes from TCP socket + + Returns: + Parsed RegistrationResponse with all layers + """ + ip = IpPacket.unpack(data) + harp = HarpPacket.unpack(ip.payload) + registration = RegistrationPacket.unpack(harp.payload) + + return cls( + raw_bytes=data, + ip=ip, + harp=harp, + registration=registration + ) + + @property + def sequence_number(self) -> int: + """Get sequence number from HARP layer.""" + return self.harp.seq + + +@dataclass +class CommandResponse: + """Parsed command response. + + Pairs with CommandMessage - parses IP[HARP[HOI]] responses. + """ + raw_bytes: bytes + ip: IpPacket + harp: HarpPacket + hoi: HoiPacket + + @classmethod + def from_bytes(cls, data: bytes) -> 'CommandResponse': + """Parse command response. + + Args: + data: Raw bytes from TCP socket + + Returns: + Parsed CommandResponse with all layers + + Raises: + ValueError: If response is not HOI protocol + """ + ip = IpPacket.unpack(data) + harp = HarpPacket.unpack(ip.payload) + + if harp.protocol != HarpTransportableProtocol.HOI2: + raise ValueError(f"Expected HOI2 protocol, got {harp.protocol}") + + hoi = HoiPacket.unpack(harp.payload) + + return cls( + raw_bytes=data, + ip=ip, + harp=harp, + hoi=hoi + ) + + @property + def sequence_number(self) -> int: + """Get sequence number from HARP layer.""" + return self.harp.seq + + @property + def hoi_params(self) -> bytes: + """Get HOI parameters (DataFragment-wrapped).""" + return self.hoi.params + + +# ============================================================================ +# TYPED HOI RESPONSE CLASSES - For response dispatch +# ============================================================================ + + +@dataclass +class HoiResponse: + """Base class for typed HOI responses with action-based dispatch. + + Provides type-safe access to response data with proper error handling. + """ + action: int # Hoi2Action enum value + interface_id: int + action_id: int + raw_params: bytes + response_required: bool # Extracted from bit 4 of action byte + + +@dataclass +class SuccessResponse(HoiResponse): + """Successful HOI response (action 0x01 or 0x04).""" + pass + + +@dataclass +class ErrorResponse(HoiResponse): + """Error HOI response (action 0x02, 0x05, or 0x0a). + + Contains parsed error details from the response. + """ + error_code: int + error_message: str + + +class ResponseParser: + """Parse CommandResponse into typed HoiResponse objects. + + Provides action-based dispatch with automatic error detection. + + Example: + parser = ResponseParser() + response = parser.parse(command_response) + if isinstance(response, ErrorResponse): + raise RuntimeError(f"Error {response.error_code}: {response.error_message}") + """ + + def parse(self, cmd_response: CommandResponse) -> HoiResponse: + """Parse CommandResponse and dispatch based on HOI action code. + + Args: + cmd_response: Parsed CommandResponse from network + + Returns: + Typed HoiResponse (SuccessResponse or ErrorResponse) + + Raises: + ValueError: If action code is unexpected + """ + from .protocol import Hoi2Action + + # Get action code (lower 4 bits) + action = Hoi2Action(cmd_response.hoi.action_code) + + # Dispatch based on action type + if action in (Hoi2Action.STATUS_EXCEPTION, + Hoi2Action.COMMAND_EXCEPTION, + Hoi2Action.INVALID_ACTION_RESPONSE): + return self._parse_error(cmd_response, action) + elif action in (Hoi2Action.STATUS_RESPONSE, + Hoi2Action.COMMAND_RESPONSE): + return SuccessResponse( + action=action, + interface_id=cmd_response.hoi.interface_id, + action_id=cmd_response.hoi.action_id, + raw_params=cmd_response.hoi.params, + response_required=cmd_response.hoi.response_required + ) + else: + raise ValueError(f"Unexpected HOI action: {action} (0x{action:02x})") + + def _parse_error(self, cmd_response: CommandResponse, action: int) -> ErrorResponse: + """Parse error response. + + Error responses may have custom formats that don't follow standard + DataFragment encoding. Return the raw payload as hex for debugging. + + Args: + cmd_response: Raw command response + action: HOI action code + + Returns: + ErrorResponse with error details + """ + # Error responses don't follow standard DataFragment format + # Just return the raw data as hex for inspection + error_code = action # Use action code as error code + error_message = f"Error response (action={action:#x}): {cmd_response.hoi.params.hex()}" + + return ErrorResponse( + action=action, + interface_id=cmd_response.hoi.interface_id, + action_id=cmd_response.hoi.action_id, + raw_params=cmd_response.hoi.params, + response_required=cmd_response.hoi.response_required, + error_code=error_code, + error_message=error_message + ) + diff --git a/pylabrobot/liquid_handling/backends/hamilton/nimbus_backend.py b/pylabrobot/liquid_handling/backends/hamilton/nimbus_backend.py new file mode 100644 index 00000000000..e43eb79e3c7 --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/nimbus_backend.py @@ -0,0 +1,2807 @@ +"""Hamilton Nimbus backend implementation. + +This module provides the NimbusBackend class for controlling Hamilton Nimbus +instruments via TCP communication using the Hamilton protocol. +""" + +from __future__ import annotations + +import enum +import logging +from typing import Dict, List, Optional, TypeVar, Union + +from pylabrobot.resources.coordinate import Coordinate + +from pylabrobot.liquid_handling.backends.backend import LiquidHandlerBackend +from pylabrobot.liquid_handling.backends.hamilton.commands import HamiltonCommand +from pylabrobot.liquid_handling.backends.hamilton.messages import ( + HoiParams, + HoiParamsParser, +) +from pylabrobot.liquid_handling.backends.hamilton.packets import Address +from pylabrobot.liquid_handling.backends.hamilton.protocol import ( + HamiltonProtocol, +) +from pylabrobot.liquid_handling.backends.hamilton.tcp_backend import TCPBackend +from pylabrobot.liquid_handling.backends.hamilton.tcp_introspection import ( + HamiltonIntrospection, +) +from pylabrobot.liquid_handling.standard import ( + Drop, + DropTipRack, + MultiHeadAspirationContainer, + MultiHeadAspirationPlate, + MultiHeadDispenseContainer, + MultiHeadDispensePlate, + Pickup, + PickupTipRack, + ResourceDrop, + ResourceMove, + ResourcePickup, + SingleChannelAspiration, + SingleChannelDispense, +) +from pylabrobot.resources import Tip +from pylabrobot.resources.container import Container +from pylabrobot.resources.hamilton import HamiltonTip, TipSize +from pylabrobot.resources.hamilton.nimbus_decks import NimbusDeck +from pylabrobot.resources.trash import Trash + +logger = logging.getLogger(__name__) + +T = TypeVar("T") + + +# ============================================================================ +# HELPER FUNCTIONS +# ============================================================================ + + +def _fill_in_defaults(val: Optional[List[T]], default: List[T]) -> List[T]: + """Util for converting an argument to the appropriate format for low level methods. + + Args: + val: Optional list of values (None means use default) + default: Default list of values + + Returns: + List of values with defaults filled in + + Raises: + ValueError: If val is provided but length doesn't match default length + """ + # if the val is None, use the default. + if val is None: + return default + # if the val is a list, it must be of the correct length. + if len(val) != len(default): + raise ValueError( + f"Value length must equal num operations ({len(default)}), but is {len(val)}" + ) + # replace None values in list with default values. + val = [v if v is not None else d for v, d in zip(val, default)] + # the value is ready to be used. + return val + + +# ============================================================================ +# TIP TYPE ENUM +# ============================================================================ + + +class NimbusTipType(enum.IntEnum): + """Hamilton Nimbus tip type enumeration. + + Maps tip type names to their integer values used in Hamilton protocol commands. + """ + + STANDARD_300UL = 0 # "300ul Standard Volume Tip" + STANDARD_300UL_FILTER = 1 # "300ul Standard Volume Tip with filter" + LOW_VOLUME_10UL = 2 # "10ul Low Volume Tip" + LOW_VOLUME_10UL_FILTER = 3 # "10ul Low Volume Tip with filter" + HIGH_VOLUME_1000UL = 4 # "1000ul High Volume Tip" + HIGH_VOLUME_1000UL_FILTER = 5 # "1000ul High Volume Tip with filter" + TIP_50UL = 22 # "50ul Tip" + TIP_50UL_FILTER = 23 # "50ul Tip with filter" + SLIM_CORE_300UL = 36 # "SLIM CO-RE Tip 300ul" + + +def _get_tip_type_from_tip(tip: Tip) -> int: # TODO: Map these to Hamilton Tip Rack Resources rather than inferring from tip characteristics + """Map Tip object characteristics to Hamilton tip type integer. + + Args: + tip: Tip object with volume and filter information. + + Returns: + Hamilton tip type integer value. + + Raises: + ValueError: If tip characteristics don't match any known tip type. + """ + # Match based on volume and filter + if tip.maximal_volume <= 15: # 10ul tip + if tip.has_filter: + return NimbusTipType.LOW_VOLUME_10UL_FILTER + else: + return NimbusTipType.LOW_VOLUME_10UL + elif tip.maximal_volume <= 60: # 50ul tip + if tip.has_filter: + return NimbusTipType.TIP_50UL_FILTER + else: + return NimbusTipType.TIP_50UL + elif tip.maximal_volume <= 500: # 300ul tip (increased threshold to catch 360µL filtered tips) + if tip.has_filter: + return NimbusTipType.STANDARD_300UL_FILTER + else: + return NimbusTipType.STANDARD_300UL + elif tip.maximal_volume <= 1100: # 1000ul tip + if tip.has_filter: + return NimbusTipType.HIGH_VOLUME_1000UL_FILTER + else: + return NimbusTipType.HIGH_VOLUME_1000UL + else: + raise ValueError( + f"Cannot determine tip type for tip with volume {tip.maximal_volume}µL " + f"and filter={tip.has_filter}. No matching Hamilton tip type found." + ) + + +# ============================================================================ +# COMMAND CLASSES +# ============================================================================ + + +class LockDoor(HamiltonCommand): + """Lock door command (DoorLock at 1:1:268, interface_id=1, command_id=1).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 1 + + def build_parameters(self) -> HoiParams: + """Build parameters for LockDoor command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse LockDoor response.""" + return {"success": True} + + +class UnlockDoor(HamiltonCommand): + """Unlock door command (DoorLock at 1:1:268, interface_id=1, command_id=2).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 2 + + def build_parameters(self) -> HoiParams: + """Build parameters for UnlockDoor command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse UnlockDoor response.""" + return {"success": True} + + +class IsDoorLocked(HamiltonCommand): + """Check if door is locked (DoorLock at 1:1:268, interface_id=1, command_id=3).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 3 + action_code = 0 # Must be 0 (STATUS_REQUEST), default is 3 (COMMAND_REQUEST) + + def build_parameters(self) -> HoiParams: + """Build parameters for IsDoorLocked command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse IsDoorLocked response.""" + parser = HoiParamsParser(data) + _, locked = parser.parse_next() + return {"locked": bool(locked)} + + +class PreInitializeSmart(HamiltonCommand): + """Pre-initialize smart command (Pipette at 1:1:257, interface_id=1, command_id=32).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 32 + + def build_parameters(self) -> HoiParams: + """Build parameters for PreInitializeSmart command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse PreInitializeSmart response.""" + return {"success": True} + + +class InitializeSmartRoll(HamiltonCommand): + """Initialize smart roll command (NimbusCore at 1:1:48896, interface_id=1, command_id=29).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 29 + + def __init__( + self, + dest: Address, + x_positions: List[int], + y_positions: List[int], + z_start_positions: List[int], + z_stop_positions: List[int], + z_final_positions: List[int], + roll_distances: List[int], + ): + """Initialize InitializeSmartRoll command. + + Args: + dest: Destination address (NimbusCore) + x_positions: X positions in 0.01mm units + y_positions: Y positions in 0.01mm units + z_start_positions: Z start positions in 0.01mm units + z_stop_positions: Z stop positions in 0.01mm units + z_final_positions: Z final positions in 0.01mm units + roll_distances: Roll distances in 0.01mm units + """ + super().__init__(dest) + self.x_positions = x_positions + self.y_positions = y_positions + self.z_start_positions = z_start_positions + self.z_stop_positions = z_stop_positions + self.z_final_positions = z_final_positions + self.roll_distances = roll_distances + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for InitializeSmartRoll command.""" + return ( + HoiParams() + .i32_array(self.x_positions) + .i32_array(self.y_positions) + .i32_array(self.z_start_positions) + .i32_array(self.z_stop_positions) + .i32_array(self.z_final_positions) + .i32_array(self.roll_distances) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse InitializeSmartRoll response (void return).""" + return {"success": True} + + +class IsInitialized(HamiltonCommand): + """Check if instrument is initialized (NimbusCore at 1:1:48896, interface_id=1, command_id=14).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 14 + action_code = 0 # Must be 0 (STATUS_REQUEST), default is 3 (COMMAND_REQUEST) + + def build_parameters(self) -> HoiParams: + """Build parameters for IsInitialized command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse IsInitialized response.""" + parser = HoiParamsParser(data) + _, initialized = parser.parse_next() + return {"initialized": bool(initialized)} + + +class IsTipPresent(HamiltonCommand): + """Check tip presence (Pipette at 1:1:257, interface_id=1, command_id=16).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 16 + action_code = 0 + + def build_parameters(self) -> HoiParams: + """Build parameters for IsTipPresent command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse IsTipPresent response - returns List[i16].""" + parser = HoiParamsParser(data) + # Parse array of i16 values representing tip presence per channel + _, tip_presence = parser.parse_next() + return {"tip_present": tip_presence} + + +class GetChannelConfiguration_1(HamiltonCommand): + """Get channel configuration (NimbusCore root, interface_id=1, command_id=15).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 15 + action_code = 0 + + def build_parameters(self) -> HoiParams: + """Build parameters for GetChannelConfiguration_1 command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse GetChannelConfiguration_1 response. + + Returns: (channels: u16, channel_types: List[i16]) + """ + parser = HoiParamsParser(data) + _, channels = parser.parse_next() + _, channel_types = parser.parse_next() + return {"channels": channels, "channel_types": channel_types} + + +class SetChannelConfiguration(HamiltonCommand): + """Set channel configuration (Pipette at 1:1:257, interface_id=1, command_id=67).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 67 + + def __init__( + self, + dest: Address, + channel: int, + indexes: List[int], + enables: List[bool], + ): + """Initialize SetChannelConfiguration command. + + Args: + dest: Destination address (Pipette) + channel: Channel number (1-based) + indexes: List of configuration indexes (e.g., [1, 3, 4]) + 1: Tip Recognition, 2: Aspirate and clot monitoring pLLD, + 3: Aspirate monitoring with cLLD, 4: Clot monitoring with cLLD + enables: List of enable flags (e.g., [True, False, False, False]) + """ + super().__init__(dest) + self.channel = channel + self.indexes = indexes + self.enables = enables + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for SetChannelConfiguration command.""" + return ( + HoiParams() + .u16(self.channel) + .i16_array(self.indexes) + .bool_array(self.enables) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse SetChannelConfiguration response (void return).""" + return {"success": True} + + +class Park(HamiltonCommand): + """Park command (NimbusCore at 1:1:48896, interface_id=1, command_id=3).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 3 + + def build_parameters(self) -> HoiParams: + """Build parameters for Park command.""" + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse Park response.""" + return {"success": True} + + +class PickupTips(HamiltonCommand): + """Pick up tips command (Pipette at 1:1:257, interface_id=1, command_id=4).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 4 + + def __init__( + self, + dest: Address, + tips_used: List[int], + x_positions: List[int], + y_positions: List[int], + traverse_height: int, + z_start_positions: List[int], + z_stop_positions: List[int], + tip_types: List[int], + ): + """Initialize PickupTips command. + + Args: + dest: Destination address (Pipette) + tips_used: Tip pattern (1 for active channels, 0 for inactive) + x_positions: X positions in 0.01mm units + y_positions: Y positions in 0.01mm units + traverse_height: Traverse height in 0.01mm units + z_start_positions: Z start positions in 0.01mm units + z_stop_positions: Z stop positions in 0.01mm units + tip_types: Tip type integers for each channel + """ + super().__init__(dest) + self.tips_used = tips_used + self.x_positions = x_positions + self.y_positions = y_positions + self.traverse_height = traverse_height + self.z_start_positions = z_start_positions + self.z_stop_positions = z_stop_positions + self.tip_types = tip_types + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for PickupTips command.""" + return ( + HoiParams() + .u16_array(self.tips_used) + .i32_array(self.x_positions) + .i32_array(self.y_positions) + .i32(self.traverse_height) + .i32_array(self.z_start_positions) + .i32_array(self.z_stop_positions) + .u16_array(self.tip_types) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse PickupTips response (void return).""" + return {"success": True} + + +class DropTips(HamiltonCommand): + """Drop tips command (Pipette at 1:1:257, interface_id=1, command_id=5).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 5 + + def __init__( + self, + dest: Address, + tips_used: List[int], + x_positions: List[int], + y_positions: List[int], + traverse_height: int, + z_start_positions: List[int], + z_stop_positions: List[int], + z_final_positions: List[int], + default_waste: bool, + ): + """Initialize DropTips command. + + Args: + dest: Destination address (Pipette) + tips_used: Tip pattern (1 for active channels, 0 for inactive) + x_positions: X positions in 0.01mm units + y_positions: Y positions in 0.01mm units + traverse_height: Traverse height in 0.01mm units + z_start_positions: Z start positions in 0.01mm units + z_stop_positions: Z stop positions in 0.01mm units + z_final_positions: Z final positions in 0.01mm units + default_waste: If True, drop to default waste (positions may be ignored) + """ + super().__init__(dest) + self.tips_used = tips_used + self.x_positions = x_positions + self.y_positions = y_positions + self.traverse_height = traverse_height + self.z_start_positions = z_start_positions + self.z_stop_positions = z_stop_positions + self.z_final_positions = z_final_positions + self.default_waste = default_waste + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for DropTips command.""" + return ( + HoiParams() + .u16_array(self.tips_used) + .i32_array(self.x_positions) + .i32_array(self.y_positions) + .i32(self.traverse_height) + .i32_array(self.z_start_positions) + .i32_array(self.z_stop_positions) + .i32_array(self.z_final_positions) + .bool_value(self.default_waste) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse DropTips response (void return).""" + return {"success": True} + + +class DropTipsRoll(HamiltonCommand): + """Drop tips with roll command (Pipette at 1:1:257, interface_id=1, command_id=82).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 82 + + def __init__( + self, + dest: Address, + tips_used: List[int], + x_positions: List[int], + y_positions: List[int], + traverse_height: int, + z_start_positions: List[int], + z_stop_positions: List[int], + z_final_positions: List[int], + roll_distances: List[int], + ): + """Initialize DropTipsRoll command. + + Args: + dest: Destination address (Pipette) + tips_used: Tip pattern (1 for active channels, 0 for inactive) + x_positions: X positions in 0.01mm units + y_positions: Y positions in 0.01mm units + traverse_height: Traverse height in 0.01mm units + z_start_positions: Z start positions in 0.01mm units + z_stop_positions: Z stop positions in 0.01mm units + z_final_positions: Z final positions in 0.01mm units + roll_distances: Roll distance for each channel in 0.01mm units + """ + super().__init__(dest) + self.tips_used = tips_used + self.x_positions = x_positions + self.y_positions = y_positions + self.traverse_height = traverse_height + self.z_start_positions = z_start_positions + self.z_stop_positions = z_stop_positions + self.z_final_positions = z_final_positions + self.roll_distances = roll_distances + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for DropTipsRoll command.""" + return ( + HoiParams() + .u16_array(self.tips_used) + .i32_array(self.x_positions) + .i32_array(self.y_positions) + .i32(self.traverse_height) + .i32_array(self.z_start_positions) + .i32_array(self.z_stop_positions) + .i32_array(self.z_final_positions) + .i32_array(self.roll_distances) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse DropTipsRoll response (void return).""" + return {"success": True} + + +class EnableADC(HamiltonCommand): + """Enable ADC command (Pipette at 1:1:257, interface_id=1, command_id=43).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 43 + + def __init__( + self, + dest: Address, + tips_used: List[int], + ): + """Initialize EnableADC command. + + Args: + dest: Destination address (Pipette) + tips_used: Tip pattern (1 for active channels, 0 for inactive) + """ + super().__init__(dest) + self.tips_used = tips_used + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for EnableADC command.""" + return HoiParams().u16_array(self.tips_used) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse EnableADC response (void return).""" + return {"success": True} + + +class DisableADC(HamiltonCommand): + """Disable ADC command (Pipette at 1:1:257, interface_id=1, command_id=44).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 44 + + def __init__( + self, + dest: Address, + tips_used: List[int], + ): + """Initialize DisableADC command. + + Args: + dest: Destination address (Pipette) + tips_used: Tip pattern (1 for active channels, 0 for inactive) + """ + super().__init__(dest) + self.tips_used = tips_used + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for DisableADC command.""" + return HoiParams().u16_array(self.tips_used) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse DisableADC response (void return).""" + return {"success": True} + + +class GetChannelConfiguration(HamiltonCommand): + """Get channel configuration command (Pipette at 1:1:257, interface_id=1, command_id=66).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 66 + action_code = 0 # Must be 0 (STATUS_REQUEST), default is 3 (COMMAND_REQUEST) + + def __init__( + self, + dest: Address, + channel: int, + indexes: List[int], + ): + """Initialize GetChannelConfiguration command. + + Args: + dest: Destination address (Pipette) + channel: Channel number (1-based) + indexes: List of configuration indexes (e.g., [2] for "Aspirate monitoring with cLLD") + """ + super().__init__(dest) + self.channel = channel + self.indexes = indexes + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for GetChannelConfiguration command.""" + return ( + HoiParams() + .u16(self.channel) + .i16_array(self.indexes) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse GetChannelConfiguration response. + + Returns: { enabled: List[bool] } + """ + parser = HoiParamsParser(data) + _, enabled = parser.parse_next() + return {"enabled": enabled} + + +class Aspirate(HamiltonCommand): + """Aspirate command (Pipette at 1:1:257, interface_id=1, command_id=6).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 6 + + def __init__( + self, + dest: Address, + aspirate_type: List[int], + tips_used: List[int], + x_positions: List[int], + y_positions: List[int], + traverse_height: int, + liquid_seek_height: List[int], + liquid_surface_height: List[int], + submerge_depth: List[int], + follow_depth: List[int], + z_min_position: List[int], + clot_check_height: List[int], + z_final: int, + liquid_exit_speed: List[int], + blowout_volume: List[int], + prewet_volume: List[int], + aspirate_volume: List[int], + transport_air_volume: List[int], + aspirate_speed: List[int], + settling_time: List[int], + mix_volume: List[int], + mix_cycles: List[int], + mix_position: List[int], + mix_follow_distance: List[int], + mix_speed: List[int], + tube_section_height: List[int], + tube_section_ratio: List[int], + lld_mode: List[int], + capacitive_lld_sensitivity: List[int], + pressure_lld_sensitivity: List[int], + lld_height_difference: List[int], + tadm_enabled: bool, + limit_curve_index: List[int], + recording_mode: int, + ): + """Initialize Aspirate command. + + Args: + dest: Destination address (Pipette) + aspirate_type: Aspirate type for each channel (List[i16]) + tips_used: Tip pattern (1 for active channels, 0 for inactive) + x_positions: X positions in 0.01mm units + y_positions: Y positions in 0.01mm units + traverse_height: Traverse height in 0.01mm units + liquid_seek_height: Liquid seek height for each channel in 0.01mm units + liquid_surface_height: Liquid surface height for each channel in 0.01mm units + submerge_depth: Submerge depth for each channel in 0.01mm units + follow_depth: Follow depth for each channel in 0.01mm units + z_min_position: Z minimum position for each channel in 0.01mm units + clot_check_height: Clot check height for each channel in 0.01mm units + z_final: Z final position in 0.01mm units + liquid_exit_speed: Liquid exit speed for each channel in 0.1µL/s units + blowout_volume: Blowout volume for each channel in 0.1µL units + prewet_volume: Prewet volume for each channel in 0.1µL units + aspirate_volume: Aspirate volume for each channel in 0.1µL units + transport_air_volume: Transport air volume for each channel in 0.1µL units + aspirate_speed: Aspirate speed for each channel in 0.1µL/s units + settling_time: Settling time for each channel in 0.1s units + mix_volume: Mix volume for each channel in 0.1µL units + mix_cycles: Mix cycles for each channel + mix_position: Mix position for each channel in 0.01mm units + mix_follow_distance: Mix follow distance for each channel in 0.01mm units + mix_speed: Mix speed for each channel in 0.1µL/s units + tube_section_height: Tube section height for each channel in 0.01mm units + tube_section_ratio: Tube section ratio for each channel + lld_mode: LLD mode for each channel (List[i16]) + capacitive_lld_sensitivity: Capacitive LLD sensitivity for each channel (List[i16]) + pressure_lld_sensitivity: Pressure LLD sensitivity for each channel (List[i16]) + lld_height_difference: LLD height difference for each channel in 0.01mm units + tadm_enabled: TADM enabled flag + limit_curve_index: Limit curve index for each channel + recording_mode: Recording mode (u16) + """ + super().__init__(dest) + self.aspirate_type = aspirate_type + self.tips_used = tips_used + self.x_positions = x_positions + self.y_positions = y_positions + self.traverse_height = traverse_height + self.liquid_seek_height = liquid_seek_height + self.liquid_surface_height = liquid_surface_height + self.submerge_depth = submerge_depth + self.follow_depth = follow_depth + self.z_min_position = z_min_position + self.clot_check_height = clot_check_height + self.z_final = z_final + self.liquid_exit_speed = liquid_exit_speed + self.blowout_volume = blowout_volume + self.prewet_volume = prewet_volume + self.aspirate_volume = aspirate_volume + self.transport_air_volume = transport_air_volume + self.aspirate_speed = aspirate_speed + self.settling_time = settling_time + self.mix_volume = mix_volume + self.mix_cycles = mix_cycles + self.mix_position = mix_position + self.mix_follow_distance = mix_follow_distance + self.mix_speed = mix_speed + self.tube_section_height = tube_section_height + self.tube_section_ratio = tube_section_ratio + self.lld_mode = lld_mode + self.capacitive_lld_sensitivity = capacitive_lld_sensitivity + self.pressure_lld_sensitivity = pressure_lld_sensitivity + self.lld_height_difference = lld_height_difference + self.tadm_enabled = tadm_enabled + self.limit_curve_index = limit_curve_index + self.recording_mode = recording_mode + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for Aspirate command.""" + return ( + HoiParams() + .i16_array(self.aspirate_type) + .u16_array(self.tips_used) + .i32_array(self.x_positions) + .i32_array(self.y_positions) + .i32(self.traverse_height) + .i32_array(self.liquid_seek_height) + .i32_array(self.liquid_surface_height) + .i32_array(self.submerge_depth) + .i32_array(self.follow_depth) + .i32_array(self.z_min_position) + .i32_array(self.clot_check_height) + .i32(self.z_final) + .u32_array(self.liquid_exit_speed) + .u32_array(self.blowout_volume) + .u32_array(self.prewet_volume) + .u32_array(self.aspirate_volume) + .u32_array(self.transport_air_volume) + .u32_array(self.aspirate_speed) + .u32_array(self.settling_time) + .u32_array(self.mix_volume) + .u32_array(self.mix_cycles) + .i32_array(self.mix_position) + .i32_array(self.mix_follow_distance) + .u32_array(self.mix_speed) + .i32_array(self.tube_section_height) + .i32_array(self.tube_section_ratio) + .i16_array(self.lld_mode) + .i16_array(self.capacitive_lld_sensitivity) + .i16_array(self.pressure_lld_sensitivity) + .i32_array(self.lld_height_difference) + .bool_value(self.tadm_enabled) + .u32_array(self.limit_curve_index) + .u16(self.recording_mode) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse Aspirate response (void return).""" + return {"success": True} + + +class Dispense(HamiltonCommand): + """Dispense command (Pipette at 1:1:257, interface_id=1, command_id=7).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 1 + command_id = 7 + + def __init__( + self, + dest: Address, + dispense_type: List[int], + tips_used: List[int], + x_positions: List[int], + y_positions: List[int], + traverse_height: int, + liquid_seek_height: List[int], + dispense_height: List[int], + submerge_depth: List[int], + follow_depth: List[int], + z_min_position: List[int], + z_final: int, + liquid_exit_speed: List[int], + transport_air_volume: List[int], + dispense_volume: List[int], + stop_back_volume: List[int], + blowout_volume: List[int], + dispense_speed: List[int], + cutoff_speed: List[int], + settling_time: List[int], + mix_volume: List[int], + mix_cycles: List[int], + mix_position: List[int], + mix_follow_distance: List[int], + mix_speed: List[int], + touch_off_distance: int, + dispense_offset: List[int], + tube_section_height: List[int], + tube_section_ratio: List[int], + lld_mode: List[int], + capacitive_lld_sensitivity: List[int], + tadm_enabled: bool, + limit_curve_index: List[int], + recording_mode: int, + ): + """Initialize Dispense command. + + Args: + dest: Destination address (Pipette) + dispense_type: Dispense type for each channel (List[i16]) + tips_used: Tip pattern (1 for active channels, 0 for inactive) + x_positions: X positions in 0.01mm units + y_positions: Y positions in 0.01mm units + traverse_height: Traverse height in 0.01mm units + liquid_seek_height: Liquid seek height for each channel in 0.01mm units + dispense_height: Dispense height for each channel in 0.01mm units + submerge_depth: Submerge depth for each channel in 0.01mm units + follow_depth: Follow depth for each channel in 0.01mm units + z_min_position: Z minimum position for each channel in 0.01mm units + z_final: Z final position in 0.01mm units + liquid_exit_speed: Liquid exit speed for each channel in 0.1µL/s units + transport_air_volume: Transport air volume for each channel in 0.1µL units + dispense_volume: Dispense volume for each channel in 0.1µL units + stop_back_volume: Stop back volume for each channel in 0.1µL units + blowout_volume: Blowout volume for each channel in 0.1µL units + dispense_speed: Dispense speed for each channel in 0.1µL/s units + cutoff_speed: Cutoff speed for each channel in 0.1µL/s units + settling_time: Settling time for each channel in 0.1s units + mix_volume: Mix volume for each channel in 0.1µL units + mix_cycles: Mix cycles for each channel + mix_position: Mix position for each channel in 0.01mm units + mix_follow_distance: Mix follow distance for each channel in 0.01mm units + mix_speed: Mix speed for each channel in 0.1µL/s units + touch_off_distance: Touch off distance in 0.01mm units + dispense_offset: Dispense offset for each channel in 0.01mm units + tube_section_height: Tube section height for each channel in 0.01mm units + tube_section_ratio: Tube section ratio for each channel + lld_mode: LLD mode for each channel (List[i16]) + capacitive_lld_sensitivity: Capacitive LLD sensitivity for each channel (List[i16]) + tadm_enabled: TADM enabled flag + limit_curve_index: Limit curve index for each channel + recording_mode: Recording mode (u16) + """ + super().__init__(dest) + self.dispense_type = dispense_type + self.tips_used = tips_used + self.x_positions = x_positions + self.y_positions = y_positions + self.traverse_height = traverse_height + self.liquid_seek_height = liquid_seek_height + self.dispense_height = dispense_height + self.submerge_depth = submerge_depth + self.follow_depth = follow_depth + self.z_min_position = z_min_position + self.z_final = z_final + self.liquid_exit_speed = liquid_exit_speed + self.transport_air_volume = transport_air_volume + self.dispense_volume = dispense_volume + self.stop_back_volume = stop_back_volume + self.blowout_volume = blowout_volume + self.dispense_speed = dispense_speed + self.cutoff_speed = cutoff_speed + self.settling_time = settling_time + self.mix_volume = mix_volume + self.mix_cycles = mix_cycles + self.mix_position = mix_position + self.mix_follow_distance = mix_follow_distance + self.mix_speed = mix_speed + self.touch_off_distance = touch_off_distance + self.dispense_offset = dispense_offset + self.tube_section_height = tube_section_height + self.tube_section_ratio = tube_section_ratio + self.lld_mode = lld_mode + self.capacitive_lld_sensitivity = capacitive_lld_sensitivity + self.tadm_enabled = tadm_enabled + self.limit_curve_index = limit_curve_index + self.recording_mode = recording_mode + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for Dispense command.""" + return ( + HoiParams() + .i16_array(self.dispense_type) + .u16_array(self.tips_used) + .i32_array(self.x_positions) + .i32_array(self.y_positions) + .i32(self.traverse_height) + .i32_array(self.liquid_seek_height) + .i32_array(self.dispense_height) + .i32_array(self.submerge_depth) + .i32_array(self.follow_depth) + .i32_array(self.z_min_position) + .i32(self.z_final) + .u32_array(self.liquid_exit_speed) + .u32_array(self.transport_air_volume) + .u32_array(self.dispense_volume) + .u32_array(self.stop_back_volume) + .u32_array(self.blowout_volume) + .u32_array(self.dispense_speed) + .u32_array(self.cutoff_speed) + .u32_array(self.settling_time) + .u32_array(self.mix_volume) + .u32_array(self.mix_cycles) + .i32_array(self.mix_position) + .i32_array(self.mix_follow_distance) + .u32_array(self.mix_speed) + .i32(self.touch_off_distance) + .i32_array(self.dispense_offset) + .i32_array(self.tube_section_height) + .i32_array(self.tube_section_ratio) + .i16_array(self.lld_mode) + .i16_array(self.capacitive_lld_sensitivity) + .bool_value(self.tadm_enabled) + .u32_array(self.limit_curve_index) + .u16(self.recording_mode) + ) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse Dispense response (void return).""" + return {"success": True} + + +# ============================================================================ +# MAIN BACKEND CLASS +# ============================================================================ + + +class NimbusBackend(TCPBackend, LiquidHandlerBackend): + """Backend for Hamilton Nimbus liquid handling instruments. + + This backend uses TCP communication with the Hamilton protocol to control + Nimbus instruments. It inherits from both TCPBackend (for communication) + and LiquidHandlerBackend (for liquid handling interface). + + Attributes: + setup_finished: Whether the backend has been set up. + _num_channels: Cached number of channels (queried from instrument). + _door_lock_available: Whether door lock is available on this instrument. + """ + + def __init__( + self, + host: str, + port: int = 2000, + read_timeout: float = 30.0, + write_timeout: float = 30.0, + buffer_size: int = 1024, + auto_reconnect: bool = True, + max_reconnect_attempts: int = 3, + ): + """Initialize Nimbus backend. + + Args: + host: Hamilton instrument IP address + port: Hamilton instrument port (default: 2000) + read_timeout: Read timeout in seconds + write_timeout: Write timeout in seconds + buffer_size: Buffer size (kept for compatibility) + auto_reconnect: Enable automatic reconnection + max_reconnect_attempts: Maximum reconnection attempts + """ + TCPBackend.__init__( + self, + host=host, + port=port, + read_timeout=read_timeout, + write_timeout=write_timeout, + buffer_size=buffer_size, + auto_reconnect=auto_reconnect, + max_reconnect_attempts=max_reconnect_attempts, + ) + LiquidHandlerBackend.__init__(self) + + self._num_channels: Optional[int] = None + self._pipette_address: Optional[Address] = None + self._door_lock_address: Optional[Address] = None + self._nimbus_core_address: Optional[Address] = None + self._is_initialized: Optional[bool] = None + self._tips_present: Optional[List[int]] = None + self._channel_configurations: Optional[Dict[int, Dict[int, bool]]] = None + + async def setup(self, unlock_door: bool = False, force_initialize: bool = False): + """Set up the Nimbus backend. + + This method: + 1. Establishes TCP connection and performs protocol initialization + 2. Discovers instrument objects + 3. Queries channel configuration to get num_channels + 4. Queries tip presence + 5. Queries initialization status + 6. Locks door if available + 7. Conditionally initializes NimbusCore with InitializeSmartRoll (only if not initialized) + 8. Optionally unlocks door after initialization + + Args: + unlock_door: If True, unlock door after initialization (default: False) + """ + # Call parent setup (TCP connection, Protocol 7 init, Protocol 3 registration) + await TCPBackend.setup(self) + + # Ensure deck is set + assert self._deck is not None, "Deck must be set before setup" + + # Discover instrument objects + await self._discover_instrument_objects() + + # Ensure required objects are discovered + if self._pipette_address is None: + raise RuntimeError( + "Pipette object not discovered. Cannot proceed with setup." + ) + if self._nimbus_core_address is None: + raise RuntimeError( + "NimbusCore root object not discovered. Cannot proceed with setup." + ) + + # Query channel configuration to get num_channels (use discovered address only) + try: + config = await self.send_command(GetChannelConfiguration_1(self._nimbus_core_address)) + self._num_channels = config["channels"] + logger.info(f"Channel configuration: {config['channels']} channels") + except Exception as e: + logger.error(f"Failed to query channel configuration: {e}") + raise + + # Query tip presence (use discovered address only) + try: + tip_status = await self.send_command(IsTipPresent(self._pipette_address)) + tip_present = tip_status.get("tip_present", []) + self._tips_present = tip_present + logger.info(f"Tip presence: {tip_present}") + except Exception as e: + logger.warning(f"Failed to query tip presence: {e}") + + # Query initialization status (use discovered address only) + try: + init_status = await self.send_command(IsInitialized(self._nimbus_core_address)) + self._is_initialized = init_status.get("initialized", False) + logger.info(f"Instrument initialized: {self._is_initialized}") + except Exception as e: + logger.error(f"Failed to query initialization status: {e}") + raise + + # Lock door if available (optional - no error if not found) + # This happens before initialization + if self._door_lock_address is not None: + try: + if not await self.is_door_locked(): + await self.lock_door() + else: + logger.info("Door already locked") + except RuntimeError: + # Door lock not available or not set up - this is okay + logger.warning("Door lock operations skipped (not available or not set up)") + except Exception as e: + logger.warning(f"Failed to lock door: {e}") + + # Conditional initialization - only if not already initialized + if not self._is_initialized or force_initialize: + # Set channel configuration for each channel (required before InitializeSmartRoll) + try: + # Configure all channels (1 to num_channels) - one SetChannelConfiguration call per channel + # Parameters: channel (1-based), indexes=[1, 3, 4], enables=[True, False, False, False] + for channel in range(1, self._num_channels + 1): + await self.send_command( + SetChannelConfiguration( + dest=self._pipette_address, + channel=channel, + indexes=[1, 3, 4], + enables=[True, False, False, False], + ) + ) + logger.info(f"Channel configuration set for {self._num_channels} channels") + except Exception as e: + logger.error(f"Failed to set channel configuration: {e}") + raise + + # Initialize NimbusCore with InitializeSmartRoll using waste positions + try: + # Build waste position parameters using helper method + # Use all channels (0 to num_channels-1) for setup + all_channels = list(range(self._num_channels)) + traverse_height = 146.0 # TODO: Access deck z_max property properly instead of hardcoded literal + + # Use same logic as DropTipsRoll: z_start = waste_z + 4.0mm, z_stop = waste_z, z_final = traverse_height + waste_params = self._build_waste_position_params( + use_channels=all_channels, + traverse_height=traverse_height, + z_start_offset=None, # Will be calculated as waste_z + 4.0mm + z_stop_offset=None, # Will be calculated as waste_z + z_final_offset=None, # Will default to traverse_height + roll_distance=None, # Will default to 9.0mm + ) + + await self.send_command( + InitializeSmartRoll( + dest=self._nimbus_core_address, + x_positions=waste_params["x_positions"], + y_positions=waste_params["y_positions"], + z_start_positions=waste_params["z_start_positions"], + z_stop_positions=waste_params["z_stop_positions"], + z_final_positions=waste_params["z_final_positions"], + roll_distances=waste_params["roll_distances"], + ) + ) + logger.info("NimbusCore initialized with InitializeSmartRoll successfully") + self._is_initialized = True + except Exception as e: + logger.error(f"Failed to initialize NimbusCore with InitializeSmartRoll: {e}") + raise + else: + logger.info("Instrument already initialized, skipping initialization") + + # Unlock door if requested (optional - no error if not found) + if unlock_door and self._door_lock_address is not None: + try: + await self.unlock_door() + except RuntimeError: + # Door lock not available or not set up - this is okay + logger.warning("Door unlock requested but not available or not set up") + except Exception as e: + logger.warning(f"Failed to unlock door: {e}") + + self.setup_finished = True + + async def _discover_instrument_objects(self): + """Discover instrument-specific objects using introspection.""" + introspection = HamiltonIntrospection(self) + + # Get root objects (already discovered in setup) + root_objects = self._discovered_objects.get('root', []) + if not root_objects: + logger.warning("No root objects discovered") + return + + # Use first root object as NimbusCore + nimbus_core_addr = root_objects[0] + self._nimbus_core_address = nimbus_core_addr + + try: + # Get NimbusCore object info + core_info = await introspection.get_object(nimbus_core_addr) + + # Discover subobjects to find Pipette and DoorLock + for i in range(core_info.subobject_count): + try: + sub_addr = await introspection.get_subobject_address(nimbus_core_addr, i) + sub_info = await introspection.get_object(sub_addr) + + # Check if this is the Pipette by interface name + if sub_info.name == "Pipette": + self._pipette_address = sub_addr + logger.info(f"Found Pipette at {sub_addr}") + + # Check if this is the DoorLock by interface name + if sub_info.name == "DoorLock": + self._door_lock_address = sub_addr + logger.info(f"Found DoorLock at {sub_addr}") + + except Exception as e: + logger.debug(f"Failed to get subobject {i}: {e}") + + except Exception as e: + logger.warning(f"Failed to discover instrument objects: {e}") + + # If door lock not found via introspection, it's not available + if self._door_lock_address is None: + logger.info("DoorLock not available on this instrument") + + @property + def num_channels(self) -> int: + """The number of channels that the robot has.""" + if self._num_channels is None: + raise RuntimeError( + "num_channels not set. Call setup() first to query from instrument." + ) + return self._num_channels + + async def park(self): + """Park the instrument. + + This command moves the instrument to its parked position. + + Raises: + RuntimeError: If NimbusCore address was not discovered during setup. + """ + if self._nimbus_core_address is None: + raise RuntimeError( + "NimbusCore address not discovered. Call setup() first." + ) + + try: + await self.send_command(Park(self._nimbus_core_address)) + logger.info("Instrument parked successfully") + except Exception as e: + logger.error(f"Failed to park instrument: {e}") + raise + + async def is_door_locked(self) -> bool: + """Check if the door is locked. + + Returns: + True if door is locked, False if unlocked. + + Raises: + RuntimeError: If door lock is not available on this instrument, + or if setup() has not been called yet. + """ + if self._door_lock_address is None: + raise RuntimeError( + "Door lock is not available on this instrument or setup() has not been called." + ) + + try: + status = await self.send_command(IsDoorLocked(self._door_lock_address)) + return bool(status["locked"]) + except Exception as e: + logger.error(f"Failed to check door lock status: {e}") + raise + + async def lock_door(self) -> None: + """Lock the door. + + Raises: + RuntimeError: If door lock is not available on this instrument, + or if setup() has not been called yet. + """ + if self._door_lock_address is None: + raise RuntimeError( + "Door lock is not available on this instrument or setup() has not been called." + ) + + try: + await self.send_command(LockDoor(self._door_lock_address)) + logger.info("Door locked successfully") + except Exception as e: + logger.error(f"Failed to lock door: {e}") + raise + + async def unlock_door(self) -> None: + """Unlock the door. + + Raises: + RuntimeError: If door lock is not available on this instrument, + or if setup() has not been called yet. + """ + if self._door_lock_address is None: + raise RuntimeError( + "Door lock is not available on this instrument or setup() has not been called." + ) + + try: + await self.send_command(UnlockDoor(self._door_lock_address)) + logger.info("Door unlocked successfully") + except Exception as e: + logger.error(f"Failed to unlock door: {e}") + raise + + async def stop(self): + """Stop the backend and close connection.""" + await TCPBackend.stop(self) + self.setup_finished = False + + def _build_waste_position_params( + self, + use_channels: List[int], + traverse_height: float, + z_start_offset: Optional[float] = None, + z_stop_offset: Optional[float] = None, + z_final_offset: Optional[float] = None, + roll_distance: Optional[float] = None, + ) -> dict: + """Build waste position parameters for InitializeSmartRoll or DropTipsRoll. + + Args: + use_channels: List of channel indices to use + traverse_height: Traverse height in mm + z_start_offset: Z start position in mm (absolute, optional, calculated from waste position) + z_stop_offset: Z stop position in mm (absolute, optional, calculated from waste position) + z_final_offset: Z final position in mm (absolute, optional, defaults to traverse_height) + roll_distance: Roll distance in mm (optional, defaults to 9.0 mm) + + Returns: + Dictionary with x_positions, y_positions, z_start_positions, z_stop_positions, + z_final_positions, roll_distances (all in 0.01mm units as lists matching num_channels) + + Raises: + RuntimeError: If deck is not set or waste position not found + """ + if self._deck is None: + raise RuntimeError("Deck must be set before building waste position parameters") + + # Validate we have a NimbusDeck for coordinate conversion + if not isinstance(self._deck, NimbusDeck): + raise RuntimeError( + "Deck must be a NimbusDeck for coordinate conversion" + ) + + # Extract coordinates for each channel + x_positions_mm: List[float] = [] + y_positions_mm: List[float] = [] + z_positions_mm: List[float] = [] + + for channel_idx in use_channels: + # Get waste position from deck based on channel index + # Use waste_type attribute from deck to construct waste position name + if not hasattr(self._deck, 'waste_type') or self._deck.waste_type is None: + raise RuntimeError( + f"Deck does not have waste_type attribute or waste_type is None. " + f"Cannot determine waste position name for channel {channel_idx}." + ) + waste_pos_name = f"{self._deck.waste_type}_{channel_idx + 1}" + try: + waste_pos = self._deck.get_resource(waste_pos_name) + abs_location = waste_pos.get_absolute_location() + except Exception as e: + raise RuntimeError( + f"Failed to get waste position {waste_pos_name} for channel {channel_idx}: {e}" + ) + + # Convert to Hamilton coordinates (returns in mm) + hamilton_coord = self._deck.to_hamilton_coordinate(abs_location) + + x_positions_mm.append(hamilton_coord.x) + y_positions_mm.append(hamilton_coord.y) + z_positions_mm.append(hamilton_coord.z) + + # Convert positions to 0.01mm units (multiply by 100) + x_positions = [int(round(x * 100)) for x in x_positions_mm] + y_positions = [int(round(y * 100)) for y in y_positions_mm] + + # Calculate Z positions from waste position coordinates + max_z_hamilton = max(z_positions_mm) # Highest waste position Z in Hamilton coordinates + waste_z_hamilton = max_z_hamilton + + if z_start_offset is None: + # Calculate from waste position: start above waste position + z_start_absolute_mm = waste_z_hamilton + 4.0 # Start 4mm above waste position + else: + z_start_absolute_mm = z_start_offset + + if z_stop_offset is None: + # Calculate from waste position: stop at waste position + z_stop_absolute_mm = waste_z_hamilton # Stop at waste position + else: + z_stop_absolute_mm = z_stop_offset + + if z_final_offset is None: + z_final_offset_mm = traverse_height # Use traverse height as final position + else: + z_final_offset_mm = z_final_offset + + if roll_distance is None: + roll_distance_mm = 9.0 # Default roll distance from log + else: + roll_distance_mm = roll_distance + + # Use absolute Z positions (same for all channels) + z_start_positions = [ + int(round(z_start_absolute_mm * 100)) + ] * len(use_channels) # Absolute Z start position + z_stop_positions = [ + int(round(z_stop_absolute_mm * 100)) + ] * len(use_channels) # Absolute Z stop position + z_final_positions = [ + int(round(z_final_offset_mm * 100)) + ] * len(use_channels) # Absolute Z final position + roll_distances = [int(round(roll_distance_mm * 100))] * len(use_channels) + + # Ensure arrays match num_channels length (with zeros for inactive channels) + x_positions_full = [0] * self.num_channels + y_positions_full = [0] * self.num_channels + z_start_positions_full = [0] * self.num_channels + z_stop_positions_full = [0] * self.num_channels + z_final_positions_full = [0] * self.num_channels + roll_distances_full = [0] * self.num_channels + + for i, channel_idx in enumerate(use_channels): + x_positions_full[channel_idx] = x_positions[i] + y_positions_full[channel_idx] = y_positions[i] + z_start_positions_full[channel_idx] = z_start_positions[i] + z_stop_positions_full[channel_idx] = z_stop_positions[i] + z_final_positions_full[channel_idx] = z_final_positions[i] + roll_distances_full[channel_idx] = roll_distances[i] + + return { + "x_positions": x_positions_full, + "y_positions": y_positions_full, + "z_start_positions": z_start_positions_full, + "z_stop_positions": z_stop_positions_full, + "z_final_positions": z_final_positions_full, + "roll_distances": roll_distances_full, + } + + # ============== Abstract methods from LiquidHandlerBackend ============== + + async def pick_up_tips( + self, + ops: List[Pickup], + use_channels: List[int], + traverse_height: float = 146.0, # TODO: Access deck z_max property properly instead of hardcoded literal + z_start_offset: Optional[float] = None, + z_stop_offset: Optional[float] = None, + ): + """Pick up tips from the specified resource. + + Z positions and traverse height are calculated from the resource locations and tip + properties if not explicitly provided: + - traverse_height: Uses deck z_max if not provided + - z_start_offset: Calculated as max(resource Z) + max(tip total_tip_length) + - z_stop_offset: Calculated as max(resource Z) + max(tip total_tip_length - tip fitting_depth) + + Args: + ops: List of Pickup operations, one per channel + use_channels: List of channel indices to use + traverse_height: Traverse height in mm (optional, defaults to deck z_max) + z_start_offset: Z start position in mm (absolute, optional, calculated from resources) + z_stop_offset: Z stop position in mm (absolute, optional, calculated from resources) + + Raises: + RuntimeError: If pipette address or deck is not set + ValueError: If deck is not a NimbusDeck and traverse_height is not provided + """ + if self._pipette_address is None: + raise RuntimeError( + "Pipette address not discovered. Call setup() first." + ) + if self._deck is None: + raise RuntimeError("Deck must be set before pick_up_tips") + + # Validate we have a NimbusDeck for coordinate conversion + if not isinstance(self._deck, NimbusDeck): + raise RuntimeError( + "Deck must be a NimbusDeck for coordinate conversion" + ) + + # Extract coordinates and tip types for each operation + x_positions_mm: List[float] = [] + y_positions_mm: List[float] = [] + z_positions_mm: List[float] = [] + tip_types: List[int] = [] + + for op in ops: + # Get absolute location from resource + abs_location = op.resource.get_absolute_location() + # Add offset + final_location = Coordinate( + x=abs_location.x + op.offset.x, + y=abs_location.y + op.offset.y, + z=abs_location.z + op.offset.z, + ) + # Convert to Hamilton coordinates (returns in mm) + hamilton_coord = self._deck.to_hamilton_coordinate(final_location) + + x_positions_mm.append(hamilton_coord.x) + y_positions_mm.append(hamilton_coord.y) + z_positions_mm.append(hamilton_coord.z) + + # Get tip type from tip object + tip_type = _get_tip_type_from_tip(op.tip) + tip_types.append(tip_type) + + # Build tip pattern array (1 for active channels, 0 for inactive) + # Array length should match num_channels + tips_used = [0] * self.num_channels + for channel_idx in use_channels: + if channel_idx >= self.num_channels: + raise ValueError( + f"Channel index {channel_idx} exceeds num_channels {self.num_channels}" + ) + tips_used[channel_idx] = 1 + + # Convert positions to 0.01mm units (multiply by 100) + x_positions = [int(round(x * 100)) for x in x_positions_mm] + y_positions = [int(round(y * 100)) for y in y_positions_mm] + + # Calculate Z positions from resource locations and tip properties + # Similar to STAR backend: z_start = max_z + max_total_tip_length, z_stop = max_z + max_tip_length + max_z_hamilton = max(z_positions_mm) # Highest resource Z in Hamilton coordinates + max_total_tip_length = max(op.tip.total_tip_length for op in ops) + max_tip_length = max((op.tip.total_tip_length - op.tip.fitting_depth) for op in ops) + + # Calculate absolute Z positions in Hamilton coordinates + # z_start: resource Z + total tip length (where tip pickup starts) + # z_stop: resource Z + (tip length - fitting depth) (where tip pickup stops) + z_start_absolute_mm = max_z_hamilton + max_total_tip_length + z_stop_absolute_mm = max_z_hamilton + max_tip_length + + # Traverse height: use provided value (defaults to 146.0 mm from function signature) + traverse_height_mm = traverse_height + + # Allow override of Z positions if explicitly provided + if z_start_offset is not None: + z_start_absolute_mm = z_start_offset + if z_stop_offset is not None: + z_stop_absolute_mm = z_stop_offset + + # Convert to 0.01mm units + traverse_height_units = int(round(traverse_height_mm * 100)) + + # For Z positions, use absolute positions (same for all channels) + z_start_positions = [ + int(round(z_start_absolute_mm * 100)) + ] * len(ops) # Absolute Z start position + z_stop_positions = [ + int(round(z_stop_absolute_mm * 100)) + ] * len(ops) # Absolute Z stop position + + # Ensure arrays match num_channels length (pad with 0s for inactive channels) + # We need to map use_channels to the correct positions + x_positions_full = [0] * self.num_channels + y_positions_full = [0] * self.num_channels + z_start_positions_full = [0] * self.num_channels + z_stop_positions_full = [0] * self.num_channels + tip_types_full = [0] * self.num_channels + + for i, channel_idx in enumerate(use_channels): + x_positions_full[channel_idx] = x_positions[i] + y_positions_full[channel_idx] = y_positions[i] + z_start_positions_full[channel_idx] = z_start_positions[i] + z_stop_positions_full[channel_idx] = z_stop_positions[i] + tip_types_full[channel_idx] = tip_types[i] + + # Create and send command + command = PickupTips( + dest=self._pipette_address, + tips_used=tips_used, + x_positions=x_positions_full, + y_positions=y_positions_full, + traverse_height=traverse_height_units, + z_start_positions=z_start_positions_full, + z_stop_positions=z_stop_positions_full, + tip_types=tip_types_full, + ) + + # Check tip presence before picking up tips + try: + tip_status = await self.send_command(IsTipPresent(self._pipette_address)) + tip_present = tip_status.get("tip_present", []) + # Check if any channels we're trying to use already have tips + channels_with_tips = [ + i for i, present in enumerate(tip_present) + if i in use_channels and present != 0 + ] + if channels_with_tips: + raise RuntimeError( + f"Cannot pick up tips: channels {channels_with_tips} already have tips mounted. " + f"Drop existing tips first." + ) + except Exception as e: + # If tip presence check fails, log warning but continue + logger.warning(f"Could not check tip presence before pickup: {e}") + + # Log parameters for debugging + logger.info("PickupTips parameters:") + logger.info(f" tips_used: {tips_used}") + logger.info(f" x_positions: {x_positions_full}") + logger.info(f" y_positions: {y_positions_full}") + logger.info(f" traverse_height: {traverse_height_units}") + logger.info(f" z_start_positions: {z_start_positions_full}") + logger.info(f" z_stop_positions: {z_stop_positions_full}") + logger.info(f" tip_types: {tip_types_full}") + logger.info(f" num_channels: {self.num_channels}") + + try: + await self.send_command(command) + logger.info(f"Picked up tips on channels {use_channels}") + except Exception as e: + logger.error(f"Failed to pick up tips: {e}") + logger.error(f"Parameters sent: tips_used={tips_used}, " + f"x_positions={x_positions_full}, y_positions={y_positions_full}, " + f"traverse_height={traverse_height_units}, " + f"z_start_positions={z_start_positions_full}, " + f"z_stop_positions={z_stop_positions_full}, tip_types={tip_types_full}") + raise + + async def drop_tips( + self, + ops: List[Drop], + use_channels: List[int], + default_waste: bool = False, + traverse_height: float = 146.0, # TODO: Access deck z_max property properly instead of hardcoded literal + z_start_offset: Optional[float] = None, + z_stop_offset: Optional[float] = None, + z_final_offset: Optional[float] = None, + roll_distance: Optional[float] = None, + ): + """Drop tips to the specified resource. + + Auto-detects waste positions and uses appropriate command: + - If resource is a waste position (Trash with category="waste_position"), uses DropTipsRoll + - Otherwise, uses DropTips command + + Z positions are calculated from resource locations if not explicitly provided: + - traverse_height: Defaults to 146.0 mm (deck z_max) + - z_start_offset: Calculated from resources (for waste: 135.39 mm, for regular: resource Z + offset) + - z_stop_offset: Calculated from resources (for waste: 131.39 mm, for regular: resource Z + offset) + - z_final_offset: Calculated from resources (defaults to traverse_height) + - roll_distance: Defaults to 9.0 mm for waste positions + + Args: + ops: List of Drop operations, one per channel + use_channels: List of channel indices to use + default_waste: For DropTips command, if True, drop to default waste (positions may be ignored) + traverse_height: Traverse height in mm (optional, defaults to 146.0 mm) + z_start_offset: Z start position in mm (absolute, optional, calculated from resources) + z_stop_offset: Z stop position in mm (absolute, optional, calculated from resources) + z_final_offset: Z final position in mm (absolute, optional, calculated from resources) + roll_distance: Roll distance in mm (optional, defaults to 9.0 mm for waste positions) + + Raises: + RuntimeError: If pipette address or deck is not set + ValueError: If operations mix waste and regular resources + """ + if self._pipette_address is None: + raise RuntimeError( + "Pipette address not discovered. Call setup() first." + ) + if self._deck is None: + raise RuntimeError("Deck must be set before drop_tips") + + # Validate we have a NimbusDeck for coordinate conversion + if not isinstance(self._deck, NimbusDeck): + raise RuntimeError( + "Deck must be a NimbusDeck for coordinate conversion" + ) + + # Check if resources are waste positions (Trash objects with category="waste_position") + is_waste_positions = [ + isinstance(op.resource, Trash) and getattr(op.resource, "category", None) == "waste_position" + for op in ops + ] + + # Check if all operations are waste positions or all are regular + all_waste = all(is_waste_positions) + all_regular = not any(is_waste_positions) + + if not (all_waste or all_regular): + raise ValueError( + "Cannot mix waste positions and regular resources in a single drop_tips call. " + "All operations must be either waste positions or regular resources." + ) + + # Build tip pattern array (1 for active channels, 0 for inactive) + tips_used = [0] * self.num_channels + for channel_idx in use_channels: + if channel_idx >= self.num_channels: + raise ValueError( + f"Channel index {channel_idx} exceeds num_channels {self.num_channels}" + ) + tips_used[channel_idx] = 1 + + # Traverse height: use provided value (defaults to 146.0 mm from function signature) + traverse_height_mm = traverse_height + + # Convert to 0.01mm units + traverse_height_units = int(round(traverse_height_mm * 100)) + + # Type annotation for command variable (can be either DropTips or DropTipsRoll) + command: Union[DropTips, DropTipsRoll] + + if all_waste: + # Use DropTipsRoll for waste positions + # Build waste position parameters using helper method + waste_params = self._build_waste_position_params( + use_channels=use_channels, + traverse_height=traverse_height_mm, + z_start_offset=z_start_offset, + z_stop_offset=z_stop_offset, + z_final_offset=z_final_offset, + roll_distance=roll_distance, + ) + + x_positions_full = waste_params["x_positions"] + y_positions_full = waste_params["y_positions"] + z_start_positions_full = waste_params["z_start_positions"] + z_stop_positions_full = waste_params["z_stop_positions"] + z_final_positions_full = waste_params["z_final_positions"] + roll_distances_full = waste_params["roll_distances"] + + # Create and send DropTipsRoll command + command = DropTipsRoll( + dest=self._pipette_address, + tips_used=tips_used, + x_positions=x_positions_full, + y_positions=y_positions_full, + traverse_height=traverse_height_units, + z_start_positions=z_start_positions_full, + z_stop_positions=z_stop_positions_full, + z_final_positions=z_final_positions_full, + roll_distances=roll_distances_full, + ) + else: + # Use DropTips for regular resources + # Extract coordinates for each operation + x_positions_mm: List[float] = [] + y_positions_mm: List[float] = [] + z_positions_mm: List[float] = [] + + for i, op in enumerate(ops): + # Get absolute location from resource + abs_location = op.resource.get_absolute_location() + + # Add offset + final_location = Coordinate( + x=abs_location.x + op.offset.x, + y=abs_location.y + op.offset.y, + z=abs_location.z + op.offset.z, + ) + # Convert to Hamilton coordinates (returns in mm) + hamilton_coord = self._deck.to_hamilton_coordinate(final_location) + + x_positions_mm.append(hamilton_coord.x) + y_positions_mm.append(hamilton_coord.y) + z_positions_mm.append(hamilton_coord.z) + + # Convert positions to 0.01mm units (multiply by 100) + x_positions = [int(round(x * 100)) for x in x_positions_mm] + y_positions = [int(round(y * 100)) for y in y_positions_mm] + + # Calculate Z positions from resource locations + max_z_hamilton = max(z_positions_mm) # Highest resource Z in Hamilton coordinates + + # Z positions are absolute, not relative to resource position + # Calculate from resource locations if not provided + if z_start_offset is None: + # TODO: Calculate from resources properly (resource Z + offset) + z_start_absolute_mm = max_z_hamilton + 10.0 # Placeholder: resource Z + safety margin + else: + z_start_absolute_mm = z_start_offset + + if z_stop_offset is None: + # TODO: Calculate from resources properly (resource Z + offset) + z_stop_absolute_mm = max_z_hamilton # Placeholder: resource Z + else: + z_stop_absolute_mm = z_stop_offset + + if z_final_offset is None: + z_final_offset_mm = traverse_height_mm # Use traverse height as final position + else: + z_final_offset_mm = z_final_offset + + # Use absolute Z positions (same for all channels) + z_start_positions = [ + int(round(z_start_absolute_mm * 100)) + ] * len(ops) # Absolute Z start position + z_stop_positions = [ + int(round(z_stop_absolute_mm * 100)) + ] * len(ops) # Absolute Z stop position + z_final_positions = [ + int(round(z_final_offset_mm * 100)) + ] * len(ops) # Absolute Z final position + + # Ensure arrays match num_channels length + x_positions_full = [0] * self.num_channels + y_positions_full = [0] * self.num_channels + z_start_positions_full = [0] * self.num_channels + z_stop_positions_full = [0] * self.num_channels + z_final_positions_full = [0] * self.num_channels + + for i, channel_idx in enumerate(use_channels): + x_positions_full[channel_idx] = x_positions[i] + y_positions_full[channel_idx] = y_positions[i] + z_start_positions_full[channel_idx] = z_start_positions[i] + z_stop_positions_full[channel_idx] = z_stop_positions[i] + z_final_positions_full[channel_idx] = z_final_positions[i] + + # Create and send DropTips command + command = DropTips( + dest=self._pipette_address, + tips_used=tips_used, + x_positions=x_positions_full, + y_positions=y_positions_full, + traverse_height=traverse_height_units, + z_start_positions=z_start_positions_full, + z_stop_positions=z_stop_positions_full, + z_final_positions=z_final_positions_full, + default_waste=default_waste, + ) + + try: + await self.send_command(command) + logger.info(f"Dropped tips on channels {use_channels}") + except Exception as e: + logger.error(f"Failed to drop tips: {e}") + raise + + async def aspirate( + self, + ops: List[SingleChannelAspiration], + use_channels: List[int], + adc_enabled: bool = False, + # Advanced kwargs (Optional, default to zeros/nulls) + lld_mode: Optional[List[int]] = None, + liquid_seek_height: Optional[List[float]] = None, + immersion_depth: Optional[List[float]] = None, + surface_following_distance: Optional[List[float]] = None, + capacitive_lld_sensitivity: Optional[List[int]] = None, + pressure_lld_sensitivity: Optional[List[int]] = None, + settling_time: Optional[List[float]] = None, + transport_air_volume: Optional[List[float]] = None, + prewet_volume: Optional[List[float]] = None, + liquid_exit_speed: Optional[List[float]] = None, + mix_volume: Optional[List[float]] = None, + mix_cycles: Optional[List[int]] = None, + mix_speed: Optional[List[float]] = None, + mix_position: Optional[List[float]] = None, + limit_curve_index: Optional[List[int]] = None, + tadm_enabled: Optional[bool] = None, + ): + """Aspirate liquid from the specified resource using pip. + + Args: + ops: List of SingleChannelAspiration operations, one per channel + use_channels: List of channel indices to use + adc_enabled: If True, enable ADC (Automatic Drip Control), else disable (default: False) + lld_mode: LLD mode (0=OFF, 1=cLLD, 2=pLLD, 3=DUAL), default: [0] * n + liquid_seek_height: Relative offset from well bottom for LLD search start position (mm). + This is a RELATIVE OFFSET, not an absolute coordinate. The instrument adds this to + z_min_position (well bottom) to determine where to start the LLD search. + If None, defaults to the well's size_z (depth), meaning "start search at top of well". + When provided, should be a list of offsets in mm, one per channel. + immersion_depth: Depth to submerge into liquid (mm), default: [0.0] * n + surface_following_distance: Distance to follow liquid surface (mm), default: [0.0] * n + capacitive_lld_sensitivity: cLLD sensitivity (1-4), default: [0] * n + pressure_lld_sensitivity: pLLD sensitivity (1-4), default: [0] * n + settling_time: Settling time (s), default: [1.0] * n + transport_air_volume: Transport air volume (µL), default: [5.0] * n + prewet_volume: Prewet volume (µL), default: [0.0] * n + liquid_exit_speed: Liquid exit speed (µL/s), default: [20.0] * n + mix_volume: Mix volume (µL). Extracted from op.mix if available, else default: [0.0] * n + mix_cycles: Mix cycles. Extracted from op.mix if available, else default: [0] * n + mix_speed: Mix speed (µL/s). Extracted from op.mix if available, else default: [0.0] * n + mix_position: Mix position relative to liquid (mm), default: [0.0] * n + limit_curve_index: Limit curve index, default: [0] * n + tadm_enabled: TADM enabled flag, default: False + + Raises: + RuntimeError: If pipette address or deck is not set + """ + if self._pipette_address is None: + raise RuntimeError( + "Pipette address not discovered. Call setup() first." + ) + if self._deck is None: + raise RuntimeError("Deck must be set before aspirate") + + # Validate we have a NimbusDeck for coordinate conversion + if not isinstance(self._deck, NimbusDeck): + raise RuntimeError( + "Deck must be a NimbusDeck for coordinate conversion" + ) + + n = len(ops) + + # Build tip pattern array (1 for active channels, 0 for inactive) + tips_used = [0] * self.num_channels + for channel_idx in use_channels: + if channel_idx >= self.num_channels: + raise ValueError( + f"Channel index {channel_idx} exceeds num_channels {self.num_channels}" + ) + tips_used[channel_idx] = 1 + + # Call ADC command (EnableADC or DisableADC) + if adc_enabled: + await self.send_command(EnableADC(self._pipette_address, tips_used)) + logger.info("Enabled ADC before aspirate") + else: + await self.send_command(DisableADC(self._pipette_address, tips_used)) + logger.info("Disabled ADC before aspirate") + + # Call GetChannelConfiguration for each active channel (index 2 = "Aspirate monitoring with cLLD") + if self._channel_configurations is None: + self._channel_configurations = {} + for channel_idx in use_channels: + channel_num = channel_idx + 1 # Convert to 1-based + try: + config = await self.send_command( + GetChannelConfiguration( + self._pipette_address, + channel=channel_num, + indexes=[2], # Index 2 = "Aspirate monitoring with cLLD" + ) + ) + enabled = config["enabled"][0] if config["enabled"] else False + if channel_num not in self._channel_configurations: + self._channel_configurations[channel_num] = {} + self._channel_configurations[channel_num][2] = enabled + logger.debug(f"Channel {channel_num} configuration (index 2): enabled={enabled}") + except Exception as e: + logger.warning(f"Failed to get channel configuration for channel {channel_num}: {e}") + + # ======================================================================== + # MINIMAL SET: Calculate from resources (NOT kwargs) + # ======================================================================== + + # Extract coordinates and convert to Hamilton coordinates + x_positions_mm: List[float] = [] + y_positions_mm: List[float] = [] + z_positions_mm: List[float] = [] + + for op in ops: + # Get absolute location from resource + abs_location = op.resource.get_absolute_location() + # Add offset + final_location = Coordinate( + x=abs_location.x + op.offset.x, + y=abs_location.y + op.offset.y, + z=abs_location.z + op.offset.z, + ) + # Convert to Hamilton coordinates (returns in mm) + hamilton_coord = self._deck.to_hamilton_coordinate(final_location) + + x_positions_mm.append(hamilton_coord.x) + y_positions_mm.append(hamilton_coord.y) + z_positions_mm.append(hamilton_coord.z) + + # Convert positions to 0.01mm units (multiply by 100) + x_positions = [int(round(x * 100)) for x in x_positions_mm] + y_positions = [int(round(y * 100)) for y in y_positions_mm] + + # Traverse height: use deck z_max or default 146.0 mm + traverse_height_mm = 146.0 # TODO: Access deck z_max property properly + traverse_height_units = int(round(traverse_height_mm * 100)) + + # Calculate well_bottoms: resource Z + offset Z + material_z_thickness + well_bottoms: List[float] = [] + for op in ops: + abs_location = op.resource.get_absolute_location() + well_bottom = abs_location.z + op.offset.z + if isinstance(op.resource, Container): + well_bottom += op.resource.material_z_thickness + well_bottoms.append(well_bottom) + + # Convert well_bottoms to Hamilton coordinates + well_bottoms_hamilton: List[float] = [] + for i, op in enumerate(ops): + abs_location = op.resource.get_absolute_location() + well_bottom_location = Coordinate( + x=abs_location.x + op.offset.x, + y=abs_location.y + op.offset.y, + z=well_bottoms[i], + ) + hamilton_coord = self._deck.to_hamilton_coordinate(well_bottom_location) + well_bottoms_hamilton.append(hamilton_coord.z) + + # Calculate liquid_surface_height: well_bottom + (op.liquid_height or 0) + # This is the fixed Z-height when LLD is OFF + liquid_surface_heights_mm: List[float] = [] + for i, op in enumerate(ops): + liquid_height = getattr(op, "liquid_height", None) or 0.0 + liquid_surface_height = well_bottoms_hamilton[i] + liquid_height + liquid_surface_heights_mm.append(liquid_surface_height) + + # Calculate liquid_seek_height if not provided as kwarg + # + # IMPORTANT: liquid_seek_height is a RELATIVE OFFSET (in mm), not an absolute coordinate. + # It represents the height offset from the well bottom where the LLD (Liquid Level Detection) + # search should start. The Hamilton instrument will add this offset to z_min_position + # (well bottom) to determine the absolute Z position where the search begins. + # + # Default behavior: Use the well's size_z (depth) as the offset, which means + # "start the LLD search at the top of the well" (well_bottom + well_size). + # This is a reasonable default since we want to search from the top downward. + # + # When provided as a kwarg, it should be a list of relative offsets in mm. + # The instrument will internally add these to z_min_position to get absolute coordinates. + if liquid_seek_height is None: + # Default: use well size_z as the offset (start search at top of well) + liquid_seek_height = [] + for op in ops: + well_size_z = op.resource.get_absolute_size_z() + liquid_seek_height.append(well_size_z) + else: + # If provided, it's already a relative offset in mm, use as-is + # The instrument will add this to z_min_position internally + pass + + # Calculate z_min_position: default to well_bottom + z_min_positions_mm = well_bottoms_hamilton.copy() + + # Extract volumes and speeds from operations + volumes = [op.volume for op in ops] # in µL + # flow_rate should not be None - if it is, it's an error (no hardcoded fallback) + flow_rates: List[float] = [] + for op in ops: + if op.flow_rate is None: + raise ValueError(f"flow_rate cannot be None for operation {op}") + flow_rates.append(op.flow_rate) + blowout_volumes = [op.blow_out_air_volume if op.blow_out_air_volume is not None else 40.0 for op in ops] # in µL, default 40 + + # Extract mix parameters from op.mix if available + mix_volumes_from_op: List[float] = [] + mix_cycles_from_op: List[int] = [] + mix_speeds_from_op: List[float] = [] + for op in ops: + if hasattr(op, "mix") and op.mix is not None: + mix_volumes_from_op.append(op.mix.volume if hasattr(op.mix, "volume") else 0.0) + mix_cycles_from_op.append(op.mix.repetitions if hasattr(op.mix, "repetitions") else 0) + # If mix has flow_rate, use it; otherwise default to aspirate speed + if hasattr(op.mix, "flow_rate") and op.mix.flow_rate is not None: + mix_speeds_from_op.append(op.mix.flow_rate) + else: + # Default to aspirate speed (flow_rate) when mix speed not specified + if op.flow_rate is None: + raise ValueError(f"flow_rate cannot be None for operation {op}") + mix_speeds_from_op.append(op.flow_rate) + else: + mix_volumes_from_op.append(0.0) + mix_cycles_from_op.append(0) + # Default to aspirate speed (flow_rate) when no mix operation + if op.flow_rate is None: + raise ValueError(f"flow_rate cannot be None for operation {op}") + mix_speeds_from_op.append(op.flow_rate) + + # ======================================================================== + # ADVANCED PARAMETERS: Fill in defaults using _fill_in_defaults() + # ======================================================================== + + # LLD mode: default to [0] * n (OFF) + lld_mode = _fill_in_defaults(lld_mode, [0] * n) + + # Immersion depth: default to [0.0] * n + immersion_depth = _fill_in_defaults(immersion_depth, [0.0] * n) + + # Surface following distance: default to [0.0] * n + surface_following_distance = _fill_in_defaults(surface_following_distance, [0.0] * n) + + # LLD sensitivities: default to [0] * n + capacitive_lld_sensitivity = _fill_in_defaults(capacitive_lld_sensitivity, [0] * n) + pressure_lld_sensitivity = _fill_in_defaults(pressure_lld_sensitivity, [0] * n) + + # Settling time: default to [1.0] * n (from log: 10 in 0.1s units = 1.0s) + settling_time = _fill_in_defaults(settling_time, [1.0] * n) + + # Transport air volume: default to [5.0] * n (from log: 50 in 0.1µL units = 5.0 µL) + transport_air_volume = _fill_in_defaults(transport_air_volume, [5.0] * n) + + # Prewet volume: default to [0.0] * n + prewet_volume = _fill_in_defaults(prewet_volume, [0.0] * n) + + # Liquid exit speed: default to [20.0] * n (from log: 200 in 0.1µL/s units = 20.0 µL/s) + liquid_exit_speed = _fill_in_defaults(liquid_exit_speed, [20.0] * n) + + # Mix parameters: use op.mix if available, else use kwargs/defaults + mix_volume = _fill_in_defaults(mix_volume, mix_volumes_from_op) + mix_cycles = _fill_in_defaults(mix_cycles, mix_cycles_from_op) + # mix_speed defaults to aspirate_speed (flow_rates) if not specified + # This matches the log file behavior where mix_speed = aspirate_speed even when mix_volume = 0 + if mix_speed is None: + mix_speed = flow_rates.copy() # Default to aspirate speed + else: + mix_speed = _fill_in_defaults(mix_speed, mix_speeds_from_op) + mix_position = _fill_in_defaults(mix_position, [0.0] * n) + + # Limit curve index: default to [0] * n + limit_curve_index = _fill_in_defaults(limit_curve_index, [0] * n) + + # TADM enabled: default to False + if tadm_enabled is None: + tadm_enabled = False + + # ======================================================================== + # CONVERT UNITS AND BUILD FULL ARRAYS + # ======================================================================== + + # Convert volumes: µL → 0.1µL units (multiply by 10) + aspirate_volumes = [int(round(vol * 10)) for vol in volumes] + blowout_volumes_units = [int(round(vol * 10)) for vol in blowout_volumes] + + # Convert speeds: µL/s → 0.1µL/s units (multiply by 10) + aspirate_speeds = [int(round(fr * 10)) for fr in flow_rates] + + # Convert heights: mm → 0.01mm units (multiply by 100) + liquid_seek_height_units = [int(round(h * 100)) for h in liquid_seek_height] + liquid_surface_height_units = [int(round(h * 100)) for h in liquid_surface_heights_mm] + immersion_depth_units = [int(round(d * 100)) for d in immersion_depth] + surface_following_distance_units = [int(round(d * 100)) for d in surface_following_distance] + z_min_position_units = [int(round(z * 100)) for z in z_min_positions_mm] + + # Convert settling time: s → 0.1s units (multiply by 10) + settling_time_units = [int(round(t * 10)) for t in settling_time] + + # Convert transport air volume: µL → 0.1µL units (multiply by 10) + transport_air_volume_units = [int(round(v * 10)) for v in transport_air_volume] + + # Convert prewet volume: µL → 0.1µL units (multiply by 10) + prewet_volume_units = [int(round(v * 10)) for v in prewet_volume] + + # Convert liquid exit speed: µL/s → 0.1µL/s units (multiply by 10) + liquid_exit_speed_units = [int(round(s * 10)) for s in liquid_exit_speed] + + # Convert mix volume: µL → 0.1µL units (multiply by 10) + mix_volume_units = [int(round(v * 10)) for v in mix_volume] + + # Convert mix speed: µL/s → 0.1µL/s units (multiply by 10) + mix_speed_units = [int(round(s * 10)) for s in mix_speed] + + # Convert mix position: mm → 0.01mm units (multiply by 100) + mix_position_units = [int(round(p * 100)) for p in mix_position] + + # Build arrays for all channels (pad with 0s for inactive channels) + x_positions_full = [0] * self.num_channels + y_positions_full = [0] * self.num_channels + aspirate_volumes_full = [0] * self.num_channels + blowout_volumes_full = [0] * self.num_channels + aspirate_speeds_full = [0] * self.num_channels + liquid_seek_height_full = [0] * self.num_channels + liquid_surface_height_full = [0] * self.num_channels + immersion_depth_full = [0] * self.num_channels + surface_following_distance_full = [0] * self.num_channels + z_min_position_full = [0] * self.num_channels + settling_time_full = [0] * self.num_channels + transport_air_volume_full = [0] * self.num_channels + prewet_volume_full = [0] * self.num_channels + liquid_exit_speed_full = [0] * self.num_channels + mix_volume_full = [0] * self.num_channels + mix_cycles_full = [0] * self.num_channels + mix_speed_full = [0] * self.num_channels + mix_position_full = [0] * self.num_channels + capacitive_lld_sensitivity_full = [0] * self.num_channels + pressure_lld_sensitivity_full = [0] * self.num_channels + limit_curve_index_full = [0] * self.num_channels + lld_mode_full = [0] * self.num_channels + + for i, channel_idx in enumerate(use_channels): + x_positions_full[channel_idx] = x_positions[i] + y_positions_full[channel_idx] = y_positions[i] + aspirate_volumes_full[channel_idx] = aspirate_volumes[i] + blowout_volumes_full[channel_idx] = blowout_volumes_units[i] + aspirate_speeds_full[channel_idx] = aspirate_speeds[i] + liquid_seek_height_full[channel_idx] = liquid_seek_height_units[i] + liquid_surface_height_full[channel_idx] = liquid_surface_height_units[i] + immersion_depth_full[channel_idx] = immersion_depth_units[i] + surface_following_distance_full[channel_idx] = surface_following_distance_units[i] + z_min_position_full[channel_idx] = z_min_position_units[i] + settling_time_full[channel_idx] = settling_time_units[i] + transport_air_volume_full[channel_idx] = transport_air_volume_units[i] + prewet_volume_full[channel_idx] = prewet_volume_units[i] + liquid_exit_speed_full[channel_idx] = liquid_exit_speed_units[i] + mix_volume_full[channel_idx] = mix_volume_units[i] + mix_cycles_full[channel_idx] = mix_cycles[i] + mix_speed_full[channel_idx] = mix_speed_units[i] + mix_position_full[channel_idx] = mix_position_units[i] + capacitive_lld_sensitivity_full[channel_idx] = capacitive_lld_sensitivity[i] + pressure_lld_sensitivity_full[channel_idx] = pressure_lld_sensitivity[i] + limit_curve_index_full[channel_idx] = limit_curve_index[i] + lld_mode_full[channel_idx] = lld_mode[i] + + # Default values for remaining parameters + aspirate_type = [0] * self.num_channels + clot_check_height = [0] * self.num_channels + z_final = traverse_height_units + mix_follow_distance = [0] * self.num_channels + tube_section_height = [0] * self.num_channels + tube_section_ratio = [0] * self.num_channels + lld_height_difference = [0] * self.num_channels + recording_mode = 0 + + # Create and send Aspirate command + command = Aspirate( + dest=self._pipette_address, + aspirate_type=aspirate_type, + tips_used=tips_used, + x_positions=x_positions_full, + y_positions=y_positions_full, + traverse_height=traverse_height_units, + liquid_seek_height=liquid_seek_height_full, + liquid_surface_height=liquid_surface_height_full, + submerge_depth=immersion_depth_full, + follow_depth=surface_following_distance_full, + z_min_position=z_min_position_full, + clot_check_height=clot_check_height, + z_final=z_final, + liquid_exit_speed=liquid_exit_speed_full, + blowout_volume=blowout_volumes_full, + prewet_volume=prewet_volume_full, + aspirate_volume=aspirate_volumes_full, + transport_air_volume=transport_air_volume_full, + aspirate_speed=aspirate_speeds_full, + settling_time=settling_time_full, + mix_volume=mix_volume_full, + mix_cycles=mix_cycles_full, + mix_position=mix_position_full, + mix_follow_distance=mix_follow_distance, + mix_speed=mix_speed_full, + tube_section_height=tube_section_height, + tube_section_ratio=tube_section_ratio, + lld_mode=lld_mode_full, + capacitive_lld_sensitivity=capacitive_lld_sensitivity_full, + pressure_lld_sensitivity=pressure_lld_sensitivity_full, + lld_height_difference=lld_height_difference, + tadm_enabled=tadm_enabled, + limit_curve_index=limit_curve_index_full, + recording_mode=recording_mode, + ) + + try: + await self.send_command(command) + logger.info(f"Aspirated on channels {use_channels}") + except Exception as e: + logger.error(f"Failed to aspirate: {e}") + raise + + async def dispense( + self, + ops: List[SingleChannelDispense], + use_channels: List[int], + adc_enabled: bool = False, + # Advanced kwargs (Optional, default to zeros/nulls) + lld_mode: Optional[List[int]] = None, + liquid_seek_height: Optional[List[float]] = None, + immersion_depth: Optional[List[float]] = None, + surface_following_distance: Optional[List[float]] = None, + capacitive_lld_sensitivity: Optional[List[int]] = None, + settling_time: Optional[List[float]] = None, + transport_air_volume: Optional[List[float]] = None, + prewet_volume: Optional[List[float]] = None, + liquid_exit_speed: Optional[List[float]] = None, + mix_volume: Optional[List[float]] = None, + mix_cycles: Optional[List[int]] = None, + mix_speed: Optional[List[float]] = None, + mix_position: Optional[List[float]] = None, + limit_curve_index: Optional[List[int]] = None, + tadm_enabled: Optional[bool] = None, + cutoff_speed: Optional[List[float]] = None, + stop_back_volume: Optional[List[float]] = None, + touch_off_distance: Optional[float] = None, + dispense_offset: Optional[List[float]] = None, + ): + """Dispense liquid from the specified resource using pip. + + Args: + ops: List of SingleChannelDispense operations, one per channel + use_channels: List of channel indices to use + adc_enabled: If True, enable ADC (Automatic Drip Control), else disable (default: False) + lld_mode: LLD mode (0=OFF, 1=cLLD, 2=pLLD, 3=DUAL), default: [0] * n + liquid_seek_height: Override calculated LLD search height (mm). If None, calculated from well_bottom + resource size + immersion_depth: Depth to submerge into liquid (mm), default: [0.0] * n + surface_following_distance: Distance to follow liquid surface (mm), default: [0.0] * n + capacitive_lld_sensitivity: cLLD sensitivity (1-4), default: [0] * n + settling_time: Settling time (s), default: [1.0] * n + transport_air_volume: Transport air volume (µL), default: [5.0] * n + prewet_volume: Prewet volume (µL), default: [0.0] * n + liquid_exit_speed: Liquid exit speed (µL/s), default: [20.0] * n + mix_volume: Mix volume (µL). Extracted from op.mix if available, else default: [0.0] * n + mix_cycles: Mix cycles. Extracted from op.mix if available, else default: [0] * n + mix_speed: Mix speed (µL/s). Extracted from op.mix if available, else default: [0.0] * n + mix_position: Mix position relative to liquid (mm), default: [0.0] * n + limit_curve_index: Limit curve index, default: [0] * n + tadm_enabled: TADM enabled flag, default: False + cutoff_speed: Cutoff speed (µL/s), default: [25.0] * n + stop_back_volume: Stop back volume (µL), default: [0.0] * n + touch_off_distance: Touch off distance (mm), default: 0.0 + dispense_offset: Dispense offset (mm), default: [0.0] * n + + Raises: + RuntimeError: If pipette address or deck is not set + """ + if self._pipette_address is None: + raise RuntimeError( + "Pipette address not discovered. Call setup() first." + ) + if self._deck is None: + raise RuntimeError("Deck must be set before dispense") + + # Validate we have a NimbusDeck for coordinate conversion + if not isinstance(self._deck, NimbusDeck): + raise RuntimeError( + "Deck must be a NimbusDeck for coordinate conversion" + ) + + n = len(ops) + + # Build tip pattern array (1 for active channels, 0 for inactive) + tips_used = [0] * self.num_channels + for channel_idx in use_channels: + if channel_idx >= self.num_channels: + raise ValueError( + f"Channel index {channel_idx} exceeds num_channels {self.num_channels}" + ) + tips_used[channel_idx] = 1 + + # Call ADC command (EnableADC or DisableADC) + if adc_enabled: + await self.send_command(EnableADC(self._pipette_address, tips_used)) + logger.info("Enabled ADC before dispense") + else: + await self.send_command(DisableADC(self._pipette_address, tips_used)) + logger.info("Disabled ADC before dispense") + + # Call GetChannelConfiguration for each active channel (index 2 = "Aspirate monitoring with cLLD") + if self._channel_configurations is None: + self._channel_configurations = {} + for channel_idx in use_channels: + channel_num = channel_idx + 1 # Convert to 1-based + try: + config = await self.send_command( + GetChannelConfiguration( + self._pipette_address, + channel=channel_num, + indexes=[2], # Index 2 = "Aspirate monitoring with cLLD" + ) + ) + enabled = config["enabled"][0] if config["enabled"] else False + if channel_num not in self._channel_configurations: + self._channel_configurations[channel_num] = {} + self._channel_configurations[channel_num][2] = enabled + logger.debug(f"Channel {channel_num} configuration (index 2): enabled={enabled}") + except Exception as e: + logger.warning(f"Failed to get channel configuration for channel {channel_num}: {e}") + + # ======================================================================== + # MINIMAL SET: Calculate from resources (NOT kwargs) + # ======================================================================== + + # Extract coordinates and convert to Hamilton coordinates + x_positions_mm: List[float] = [] + y_positions_mm: List[float] = [] + z_positions_mm: List[float] = [] + + for op in ops: + # Get absolute location from resource + abs_location = op.resource.get_absolute_location() + # Add offset + final_location = Coordinate( + x=abs_location.x + op.offset.x, + y=abs_location.y + op.offset.y, + z=abs_location.z + op.offset.z, + ) + # Convert to Hamilton coordinates (returns in mm) + hamilton_coord = self._deck.to_hamilton_coordinate(final_location) + + x_positions_mm.append(hamilton_coord.x) + y_positions_mm.append(hamilton_coord.y) + z_positions_mm.append(hamilton_coord.z) + + # Convert positions to 0.01mm units (multiply by 100) + x_positions = [int(round(x * 100)) for x in x_positions_mm] + y_positions = [int(round(y * 100)) for y in y_positions_mm] + + # Traverse height: use deck z_max or default 146.0 mm + traverse_height_mm = 146.0 # TODO: Access deck z_max property properly + traverse_height_units = int(round(traverse_height_mm * 100)) + + # Calculate well_bottoms: resource Z + offset Z + material_z_thickness + well_bottoms: List[float] = [] + for op in ops: + abs_location = op.resource.get_absolute_location() + well_bottom = abs_location.z + op.offset.z + if isinstance(op.resource, Container): + well_bottom += op.resource.material_z_thickness + well_bottoms.append(well_bottom) + + # Convert well_bottoms to Hamilton coordinates + well_bottoms_hamilton: List[float] = [] + for i, op in enumerate(ops): + abs_location = op.resource.get_absolute_location() + well_bottom_location = Coordinate( + x=abs_location.x + op.offset.x, + y=abs_location.y + op.offset.y, + z=well_bottoms[i], + ) + hamilton_coord = self._deck.to_hamilton_coordinate(well_bottom_location) + well_bottoms_hamilton.append(hamilton_coord.z) + + # Calculate dispense_height: well_bottom + (op.liquid_height or 0) + # This is the fixed Z-height when LLD is OFF + dispense_heights_mm: List[float] = [] + for i, op in enumerate(ops): + liquid_height = getattr(op, "liquid_height", None) or 0.0 + dispense_height = well_bottoms_hamilton[i] + liquid_height + dispense_heights_mm.append(dispense_height) + + # Calculate liquid_seek_height if not provided as kwarg + # + # IMPORTANT: liquid_seek_height is a RELATIVE OFFSET (in mm), not an absolute coordinate. + # It represents the height offset from the well bottom where the LLD (Liquid Level Detection) + # search should start. The Hamilton instrument will add this offset to z_min_position + # (well bottom) to determine the absolute Z position where the search begins. + # + # Default behavior: Use the well's size_z (depth) as the offset, which means + # "start the LLD search at the top of the well" (well_bottom + well_size). + # This is a reasonable default since we want to search from the top downward. + # + # When provided as a kwarg, it should be a list of relative offsets in mm. + # The instrument will internally add these to z_min_position to get absolute coordinates. + if liquid_seek_height is None: + # Default: use well size_z as the offset (start search at top of well) + liquid_seek_height = [] + for op in ops: + well_size_z = op.resource.get_absolute_size_z() + liquid_seek_height.append(well_size_z) + else: + # If provided, it's already a relative offset in mm, use as-is + # The instrument will add this to z_min_position internally + pass + + # Calculate z_min_position: default to well_bottom + z_min_positions_mm = well_bottoms_hamilton.copy() + + # Extract volumes and speeds from operations + volumes = [op.volume for op in ops] # in µL + # flow_rate should not be None - if it is, it's an error (no hardcoded fallback) + flow_rates: List[float] = [] + for op in ops: + if op.flow_rate is None: + raise ValueError(f"flow_rate cannot be None for operation {op}") + flow_rates.append(op.flow_rate) + blowout_volumes = [op.blow_out_air_volume if op.blow_out_air_volume is not None else 40.0 for op in ops] # in µL, default 40 + + # Extract mix parameters from op.mix if available + mix_volumes_from_op: List[float] = [] + mix_cycles_from_op: List[int] = [] + mix_speeds_from_op: List[float] = [] + for op in ops: + if hasattr(op, "mix") and op.mix is not None: + mix_volumes_from_op.append(op.mix.volume if hasattr(op.mix, "volume") else 0.0) + mix_cycles_from_op.append(op.mix.repetitions if hasattr(op.mix, "repetitions") else 0) + # If mix has flow_rate, use it; otherwise default to dispense speed + if hasattr(op.mix, "flow_rate") and op.mix.flow_rate is not None: + mix_speeds_from_op.append(op.mix.flow_rate) + else: + # Default to dispense speed (flow_rate) when mix speed not specified + if op.flow_rate is None: + raise ValueError(f"flow_rate cannot be None for operation {op}") + mix_speeds_from_op.append(op.flow_rate) + else: + mix_volumes_from_op.append(0.0) + mix_cycles_from_op.append(0) + # Default to dispense speed (flow_rate) when no mix operation + if op.flow_rate is None: + raise ValueError(f"flow_rate cannot be None for operation {op}") + mix_speeds_from_op.append(op.flow_rate) + + # ======================================================================== + # ADVANCED PARAMETERS: Fill in defaults using _fill_in_defaults() + # ======================================================================== + + # LLD mode: default to [0] * n (OFF) + lld_mode = _fill_in_defaults(lld_mode, [0] * n) + + # Immersion depth: default to [0.0] * n + immersion_depth = _fill_in_defaults(immersion_depth, [0.0] * n) + + # Surface following distance: default to [0.0] * n + surface_following_distance = _fill_in_defaults(surface_following_distance, [0.0] * n) + + # LLD sensitivities: default to [0] * n + capacitive_lld_sensitivity = _fill_in_defaults(capacitive_lld_sensitivity, [0] * n) + + # Settling time: default to [1.0] * n (from log: 10 in 0.1s units = 1.0s) + settling_time = _fill_in_defaults(settling_time, [1.0] * n) + + # Transport air volume: default to [5.0] * n (from log: 50 in 0.1µL units = 5.0 µL) + transport_air_volume = _fill_in_defaults(transport_air_volume, [5.0] * n) + + # Prewet volume: default to [0.0] * n + prewet_volume = _fill_in_defaults(prewet_volume, [0.0] * n) + + # Liquid exit speed: default to [20.0] * n (from log: 200 in 0.1µL/s units = 20.0 µL/s) + liquid_exit_speed = _fill_in_defaults(liquid_exit_speed, [20.0] * n) + + # Mix parameters: use op.mix if available, else use kwargs/defaults + mix_volume = _fill_in_defaults(mix_volume, mix_volumes_from_op) + mix_cycles = _fill_in_defaults(mix_cycles, mix_cycles_from_op) + # mix_speed defaults to dispense_speed (flow_rates) if not specified + # This matches the log file behavior where mix_speed = dispense_speed even when mix_volume = 0 + if mix_speed is None: + mix_speed = flow_rates.copy() # Default to dispense speed + else: + mix_speed = _fill_in_defaults(mix_speed, mix_speeds_from_op) + mix_position = _fill_in_defaults(mix_position, [0.0] * n) + + # Limit curve index: default to [0] * n + limit_curve_index = _fill_in_defaults(limit_curve_index, [0] * n) + + # TADM enabled: default to False + if tadm_enabled is None: + tadm_enabled = False + + # Dispense-specific parameters + cutoff_speed = _fill_in_defaults(cutoff_speed, [25.0] * n) + stop_back_volume = _fill_in_defaults(stop_back_volume, [0.0] * n) + dispense_offset = _fill_in_defaults(dispense_offset, [0.0] * n) + + # Touch off distance: default to 0.0 (not a list) + if touch_off_distance is None: + touch_off_distance = 0.0 + + # ======================================================================== + # CONVERT UNITS AND BUILD FULL ARRAYS + # ======================================================================== + + # Convert volumes: µL → 0.1µL units (multiply by 10) + dispense_volumes = [int(round(vol * 10)) for vol in volumes] + blowout_volumes_units = [int(round(vol * 10)) for vol in blowout_volumes] + + # Convert speeds: µL/s → 0.1µL/s units (multiply by 10) + dispense_speeds = [int(round(fr * 10)) for fr in flow_rates] + + # Convert heights: mm → 0.01mm units (multiply by 100) + liquid_seek_height_units = [int(round(h * 100)) for h in liquid_seek_height] + dispense_height_units = [int(round(h * 100)) for h in dispense_heights_mm] + immersion_depth_units = [int(round(d * 100)) for d in immersion_depth] + surface_following_distance_units = [int(round(d * 100)) for d in surface_following_distance] + z_min_position_units = [int(round(z * 100)) for z in z_min_positions_mm] + + # Convert settling time: s → 0.1s units (multiply by 10) + settling_time_units = [int(round(t * 10)) for t in settling_time] + + # Convert transport air volume: µL → 0.1µL units (multiply by 10) + transport_air_volume_units = [int(round(v * 10)) for v in transport_air_volume] + + # Convert prewet volume: µL → 0.1µL units (multiply by 10) + prewet_volume_units = [int(round(v * 10)) for v in prewet_volume] + + # Convert liquid exit speed: µL/s → 0.1µL/s units (multiply by 10) + liquid_exit_speed_units = [int(round(s * 10)) for s in liquid_exit_speed] + + # Convert mix volume: µL → 0.1µL units (multiply by 10) + mix_volume_units = [int(round(v * 10)) for v in mix_volume] + + # Convert mix speed: µL/s → 0.1µL/s units (multiply by 10) + mix_speed_units = [int(round(s * 10)) for s in mix_speed] + + # Convert mix position: mm → 0.01mm units (multiply by 100) + mix_position_units = [int(round(p * 100)) for p in mix_position] + + # Convert cutoff speed: µL/s → 0.1µL/s units (multiply by 10) + cutoff_speed_units = [int(round(s * 10)) for s in cutoff_speed] + + # Convert stop back volume: µL → 0.1µL units (multiply by 10) + stop_back_volume_units = [int(round(v * 10)) for v in stop_back_volume] + + # Convert dispense offset: mm → 0.01mm units (multiply by 100) + dispense_offset_units = [int(round(o * 100)) for o in dispense_offset] + + # Convert touch off distance: mm → 0.01mm units (multiply by 100) + touch_off_distance_units = int(round(touch_off_distance * 100)) + + # Build arrays for all channels (pad with 0s for inactive channels) + x_positions_full = [0] * self.num_channels + y_positions_full = [0] * self.num_channels + dispense_volumes_full = [0] * self.num_channels + blowout_volumes_full = [0] * self.num_channels + dispense_speeds_full = [0] * self.num_channels + liquid_seek_height_full = [0] * self.num_channels + dispense_height_full = [0] * self.num_channels + immersion_depth_full = [0] * self.num_channels + surface_following_distance_full = [0] * self.num_channels + z_min_position_full = [0] * self.num_channels + settling_time_full = [0] * self.num_channels + transport_air_volume_full = [0] * self.num_channels + prewet_volume_full = [0] * self.num_channels + liquid_exit_speed_full = [0] * self.num_channels + mix_volume_full = [0] * self.num_channels + mix_cycles_full = [0] * self.num_channels + mix_speed_full = [0] * self.num_channels + mix_position_full = [0] * self.num_channels + capacitive_lld_sensitivity_full = [0] * self.num_channels + limit_curve_index_full = [0] * self.num_channels + lld_mode_full = [0] * self.num_channels + cutoff_speed_full = [0] * self.num_channels + stop_back_volume_full = [0] * self.num_channels + dispense_offset_full = [0] * self.num_channels + + for i, channel_idx in enumerate(use_channels): + x_positions_full[channel_idx] = x_positions[i] + y_positions_full[channel_idx] = y_positions[i] + dispense_volumes_full[channel_idx] = dispense_volumes[i] + blowout_volumes_full[channel_idx] = blowout_volumes_units[i] + dispense_speeds_full[channel_idx] = dispense_speeds[i] + liquid_seek_height_full[channel_idx] = liquid_seek_height_units[i] + dispense_height_full[channel_idx] = dispense_height_units[i] + immersion_depth_full[channel_idx] = immersion_depth_units[i] + surface_following_distance_full[channel_idx] = surface_following_distance_units[i] + z_min_position_full[channel_idx] = z_min_position_units[i] + settling_time_full[channel_idx] = settling_time_units[i] + transport_air_volume_full[channel_idx] = transport_air_volume_units[i] + prewet_volume_full[channel_idx] = prewet_volume_units[i] + liquid_exit_speed_full[channel_idx] = liquid_exit_speed_units[i] + mix_volume_full[channel_idx] = mix_volume_units[i] + mix_cycles_full[channel_idx] = mix_cycles[i] + mix_speed_full[channel_idx] = mix_speed_units[i] + mix_position_full[channel_idx] = mix_position_units[i] + capacitive_lld_sensitivity_full[channel_idx] = capacitive_lld_sensitivity[i] + limit_curve_index_full[channel_idx] = limit_curve_index[i] + lld_mode_full[channel_idx] = lld_mode[i] + cutoff_speed_full[channel_idx] = cutoff_speed_units[i] + stop_back_volume_full[channel_idx] = stop_back_volume_units[i] + dispense_offset_full[channel_idx] = dispense_offset_units[i] + + # Default values for remaining parameters + dispense_type = [0] * self.num_channels + z_final = traverse_height_units + mix_follow_distance = [0] * self.num_channels + tube_section_height = [0] * self.num_channels + tube_section_ratio = [0] * self.num_channels + recording_mode = 0 + + # Create and send Dispense command + command = Dispense( + dest=self._pipette_address, + dispense_type=dispense_type, + tips_used=tips_used, + x_positions=x_positions_full, + y_positions=y_positions_full, + traverse_height=traverse_height_units, + liquid_seek_height=liquid_seek_height_full, + dispense_height=dispense_height_full, + submerge_depth=immersion_depth_full, + follow_depth=surface_following_distance_full, + z_min_position=z_min_position_full, + z_final=z_final, + liquid_exit_speed=liquid_exit_speed_full, + transport_air_volume=transport_air_volume_full, + dispense_volume=dispense_volumes_full, + stop_back_volume=stop_back_volume_full, + blowout_volume=blowout_volumes_full, + dispense_speed=dispense_speeds_full, + cutoff_speed=cutoff_speed_full, + settling_time=settling_time_full, + mix_volume=mix_volume_full, + mix_cycles=mix_cycles_full, + mix_position=mix_position_full, + mix_follow_distance=mix_follow_distance, + mix_speed=mix_speed_full, + touch_off_distance=touch_off_distance_units, + dispense_offset=dispense_offset_full, + tube_section_height=tube_section_height, + tube_section_ratio=tube_section_ratio, + lld_mode=lld_mode_full, + capacitive_lld_sensitivity=capacitive_lld_sensitivity_full, + tadm_enabled=tadm_enabled, + limit_curve_index=limit_curve_index_full, + recording_mode=recording_mode, + ) + + try: + await self.send_command(command) + logger.info(f"Dispensed on channels {use_channels}") + except Exception as e: + logger.error(f"Failed to dispense: {e}") + raise + + async def pick_up_tips96(self, pickup: PickupTipRack): + """Pick up tips from the specified resource using CoRe 96.""" + raise NotImplementedError("pick_up_tips96 not yet implemented") + + async def drop_tips96(self, drop: DropTipRack): + """Drop tips to the specified resource using CoRe 96.""" + raise NotImplementedError("drop_tips96 not yet implemented") + + async def aspirate96( + self, aspiration: MultiHeadAspirationPlate | MultiHeadAspirationContainer + ): + """Aspirate from all wells in 96 well plate.""" + raise NotImplementedError("aspirate96 not yet implemented") + + async def dispense96( + self, dispense: MultiHeadDispensePlate | MultiHeadDispenseContainer + ): + """Dispense to all wells in 96 well plate.""" + raise NotImplementedError("dispense96 not yet implemented") + + async def pick_up_resource(self, pickup: ResourcePickup): + """Pick up a resource like a plate or a lid using the integrated robotic arm.""" + raise NotImplementedError("pick_up_resource not yet implemented") + + async def move_picked_up_resource(self, move: ResourceMove): + """Move a picked up resource like a plate or a lid using the integrated robotic arm.""" + raise NotImplementedError("move_picked_up_resource not yet implemented") + + async def drop_resource(self, drop: ResourceDrop): + """Drop a resource like a plate or a lid using the integrated robotic arm.""" + raise NotImplementedError("drop_resource not yet implemented") + + def can_pick_up_tip(self, channel_idx: int, tip: Tip) -> bool: + """Check if the tip can be picked up by the specified channel. + + Args: + channel_idx: Channel index (0-based) + tip: Tip object to check + + Returns: + True if the tip can be picked up, False otherwise + """ + # Only Hamilton tips are supported + if not isinstance(tip, HamiltonTip): + return False + + # XL tips are not supported on Nimbus + if tip.tip_size in {TipSize.XL}: + return False + + # Check if channel index is valid + if self._num_channels is not None and channel_idx >= self._num_channels: + return False + + return True + diff --git a/pylabrobot/liquid_handling/backends/hamilton/packets.py b/pylabrobot/liquid_handling/backends/hamilton/packets.py new file mode 100644 index 00000000000..072af1f93e2 --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/packets.py @@ -0,0 +1,379 @@ +"""Hamilton TCP packet structures. + +This module defines the packet layer of the Hamilton protocol stack: +- IpPacket: Transport layer (size, protocol, version, payload) +- HarpPacket: Protocol layer (addressing, sequence, action, payload) +- HoiPacket: HOI application layer (interface_id, action_id, DataFragment params) +- RegistrationPacket: Registration protocol payload +- ConnectionPacket: Connection initialization payload + +Each packet knows how to pack/unpack itself using the Wire serialization layer. +""" + +from __future__ import annotations + +import struct +from dataclasses import dataclass + +from pylabrobot.liquid_handling.backends.hamilton.wire import Wire + + +# Hamilton protocol version +HAMILTON_PROTOCOL_VERSION_MAJOR = 3 +HAMILTON_PROTOCOL_VERSION_MINOR = 0 + + +@dataclass(frozen=True) +class Address: + """Hamilton network address (module_id, node_id, object_id).""" + module: int # u16 + node: int # u16 + object: int # u16 + + def pack(self) -> bytes: + """Serialize address to 6 bytes.""" + return Wire.write().u16(self.module).u16(self.node).u16(self.object).finish() + + @classmethod + def unpack(cls, data: bytes) -> 'Address': + """Deserialize address from bytes.""" + r = Wire.read(data) + return cls(module=r.u16(), node=r.u16(), object=r.u16()) + + def __str__(self) -> str: + return f"{self.module}:{self.node}:{self.object}" + + +@dataclass +class IpPacket: + """Hamilton IpPacket2 - Transport layer. + + Structure: + Bytes 00-01: size (2) + Bytes 02: protocol (1) + Bytes 03: version byte (major.minor) + Bytes 04-05: options_length (2) + Bytes 06+: options (x bytes) + Bytes: payload + """ + protocol: int # Protocol identifier (6=OBJECT_DISCOVERY, 7=INITIALIZATION) + payload: bytes + options: bytes = b'' + + def pack(self) -> bytes: + """Serialize IP packet.""" + # Calculate size: protocol(1) + version(1) + opts_len(2) + options + payload + packet_size = 1 + 1 + 2 + len(self.options) + len(self.payload) + + return (Wire.write() + .u16(packet_size) + .u8(self.protocol) + .version_byte(HAMILTON_PROTOCOL_VERSION_MAJOR, HAMILTON_PROTOCOL_VERSION_MINOR) + .u16(len(self.options)) + .raw_bytes(self.options) + .raw_bytes(self.payload) + .finish()) + + @classmethod + def unpack(cls, data: bytes) -> 'IpPacket': + """Deserialize IP packet.""" + r = Wire.read(data) + _size = r.u16() # Read but unused + protocol = r.u8() + major, minor = r.version_byte() + + # Validate version + if major != HAMILTON_PROTOCOL_VERSION_MAJOR or minor != HAMILTON_PROTOCOL_VERSION_MINOR: + # Warning but not fatal + pass + + opts_len = r.u16() + options = r.raw_bytes(opts_len) if opts_len > 0 else b'' + payload = r.remaining() + + return cls(protocol=protocol, payload=payload, options=options) + + +@dataclass +class HarpPacket: + """Hamilton HarpPacket2 - Protocol layer. + + Structure: + Bytes 00-05: src address (module, node, object) + Bytes 06-11: dst address (module, node, object) + Byte 12: sequence number + Byte 13: reserved + Byte 14: protocol (2=HOI, 3=Registration) + Byte 15: action + Bytes 16-17: message length + Bytes 18-19: options length + Bytes 20+: options + Bytes: version byte (major.minor) + Byte: reserved2 + Bytes: payload + """ + src: Address + dst: Address + seq: int + protocol: int # 2=HOI, 3=Registration + action_code: int # Base action code (0-15) + payload: bytes + options: bytes = b'' + response_required: bool = True # Controls bit 4 of action byte + + @property + def action(self) -> int: + """Compute action byte from action_code and response_required flag. + + Returns: + Action byte with bit 4 set if response required + """ + return self.action_code | (0x10 if self.response_required else 0x00) + + def pack(self) -> bytes: + """Serialize HARP packet.""" + # Message length includes: src(6) + dst(6) + seq(1) + reserved(1) + protocol(1) + + # action(1) + msg_len(2) + opts_len(2) + options + version(1) + reserved2(1) + payload + # = 20 (fixed header) + options + version + reserved2 + payload + msg_len = 20 + len(self.options) + 1 + 1 + len(self.payload) + + return (Wire.write() + .raw_bytes(self.src.pack()) + .raw_bytes(self.dst.pack()) + .u8(self.seq) + .u8(0) # reserved + .u8(self.protocol) + .u8(self.action) # Uses computed property + .u16(msg_len) + .u16(len(self.options)) + .raw_bytes(self.options) + .u8(0) # version byte - C# DLL uses 0, not 3.0 + .u8(0) # reserved2 + .raw_bytes(self.payload) + .finish()) + + @classmethod + def unpack(cls, data: bytes) -> 'HarpPacket': + """Deserialize HARP packet.""" + r = Wire.read(data) + + # Parse addresses + src = Address.unpack(r.raw_bytes(6)) + dst = Address.unpack(r.raw_bytes(6)) + + seq = r.u8() + _reserved = r.u8() # Read but unused + protocol = r.u8() + action_byte = r.u8() + _msg_len = r.u16() # Read but unused + opts_len = r.u16() + + options = r.raw_bytes(opts_len) if opts_len > 0 else b'' + _version = r.u8() # version byte (C# DLL uses 0) - Read but unused + _reserved2 = r.u8() # Read but unused + payload = r.remaining() + + # Decompose action byte into action_code and response_required flag + action_code = action_byte & 0x0F + response_required = bool(action_byte & 0x10) + + return cls( + src=src, + dst=dst, + seq=seq, + protocol=protocol, + action_code=action_code, + payload=payload, + options=options, + response_required=response_required + ) + + +@dataclass +class HoiPacket: + """Hamilton HoiPacket2 - HOI application layer. + + Structure: + Byte 00: interface_id + Byte 01: action + Bytes 02-03: action_id + Byte 04: version byte (major.minor) + Byte 05: number of fragments + Bytes 06+: DataFragments + + Note: params must be DataFragment-wrapped (use HoiParams to build). + """ + interface_id: int + action_code: int # Base action code (0-15) + action_id: int + params: bytes # Already DataFragment-wrapped via HoiParams + response_required: bool = False # Controls bit 4 of action byte + + @property + def action(self) -> int: + """Compute action byte from action_code and response_required flag. + + Returns: + Action byte with bit 4 set if response required + """ + return self.action_code | (0x10 if self.response_required else 0x00) + + def pack(self) -> bytes: + """Serialize HOI packet.""" + num_fragments = self._count_fragments(self.params) + + return (Wire.write() + .u8(self.interface_id) + .u8(self.action) # Uses computed property + .u16(self.action_id) + .u8(0) # version byte - always 0 for HOI packets (not 0x30!) + .u8(num_fragments) + .raw_bytes(self.params) + .finish()) + + @classmethod + def unpack(cls, data: bytes) -> 'HoiPacket': + """Deserialize HOI packet.""" + r = Wire.read(data) + + interface_id = r.u8() + action_byte = r.u8() + action_id = r.u16() + major, minor = r.version_byte() + _num_fragments = r.u8() # Read but unused + params = r.remaining() + + # Decompose action byte into action_code and response_required flag + action_code = action_byte & 0x0F + response_required = bool(action_byte & 0x10) + + return cls( + interface_id=interface_id, + action_code=action_code, + action_id=action_id, + params=params, + response_required=response_required + ) + + @staticmethod + def _count_fragments(data: bytes) -> int: + """Count DataFragments in params. + + Each DataFragment has format: [type_id:1][flags:1][length:2][data:n] + """ + if len(data) == 0: + return 0 + + count = 0 + offset = 0 + + while offset < len(data): + if offset + 4 > len(data): + break # Not enough bytes for a fragment header + + # Read fragment length + fragment_length = struct.unpack(' bytes: + """Serialize Registration packet.""" + return (Wire.write() + .u16(self.action_code) + .u16(self.response_code) + .u8(0) # version byte - DLL uses 0.0, not 3.0 + .u8(0) # reserved + .raw_bytes(self.req_address.pack()) + .raw_bytes(self.res_address.pack()) + .u16(len(self.options)) + .raw_bytes(self.options) + .finish()) + + @classmethod + def unpack(cls, data: bytes) -> 'RegistrationPacket': + """Deserialize Registration packet.""" + r = Wire.read(data) + + action_code = r.u16() + response_code = r.u16() + _version = r.u8() # version byte (DLL uses 0, not packed 3.0) - Read but unused + _reserved = r.u8() # Read but unused + req_address = Address.unpack(r.raw_bytes(6)) + res_address = Address.unpack(r.raw_bytes(6)) + opts_len = r.u16() + options = r.raw_bytes(opts_len) if opts_len > 0 else b'' + + return cls( + action_code=action_code, + response_code=response_code, + req_address=req_address, + res_address=res_address, + options=options + ) + + +@dataclass +class ConnectionPacket: + """Hamilton ConnectionPacket - Connection initialization payload. + + Used for Protocol 7 (INITIALIZATION). Has a different structure than + HARP-based packets - uses raw parameter encoding, NOT DataFragments. + + Structure: + Byte 00: version + Byte 01: message_id + Byte 02: count (number of parameters) + Byte 03: unknown + Bytes 04+: raw parameters [id|type|reserved|value] repeated + """ + params: bytes # Raw format (NOT DataFragments) + + def pack_into_ip(self) -> bytes: + """Build complete IP packet for connection initialization. + + Returns full IP packet with protocol=7. + """ + # Connection packet size: just the params (frame is included in params) + packet_size = 1 + 1 + 2 + len(self.params) + + return (Wire.write() + .u16(packet_size) + .u8(7) # INITIALIZATION protocol + .version_byte(HAMILTON_PROTOCOL_VERSION_MAJOR, HAMILTON_PROTOCOL_VERSION_MINOR) + .u16(0) # options_length + .raw_bytes(self.params) + .finish()) + + @classmethod + def unpack_from_ip_payload(cls, data: bytes) -> 'ConnectionPacket': + """Extract ConnectionPacket from IP packet payload. + + Assumes IP header has already been parsed. + """ + return cls(params=data) + diff --git a/pylabrobot/liquid_handling/backends/hamilton/protocol.py b/pylabrobot/liquid_handling/backends/hamilton/protocol.py new file mode 100644 index 00000000000..deb97f13204 --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/protocol.py @@ -0,0 +1,173 @@ +"""Hamilton TCP protocol constants and enumerations. + +This module contains all protocol-level constants, enumerations, and type definitions +used throughout the Hamilton TCP communication stack. +""" + +from __future__ import annotations + +from enum import IntEnum + + +# Hamilton protocol version (from Piglet: version byte 0x30 = major 3, minor 0) +HAMILTON_PROTOCOL_VERSION_MAJOR = 3 +HAMILTON_PROTOCOL_VERSION_MINOR = 0 + + +class HamiltonProtocol(IntEnum): + """Hamilton protocol identifiers. + + These values are derived from the piglet Rust implementation: + - Protocol 2: PIPETTE - pipette-specific operations + - Protocol 3: REGISTRATION - object registration and discovery + - Protocol 6: OBJECT_DISCOVERY - general object discovery and method calls + - Protocol 7: INITIALIZATION - connection initialization and client ID negotiation + """ + PIPETTE = 0x02 + REGISTRATION = 0x03 + OBJECT_DISCOVERY = 0x06 + INITIALIZATION = 0x07 + + +class Hoi2Action(IntEnum): + """HOI2/HARP2 action codes (bits 0-3 of action field). + + Values from Hamilton.Components.TransportLayer.Protocols.HoiPacket2Constants.Hoi2Action + + The action byte combines the action code (lower 4 bits) with the response_required flag (bit 4): + - action_byte = action_code | (0x10 if response_required else 0x00) + - Example: COMMAND_REQUEST with response = 3 | 0x10 = 0x13 + - Example: STATUS_REQUEST without response = 0 | 0x00 = 0x00 + + Common action codes: + - COMMAND_REQUEST (3): Send a command to an object (most common for method calls) + - STATUS_REQUEST (0): Request status information + - COMMAND_RESPONSE (4): Response to a command + - STATUS_RESPONSE (1): Response with status information + + NOTE: According to Hamilton documentation, both HARP2 and HOI2 use the same action + enumeration values. This needs verification through TCP introspection. + """ + STATUS_REQUEST = 0 + STATUS_RESPONSE = 1 + STATUS_EXCEPTION = 2 + COMMAND_REQUEST = 3 + COMMAND_RESPONSE = 4 + COMMAND_EXCEPTION = 5 + COMMAND_ACK = 6 + UPSTREAM_SYSTEM_EVENT = 7 + DOWNSTREAM_SYSTEM_EVENT = 8 + EVENT = 9 + INVALID_ACTION_RESPONSE = 10 + STATUS_WARNING = 11 + COMMAND_WARNING = 12 + + +class HarpTransportableProtocol(IntEnum): + """HARP2 protocol field values - determines payload type. + + From Hamilton.Components.TransportLayer.Protocols.HarpTransportableProtocol. + The protocol field at byte 14 in HARP2 tells which payload parser to use. + """ + HOI2 = 2 # Payload is Hoi2 structure (Protocol 2) + REGISTRATION2 = 3 # Payload is Registration2 structure (Protocol 3) + NOT_DEFINED = 0xFF # Invalid/unknown protocol + + +class RegistrationActionCode(IntEnum): + """Registration2 action codes (bytes 0-1 in Registration2 packet). + + From Hamilton.Components.TransportLayer.Protocols.RegistrationPacket2Constants.RegistrationActionCode2. + + Note: HARP action values for Registration packets are different from HOI action codes: + - 0x13 (19): Request with response required (typical for HARP_PROTOCOL_REQUEST) + - 0x14 (20): Response with data (typical for HARP_PROTOCOL_RESPONSE) + - 0x03 (3): Request without response + """ + REGISTRATION_REQUEST = 0 # Initial registration handshake + REGISTRATION_RESPONSE = 1 # Response to registration + DEREGISTRATION_REQUEST = 2 # Cleanup on disconnect + DEREGISTRATION_RESPONSE = 3 # Deregistration acknowledgment + NODE_RESET_INDICATION = 4 # Node will reset + BRIDGE_REGISTRATION_REQUEST = 5 # Bridge registration + START_NODE_IDENTIFICATION = 6 # Start identification + START_NODE_IDENTIFICATION_RESPONSE = 7 + STOP_NODE_IDENTIFICATION = 8 # Stop identification + STOP_NODE_IDENTIFICATION_RESPONSE = 9 + LIST_OF_REGISTERED_MODULES_REQUEST = 10 # Request registered modules + LIST_OF_REGISTERED_MODULES_RESPONSE = 11 + HARP_PROTOCOL_REQUEST = 12 # Request objects (most important!) + HARP_PROTOCOL_RESPONSE = 13 # Response with object list + HARP_NODE_REMOVED_FROM_NETWORK = 14 + LIST_OF_REGISTERED_NODES_REQUEST = 15 + LIST_OF_REGISTERED_NODES_RESPONSE = 16 + + +class RegistrationOptionType(IntEnum): + """Registration2 option types (byte 0 of each option). + + From Hamilton.Components.TransportLayer.Protocols.RegistrationPacket2Constants.Option. + + These are semantic labels for the TYPE of information (what it means), while the + actual data inside uses Hamilton type_ids (how it's encoded). + """ + RESERVED = 0 # Padding for 16-bit alignment when odd number of unsupported options + INCOMPATIBLE_VERSION = 1 # Version mismatch error (HARP version too high) + UNSUPPORTED_OPTIONS = 2 # Unknown options error + START_NODE_IDENTIFICATION = 3 # Identification timeout (seconds) + HARP_NETWORK_ADDRESS = 4 # Registered module/node IDs + HARP_PROTOCOL_REQUEST = 5 # Protocol request + HARP_PROTOCOL_RESPONSE = 6 # PRIMARY: Contains object ID lists (most commonly used) + + +class HamiltonDataType(IntEnum): + """Hamilton parameter data types for wire encoding in DataFragments. + + These constants represent the type identifiers used in Hamilton DataFragments + for HOI2 command parameters. Each type ID corresponds to a specific data format + and encoding scheme used on the wire. + + From Hamilton.Components.TransportLayer.Protocols.Parameter.ParameterTypes. + """ + # Scalar integer types + I8 = 1 + I16 = 2 + I32 = 3 + U8 = 4 + U16 = 5 + U32 = 6 + I64 = 36 + U64 = 37 + + # Floating-point types + F32 = 40 + F64 = 41 + + # String and boolean + STRING = 15 + BOOL = 23 + + # Array types + U8_ARRAY = 22 + I8_ARRAY = 24 + I16_ARRAY = 25 + U16_ARRAY = 26 + I32_ARRAY = 27 + U32_ARRAY = 28 + BOOL_ARRAY = 29 + STRING_ARRAY = 34 + I64_ARRAY = 38 + U64_ARRAY = 39 + F32_ARRAY = 42 + F64_ARRAY = 43 + + +class HoiRequestId(IntEnum): + """Request types for HarpProtocolRequest (byte 3 in command_data). + + From Hamilton.Components.TransportLayer.Protocols.RegistrationPacket2Constants.HarpProtocolRequest.HoiRequestId. + """ + ROOT_OBJECT_OBJECT_ID = 1 # Request root objects (pipette, deck, etc.) + GLOBAL_OBJECT_ADDRESS = 2 # Request global objects + CPU_OBJECT_ADDRESS = 3 # Request CPU objects + diff --git a/pylabrobot/liquid_handling/backends/hamilton/tcp_backend.py b/pylabrobot/liquid_handling/backends/hamilton/tcp_backend.py new file mode 100644 index 00000000000..6ec5dcbf576 --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/tcp_backend.py @@ -0,0 +1,613 @@ +"""Hamilton TCP Backend Base Class. + +This module provides the base backend for all Hamilton TCP instruments. +It handles connection management, message routing, and the introspection API. +""" + +from __future__ import annotations + +import asyncio +import logging +from dataclasses import dataclass +from typing import Dict, Optional + +from pylabrobot.io.socket import Socket +from pylabrobot.liquid_handling.backends.hamilton.protocol import ( + RegistrationActionCode, + HoiRequestId, + RegistrationOptionType, +) +from pylabrobot.liquid_handling.backends.hamilton.packets import Address +from pylabrobot.liquid_handling.backends.hamilton.commands import HamiltonCommand +from pylabrobot.liquid_handling.backends.hamilton.messages import ( + CommandResponse, + ErrorResponse, + InitMessage, + InitResponse, + RegistrationMessage, + RegistrationResponse, + ResponseParser, + SuccessResponse, +) +from pylabrobot.liquid_handling.backends.hamilton.wire import Wire + +logger = logging.getLogger(__name__) + + +@dataclass +class HamiltonError: + """Hamilton error response.""" + error_code: int + error_message: str + interface_id: int + action_id: int + + +class ErrorParser: + """Parse Hamilton error responses.""" + + @staticmethod + def parse_error(data: bytes) -> HamiltonError: + """Parse error response from Hamilton instrument.""" + # Error responses have a specific format + # This is a simplified implementation - real errors may vary + if len(data) < 8: + raise ValueError("Error response too short") + + # Parse error structure (simplified) + error_code = Wire.read(data).u32() + error_message = data[4:].decode('utf-8', errors='replace') + + return HamiltonError( + error_code=error_code, + error_message=error_message, + interface_id=0, + action_id=0 + ) + + +class TCPBackend(Socket): + """Base backend for all Hamilton TCP instruments. + + This class provides: + - Connection management via Socket (wrapped with state tracking) + - Protocol 7 initialization + - Protocol 3 registration + - Generic command execution + - Object discovery via introspection + + Hamilton uses strict request-response protocol (no unsolicited messages), + so we use simple direct read/write instead of complex routing. + """ + + def __init__( + self, + host: str, + port: int, + read_timeout: float = 30.0, + write_timeout: float = 30.0, + buffer_size: int = 1024, + auto_reconnect: bool = True, + max_reconnect_attempts: int = 3, + ): + """Initialize Hamilton TCP backend. + + Args: + host: Hamilton instrument IP address + port: Hamilton instrument port (usually 50007) + read_timeout: Read timeout in seconds + write_timeout: Write timeout in seconds + buffer_size: Buffer size (not used by Socket, kept for compatibility) + auto_reconnect: Enable automatic reconnection + max_reconnect_attempts: Maximum reconnection attempts + """ + super().__init__( + host=host, + port=port, + read_timeout=read_timeout, + write_timeout=write_timeout, + ) + + # Connection state tracking (wrapping Socket) + self._connection_state = "disconnected" + self._last_error: Optional[Exception] = None + self._reconnect_attempts = 0 + self.auto_reconnect = auto_reconnect + self.max_reconnect_attempts = max_reconnect_attempts + self.buffer_size = buffer_size # Kept for compatibility, not used by Socket + + # Hamilton-specific state + self._client_id: Optional[int] = None + self.client_address: Optional[Address] = None + self._sequence_numbers: Dict[Address, int] = {} + self._discovered_objects: Dict[str, list[Address]] = {} + + # Instrument-specific addresses (set by subclasses) + self._instrument_addresses: Dict[str, Address] = {} + + async def _ensure_connected(self): + """Ensure connection is healthy before operations.""" + if self._connection_state != "connected": + if self.auto_reconnect: + logger.info(f"{self._unique_id} Connection not established, attempting to reconnect...") + await self._reconnect() + else: + raise ConnectionError( + f"{self._unique_id} Connection not established and auto-reconnect disabled" + ) + + async def _reconnect(self): + """Attempt to reconnect with exponential backoff.""" + if not self.auto_reconnect: + raise ConnectionError(f"{self._unique_id} Auto-reconnect disabled") + + for attempt in range(self.max_reconnect_attempts): + try: + logger.info( + f"{self._unique_id} Reconnection attempt {attempt + 1}/{self.max_reconnect_attempts}" + ) + + # Clean up existing connection + try: + await self.stop() + except Exception: + pass + + # Wait before reconnecting (exponential backoff) + if attempt > 0: + wait_time = 1.0 * (2 ** (attempt - 1)) # 1s, 2s, 4s, etc. + await asyncio.sleep(wait_time) + + # Attempt to reconnect + await self.setup() + self._reconnect_attempts = 0 + logger.info(f"{self._unique_id} Reconnection successful") + return + + except Exception as e: + self._last_error = e + logger.warning(f"{self._unique_id} Reconnection attempt {attempt + 1} failed: {e}") + + # All reconnection attempts failed + self._connection_state = "disconnected" + raise ConnectionError( + f"{self._unique_id} Failed to reconnect after {self.max_reconnect_attempts} attempts" + ) + + async def write(self, data: bytes, timeout: Optional[float] = None): + """Write data to the socket with connection state tracking. + + Args: + data: The data to write. + timeout: The timeout for writing to the server in seconds. If `None`, use the default timeout. + """ + await self._ensure_connected() + + try: + await super().write(data, timeout=timeout) + self._connection_state = "connected" + except (ConnectionError, OSError, TimeoutError) as e: + self._connection_state = "disconnected" + self._last_error = e + raise + + async def read(self, num_bytes: int = 128, timeout: Optional[float] = None) -> bytes: + """Read data from the socket with connection state tracking. + + Args: + num_bytes: Maximum number of bytes to read. Defaults to 128. + timeout: The timeout for reading from the server in seconds. If `None`, use the default timeout. + + Returns: + The data read from the socket. + """ + await self._ensure_connected() + + try: + data = await super().read(num_bytes, timeout=timeout) + self._connection_state = "connected" + return data + except (ConnectionError, OSError, TimeoutError) as e: + self._connection_state = "disconnected" + self._last_error = e + raise + + async def read_exact(self, num_bytes: int, timeout: Optional[float] = None) -> bytes: + """Read exactly num_bytes with connection state tracking. + + Args: + num_bytes: The exact number of bytes to read. + timeout: The timeout for reading from the server in seconds. If `None`, use the default timeout. + + Returns: + Exactly num_bytes of data. + + Raises: + ConnectionError: If the connection is closed before num_bytes are read. + """ + await self._ensure_connected() + + try: + data = await super().read_exact(num_bytes, timeout=timeout) + self._connection_state = "connected" + return data + except (ConnectionError, OSError, TimeoutError) as e: + self._connection_state = "disconnected" + self._last_error = e + raise + + + @property + def connection_state(self) -> str: + """Get the current connection state.""" + return self._connection_state + + @property + def is_connected(self) -> bool: + """Check if the connection is currently established.""" + return self._connection_state == "connected" + + @property + def last_error(self) -> Optional[Exception]: + """Get the last connection error.""" + return self._last_error + + async def _read_one_message(self): + """Read one complete Hamilton packet and parse based on protocol. + + Hamilton packets are length-prefixed: + - First 2 bytes: packet size (little-endian) + - Next packet_size bytes: packet payload + + The method inspects the IP protocol field and, for Protocol 6 (HARP), + also checks the HARP protocol field to dispatch correctly. + + Returns: + Union[RegistrationResponse, CommandResponse]: Parsed response + + Raises: + ConnectionError: If connection is lost + TimeoutError: If no message received within timeout + ValueError: If protocol type is unknown + """ + # Read packet size (2 bytes, little-endian) + size_data = await self.read_exact(2) + packet_size = Wire.read(size_data).u16() + + # Read packet payload + payload_data = await self.read_exact(packet_size) + complete_data = size_data + payload_data + + # Parse IP packet to get protocol field (byte 2) + # Format: [size:2][ip_protocol:1][version:1][options_len:2][options:x][payload:n] + ip_protocol = complete_data[2] + + # Dispatch based on IP protocol + if ip_protocol == 6: + # Protocol 6: HARP wrapper - need to check HARP protocol field + # IP header: [size:2][protocol:1][version:1][options_len:2] + ip_options_len = int.from_bytes(complete_data[4:6], 'little') + harp_start = 6 + ip_options_len + + # HARP header: [src:6][dst:6][seq:1][unk:1][harp_protocol:1][action:1]... + # HARP protocol is at offset 14 within HARP packet + harp_protocol_offset = harp_start + 14 + harp_protocol = complete_data[harp_protocol_offset] + + if harp_protocol == 2: + # HARP Protocol 2: HOI2 + return CommandResponse.from_bytes(complete_data) + elif harp_protocol == 3: + # HARP Protocol 3: Registration2 + return RegistrationResponse.from_bytes(complete_data) + else: + logger.warning(f"Unknown HARP protocol: {harp_protocol}, attempting CommandResponse parse") + return CommandResponse.from_bytes(complete_data) + else: + logger.warning(f"Unknown IP protocol: {ip_protocol}, attempting CommandResponse parse") + return CommandResponse.from_bytes(complete_data) + + async def setup(self): + """Initialize Hamilton connection and discover objects. + + Hamilton uses strict request-response protocol: + 1. Establish TCP connection + 2. Protocol 7 initialization (get client ID) + 3. Protocol 3 registration + 4. Discover objects via Protocol 3 introspection + """ + # Step 1: Establish TCP connection + await super().setup() + # Set connection state after successful connection + self._connection_state = "connected" + self._last_error = None + self._reconnect_attempts = 0 + + # Step 2: Initialize connection (Protocol 7) + await self._initialize_connection() + + # Step 3: Register client (Protocol 3) + await self._register_client() + + # Step 4: Discover root objects + await self._discover_root() + + logger.info(f"Hamilton backend setup complete. Client ID: {self._client_id}") + + async def _initialize_connection(self): + """Initialize connection using Protocol 7 (ConnectionPacket). + + Note: Protocol 7 doesn't have sequence numbers, so we send the packet + and read the response directly (blocking) rather than using the + normal routing mechanism. + """ + logger.info("Initializing Hamilton connection...") + + # Build Protocol 7 ConnectionPacket using new InitMessage + packet = InitMessage(timeout=30).build() + + logger.info("[INIT] Sending Protocol 7 initialization packet:") + logger.info(f"[INIT] Length: {len(packet)} bytes") + logger.info(f"[INIT] Hex: {packet.hex(' ')}") + + # Send packet + await self.write(packet) + + # Read response directly (blocking - safe because this is first communication) + # Read packet size (2 bytes, little-endian) + size_data = await self.read_exact(2) + packet_size = Wire.read(size_data).u16() + + # Read packet payload + payload_data = await self.read_exact(packet_size) + response_bytes = size_data + payload_data + + logger.info("[INIT] Received response:") + logger.info(f"[INIT] Length: {len(response_bytes)} bytes") + logger.info(f"[INIT] Hex: {response_bytes.hex(' ')}") + + # Parse response using InitResponse + response = InitResponse.from_bytes(response_bytes) + + self._client_id = response.client_id + # Controller module is 2, node is client_id, object 65535 for general addressing + self.client_address = Address(2, response.client_id, 65535) + + logger.info(f"[INIT] ✓ Client ID: {self._client_id}, Address: {self.client_address}") + + async def _register_client(self): + """Register client using Protocol 3.""" + logger.info("Registering Hamilton client...") + + # Registration service address (DLL uses 0:0:65534, Piglet comment confirms) + registration_service = Address(0, 0, 65534) + + # Step 1: Initial registration (action_code=0) + reg_msg = RegistrationMessage( + dest=registration_service, + action_code=RegistrationActionCode.REGISTRATION_REQUEST + ) + + # Ensure client is initialized + if self.client_address is None or self._client_id is None: + raise RuntimeError("Client not initialized - call _initialize_connection() first") + + # Build and send registration packet + seq = self._allocate_sequence_number(registration_service) + packet = reg_msg.build( + src=self.client_address, + req_addr=Address(2, self._client_id, 65535), # C# DLL: 2:{client_id}:65535 + res_addr=Address(0, 0, 0), # C# DLL: 0:0:0 + seq=seq, + harp_action_code=3, # COMMAND_REQUEST + harp_response_required=False # DLL uses 0x03 (no response flag) + ) + + logger.info("[REGISTER] Sending registration packet:") + logger.info(f"[REGISTER] Length: {len(packet)} bytes, Seq: {seq}") + logger.info(f"[REGISTER] Hex: {packet.hex(' ')}") + logger.info(f"[REGISTER] Src: {self.client_address}, Dst: {registration_service}") + + # Send registration packet + await self.write(packet) + + # Read response + response = await self._read_one_message() + + logger.info("[REGISTER] Received response:") + logger.info(f"[REGISTER] Length: {len(response.raw_bytes)} bytes") + logger.debug(f"[REGISTER] Hex: {response.raw_bytes.hex(' ')}") + + logger.info("[REGISTER] ✓ Registration complete") + + async def _discover_root(self): + """Discover root objects via Protocol 3 HARP_PROTOCOL_REQUEST""" + logger.info("Discovering Hamilton root objects...") + + registration_service = Address(0, 0, 65534) + + # Request root objects (request_id=1) + root_msg = RegistrationMessage( + dest=registration_service, + action_code=RegistrationActionCode.HARP_PROTOCOL_REQUEST + ) + root_msg.add_registration_option( + RegistrationOptionType.HARP_PROTOCOL_REQUEST, + protocol=2, + request_id=HoiRequestId.ROOT_OBJECT_OBJECT_ID + ) + + # Ensure client is initialized + if self.client_address is None or self._client_id is None: + raise RuntimeError("Client not initialized - call _initialize_connection() first") + + seq = self._allocate_sequence_number(registration_service) + packet = root_msg.build( + src=self.client_address, + req_addr=Address(0, 0, 0), + res_addr=Address(0, 0, 0), + seq=seq, + harp_action_code=3, # COMMAND_REQUEST + harp_response_required=True # Request with response + ) + + logger.info("[DISCOVER_ROOT] Sending root object discovery:") + logger.info(f"[DISCOVER_ROOT] Length: {len(packet)} bytes, Seq: {seq}") + logger.info(f"[DISCOVER_ROOT] Hex: {packet.hex(' ')}") + + # Send request + await self.write(packet) + + # Read response + response = await self._read_one_message() + + logger.debug(f"[DISCOVER_ROOT] Received response: {len(response.raw_bytes)} bytes") + + # Parse registration response to extract root object IDs + root_objects = self._parse_registration_response(response) + logger.info(f"[DISCOVER_ROOT] ✓ Found {len(root_objects)} root objects") + + # Store discovered root objects + self._discovered_objects['root'] = root_objects + + logger.info(f"✓ Discovery complete: {len(root_objects)} root objects") + + def _parse_registration_response(self, response: RegistrationResponse) -> list[Address]: + """Parse registration response options to extract object addresses. + + From Piglet: Option type 6 (HARP_PROTOCOL_RESPONSE) contains object IDs + as a packed list of u16 values. + + Args: + response: Parsed RegistrationResponse + + Returns: + List of discovered object addresses + """ + objects: list[Address] = [] + options_data = response.registration.options + + if not options_data: + logger.debug("No options in registration response (no objects found)") + return objects + + # Parse options: [option_id:1][length:1][data:x] + reader = Wire.read(options_data) + + while reader.has_remaining(): + option_id = reader.u8() + length = reader.u8() + + if option_id == RegistrationOptionType.HARP_PROTOCOL_RESPONSE: + if length > 0: + # Skip padding u16 + _ = reader.u16() + + # Read object IDs (u16 each) + num_objects = (length - 2) // 2 + for _ in range(num_objects): + object_id = reader.u16() + # Objects are at Address(1, 1, object_id) + objects.append(Address(1, 1, object_id)) + else: + logger.warning(f"Unknown registration option ID: {option_id}, skipping {length} bytes") + # Skip unknown option data + reader.raw_bytes(length) + + return objects + + def _allocate_sequence_number(self, dest_address: Address) -> int: + """Allocate next sequence number for destination. + + Args: + dest_address: Destination object address + + Returns: + Next sequence number for this destination + """ + current = self._sequence_numbers.get(dest_address, 0) + next_seq = (current + 1) % 256 # Wrap at 8 bits (1 byte) + self._sequence_numbers[dest_address] = next_seq + return next_seq + + async def send_command(self, command: HamiltonCommand, timeout: float = 10.0) -> dict: + """Send Hamilton command and wait for response. + + Sets source_address if not already set by caller (for testing). + Uses backend's client_address assigned during Protocol 7 initialization. + + Args: + command: Hamilton command to execute + timeout: Maximum time to wait for response + + Returns: + Parsed response dictionary + + Raises: + TimeoutError: If no response received within timeout + HamiltonError: If command returned an error + """ + # Set source address with smart fallback + if command.source_address is None: + if self.client_address is None: + raise RuntimeError( + "Backend not initialized - call setup() first to assign client_address" + ) + command.source_address = self.client_address + + # Allocate sequence number for this command + command.sequence_number = self._allocate_sequence_number(command.dest_address) + + # Build command message + message = command.build() + + # Log command parameters for debugging + log_params = command.get_log_params() + logger.info(f"{command.__class__.__name__} parameters:") + for key, value in log_params.items(): + # Format arrays nicely if very long + if isinstance(value, list) and len(value) > 8: + logger.info(f" {key}: {value[:4]}... ({len(value)} items)") + else: + logger.info(f" {key}: {value}") + + # Send command + await self.write(message) + + # Read response (timeout handled by TCP layer) + response_message = await self._read_one_message() + + # Parse response with type dispatch + parser = ResponseParser() + hoi_response = parser.parse(response_message) + + # Handle errors + if isinstance(hoi_response, ErrorResponse): + logger.error(f"Hamilton error {hoi_response.error_code}: {hoi_response.error_message}") + raise RuntimeError( + f"Hamilton error {hoi_response.error_code}: {hoi_response.error_message}" + ) + + # Let command interpret success response + # Type narrowing: we know it's SuccessResponse after ErrorResponse check + if not isinstance(hoi_response, SuccessResponse): + raise RuntimeError(f"Unexpected response type: {type(hoi_response)}") + return command.interpret_response(hoi_response) + + async def stop(self): + """Stop the backend and close connection.""" + try: + await super().stop() + except Exception as e: + logger.warning(f"Error during stop: {e}") + finally: + self._connection_state = "disconnected" + logger.info("Hamilton backend stopped") + + def serialize(self) -> dict: + """Serialize backend configuration.""" + return { + **super().serialize(), + "client_id": self._client_id, + "instrument_addresses": {k: str(v) for k, v in self._instrument_addresses.items()}, + } diff --git a/pylabrobot/liquid_handling/backends/hamilton/tcp_introspection.py b/pylabrobot/liquid_handling/backends/hamilton/tcp_introspection.py new file mode 100644 index 00000000000..286941c3fa9 --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/tcp_introspection.py @@ -0,0 +1,846 @@ +"""Hamilton TCP Introspection API. + +This module provides dynamic discovery of Hamilton instrument capabilities +using Interface 0 introspection methods. It allows discovering available +objects, methods, interfaces, enums, and structs at runtime. +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass, field +from typing import Any, Dict, List + +from pylabrobot.liquid_handling.backends.hamilton.protocol import HamiltonProtocol, HamiltonDataType +from pylabrobot.liquid_handling.backends.hamilton.packets import Address +from pylabrobot.liquid_handling.backends.hamilton.commands import HamiltonCommand +from pylabrobot.liquid_handling.backends.hamilton.messages import HoiParams, HoiParamsParser + +logger = logging.getLogger(__name__) + + +# ============================================================================ +# TYPE RESOLUTION HELPERS +# ============================================================================ + +def resolve_type_id(type_id: int) -> str: + """Resolve Hamilton type ID to readable name. + + Args: + type_id: Hamilton data type ID + + Returns: + Human-readable type name + """ + try: + return HamiltonDataType(type_id).name + except ValueError: + return f"UNKNOWN_TYPE_{type_id}" + + +def resolve_type_ids(type_ids: List[int]) -> List[str]: + """Resolve list of Hamilton type IDs to readable names. + + Args: + type_ids: List of Hamilton data type IDs + + Returns: + List of human-readable type names + """ + return [resolve_type_id(tid) for tid in type_ids] + + +# ============================================================================ +# INTROSPECTION TYPE MAPPING +# ============================================================================ +# Introspection type IDs are separate from HamiltonDataType wire encoding types. +# These are used for method signature display/metadata, not binary encoding. + +# Type ID ranges for categorization: +# - Argument types: Method parameters (input) +# - ReturnElement types: Multiple return values (struct fields) +# - ReturnValue types: Single return value + +_INTROSPECTION_TYPE_NAMES: dict[int, str] = { + # Argument types (1-8, 33, 41, 45, 49, 53, 61, 66, 82, 102) + 1: "i8", + 2: "u8", + 3: "i16", + 4: "u16", + 5: "i32", + 6: "u32", + 7: "str", + 8: "bytes", + 33: "bool", + 41: "List[i16]", + 45: "List[u16]", + 49: "List[i32]", + 53: "List[u32]", + 61: "List[struct]", # Complex type, needs source_id + struct_id + 66: "List[bool]", + 82: "List[enum]", # Complex type, needs source_id + enum_id + 102: "f32", + + # ReturnElement types (18-24, 35, 43, 47, 51, 55, 68, 76) + 18: "u8", + 19: "i16", + 20: "u16", + 21: "i32", + 22: "u32", + 23: "str", + 24: "bytes", + 35: "bool", + 43: "List[i16]", + 47: "List[u16]", + 51: "List[i32]", + 55: "List[u32]", + 68: "List[bool]", + 76: "List[str]", + + # ReturnValue types (25-32, 36, 44, 48, 52, 56, 69, 81, 85, 104, 105) + 25: "i8", + 26: "u8", + 27: "i16", + 28: "u16", + 29: "i32", + 30: "u32", + 31: "str", + 32: "bytes", + 36: "bool", + 44: "List[i16]", + 48: "List[u16]", + 52: "List[i32]", + 56: "List[u32]", + 69: "List[bool]", + 81: "enum", # Complex type, needs source_id + enum_id + 85: "enum", # Complex type, needs source_id + enum_id + 104: "f32", + 105: "f32", + + # Complex types (60, 64, 78) - these need source_id + id + 60: "struct", # ReturnValue, needs source_id + struct_id + 64: "struct", # ReturnValue, needs source_id + struct_id + 78: "enum", # Argument, needs source_id + enum_id +} + +# Type ID sets for categorization +_ARGUMENT_TYPE_IDS = {1, 2, 3, 4, 5, 6, 7, 8, 33, 41, 45, 49, 53, 61, 66, 82, 102} +_RETURN_ELEMENT_TYPE_IDS = {18, 19, 20, 21, 22, 23, 24, 35, 43, 47, 51, 55, 68, 76} +_RETURN_VALUE_TYPE_IDS = {25, 26, 27, 28, 29, 30, 31, 32, 36, 44, 48, 52, 56, 69, 81, 85, 104, 105} +_COMPLEX_TYPE_IDS = {60, 61, 64, 78, 81, 82, 85} # Types that need additional bytes + + +def get_introspection_type_category(type_id: int) -> str: + """Get category for introspection type ID. + + Args: + type_id: Introspection type ID + + Returns: + Category: "Argument", "ReturnElement", "ReturnValue", or "Unknown" + """ + if type_id in _ARGUMENT_TYPE_IDS: + return "Argument" + elif type_id in _RETURN_ELEMENT_TYPE_IDS: + return "ReturnElement" + elif type_id in _RETURN_VALUE_TYPE_IDS: + return "ReturnValue" + else: + return "Unknown" + + +def resolve_introspection_type_name(type_id: int) -> str: + """Resolve introspection type ID to readable name. + + Args: + type_id: Introspection type ID + + Returns: + Human-readable type name + """ + return _INTROSPECTION_TYPE_NAMES.get(type_id, f"UNKNOWN_TYPE_{type_id}") + + +def is_complex_introspection_type(type_id: int) -> bool: + """Check if introspection type is complex (needs additional bytes). + + Complex types require 3 bytes total: type_id, source_id, struct_id/enum_id + + Args: + type_id: Introspection type ID + + Returns: + True if type is complex + """ + return type_id in _COMPLEX_TYPE_IDS + + +# ============================================================================ +# DATA STRUCTURES +# ============================================================================ + +@dataclass +class ObjectInfo: + """Object metadata from introspection.""" + name: str + version: str + method_count: int + subobject_count: int + address: Address + + +@dataclass +class MethodInfo: + """Method signature from introspection.""" + interface_id: int + call_type: int + method_id: int + name: str + parameter_types: list[int] = field(default_factory=list) # Decoded parameter type IDs (Argument category) + parameter_labels: list[str] = field(default_factory=list) # Parameter names (if available) + return_types: list[int] = field(default_factory=list) # Decoded return type IDs (ReturnElement/ReturnValue category) + return_labels: list[str] = field(default_factory=list) # Return names (if available) + + def get_signature_string(self) -> str: + """Get method signature as a readable string.""" + # Decode parameter types to readable names + if self.parameter_types: + param_type_names = [resolve_introspection_type_name(tid) for tid in self.parameter_types] + + # If we have labels, use them; otherwise just show types + if self.parameter_labels and len(self.parameter_labels) == len(param_type_names): + # Format as "param1: type1, param2: type2" + params = [f"{label}: {type_name}" for label, type_name in zip(self.parameter_labels, param_type_names)] + param_str = ", ".join(params) + else: + # Just show types + param_str = ", ".join(param_type_names) + else: + param_str = "void" + + # Decode return types to readable names + if self.return_types: + return_type_names = [resolve_introspection_type_name(tid) for tid in self.return_types] + return_categories = [get_introspection_type_category(tid) for tid in self.return_types] + + # Format return based on category + if any(cat == "ReturnElement" for cat in return_categories): + # Multiple return values → struct format + if self.return_labels and len(self.return_labels) == len(return_type_names): + # Format as "{ label1: type1, label2: type2 }" + returns = [f"{label}: {type_name}" for label, type_name in zip(self.return_labels, return_type_names)] + return_str = f"{{ {', '.join(returns)} }}" + else: + # Just show types + return_str = f"{{ {', '.join(return_type_names)} }}" + elif len(return_type_names) == 1: + # Single return value + if self.return_labels and len(self.return_labels) == 1: + return_str = f"{self.return_labels[0]}: {return_type_names[0]}" + else: + return_str = return_type_names[0] + else: + return_str = "void" + else: + return_str = "void" + + return f"{self.name}({param_str}) -> {return_str}" + + +@dataclass +class InterfaceInfo: + """Interface metadata from introspection.""" + interface_id: int + name: str + version: str + + +@dataclass +class EnumInfo: + """Enum definition from introspection.""" + enum_id: int + name: str + values: Dict[str, int] + + +@dataclass +class StructInfo: + """Struct definition from introspection.""" + struct_id: int + name: str + fields: Dict[str, int] # field_name -> type_id + + @property + def field_type_names(self) -> Dict[str, str]: + """Get human-readable field type names.""" + return {field_name: resolve_type_id(type_id) for field_name, type_id in self.fields.items()} + + def get_struct_string(self) -> str: + """Get struct definition as a readable string.""" + field_strs = [f"{field_name}: {resolve_type_id(type_id)}" + for field_name, type_id in self.fields.items()] + fields_str = "\n ".join(field_strs) if field_strs else " (empty)" + return f"struct {self.name} {{\n {fields_str}\n}}" + + +# ============================================================================ +# INTROSPECTION COMMAND CLASSES +# ============================================================================ + +class GetObjectCommand(HamiltonCommand): + """Get object metadata (command_id=1).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 0 + command_id = 1 + action_code = 0 # QUERY + + def __init__(self, object_address: Address): + super().__init__(object_address) + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for get_object command.""" + # No parameters needed for get_object + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse get_object response.""" + # Parse HOI2 DataFragments + parser = HoiParamsParser(data) + + _, name = parser.parse_next() + _, version = parser.parse_next() + _, method_count = parser.parse_next() + _, subobject_count = parser.parse_next() + + return { + 'name': name, + 'version': version, + 'method_count': method_count, + 'subobject_count': subobject_count + } + + +class GetMethodCommand(HamiltonCommand): + """Get method signature (command_id=2).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 0 + command_id = 2 + action_code = 0 # QUERY + + def __init__(self, object_address: Address, method_index: int): + super().__init__(object_address) + self.method_index = method_index + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for get_method command.""" + return HoiParams().u32(self.method_index) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse get_method response.""" + parser = HoiParamsParser(data) + + _, interface_id = parser.parse_next() + _, call_type = parser.parse_next() + _, method_id = parser.parse_next() + _, name = parser.parse_next() + + # The remaining fragments are STRING types containing type IDs as bytes + # Hamilton sends ONE combined list where type IDs encode category (Argument/ReturnElement/ReturnValue) + # First STRING after method name is parameter_types (each byte is a type ID - can be Argument or Return) + # Second STRING (if present) is parameter_labels (comma-separated names - includes both params and returns) + parameter_types_str = None + parameter_labels_str = None + + if parser.has_remaining(): + _, parameter_types_str = parser.parse_next() + + if parser.has_remaining(): + _, parameter_labels_str = parser.parse_next() + + # Decode string bytes to type IDs (like piglet does: .as_bytes().to_vec()) + all_type_ids: list[int] = [] + if parameter_types_str: + all_type_ids = [ord(c) for c in parameter_types_str] + + # Parse all labels (comma-separated - includes both parameters and returns) + all_labels: list[str] = [] + if parameter_labels_str: + all_labels = [label.strip() for label in parameter_labels_str.split(',') if label.strip()] + + # Categorize by type ID ranges (like piglet does) + # Split into arguments vs returns based on type ID category + parameter_types: list[int] = [] + parameter_labels: list[str] = [] + return_types: list[int] = [] + return_labels: list[str] = [] + + for i, type_id in enumerate(all_type_ids): + category = get_introspection_type_category(type_id) + label = all_labels[i] if i < len(all_labels) else None + + if category == "Argument": + parameter_types.append(type_id) + if label: + parameter_labels.append(label) + elif category in ("ReturnElement", "ReturnValue"): + return_types.append(type_id) + if label: + return_labels.append(label) + # Unknown types - could be parameters or returns, default to parameters + else: + parameter_types.append(type_id) + if label: + parameter_labels.append(label) + + return { + 'interface_id': interface_id, + 'call_type': call_type, + 'method_id': method_id, + 'name': name, + 'parameter_types': parameter_types, # Decoded type IDs (Argument category only) + 'parameter_labels': parameter_labels, # Parameter names only + 'return_types': return_types, # Decoded type IDs (ReturnElement/ReturnValue only) + 'return_labels': return_labels, # Return names only + } + + +class GetSubobjectAddressCommand(HamiltonCommand): + """Get subobject address (command_id=3).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 0 + command_id = 3 + action_code = 0 # QUERY + + def __init__(self, object_address: Address, subobject_index: int): + super().__init__(object_address) + self.subobject_index = subobject_index + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for get_subobject_address command.""" + return HoiParams().u16(self.subobject_index) # Use u16, not u32 + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse get_subobject_address response.""" + parser = HoiParamsParser(data) + + _, module_id = parser.parse_next() + _, node_id = parser.parse_next() + _, object_id = parser.parse_next() + + return { + 'address': Address(module_id, node_id, object_id) + } + + +class GetInterfacesCommand(HamiltonCommand): + """Get available interfaces (command_id=4).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 0 + command_id = 4 + action_code = 0 # QUERY + + def __init__(self, object_address: Address): + super().__init__(object_address) + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for get_interfaces command.""" + # No parameters needed + return HoiParams() + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse get_interfaces response.""" + parser = HoiParamsParser(data) + + interfaces = [] + _, interface_count = parser.parse_next() + + for _ in range(interface_count): + _, interface_id = parser.parse_next() + _, name = parser.parse_next() + _, version = parser.parse_next() + interfaces.append({ + 'interface_id': interface_id, + 'name': name, + 'version': version + }) + + return {'interfaces': interfaces} + + +class GetEnumsCommand(HamiltonCommand): + """Get enum definitions (command_id=5).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 0 + command_id = 5 + action_code = 0 # QUERY + + def __init__(self, object_address: Address, target_interface_id: int): + super().__init__(object_address) + self.target_interface_id = target_interface_id + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for get_enums command.""" + return HoiParams().u8(self.target_interface_id) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse get_enums response.""" + parser = HoiParamsParser(data) + + enums = [] + _, enum_count = parser.parse_next() + + for _ in range(enum_count): + _, enum_id = parser.parse_next() + _, name = parser.parse_next() + + # Parse enum values + _, value_count = parser.parse_next() + values = {} + for _ in range(value_count): + _, value_name = parser.parse_next() + _, value_value = parser.parse_next() + values[value_name] = value_value + + enums.append({ + 'enum_id': enum_id, + 'name': name, + 'values': values + }) + + return {'enums': enums} + + +class GetStructsCommand(HamiltonCommand): + """Get struct definitions (command_id=6).""" + + protocol = HamiltonProtocol.OBJECT_DISCOVERY + interface_id = 0 + command_id = 6 + action_code = 0 # QUERY + + def __init__(self, object_address: Address, target_interface_id: int): + super().__init__(object_address) + self.target_interface_id = target_interface_id + self._assign_params() + + def build_parameters(self) -> HoiParams: + """Build parameters for get_structs command.""" + return HoiParams().u8(self.target_interface_id) + + @classmethod + def parse_response_parameters(cls, data: bytes) -> dict: + """Parse get_structs response.""" + parser = HoiParamsParser(data) + + structs = [] + _, struct_count = parser.parse_next() + + for _ in range(struct_count): + _, struct_id = parser.parse_next() + _, name = parser.parse_next() + + # Parse struct fields + _, field_count = parser.parse_next() + fields = {} + for _ in range(field_count): + _, field_name = parser.parse_next() + _, field_type = parser.parse_next() + fields[field_name] = field_type + + structs.append({ + 'struct_id': struct_id, + 'name': name, + 'fields': fields + }) + + return {'structs': structs} + + +# ============================================================================ +# HIGH-LEVEL INTROSPECTION API +# ============================================================================ + +class HamiltonIntrospection: + """High-level API for Hamilton introspection.""" + + def __init__(self, backend): + """Initialize introspection API. + + Args: + backend: TCPBackend instance + """ + self.backend = backend + + async def get_object(self, address: Address) -> ObjectInfo: + """Get object metadata. + + Args: + address: Object address to query + + Returns: + Object metadata + """ + command = GetObjectCommand(address) + response = await self.backend.send_command(command) + + return ObjectInfo( + name=response['name'], + version=response['version'], + method_count=response['method_count'], + subobject_count=response['subobject_count'], + address=address + ) + + async def get_method(self, address: Address, method_index: int) -> MethodInfo: + """Get method signature. + + Args: + address: Object address + method_index: Method index to query + + Returns: + Method signature + """ + command = GetMethodCommand(address, method_index) + response = await self.backend.send_command(command) + + return MethodInfo( + interface_id=response['interface_id'], + call_type=response['call_type'], + method_id=response['method_id'], + name=response['name'], + parameter_types=response.get('parameter_types', []), + parameter_labels=response.get('parameter_labels', []), + return_types=response.get('return_types', []), + return_labels=response.get('return_labels', []) + ) + + async def get_subobject_address(self, address: Address, subobject_index: int) -> Address: + """Get subobject address. + + Args: + address: Parent object address + subobject_index: Subobject index + + Returns: + Subobject address + """ + command = GetSubobjectAddressCommand(address, subobject_index) + response = await self.backend.send_command(command) + + # Type: ignore needed because response dict is typed as dict[str, Any] + # but we know 'address' key contains Address object + return response['address'] # type: ignore[no-any-return, return-value] + + async def get_interfaces(self, address: Address) -> List[InterfaceInfo]: + """Get available interfaces. + + Args: + address: Object address + + Returns: + List of interface information + """ + command = GetInterfacesCommand(address) + response = await self.backend.send_command(command) + + return [ + InterfaceInfo( + interface_id=iface['interface_id'], + name=iface['name'], + version=iface['version'] + ) + for iface in response['interfaces'] + ] + + async def get_enums(self, address: Address, interface_id: int) -> List[EnumInfo]: + """Get enum definitions. + + Args: + address: Object address + interface_id: Interface ID + + Returns: + List of enum definitions + """ + command = GetEnumsCommand(address, interface_id) + response = await self.backend.send_command(command) + + return [ + EnumInfo( + enum_id=enum_def['enum_id'], + name=enum_def['name'], + values=enum_def['values'] + ) + for enum_def in response['enums'] + ] + + async def get_structs(self, address: Address, interface_id: int) -> List[StructInfo]: + """Get struct definitions. + + Args: + address: Object address + interface_id: Interface ID + + Returns: + List of struct definitions + """ + command = GetStructsCommand(address, interface_id) + response = await self.backend.send_command(command) + + return [ + StructInfo( + struct_id=struct_def['struct_id'], + name=struct_def['name'], + fields=struct_def['fields'] + ) + for struct_def in response['structs'] + ] + + async def get_all_methods(self, address: Address) -> List[MethodInfo]: + """Get all methods for an object. + + Args: + address: Object address + + Returns: + List of all method signatures + """ + # First get object info to know how many methods there are + object_info = await self.get_object(address) + + methods = [] + for i in range(object_info.method_count): + try: + method = await self.get_method(address, i) + methods.append(method) + except Exception as e: + logger.warning(f"Failed to get method {i} for {address}: {e}") + + return methods + + async def discover_hierarchy(self, root_address: Address) -> Dict[str, Any]: + """Recursively discover object hierarchy. + + Args: + root_address: Root object address + + Returns: + Nested dictionary of discovered objects + """ + hierarchy = {} + + try: + # Get root object info + root_info = await self.get_object(root_address) + # Type: ignore needed because hierarchy is Dict[str, Any] for flexibility + hierarchy['info'] = root_info # type: ignore[assignment] + + # Discover subobjects + subobjects = {} + for i in range(root_info.subobject_count): + try: + subaddress = await self.get_subobject_address(root_address, i) + subobjects[f'subobject_{i}'] = await self.discover_hierarchy(subaddress) + except Exception as e: + logger.warning(f"Failed to discover subobject {i}: {e}") + + # Type: ignore needed because hierarchy is Dict[str, Any] for flexibility + hierarchy['subobjects'] = subobjects # type: ignore[assignment] + + # Discover methods + methods = await self.get_all_methods(root_address) + # Type: ignore needed because hierarchy is Dict[str, Any] for flexibility + hierarchy['methods'] = methods # type: ignore[assignment] + + except Exception as e: + logger.error(f"Failed to discover hierarchy for {root_address}: {e}") + # Type: ignore needed because hierarchy is Dict[str, Any] for flexibility + hierarchy['error'] = str(e) # type: ignore[assignment] + + return hierarchy + + async def discover_all_objects(self, root_addresses: List[Address]) -> Dict[str, Any]: + """Discover all objects starting from root addresses. + + Args: + root_addresses: List of root addresses to start discovery from + + Returns: + Dictionary mapping address strings to discovered hierarchies + """ + all_objects = {} + + for root_address in root_addresses: + try: + hierarchy = await self.discover_hierarchy(root_address) + all_objects[str(root_address)] = hierarchy + except Exception as e: + logger.error(f"Failed to discover objects from {root_address}: {e}") + all_objects[str(root_address)] = {'error': str(e)} + + return all_objects + + def print_method_signatures(self, methods: List[MethodInfo]) -> None: + """Print method signatures in a readable format. + + Args: + methods: List of MethodInfo objects to print + """ + print("Method Signatures:") + print("=" * 50) + for method in methods: + print(f" {method.get_signature_string()}") + print(f" Interface: {method.interface_id}, Method ID: {method.method_id}") + print() + + def print_struct_definitions(self, structs: List[StructInfo]) -> None: + """Print struct definitions in a readable format. + + Args: + structs: List of StructInfo objects to print + """ + print("Struct Definitions:") + print("=" * 50) + for struct in structs: + print(struct.get_struct_string()) + print() + + def get_methods_by_name(self, methods: List[MethodInfo], name_pattern: str) -> List[MethodInfo]: + """Filter methods by name pattern. + + Args: + methods: List of MethodInfo objects to filter + name_pattern: Name pattern to search for (case-insensitive) + + Returns: + List of methods matching the name pattern + """ + return [method for method in methods if name_pattern.lower() in method.name.lower()] + + def get_methods_by_interface(self, methods: List[MethodInfo], interface_id: int) -> List[MethodInfo]: + """Filter methods by interface ID. + + Args: + methods: List of MethodInfo objects to filter + interface_id: Interface ID to filter by + + Returns: + List of methods from the specified interface + """ + return [method for method in methods if method.interface_id == interface_id] diff --git a/pylabrobot/liquid_handling/backends/hamilton/wire.py b/pylabrobot/liquid_handling/backends/hamilton/wire.py new file mode 100644 index 00000000000..070fa161c47 --- /dev/null +++ b/pylabrobot/liquid_handling/backends/hamilton/wire.py @@ -0,0 +1,283 @@ +"""Hamilton TCP wire protocol - primitive byte serialization. + +This module provides low-level byte serialization/deserialization without any +protocol-specific wrapping. DataFragment headers, Registration options, and +Connection parameters are handled by higher-level modules. + +Example: + # Writing + data = Wire.write().u8(1).u16(100).string("test").finish() + + # Reading + reader = Wire.read(data) + val1 = reader.u8() + val2 = reader.u16() + val3 = reader.string() +""" + +from __future__ import annotations + +import struct +from io import BytesIO + + +class Writer: + """Raw byte writer for Hamilton protocol primitives. + + Provides fluent interface for building byte sequences. All integers use + little-endian encoding per Hamilton specification. + """ + + def __init__(self): + self._buffer = BytesIO() + + def u8(self, value: int) -> 'Writer': + """Write unsigned 8-bit integer (0-255).""" + if not 0 <= value <= 255: + raise ValueError(f"u8 value must be 0-255, got {value}") + self._buffer.write(struct.pack(' 'Writer': + """Write unsigned 16-bit integer (little-endian).""" + if not 0 <= value <= 65535: + raise ValueError(f"u16 value must be 0-65535, got {value}") + self._buffer.write(struct.pack(' 'Writer': + """Write unsigned 32-bit integer (little-endian).""" + if not 0 <= value <= 4294967295: + raise ValueError(f"u32 value must be 0-4294967295, got {value}") + self._buffer.write(struct.pack(' 'Writer': + """Write unsigned 64-bit integer (little-endian).""" + if not 0 <= value <= 18446744073709551615: + raise ValueError("u64 value out of range") + self._buffer.write(struct.pack(' 'Writer': + """Write signed 8-bit integer (-128 to 127).""" + if not -128 <= value <= 127: + raise ValueError(f"i8 value must be -128 to 127, got {value}") + self._buffer.write(struct.pack(' 'Writer': + """Write signed 16-bit integer (little-endian).""" + if not -32768 <= value <= 32767: + raise ValueError(f"i16 value must be -32768 to 32767, got {value}") + self._buffer.write(struct.pack(' 'Writer': + """Write signed 32-bit integer (little-endian).""" + if not -2147483648 <= value <= 2147483647: + raise ValueError("i32 value out of range") + self._buffer.write(struct.pack(' 'Writer': + """Write signed 64-bit integer (little-endian).""" + if not -9223372036854775808 <= value <= 9223372036854775807: + raise ValueError("i64 value out of range") + self._buffer.write(struct.pack(' 'Writer': + """Write 32-bit float (little-endian).""" + self._buffer.write(struct.pack(' 'Writer': + """Write 64-bit double (little-endian).""" + self._buffer.write(struct.pack(' 'Writer': + """Write null-terminated UTF-8 string.""" + self._buffer.write(value.encode('utf-8')) + self._buffer.write(b'\x00') + return self + + def raw_bytes(self, value: bytes) -> 'Writer': + """Write raw bytes.""" + self._buffer.write(value) + return self + + def version_byte(self, major: int, minor: int) -> 'Writer': + """Write Hamilton version byte (two 4-bit fields packed into one byte). + + Args: + major: Major version (0-15, stored in upper 4 bits) + minor: Minor version (0-15, stored in lower 4 bits) + + Returns: + Self for method chaining + """ + if not 0 <= major <= 15: + raise ValueError(f"major version must be 0-15, got {major}") + if not 0 <= minor <= 15: + raise ValueError(f"minor version must be 0-15, got {minor}") + version_byte = (minor & 0xF) | ((major & 0xF) << 4) + return self.u8(version_byte) + + def finish(self) -> bytes: + """Return the built byte sequence.""" + return self._buffer.getvalue() + + +class Reader: + """Raw byte reader for Hamilton protocol primitives. + + Reads primitive values from byte sequences. All integers use little-endian + encoding per Hamilton specification. + """ + + def __init__(self, data: bytes): + self._data = data + self._offset = 0 + + def u8(self) -> int: + """Read unsigned 8-bit integer.""" + if self._offset + 1 > len(self._data): + raise ValueError(f"Not enough data for u8 at offset {self._offset}") + value: int = struct.unpack(' int: + """Read unsigned 16-bit integer (little-endian).""" + if self._offset + 2 > len(self._data): + raise ValueError(f"Not enough data for u16 at offset {self._offset}") + value: int = struct.unpack(' int: + """Read unsigned 32-bit integer (little-endian).""" + if self._offset + 4 > len(self._data): + raise ValueError(f"Not enough data for u32 at offset {self._offset}") + value: int = struct.unpack(' int: + """Read unsigned 64-bit integer (little-endian).""" + if self._offset + 8 > len(self._data): + raise ValueError(f"Not enough data for u64 at offset {self._offset}") + value: int = struct.unpack(' int: + """Read signed 8-bit integer.""" + if self._offset + 1 > len(self._data): + raise ValueError(f"Not enough data for i8 at offset {self._offset}") + value: int = struct.unpack(' int: + """Read signed 16-bit integer (little-endian).""" + if self._offset + 2 > len(self._data): + raise ValueError(f"Not enough data for i16 at offset {self._offset}") + value: int = struct.unpack(' int: + """Read signed 32-bit integer (little-endian).""" + if self._offset + 4 > len(self._data): + raise ValueError(f"Not enough data for i32 at offset {self._offset}") + value: int = struct.unpack(' int: + """Read signed 64-bit integer (little-endian).""" + if self._offset + 8 > len(self._data): + raise ValueError(f"Not enough data for i64 at offset {self._offset}") + value: int = struct.unpack(' float: + """Read 32-bit float (little-endian).""" + if self._offset + 4 > len(self._data): + raise ValueError(f"Not enough data for f32 at offset {self._offset}") + value: float = struct.unpack(' float: + """Read 64-bit double (little-endian).""" + if self._offset + 8 > len(self._data): + raise ValueError(f"Not enough data for f64 at offset {self._offset}") + value: float = struct.unpack(' str: + """Read null-terminated UTF-8 string.""" + # Find null terminator + null_pos = self._data.find(b'\x00', self._offset) + if null_pos == -1: + raise ValueError(f"No null terminator found for string at offset {self._offset}") + + # Extract string (excluding null terminator) + string_bytes = self._data[self._offset:null_pos] + self._offset = null_pos + 1 # Move past null terminator + + return string_bytes.decode('utf-8') + + def raw_bytes(self, n: int) -> bytes: + """Read n raw bytes.""" + if self._offset + n > len(self._data): + raise ValueError(f"Not enough data for {n} bytes at offset {self._offset}") + value = self._data[self._offset:self._offset+n] + self._offset += n + return value + + def version_byte(self) -> tuple[int, int]: + """Read Hamilton version byte and return (major, minor). + + Returns: + Tuple of (major_version, minor_version), each 0-15 + """ + version_byte = self.u8() + minor = version_byte & 0xF + major = (version_byte >> 4) & 0xF + return (major, minor) + + def remaining(self) -> bytes: + """Return all remaining unread bytes.""" + remaining = self._data[self._offset:] + self._offset = len(self._data) + return remaining + + def has_remaining(self) -> bool: + """Check if there are unread bytes.""" + return self._offset < len(self._data) + + def offset(self) -> int: + """Get current read offset.""" + return self._offset + + +class Wire: + """Factory for creating Writer and Reader instances.""" + + @staticmethod + def write() -> Writer: + """Create a new Writer for building byte sequences.""" + return Writer() + + @staticmethod + def read(data: bytes) -> Reader: + """Create a new Reader for parsing byte sequences.""" + return Reader(data) + diff --git a/pylabrobot/resources/hamilton/__init__.py b/pylabrobot/resources/hamilton/__init__.py index 54000467468..e8c3df45b98 100644 --- a/pylabrobot/resources/hamilton/__init__.py +++ b/pylabrobot/resources/hamilton/__init__.py @@ -4,6 +4,7 @@ STARDeck, STARLetDeck, ) +from .nimbus_decks import NimbusDeck from .mfx_carriers import * from .mfx_modules import * from .plate_adapters import * diff --git a/pylabrobot/resources/hamilton/nimbus_decks.py b/pylabrobot/resources/hamilton/nimbus_decks.py new file mode 100644 index 00000000000..f28a8e605c3 --- /dev/null +++ b/pylabrobot/resources/hamilton/nimbus_decks.py @@ -0,0 +1,630 @@ +"""Nimbus deck class and utilities for Hamilton Nimbus instruments. + +This module provides the NimbusDeck class and factory function for creating +Nimbus deck instances with either explicit parameters or by parsing config files. +""" + +from __future__ import annotations + +import logging +import re +import warnings +from typing import Any, Dict, List, Literal, Optional + +from pylabrobot.resources.coordinate import Coordinate +from pylabrobot.resources.hamilton.hamilton_decks import HamiltonDeck +from pylabrobot.resources.resource import Resource +from pylabrobot.resources.trash import Trash +from pylabrobot.serializer import serialize + +logger = logging.getLogger("pylabrobot") + + +# ============================================================================ +# DECK CLASS +# ============================================================================ + + +class NimbusDeck(HamiltonDeck): + """Hamilton Nimbus deck. + + Supports track-based positioning (called "rails" in the API for consistency + with other Hamilton decks). The deck is defined in PyLabRobot coordinates, + but can convert to/from Hamilton coordinates when interfacing with hardware. + """ + + def __init__( + self, + num_rails: int = 30, + size_x: float = 831.85, + size_y: float = 424.18, + size_z: float = 300.0, + hamilton_origin: Coordinate = Coordinate(x=-151.51, y=-363.83, z=0.0), + y_min: float = -310.0, + y_max: float = 20.0, + z_max: float = 146.0, + rail_start_x: float = -125.7, + rail_width: float = 22.454, + rail_y: float = -360.487, + name: str = "deck", + category: str = "deck", + origin: Coordinate = Coordinate.zero(), + waste_type: Optional[Literal["default_long"]] = "default_long", + ) -> None: + """Create a new Nimbus deck. + + Default values are from Nimbus8.dck layout 8 and Nimbus8.cfg. + + Args: + num_rails: Number of rails (maps to hardware tracks, default: 30) + size_x: Deck size in X dimension (mm, default: 831.85) + size_y: Deck size in Y dimension (mm, default: 424.18) + size_z: Deck size in Z dimension (mm, default: 300.0) + hamilton_origin: Hamilton origin coordinate for coordinate conversion + (default: Coordinate(x=-151.51, y=-363.83, z=0.0)) + y_min: Hamilton Y minimum coordinate bound (mm, default: -310.0) + y_max: Hamilton Y maximum coordinate bound (mm, default: 20.0) + z_max: Maximum Z height (mm, default: 146.0) + rail_start_x: Hamilton X coordinate of first rail start (mm, default: -125.7) + rail_width: Width between rails (mm, default: 22.454) + rail_y: Hamilton Y coordinate of all rails (mm, default: -360.487) + name: Deck name (default: "deck") + category: Deck category (default: "deck") + origin: PyLabRobot origin coordinate (default: Coordinate.zero()) + waste_type: Waste configuration type (default: "default_long"). If "default_long", + creates a waste block with 8 channel positions. If None, no waste is created. + """ + super().__init__( + num_rails=num_rails, + size_x=size_x, + size_y=size_y, + size_z=size_z, + name=name, + category=category, + origin=origin, + ) + + # Store Hamilton origin for coordinate conversion + self._hamilton_origin = hamilton_origin + + # Store coordinate bounds for validation + self._y_min = y_min + self._y_max = y_max + self._z_max = z_max + + # Store rail/track parameters for rails_to_location() + self._rail_start_x = rail_start_x + self._rail_width = rail_width + self._rail_y = rail_y + + # Store waste type for waste position lookup + self.waste_type = waste_type + + # Create waste resources if specified + if waste_type == "default_long": + self._create_default_long_waste() + + def _create_default_long_waste(self) -> None: + """Create default_long waste block with 8 channel positions. + + Creates a waste block resource (ChannelLongWasteBlock) and 8 Container + resources for waste positions (DefaultLongWaste_0001). All coordinates + are converted from Hamilton to PyLabRobot coordinate system. + """ + # Waste block dimensions and location + # ChannelLongWasteBlock: X=546.225, Y=-313.64, Z=0, dX=19.05, dY=366.25, dZ=160 + waste_block_location_hamilton = Coordinate(x=546.225, y=-313.64, z=0.0) + + # Convert waste block location to PyLabRobot coordinates + waste_block_location_plr = self.from_hamilton_coordinate(waste_block_location_hamilton) + + # Create waste block resource + waste_block = Resource( + name="default_long_block", + size_x=19.05, + size_y=366.25, + size_z=160.0, + category="waste_block", + ) + + # Assign waste block to deck + self.assign_child_resource(waste_block, location=waste_block_location_plr) + + # Waste positions (from CSV sequences) + # DefaultLongWaste_0001 positions 1-8 + waste_positions_hamilton = [ + Coordinate(x=553.746, y=19.863, z=131.389), # Position 1 + Coordinate(x=553.746, y=1.880, z=131.389), # Position 2 + Coordinate(x=553.746, y=-76.149, z=131.389), # Position 3 + Coordinate(x=553.746, y=-94.132, z=131.389), # Position 4 + Coordinate(x=553.746, y=-152.349, z=131.389), # Position 5 + Coordinate(x=553.746, y=-170.332, z=131.389), # Position 6 + Coordinate(x=553.746, y=-219.549, z=131.389), # Position 7 + Coordinate(x=553.746, y=-237.532, z=131.389), # Position 8 + ] + + # Create container resources for each waste position + for i, pos_hamilton in enumerate(waste_positions_hamilton, start=1): + # Convert position to PyLabRobot coordinates (relative to deck origin) + pos_plr = self.from_hamilton_coordinate(pos_hamilton) + + # Position relative to waste block location + # Both waste_block_location_plr and pos_plr are relative to deck origin, + # so subtract to get position relative to waste block + pos_plr_rel = Coordinate( + x=pos_plr.x - waste_block_location_plr.x, + y=pos_plr.y - waste_block_location_plr.y, + z=pos_plr.z - waste_block_location_plr.z, + ) + + # Create Trash resource for this position (compatible with LiquidHandler.drop_tips) + waste_position = Trash( + name=f"default_long_{i}", + size_x=0.0, + size_y=0.0, + size_z=0.0, + category="waste_position", + ) + + # Assign waste position to waste block + waste_block.assign_child_resource(waste_position, location=pos_plr_rel) + + def rails_to_location(self, rails: int) -> Coordinate: + """Convert a rail identifier to an absolute (x, y, z) coordinate. + + Converts rail number (1-30) to PyLabRobot coordinates. Internally maps + hardware tracks to API rails for consistency with other Hamilton decks. + Uses instance attributes for rail positions, which can be set from config files. + + Args: + rails: Rail number (1-30, maps to hardware tracks) + + Returns: + PyLabRobot coordinate relative to deck origin + """ + # Calculate X position in Hamilton coordinates using instance attributes + x_hamilton = self._rail_start_x + (rails - 1) * self._rail_width + y_hamilton = self._rail_y + z_hamilton = 0.0 + + # Convert to PyLabRobot coordinates (absolute, relative to PLR world origin) + rail_coord_hamilton = Coordinate(x=x_hamilton, y=y_hamilton, z=z_hamilton) + + # X and Z remain the same relative to their origins + x_plr = rail_coord_hamilton.x - self._hamilton_origin.x + z_plr = rail_coord_hamilton.z - self._hamilton_origin.z + + # Y conversion: Hamilton Y is negative, with less negative = back, more negative = front + # PyLabRobot Y is positive, with larger = back, smaller = front + # Hamilton origin (Origin.Y) is at front-left (most negative Y), maps to PyLabRobot Y=0 + # Formula: y_plr = y_hamilton - hamilton_origin.y + y_plr = rail_coord_hamilton.y - self._hamilton_origin.y + + rail_coord_plr_abs = Coordinate(x=x_plr, y=y_plr, z=z_plr) + + # Return coordinates relative to deck origin + # Deck always sets location during initialization, so it's never None + assert self.location is not None + return Coordinate( + x=rail_coord_plr_abs.x - self.location.x, + y=rail_coord_plr_abs.y - self.location.y, + z=rail_coord_plr_abs.z - self.location.z, + ) + + def to_hamilton_coordinate(self, coord: Coordinate) -> Coordinate: + """Convert PyLabRobot coordinate to Hamilton coordinate. + + Useful when sending commands to hardware that expects Hamilton coordinates. + + Args: + coord: PyLabRobot coordinate (relative to deck origin) + + Returns: + Hamilton coordinate + """ + # Convert to absolute coordinate (relative to deck's PyLabRobot origin) + # Deck always sets location during initialization, so it's never None + assert self.location is not None + abs_coord = Coordinate( + x=coord.x + self.location.x, + y=coord.y + self.location.y, + z=coord.z + self.location.z, + ) + + # Convert to Hamilton coordinate system + # X and Z: add back the origin offset + x_hamilton = abs_coord.x + self._hamilton_origin.x + z_hamilton = abs_coord.z + self._hamilton_origin.z + + # Y conversion: inverse of from_hamilton_coordinate + # y_plr = y_hamilton - hamilton_origin.y + # Solving for y_hamilton: y_hamilton = y_plr + hamilton_origin.y + y_hamilton = abs_coord.y + self._hamilton_origin.y + + return Coordinate(x=x_hamilton, y=y_hamilton, z=z_hamilton) + + def from_hamilton_coordinate(self, coord: Coordinate) -> Coordinate: + """Convert Hamilton coordinate to PyLabRobot coordinate. + + Useful when reading config files or parsing hardware responses. + + Args: + coord: Hamilton coordinate + + Returns: + PyLabRobot coordinate (relative to deck origin) + """ + # Convert to PyLabRobot coordinate system (absolute) + # X and Z remain the same relative to their origins + x_plr = coord.x - self._hamilton_origin.x + z_plr = coord.z - self._hamilton_origin.z + + # Y conversion: Hamilton Y is negative, with less negative = back, more negative = front + # PyLabRobot Y is positive, with larger = back, smaller = front + # Hamilton origin (Origin.Y) is at front-left (most negative Y), maps to PyLabRobot Y=0 + # Formula: y_plr = y_hamilton - hamilton_origin.y + y_plr = coord.y - self._hamilton_origin.y + + plr_coord_abs = Coordinate(x=x_plr, y=y_plr, z=z_plr) + + # Adjust to deck origin (make relative to deck origin) + # Deck always sets location during initialization, so it's never None + assert self.location is not None + return Coordinate( + x=plr_coord_abs.x - self.location.x, + y=plr_coord_abs.y - self.location.y, + z=plr_coord_abs.z - self.location.z, + ) +# TODO: There is probably a better way to not hardcode waste positions AND make serialization/de not terrible + def serialize(self) -> dict: + """Serialize this deck.""" + serialized = super().serialize() + # Remove with_trash and with_trash96 since NimbusDeck.__init__() doesn't accept them + # NimbusDeck uses waste_type instead to control waste block creation + serialized.pop("with_trash", None) + serialized.pop("with_trash96", None) + return { + **serialized, + "hamilton_origin": serialize(self._hamilton_origin), + "y_min": self._y_min, + "y_max": self._y_max, + "z_max": self._z_max, + "rail_start_x": self._rail_start_x, + "rail_width": self._rail_width, + "rail_y": self._rail_y, + "waste_type": self.waste_type, + } + + @classmethod + def deserialize(cls, data: dict, allow_marshal: bool = False) -> "NimbusDeck": + """Deserialize a NimbusDeck from a dictionary. + + Overrides parent deserialize to prevent waste block creation conflict. + Sets waste_type=None before calling parent deserialize to prevent __init__() + from creating the waste block, then restores waste_type from serialized data. + + Args: + data: Serialized deck data dictionary + allow_marshal: If True, allow marshal module for function deserialization + + Returns: + Deserialized NimbusDeck instance + """ + data_copy = data.copy() + original_waste_type = data_copy.get("waste_type") + # Set waste_type=None to prevent __init__() from creating waste block + # The waste block will come from children data (already serialized) + data_copy["waste_type"] = None + + # Call parent deserialize (waste block won't be created in __init__) + deck = super().deserialize(data_copy, allow_marshal=allow_marshal) + + # Restore waste_type attribute from serialized data to keep instance consistent + deck.waste_type = original_waste_type + + return deck + + @classmethod + def from_files( + cls, + cfg_path: str, + dck_path: str, + origin: Coordinate = Coordinate.zero(), + num_rails: Optional[int] = None, + size_x: Optional[float] = None, + size_y: Optional[float] = None, + size_z: Optional[float] = None, + hamilton_origin: Optional[Coordinate] = None, + y_min: Optional[float] = None, + y_max: Optional[float] = None, + z_max: Optional[float] = None, + rail_start_x: Optional[float] = None, + rail_width: Optional[float] = None, + rail_y: Optional[float] = None, + waste_type: Optional[Literal["default_long"]] = None, + ) -> NimbusDeck: + """Create a Nimbus deck by parsing config files. + + Parses .cfg and .dck files to extract deck definition. The layout number + is extracted from the "Layout" field in the .cfg file. Explicit parameters + can be provided to override values parsed from the files. + + Args: + cfg_path: Path to Nimbus .cfg file + dck_path: Path to Nimbus .dck file + origin: PyLabRobot origin coordinate (default: Coordinate.zero()) + num_rails: Override number of rails from parsed config + size_x: Override deck size in X dimension from parsed config + size_y: Override deck size in Y dimension from parsed config + size_z: Override deck size in Z dimension from parsed config + hamilton_origin: Override Hamilton origin coordinate from parsed config + y_min: Override Hamilton Y minimum coordinate bound from parsed config + y_max: Override Hamilton Y maximum coordinate bound from parsed config + z_max: Override maximum Z height from parsed config + rail_start_x: Override Hamilton X coordinate of first rail start from parsed config + rail_width: Override width between rails from parsed config + rail_y: Override Hamilton Y coordinate of all rails from parsed config + waste_type: Waste configuration type (default: None, auto-detected from config). + If "default_long", creates a waste block with 8 channel positions. + If None, attempts to detect from config (WasteLong ExSite), otherwise warns. + + Returns: + NimbusDeck instance with parsed or overridden dimensions + + Raises: + FileNotFoundError: If config files are not found + ValueError: If required values are not found in config files + """ + # Helper function to parse config files + def _parse_config_files(cfg_path: str, dck_path: str) -> Dict[str, Any]: + """Parse Nimbus config files to extract deck definition. + + The layout number is extracted from the "Layout" field in the .cfg file. + Returns a dictionary with float values for dimensions and a list of strings for exsite_ids. + """ + # Read .cfg file + with open(cfg_path, "r", encoding="utf-8") as f: + cfg_content = f.read() + + # Read .dck file + with open(dck_path, "r", encoding="utf-8") as f: + dck_content = f.read() + + # Extract from .cfg file + def extract_cfg_value(key: str) -> Optional[float]: + """Extract a value from .cfg file.""" + pattern = rf'{key},\s*"([^"]+)"' + match = re.search(pattern, cfg_content) + if match: + try: + return float(match.group(1)) + except ValueError: + return None + return None + + def extract_cfg_string(key: str) -> Optional[str]: + """Extract a string value from .cfg file.""" + pattern = rf'{key},\s*"([^"]+)"' + match = re.search(pattern, cfg_content) + if match: + return match.group(1) + return None + + # Extract from .dck file (layout-specific section) + def extract_dck_value(key: str, layout_num: int) -> Optional[float]: + """Extract a value from .dck file for specific layout.""" + # Find the layout section: DataDef,DECK,2,{layout_num} + layout_pattern = rf"DataDef,DECK,2,{layout_num}," + layout_match = re.search(layout_pattern, dck_content) + if not layout_match: + return None + + # Find the section end + start_pos = layout_match.end() + # Look for next DataDef or end of file + next_datadef = re.search(r"DataDef,", dck_content[start_pos:]) + if next_datadef: + section_end = start_pos + next_datadef.start() + else: + section_end = len(dck_content) + + section_content = dck_content[start_pos:section_end] + + # Extract value from this section + pattern = rf'{key},\s*"([^"]+)"' + match = re.search(pattern, section_content) + if match: + try: + return float(match.group(1)) + except ValueError: + return None + return None + + def extract_dck_exsite_ids(layout_num: int) -> List[str]: + """Extract ExSite.Id values from .dck file for specific layout.""" + # Find the layout section: DataDef,DECK,2,{layout_num} + layout_pattern = rf"DataDef,DECK,2,{layout_num}," + layout_match = re.search(layout_pattern, dck_content) + if not layout_match: + return [] + + # Find the section end + start_pos = layout_match.end() + # Look for next DataDef or end of file + next_datadef = re.search(r"DataDef,", dck_content[start_pos:]) + if next_datadef: + section_end = start_pos + next_datadef.start() + else: + section_end = len(dck_content) + + section_content = dck_content[start_pos:section_end] + + # Extract all ExSite.Id values + pattern = r'ExSite\.\d+\.Id,\s*"([^"]+)"' + matches = re.findall(pattern, section_content) + return matches + + # Extract layout from .cfg file (required) + layout_str = extract_cfg_string("Layout") + if layout_str is None: + raise ValueError( + "Could not extract Layout from .cfg file. " + "The Layout field is required to determine which deck layout to use from the .dck file." + ) + try: + layout = int(layout_str) + except ValueError: + raise ValueError( + f"Could not parse Layout value '{layout_str}' from .cfg file as an integer." + ) + + # Extract ExSite.Id values to check for waste configuration + exsite_ids = extract_dck_exsite_ids(layout) + + # Extract from .cfg file + y_min_val = extract_cfg_value("Y_MIN") + y_max_val = extract_cfg_value("Y_MAX") + z_max_val = extract_cfg_value("probeMaxZPosition") + + # Extract from .dck file (layout-specific) + dim_dx = extract_dck_value("Dim\\.Dx", layout) + dim_dy = extract_dck_value("Dim\\.Dy", layout) + dim_dz = extract_dck_value("Dim\\.Dz", layout) + origin_x = extract_dck_value("Origin\\.X", layout) + origin_y = extract_dck_value("Origin\\.Y", layout) + origin_z = extract_dck_value("Origin\\.Z", layout) + track_count = extract_dck_value("Track\\.Cnt", layout) + track_width = extract_dck_value("Track\\.Dx", layout) + track_start_x_val = extract_dck_value("Track\\.Start\\.X", layout) + track_y_val = extract_dck_value("Track\\.Y", layout) + + # Validate required values + if dim_dx is None or dim_dy is None or dim_dz is None: + raise ValueError( + f"Could not extract deck dimensions from config files. " + f"Found: Dx={dim_dx}, Dy={dim_dy}, Dz={dim_dz}" + ) + + if origin_x is None or origin_y is None or origin_z is None: + raise ValueError( + f"Could not extract deck origin from config files. " + f"Found: Origin.X={origin_x}, Origin.Y={origin_y}, Origin.Z={origin_z}" + ) + + result: Dict[str, Any] = { + "size_x": dim_dx, + "size_y": dim_dy, + "size_z": dim_dz, + "origin_x": origin_x, + "origin_y": origin_y, + "origin_z": origin_z, + } + + # Add optional values if found + if y_min_val is not None: + result["y_min"] = y_min_val + if y_max_val is not None: + result["y_max"] = y_max_val + if z_max_val is not None: + result["z_max"] = z_max_val + if track_count is not None: + result["track_count"] = track_count + if track_width is not None: + result["track_width"] = track_width + if track_start_x_val is not None: + result["track_start_x"] = track_start_x_val + if track_y_val is not None: + result["track_y"] = track_y_val + + # Store ExSite.Id values for waste detection + result["exsite_ids"] = exsite_ids + + return result + + # Parse config files + parsed_config = _parse_config_files(cfg_path, dck_path) + + # Determine waste_type from config if not explicitly provided + if waste_type is None: + exsite_ids = parsed_config.get("exsite_ids", []) + if "WasteLong" in exsite_ids: + waste_type = "default_long" + else: + # Find waste-related ExSite names + waste_sites = [site_id for site_id in exsite_ids if "Waste" in site_id] + if waste_sites: + warnings.warn( + f"Waste site(s) found in config ({', '.join(waste_sites)}), but no PyLabRobot " + f"resource is defined for them. To add waste support, either:\n" + f" 1. Specify waste_type='default_long' when calling from_files() if using WasteLong, or\n" + f" 2. Add support for the waste site type(s) in PyLabRobot by implementing " + f"the corresponding resource definition.", + UserWarning, + stacklevel=2, + ) + else: + warnings.warn( + "No waste block found in config files. " + "Waste will not be created. To add waste, specify waste_type='default_long' " + "when calling from_files().", + UserWarning, + stacklevel=2, + ) + + # Extract Hamilton origin from parsed config if not overridden + if hamilton_origin is None: + hamilton_origin = Coordinate( + x=parsed_config["origin_x"], + y=parsed_config["origin_y"], + z=parsed_config["origin_z"], + ) + + # Use parsed values, but allow explicit parameters to override + num_rails_val = num_rails if num_rails is not None else int(parsed_config.get("track_count", 0)) + size_x_val = size_x if size_x is not None else parsed_config["size_x"] + size_y_val = size_y if size_y is not None else parsed_config["size_y"] + size_z_val = size_z if size_z is not None else parsed_config["size_z"] + y_min_val = y_min if y_min is not None else parsed_config.get("y_min") + y_max_val = y_max if y_max is not None else parsed_config.get("y_max") + z_max_val = z_max if z_max is not None else parsed_config.get("z_max") + rail_start_x_val = rail_start_x if rail_start_x is not None else parsed_config.get("track_start_x") + rail_width_val = rail_width if rail_width is not None else parsed_config.get("track_width") + rail_y_val = rail_y if rail_y is not None else parsed_config.get("track_y") + + # Validate that we have all required values + if num_rails_val is None: + raise ValueError("Could not extract track_count from config files and num_rails not provided") + if size_x_val is None: + raise ValueError("Could not extract size_x from config files and size_x not provided") + if size_y_val is None: + raise ValueError("Could not extract size_y from config files and size_y not provided") + if size_z_val is None: + raise ValueError("Could not extract size_z from config files and size_z not provided") + if y_min_val is None: + raise ValueError("Could not extract y_min from config files and y_min not provided") + if y_max_val is None: + raise ValueError("Could not extract y_max from config files and y_max not provided") + if z_max_val is None: + raise ValueError("Could not extract z_max from config files and z_max not provided") + if rail_start_x_val is None: + raise ValueError("Could not extract track_start_x from config files and rail_start_x not provided") + if rail_width_val is None: + raise ValueError("Could not extract track_width from config files and rail_width not provided") + if rail_y_val is None: + raise ValueError("Could not extract track_y from config files and rail_y not provided") + + return cls( + num_rails=num_rails_val, + size_x=size_x_val, + size_y=size_y_val, + size_z=size_z_val, + hamilton_origin=hamilton_origin, + y_min=y_min_val, + y_max=y_max_val, + z_max=z_max_val, + rail_start_x=rail_start_x_val, + rail_width=rail_width_val, + rail_y=rail_y_val, + origin=origin, + waste_type=waste_type, + )