diff --git a/components/site-workflows/sensors/sensor-ironic-node-update.yaml b/components/site-workflows/sensors/sensor-ironic-node-update.yaml index 776b0a818..5dd9a5b0a 100644 --- a/components/site-workflows/sensors/sensor-ironic-node-update.yaml +++ b/components/site-workflows/sensors/sensor-ironic-node-update.yaml @@ -8,18 +8,15 @@ metadata: workflows.argoproj.io/description: |+ Triggers on the following Ironic Events: - - baremetal.node.provision_set.end which happens after a state change on the node - - baremetal.node.update.end which happens when node fields are updated. - - Currently parses out the following fields: - - - provision_state + - baremetal.node.create.end which happens when a baremetal node is created + - baremetal.node.update.end which happens when node fields are updated + - baremetal.node.delete.end which happens when a node is deleted Resulting code should be very similar to: ``` - argo -n argo-events submit --from workflowtemplate/sync-provision-state-to-nautobot \ - -p device_uuid=00000000-0000-0000-0000-000000000000 -p provision_state=available + argo -n argo-events submit --from workflowtemplate/openstack-oslo-event \ + -p event-json "JSON-payload" ``` Defined in `workflows/argo-events/sensors/ironic-node-update.yaml` @@ -30,16 +27,19 @@ spec: name: ironic-dep transform: # the event is a string-ified JSON so we need to decode it - jq: ".body[\"oslo.message\"] | fromjson" + # replace the whole event body + jq: | + .body = (.body["oslo.message"] | fromjson) filters: # applies each of the items in data with 'and' but there's only one dataLogicalOperator: "and" data: - - path: "event_type" + - path: "body.event_type" type: "string" value: + - "baremetal.node.create.end" - "baremetal.node.update.end" - - "baremetal.node.provision_set.end" + - "baremetal.node.delete.end" template: serviceAccountName: sensor-submit-workflow triggers: @@ -49,25 +49,10 @@ spec: k8s: operation: create parameters: - # first parameter's value is replaced with the uuid + # first parameter is the parsed oslo.message - dest: spec.arguments.parameters.0.value src: - dataKey: payload.ironic_object\.data.uuid - dependencyName: ironic-dep - # second parameter's value is replaced with the provision_state - - dest: spec.arguments.parameters.1.value - src: - dataKey: payload.ironic_object\.data.provision_state - dependencyName: ironic-dep - # third parameter's value is replaced with the lessee - - dest: spec.arguments.parameters.2.value - src: - dataKey: payload.ironic_object\.data.lessee - dependencyName: ironic-dep - # fourth parameter's value is replaced with the resource_class - - dest: spec.arguments.parameters.3.value - src: - dataKey: payload.ironic_object\.data.resource_class + dataKey: body dependencyName: ironic-dep source: # create a workflow in argo-events prefixed with ironic-node-update- @@ -81,10 +66,7 @@ spec: # defines the parameters being replaced above arguments: parameters: - - name: device_uuid - - name: provision_state - - name: lessee - - name: resource_class + - name: event-json # references the workflow workflowTemplateRef: - name: sync-provision-state-to-nautobot + name: openstack-oslo-event diff --git a/components/site-workflows/sensors/sensor-ironic-oslo-event.yaml b/components/site-workflows/sensors/sensor-ironic-oslo-event.yaml index 02d87af6d..43cb66728 100644 --- a/components/site-workflows/sensors/sensor-ironic-oslo-event.yaml +++ b/components/site-workflows/sensors/sensor-ironic-oslo-event.yaml @@ -10,14 +10,19 @@ metadata: - baremetal.node.provision_set.end which happens after a state change on the node + This sensor handles ALL provision state changes. The event handlers internally + filter for specific states: + - deploying: Sets up storage (volume connectors) for instances with storage enabled + - inspecting: Updates Nautobot device with inspection data (inventory, ports) + Resulting code should be very similar to: ``` - argo -n argo-events submit --from workflowtemplate/ironic-oslo-event \ - -p event-json "JSON-payload" -p device_id= -p project_id= + argo -n argo-events submit --from workflowtemplate/openstack-oslo-event \ + -p event-json "JSON-payload" ``` - Defined in `workflows/argo-events/sensors/sensor-ironic-oslo-event.yaml` + Defined in `components/site-workflows/sensors/sensor-ironic-oslo-event.yaml` spec: dependencies: - eventName: openstack @@ -39,10 +44,6 @@ spec: type: "string" value: - "baremetal.node.provision_set.end" - - path: "body.ironic_object.previous_provision_state" - type: "string" - value: - - "deploying" template: serviceAccountName: sensor-submit-workflow triggers: @@ -56,9 +57,9 @@ spec: src: dataKey: body dependencyName: ironic-dep - - dest: spec.arguments.parameters.1.value # device_id + - dest: spec.arguments.parameters.1.value # previous_provision_state src: - dataKey: body.ironic_object.uuid + dataKey: body.ironic_object.previous_provision_state dependencyName: ironic-dep - dest: spec.arguments.parameters.2.value # project_id src: @@ -79,7 +80,7 @@ spec: arguments: parameters: - name: event-json - - name: device_id + - name: previous_provision_state - name: project_id templates: - name: main @@ -93,6 +94,7 @@ spec: - name: event-json value: "{{workflow.parameters.event-json}}" - name: convert-project-id + when: "\"{{workflow.parameters.previous_provision_state}}\" == deploying" inline: script: image: python:alpine @@ -102,7 +104,7 @@ spec: project_id_without_dashes = "{{workflow.parameters.project_id}}" print(str(uuid.UUID(project_id_without_dashes))) - - name: ansible-storage-update - when: "\"{{steps.oslo-events.outputs.parameters.storage}}\" == wanted" + when: "\"{{steps.oslo-events.outputs.parameters.storage}}\" == wanted && \"{{workflow.parameters.previous_provision_state}}\" == deploying" templateRef: name: ansible-workflow-template template: ansible-run diff --git a/python/understack-workflows/pyproject.toml b/python/understack-workflows/pyproject.toml index 248372b84..b2eb0cda2 100644 --- a/python/understack-workflows/pyproject.toml +++ b/python/understack-workflows/pyproject.toml @@ -30,7 +30,6 @@ dependencies = [ [project.scripts] sync-keystone = "understack_workflows.main.sync_keystone:main" -sync-provision-state = "understack_workflows.main.sync_provision_state:main" undersync-switch = "understack_workflows.main.undersync_switch:main" enroll-server = "understack_workflows.main.enroll_server:main" bmc-password = "understack_workflows.main.print_bmc_password:main" diff --git a/python/understack-workflows/tests/test_nautobot_device_interface_sync.py b/python/understack-workflows/tests/test_nautobot_device_interface_sync.py new file mode 100644 index 000000000..16a2c694d --- /dev/null +++ b/python/understack-workflows/tests/test_nautobot_device_interface_sync.py @@ -0,0 +1,788 @@ +"""Tests for nautobot_device_interface_sync module.""" + +import uuid +from unittest.mock import MagicMock +from unittest.mock import patch + +import pytest + +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + EXIT_STATUS_FAILURE, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + EXIT_STATUS_SUCCESS, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import InterfaceInfo +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _assign_ip_to_interface, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _build_interface_map_from_inventory, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _build_interfaces_from_ports, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _cleanup_stale_interfaces, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _create_nautobot_interface, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _delete_nautobot_interface, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _extract_node_uuid_from_event, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _get_interface_description, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _get_interface_type, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _handle_cable_management, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + _update_nautobot_interface, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + handle_interface_sync_event, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + sync_idrac_interface, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + sync_interfaces_from_data, +) +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + sync_interfaces_to_nautobot, +) + + +class TestGetInterfaceType: + """Test cases for _get_interface_type function.""" + + def test_slot_interface(self): + assert _get_interface_type("NIC.Slot.1-1") == "25gbase-x-sfp28" + + def test_embedded_interface(self): + assert _get_interface_type("NIC.Embedded.1-1-1") == "25gbase-x-sfp28" + + def test_integrated_interface(self): + assert _get_interface_type("NIC.Integrated.1-1") == "25gbase-x-sfp28" + + def test_unknown_interface(self): + assert _get_interface_type("eth0") == "unknown" + + +class TestGetInterfaceDescription: + """Test cases for _get_interface_description function.""" + + def test_embedded_nic_description(self): + result = _get_interface_description("NIC.Embedded.1-1-1") + assert result == "Embedded NIC 1 Port 1 Partition 1" + + def test_embedded_nic_two_parts(self): + result = _get_interface_description("NIC.Embedded.2-1") + assert result == "Embedded NIC 2 Port 1" + + def test_integrated_nic_description(self): + result = _get_interface_description("NIC.Integrated.1-2") + assert result == "Integrated NIC 1 Port 2" + + def test_slot_nic_description(self): + result = _get_interface_description("NIC.Slot.1-1") + assert result == "NIC in Slot 1 Port 1" + + def test_idrac_description(self): + result = _get_interface_description("iDRAC") + assert result == "Dedicated iDRAC interface" + + def test_short_name_returns_empty(self): + result = _get_interface_description("eth0") + assert result == "" + + +class TestBuildInterfaceMapFromInventory: + """Test cases for _build_interface_map_from_inventory function.""" + + def test_build_map_with_interfaces(self): + inventory = { + "inventory": { + "interfaces": [ + {"mac_address": "AA:BB:CC:DD:EE:01", "name": "NIC.Slot.1-1"}, + {"mac_address": "AA:BB:CC:DD:EE:02", "name": "NIC.Slot.1-2"}, + ] + } + } + + result = _build_interface_map_from_inventory(inventory) + + assert len(result) == 2 + assert result["aa:bb:cc:dd:ee:01"] == "NIC.Slot.1-1" + assert result["aa:bb:cc:dd:ee:02"] == "NIC.Slot.1-2" + + def test_build_map_empty_inventory(self): + result = _build_interface_map_from_inventory({}) + + assert result == {} + + def test_build_map_skips_missing_mac(self): + inventory = { + "inventory": { + "interfaces": [ + {"name": "NIC.Slot.1-1"}, # Missing mac_address key + {"mac_address": "AA:BB:CC:DD:EE:02", "name": "NIC.Slot.1-2"}, + ] + } + } + + result = _build_interface_map_from_inventory(inventory) + + assert len(result) == 1 + + +class TestBuildInterfacesFromPorts: + """Test cases for _build_interfaces_from_ports function.""" + + def test_build_interfaces_with_bios_name(self): + node_uuid = str(uuid.uuid4()) + port_uuid = str(uuid.uuid4()) + + port = MagicMock() + port.uuid = port_uuid + port.address = "aa:bb:cc:dd:ee:ff" + port.extra = {"bios_name": "NIC.Slot.1-1"} + port.local_link_connection = { + "port_id": "Eth1/1", + "switch_info": "switch1", + "switch_id": "11:22:33:44:55:66", + } + port.pxe_enabled = True + port.physical_network = "provisioning" + port.name = None + + inventory_map = {} + + result = _build_interfaces_from_ports(node_uuid, [port], inventory_map) + + assert len(result) == 1 + iface = result[0] + assert iface.uuid == port_uuid + assert iface.name == "NIC.Slot.1-1" + assert iface.mac_address == "AA:BB:CC:DD:EE:FF" + assert iface.device_uuid == node_uuid + assert iface.pxe_enabled is True + assert iface.switch_port_id == "Eth1/1" + assert iface.switch_info == "switch1" + + def test_build_interfaces_fallback_to_inventory_name(self): + node_uuid = str(uuid.uuid4()) + port_uuid = str(uuid.uuid4()) + + port = MagicMock() + port.uuid = port_uuid + port.address = "aa:bb:cc:dd:ee:ff" + port.extra = {} + port.local_link_connection = {} + port.pxe_enabled = False + port.physical_network = None + port.name = None + + inventory_map = {"aa:bb:cc:dd:ee:ff": "NIC.Embedded.1-1-1"} + + result = _build_interfaces_from_ports(node_uuid, [port], inventory_map) + + assert result[0].name == "NIC.Embedded.1-1-1" + + def test_build_interfaces_fallback_to_port_uuid(self): + node_uuid = str(uuid.uuid4()) + port_uuid = str(uuid.uuid4()) + + port = MagicMock() + port.uuid = port_uuid + port.address = "aa:bb:cc:dd:ee:ff" + port.extra = {} + port.local_link_connection = {} + port.pxe_enabled = False + port.physical_network = None + port.name = None + + result = _build_interfaces_from_ports(node_uuid, [port], {}) + + assert result[0].name == port_uuid + + +class TestInterfaceInfo: + """Test cases for InterfaceInfo dataclass.""" + + def test_interface_info_defaults(self): + iface = InterfaceInfo( + uuid="test-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + ) + + assert iface.enabled is True + assert iface.mgmt_only is False + assert iface.pxe_enabled is False + assert iface.interface_type == "unknown" + + +class TestCreateNautobotInterface: + """Test cases for _create_nautobot_interface function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_create_interface_success(self, mock_nautobot): + interface = InterfaceInfo( + uuid="intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + description="NIC in Slot 1 Port 1", + interface_type="25gbase-x-sfp28", + ) + + mock_nautobot.dcim.interfaces.create.return_value = MagicMock(id="intf-uuid") + + _create_nautobot_interface(interface, mock_nautobot) + + mock_nautobot.dcim.interfaces.create.assert_called_once() + call_kwargs = mock_nautobot.dcim.interfaces.create.call_args.kwargs + assert call_kwargs["id"] == "intf-uuid" + assert call_kwargs["name"] == "NIC.Slot.1-1" + assert call_kwargs["mac_address"] == "AA:BB:CC:DD:EE:FF" + assert call_kwargs["device"] == "device-uuid" + + def test_create_interface_handles_unique_constraint(self, mock_nautobot): + interface = InterfaceInfo( + uuid="intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + ) + + mock_nautobot.dcim.interfaces.create.side_effect = Exception( + "unique constraint violation" + ) + mock_nautobot.dcim.interfaces.get.return_value = MagicMock(id="intf-uuid") + + _create_nautobot_interface(interface, mock_nautobot) + + mock_nautobot.dcim.interfaces.get.assert_called_once_with(id="intf-uuid") + + +class TestUpdateNautobotInterface: + """Test cases for _update_nautobot_interface function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + @pytest.fixture + def mock_nautobot_intf(self): + intf = MagicMock() + intf.name = "old-name" + intf.mac_address = "00:00:00:00:00:00" + intf.type = MagicMock(value="1000base-t") + intf.description = "" + intf.mgmt_only = False + return intf + + def test_update_name(self, mock_nautobot, mock_nautobot_intf): + interface = InterfaceInfo( + uuid="intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + ) + mock_nautobot.dcim.interfaces.get.return_value = None + + _update_nautobot_interface(interface, mock_nautobot_intf, mock_nautobot) + + assert mock_nautobot_intf.name == "NIC.Slot.1-1" + mock_nautobot_intf.save.assert_called_once() + + def test_update_handles_name_conflict(self, mock_nautobot, mock_nautobot_intf): + interface = InterfaceInfo( + uuid="intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + ) + + conflicting_intf = MagicMock() + conflicting_intf.id = "other-uuid" + mock_nautobot.dcim.interfaces.get.return_value = conflicting_intf + + _update_nautobot_interface(interface, mock_nautobot_intf, mock_nautobot) + + conflicting_intf.delete.assert_called_once() + + def test_no_update_when_unchanged(self, mock_nautobot): + interface = InterfaceInfo( + uuid="intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + interface_type="25gbase-x-sfp28", + ) + + nautobot_intf = MagicMock() + nautobot_intf.name = "NIC.Slot.1-1" + nautobot_intf.mac_address = "AA:BB:CC:DD:EE:FF" + nautobot_intf.type = MagicMock(value="25gbase-x-sfp28") + nautobot_intf.description = "" + nautobot_intf.mgmt_only = False + mock_nautobot.dcim.interfaces.get.return_value = None + + _update_nautobot_interface(interface, nautobot_intf, mock_nautobot) + + nautobot_intf.save.assert_not_called() + + +class TestSyncIdracInterface: + """Test cases for sync_idrac_interface function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_create_idrac_interface(self, mock_nautobot): + device_uuid = str(uuid.uuid4()) + bmc_mac = "aa:bb:cc:dd:ee:ff" + + mock_nautobot.dcim.interfaces.get.return_value = None + mock_nautobot.dcim.interfaces.create.return_value = MagicMock(id="idrac-uuid") + + sync_idrac_interface(device_uuid, bmc_mac, mock_nautobot) + + mock_nautobot.dcim.interfaces.create.assert_called_once() + call_kwargs = mock_nautobot.dcim.interfaces.create.call_args.kwargs + assert call_kwargs["device"] == device_uuid + assert call_kwargs["name"] == "iDRAC" + assert call_kwargs["type"] == "1000base-t" + assert call_kwargs["mac_address"] == "AA:BB:CC:DD:EE:FF" + assert call_kwargs["mgmt_only"] is True + + def test_update_existing_idrac_interface(self, mock_nautobot): + device_uuid = str(uuid.uuid4()) + bmc_mac = "aa:bb:cc:dd:ee:ff" + + existing_intf = MagicMock() + existing_intf.mac_address = "00:00:00:00:00:00" + mock_nautobot.dcim.interfaces.get.return_value = existing_intf + + sync_idrac_interface(device_uuid, bmc_mac, mock_nautobot) + + assert existing_intf.mac_address == "AA:BB:CC:DD:EE:FF" + existing_intf.save.assert_called_once() + + def test_skip_when_no_bmc_mac(self, mock_nautobot): + device_uuid = str(uuid.uuid4()) + + sync_idrac_interface(device_uuid, "", mock_nautobot) + + mock_nautobot.dcim.interfaces.get.assert_not_called() + + def test_idrac_with_bmc_ip(self, mock_nautobot): + device_uuid = str(uuid.uuid4()) + bmc_mac = "aa:bb:cc:dd:ee:ff" + bmc_ip = "10.0.0.100" + + mock_nautobot.dcim.interfaces.get.return_value = None + mock_intf = MagicMock() + mock_intf.id = "idrac-uuid" + mock_nautobot.dcim.interfaces.create.return_value = mock_intf + mock_nautobot.ipam.ip_addresses.get.return_value = None + mock_nautobot.ipam.ip_addresses.create.return_value = MagicMock(id="ip-uuid") + mock_nautobot.ipam.ip_address_to_interface.get.return_value = None + + sync_idrac_interface(device_uuid, bmc_mac, mock_nautobot, bmc_ip) + + mock_nautobot.ipam.ip_addresses.create.assert_called_once() + + +class TestAssignIpToInterface: + """Test cases for _assign_ip_to_interface function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_create_and_assign_ip(self, mock_nautobot): + interface_id = "intf-uuid" + ip_address = "10.0.0.100" + + mock_nautobot.ipam.ip_addresses.get.return_value = None + mock_nautobot.ipam.ip_addresses.create.return_value = MagicMock(id="ip-uuid") + mock_nautobot.ipam.ip_address_to_interface.get.return_value = None + + _assign_ip_to_interface(mock_nautobot, interface_id, ip_address) + + mock_nautobot.ipam.ip_addresses.create.assert_called_once_with( + address=ip_address, + status="Active", + ) + mock_nautobot.ipam.ip_address_to_interface.create.assert_called_once() + + def test_use_existing_ip(self, mock_nautobot): + interface_id = "intf-uuid" + ip_address = "10.0.0.100" + + existing_ip = MagicMock() + existing_ip.id = "existing-ip-uuid" + mock_nautobot.ipam.ip_addresses.get.return_value = existing_ip + mock_nautobot.ipam.ip_address_to_interface.get.return_value = None + + _assign_ip_to_interface(mock_nautobot, interface_id, ip_address) + + mock_nautobot.ipam.ip_addresses.create.assert_not_called() + mock_nautobot.ipam.ip_address_to_interface.create.assert_called_once() + + def test_skip_if_already_associated(self, mock_nautobot): + interface_id = "intf-uuid" + ip_address = "10.0.0.100" + + existing_ip = MagicMock() + existing_ip.id = "ip-uuid" + mock_nautobot.ipam.ip_addresses.get.return_value = existing_ip + + existing_assoc = MagicMock() + existing_assoc.interface.id = interface_id + mock_nautobot.ipam.ip_address_to_interface.get.return_value = existing_assoc + + _assign_ip_to_interface(mock_nautobot, interface_id, ip_address) + + mock_nautobot.ipam.ip_address_to_interface.create.assert_not_called() + + def test_skip_empty_ip(self, mock_nautobot): + _assign_ip_to_interface(mock_nautobot, "intf-uuid", "") + + mock_nautobot.ipam.ip_addresses.get.assert_not_called() + + +class TestHandleCableManagement: + """Test cases for _handle_cable_management function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_create_cable(self, mock_nautobot): + interface = InterfaceInfo( + uuid="server-intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + switch_info="switch1", + switch_port_id="Eth1/1", + ) + + nautobot_intf = MagicMock() + nautobot_intf.cable = None + + switch_intf = MagicMock() + switch_intf.id = "switch-intf-uuid" + mock_nautobot.dcim.interfaces.get.return_value = switch_intf + + _handle_cable_management(interface, nautobot_intf, mock_nautobot) + + mock_nautobot.dcim.cables.create.assert_called_once() + call_kwargs = mock_nautobot.dcim.cables.create.call_args.kwargs + assert call_kwargs["termination_a_id"] == "server-intf-uuid" + assert call_kwargs["termination_b_id"] == "switch-intf-uuid" + assert call_kwargs["status"] == "Connected" + + def test_skip_without_switch_info(self, mock_nautobot): + interface = InterfaceInfo( + uuid="server-intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + ) + + nautobot_intf = MagicMock() + + _handle_cable_management(interface, nautobot_intf, mock_nautobot) + + mock_nautobot.dcim.interfaces.get.assert_not_called() + + def test_skip_when_switch_interface_not_found(self, mock_nautobot): + interface = InterfaceInfo( + uuid="server-intf-uuid", + name="NIC.Slot.1-1", + mac_address="AA:BB:CC:DD:EE:FF", + device_uuid="device-uuid", + switch_info="switch1", + switch_port_id="Eth1/1", + ) + + nautobot_intf = MagicMock() + nautobot_intf.cable = None + mock_nautobot.dcim.interfaces.get.return_value = None + + _handle_cable_management(interface, nautobot_intf, mock_nautobot) + + mock_nautobot.dcim.cables.create.assert_not_called() + + +class TestSyncInterfacesFromData: + """Test cases for sync_interfaces_from_data function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_sync_interfaces_success(self, mock_nautobot): + node_uuid = str(uuid.uuid4()) + port_uuid = str(uuid.uuid4()) + + inventory = { + "inventory": { + "interfaces": [ + {"mac_address": "aa:bb:cc:dd:ee:ff", "name": "NIC.Slot.1-1"} + ], + "bmc_mac": "11:22:33:44:55:66", + } + } + + port = MagicMock() + port.uuid = port_uuid + port.address = "aa:bb:cc:dd:ee:ff" + port.extra = {} + port.local_link_connection = {} + port.pxe_enabled = False + port.physical_network = None + port.name = None + + mock_nautobot.dcim.interfaces.get.return_value = None + mock_nautobot.dcim.interfaces.create.return_value = MagicMock() + mock_nautobot.dcim.interfaces.filter.return_value = [] + + result = sync_interfaces_from_data(node_uuid, inventory, [port], mock_nautobot) + + assert result == EXIT_STATUS_SUCCESS + # Verify cleanup was called + mock_nautobot.dcim.interfaces.filter.assert_called_with(device_id=node_uuid) + + def test_sync_interfaces_empty_uuid(self, mock_nautobot): + result = sync_interfaces_from_data("", {}, [], mock_nautobot) + + assert result == EXIT_STATUS_FAILURE + + +class TestSyncInterfacesToNautobot: + """Test cases for sync_interfaces_to_nautobot function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + @patch( + "understack_workflows.oslo_event.nautobot_device_interface_sync.IronicClient" + ) + @patch( + "understack_workflows.oslo_event.nautobot_device_interface_sync.sync_interfaces_from_data" + ) + def test_sync_interfaces_success( + self, mock_sync_from_data, mock_ironic_class, mock_nautobot + ): + node_uuid = str(uuid.uuid4()) + mock_ironic = MagicMock() + mock_ironic_class.return_value = mock_ironic + mock_ironic.get_node_inventory.return_value = {} + mock_ironic.list_ports.return_value = [] + mock_sync_from_data.return_value = EXIT_STATUS_SUCCESS + + result = sync_interfaces_to_nautobot(node_uuid, mock_nautobot) + + assert result == EXIT_STATUS_SUCCESS + mock_sync_from_data.assert_called_once() + + def test_sync_interfaces_empty_uuid(self, mock_nautobot): + result = sync_interfaces_to_nautobot("", mock_nautobot) + + assert result == EXIT_STATUS_FAILURE + + +class TestExtractNodeUuidFromEvent: + """Test cases for _extract_node_uuid_from_event function.""" + + def test_extract_node_uuid_from_port_event(self): + event_data = { + "payload": { + "ironic_object.data": { + "node_uuid": "12345678-1234-5678-9abc-123456789abc" + } + } + } + + result = _extract_node_uuid_from_event(event_data) + + assert result == "12345678-1234-5678-9abc-123456789abc" + + def test_extract_uuid_from_node_event(self): + event_data = { + "payload": { + "ironic_object.data": {"uuid": "12345678-1234-5678-9abc-123456789abc"} + } + } + + result = _extract_node_uuid_from_event(event_data) + + assert result == "12345678-1234-5678-9abc-123456789abc" + + def test_extract_returns_none_for_missing(self): + event_data = {"payload": {"ironic_object.data": {}}} + + result = _extract_node_uuid_from_event(event_data) + + assert result is None + + +class TestHandleInterfaceSyncEvent: + """Test cases for handle_interface_sync_event function.""" + + @pytest.fixture + def mock_conn(self): + return MagicMock() + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + @patch( + "understack_workflows.oslo_event.nautobot_device_interface_sync.sync_interfaces_to_nautobot" + ) + def test_handle_event_success(self, mock_sync, mock_conn, mock_nautobot): + node_uuid = str(uuid.uuid4()) + event_data = { + "event_type": "baremetal.node.inspect.end", + "payload": { + "ironic_object.data": { + "uuid": node_uuid, + } + }, + } + mock_sync.return_value = EXIT_STATUS_SUCCESS + + result = handle_interface_sync_event(mock_conn, mock_nautobot, event_data) + + assert result == EXIT_STATUS_SUCCESS + mock_sync.assert_called_once_with(node_uuid, mock_nautobot) + + def test_handle_event_no_uuid(self, mock_conn, mock_nautobot): + event_data = {"payload": {"ironic_object.data": {}}} + + result = handle_interface_sync_event(mock_conn, mock_nautobot, event_data) + + assert result == EXIT_STATUS_FAILURE + + +class TestDeleteNautobotInterface: + """Test cases for _delete_nautobot_interface function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_delete_interface_with_cable(self, mock_nautobot): + nautobot_intf = MagicMock() + nautobot_intf.id = "intf-uuid" + nautobot_intf.cable = MagicMock() + + _delete_nautobot_interface(nautobot_intf, mock_nautobot) + + nautobot_intf.cable.delete.assert_called_once() + nautobot_intf.delete.assert_called_once() + + def test_delete_interface_without_cable(self, mock_nautobot): + nautobot_intf = MagicMock() + nautobot_intf.id = "intf-uuid" + nautobot_intf.cable = None + + _delete_nautobot_interface(nautobot_intf, mock_nautobot) + + nautobot_intf.delete.assert_called_once() + + +class TestCleanupStaleInterfaces: + """Test cases for _cleanup_stale_interfaces function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_delete_stale_interface(self, mock_nautobot): + node_uuid = str(uuid.uuid4()) + valid_ids = {"valid-intf-1", "valid-intf-2"} + + stale_intf = MagicMock() + stale_intf.id = "stale-intf" + stale_intf.name = "NIC.Slot.1-1" + stale_intf.cable = None + + valid_intf = MagicMock() + valid_intf.id = "valid-intf-1" + valid_intf.name = "NIC.Slot.1-2" + + mock_nautobot.dcim.interfaces.filter.return_value = [stale_intf, valid_intf] + + _cleanup_stale_interfaces(node_uuid, valid_ids, mock_nautobot) + + stale_intf.delete.assert_called_once() + valid_intf.delete.assert_not_called() + + def test_skip_idrac_interface(self, mock_nautobot): + node_uuid = str(uuid.uuid4()) + valid_ids = set() + + idrac_intf = MagicMock() + idrac_intf.id = "idrac-intf" + idrac_intf.name = "iDRAC" + + mock_nautobot.dcim.interfaces.filter.return_value = [idrac_intf] + + _cleanup_stale_interfaces(node_uuid, valid_ids, mock_nautobot) + + idrac_intf.delete.assert_not_called() + + def test_no_stale_interfaces(self, mock_nautobot): + node_uuid = str(uuid.uuid4()) + valid_ids = {"intf-1", "intf-2"} + + intf1 = MagicMock() + intf1.id = "intf-1" + intf1.name = "NIC.Slot.1-1" + + intf2 = MagicMock() + intf2.id = "intf-2" + intf2.name = "NIC.Slot.1-2" + + mock_nautobot.dcim.interfaces.filter.return_value = [intf1, intf2] + + _cleanup_stale_interfaces(node_uuid, valid_ids, mock_nautobot) + + intf1.delete.assert_not_called() + intf2.delete.assert_not_called() + + def test_handles_delete_failure(self, mock_nautobot): + node_uuid = str(uuid.uuid4()) + valid_ids = set() + + stale_intf = MagicMock() + stale_intf.id = "stale-intf" + stale_intf.name = "NIC.Slot.1-1" + stale_intf.cable = None + stale_intf.delete.side_effect = Exception("Delete failed") + + mock_nautobot.dcim.interfaces.filter.return_value = [stale_intf] + + # Should not raise, just log warning + _cleanup_stale_interfaces(node_uuid, valid_ids, mock_nautobot) diff --git a/python/understack-workflows/tests/test_nautobot_device_sync.py b/python/understack-workflows/tests/test_nautobot_device_sync.py new file mode 100644 index 000000000..9c0925858 --- /dev/null +++ b/python/understack-workflows/tests/test_nautobot_device_sync.py @@ -0,0 +1,622 @@ +"""Tests for nautobot_device_sync module.""" + +import uuid +from unittest.mock import MagicMock +from unittest.mock import patch + +import pytest + +from understack_workflows.oslo_event.nautobot_device_sync import EXIT_STATUS_FAILURE +from understack_workflows.oslo_event.nautobot_device_sync import EXIT_STATUS_SUCCESS +from understack_workflows.oslo_event.nautobot_device_sync import DeviceInfo +from understack_workflows.oslo_event.nautobot_device_sync import _create_nautobot_device +from understack_workflows.oslo_event.nautobot_device_sync import ( + _extract_node_uuid_from_event, +) +from understack_workflows.oslo_event.nautobot_device_sync import _generate_device_name +from understack_workflows.oslo_event.nautobot_device_sync import _get_record_value +from understack_workflows.oslo_event.nautobot_device_sync import _normalise_manufacturer +from understack_workflows.oslo_event.nautobot_device_sync import ( + _populate_from_inventory, +) +from understack_workflows.oslo_event.nautobot_device_sync import _populate_from_node +from understack_workflows.oslo_event.nautobot_device_sync import ( + _set_location_from_switches, +) +from understack_workflows.oslo_event.nautobot_device_sync import _update_nautobot_device +from understack_workflows.oslo_event.nautobot_device_sync import ( + delete_device_from_nautobot, +) +from understack_workflows.oslo_event.nautobot_device_sync import ( + handle_node_delete_event, +) +from understack_workflows.oslo_event.nautobot_device_sync import handle_node_event +from understack_workflows.oslo_event.nautobot_device_sync import sync_device_to_nautobot + + +class TestNormaliseManufacturer: + """Test cases for _normalise_manufacturer function.""" + + def test_normalise_dell_uppercase(self): + assert _normalise_manufacturer("DELL INC.") == "Dell" + + def test_normalise_dell_lowercase(self): + assert _normalise_manufacturer("dell") == "Dell" + + def test_normalise_dell_mixed_case(self): + assert _normalise_manufacturer("Dell Inc.") == "Dell" + + def test_normalise_hp(self): + assert _normalise_manufacturer("HP") == "HP" + + def test_unsupported_manufacturer_raises(self): + with pytest.raises(ValueError, match="not supported"): + _normalise_manufacturer("Lenovo") + + +class TestPopulateFromNode: + """Test cases for _populate_from_node function.""" + + @pytest.fixture + def device_info(self): + return DeviceInfo(uuid="test-uuid") + + @pytest.fixture + def mock_node(self): + node = MagicMock() + node.properties = { + "memory_mb": 65536, + "cpus": 32, + "cpu_arch": "x86_64", + "local_gb": 500, + } + node.traits = ["CUSTOM_TRAIT1", "CUSTOM_TRAIT2"] + node.provision_state = "active" + node.lessee = "12345678-1234-5678-9abc-123456789abc" + return node + + def test_populate_all_fields(self, device_info, mock_node): + _populate_from_node(device_info, mock_node) + + assert device_info.memory_mb == 65536 + assert device_info.cpus == 32 + assert device_info.cpu_arch == "x86_64" + assert device_info.local_gb == 500 + assert device_info.traits == ["CUSTOM_TRAIT1", "CUSTOM_TRAIT2"] + assert device_info.status == "Active" + assert device_info.tenant_id == "12345678-1234-5678-9abc-123456789abc" + + def test_populate_with_empty_properties(self, device_info): + node = MagicMock() + node.properties = {} + node.traits = None + node.provision_state = "enroll" + node.lessee = None + + _populate_from_node(device_info, node) + + assert device_info.manufacturer is None + assert device_info.memory_mb is None + assert device_info.cpus is None + assert device_info.traits == [] + assert device_info.tenant_id is None + + def test_populate_with_invalid_lessee(self, device_info): + node = MagicMock() + node.properties = {} + node.traits = None + node.provision_state = "active" + node.lessee = "invalid-uuid" + + _populate_from_node(device_info, node) + + assert device_info.tenant_id is None + + +class TestPopulateFromInventory: + """Test cases for _populate_from_inventory function.""" + + @pytest.fixture + def device_info(self): + return DeviceInfo(uuid="test-uuid") + + def test_populate_from_inventory_full(self, device_info): + inventory = { + "inventory": { + "system_vendor": { + "manufacturer": "Dell Inc.", + "product_name": "PowerEdge R7615 (SKU=0AF7)", + "sku": "ABC1234", + "serial_number": "SN123456", + } + } + } + + _populate_from_inventory(device_info, inventory) + + assert device_info.manufacturer == "Dell" + assert device_info.model == "PowerEdge R7615" + assert device_info.service_tag == "ABC1234" + assert device_info.serial_number == "SN123456" + + def test_populate_from_inventory_agent_format(self, device_info): + """Test AGENT inspection format (no sku, serial_number as service tag).""" + inventory = { + "inventory": { + "system_vendor": { + "manufacturer": "Dell Inc.", + "product_name": "PowerEdge R640", + "serial_number": "SERVICETAG123", + } + } + } + + _populate_from_inventory(device_info, inventory) + + assert device_info.service_tag == "SERVICETAG123" + assert device_info.serial_number is None # Only set when sku exists + + def test_populate_from_inventory_empty(self, device_info): + _populate_from_inventory(device_info, None) + + assert device_info.model is None + assert device_info.service_tag is None + + def test_populate_from_inventory_system_product_name(self, device_info): + """Test that 'System' product name is ignored.""" + inventory = { + "inventory": { + "system_vendor": { + "product_name": "System", + } + } + } + + _populate_from_inventory(device_info, inventory) + + assert device_info.model is None + + def test_manufacturer_fallback(self, device_info): + """Test manufacturer is set from inventory.""" + device_info.manufacturer = "Dell" # Already set + inventory = { + "inventory": { + "system_vendor": { + "manufacturer": "HP", # Different + } + } + } + + _populate_from_inventory(device_info, inventory) + + # Inventory always sets manufacturer + assert device_info.manufacturer == "HP" + + +class TestGenerateDeviceName: + """Test cases for _generate_device_name function.""" + + def test_generate_name_with_both_fields(self): + device_info = DeviceInfo( + uuid="test-uuid", + manufacturer="Dell", + service_tag="ABC1234", + ) + + _generate_device_name(device_info) + + assert device_info.name == "Dell-ABC1234" + + def test_generate_name_missing_manufacturer(self): + device_info = DeviceInfo( + uuid="test-uuid", + service_tag="ABC1234", + ) + + _generate_device_name(device_info) + + assert device_info.name is None + + def test_generate_name_missing_service_tag(self): + device_info = DeviceInfo( + uuid="test-uuid", + manufacturer="Dell", + ) + + _generate_device_name(device_info) + + assert device_info.name is None + + +class TestSetLocationFromSwitches: + """Test cases for _set_location_from_switches function.""" + + @pytest.fixture + def device_info(self): + return DeviceInfo(uuid="test-uuid") + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_set_location_from_switch_info(self, device_info, mock_nautobot): + ports = [ + MagicMock( + local_link_connection={ + "switch_info": "switch1.example.com", + "switch_id": "aa:bb:cc:dd:ee:ff", + } + ) + ] + + mock_device = MagicMock() + mock_device.location.id = "location-uuid" + mock_device.rack.id = "rack-uuid" + mock_nautobot.dcim.devices.get.return_value = mock_device + + _set_location_from_switches(device_info, ports, mock_nautobot) + + assert device_info.location_id == "location-uuid" + assert device_info.rack_id == "rack-uuid" + + def test_set_location_no_switch_info(self, device_info, mock_nautobot): + ports = [MagicMock(local_link_connection={})] + + _set_location_from_switches(device_info, ports, mock_nautobot) + + assert device_info.location_id is None + assert device_info.rack_id is None + + def test_set_location_switch_not_found(self, device_info, mock_nautobot): + ports = [ + MagicMock( + local_link_connection={ + "switch_info": "unknown-switch", + } + ) + ] + mock_nautobot.dcim.devices.get.return_value = None + mock_nautobot.dcim.interfaces.filter.return_value = [] + + _set_location_from_switches(device_info, ports, mock_nautobot) + + assert device_info.location_id is None + + +class TestGetRecordValue: + """Test cases for _get_record_value function.""" + + def test_get_value_from_record(self): + record = MagicMock() + record.value = "test-value" + + assert _get_record_value(record) == "test-value" + + def test_get_id_from_record(self): + record = MagicMock() + record.id = "test-id" + + assert _get_record_value(record, "id") == "test-id" + + def test_get_value_from_none(self): + assert _get_record_value(None) is None + + def test_get_value_from_primitive(self): + assert _get_record_value("simple-string") == "simple-string" + + +class TestCreateNautobotDevice: + """Test cases for _create_nautobot_device function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_create_device_success(self, mock_nautobot): + device_info = DeviceInfo( + uuid="test-uuid", + name="Dell-ABC123", + manufacturer="Dell", + model="PowerEdge R640", + location_id="location-uuid", + role="server", + ) + + mock_nautobot.dcim.devices.create.return_value = MagicMock(id="test-uuid") + + _create_nautobot_device(device_info, mock_nautobot) + + mock_nautobot.dcim.devices.create.assert_called_once() + call_kwargs = mock_nautobot.dcim.devices.create.call_args.kwargs + assert call_kwargs["id"] == "test-uuid" + assert call_kwargs["name"] == "Dell-ABC123" + assert call_kwargs["location"] == "location-uuid" + + def test_create_device_without_location_raises(self, mock_nautobot): + device_info = DeviceInfo( + uuid="test-uuid", + name="Dell-ABC123", + ) + + with pytest.raises(ValueError, match="without location"): + _create_nautobot_device(device_info, mock_nautobot) + + def test_create_device_fallback_name_to_uuid(self, mock_nautobot): + device_info = DeviceInfo( + uuid="test-uuid", + manufacturer="Dell", + model="PowerEdge R640", + location_id="location-uuid", + ) + + _create_nautobot_device(device_info, mock_nautobot) + + call_kwargs = mock_nautobot.dcim.devices.create.call_args.kwargs + assert call_kwargs["name"] == "test-uuid" + + +class TestUpdateNautobotDevice: + """Test cases for _update_nautobot_device function.""" + + @pytest.fixture + def mock_nautobot_device(self): + device = MagicMock() + device.status = MagicMock(name="Planned") + device.name = "Old-Name" + device.serial = None + device.location = MagicMock(id="old-location") + device.rack = MagicMock(id="old-rack") + device.tenant = None + device.custom_fields = {} + return device + + def test_update_status(self, mock_nautobot_device): + device_info = DeviceInfo(uuid="test-uuid", status="Active") + + result = _update_nautobot_device(device_info, mock_nautobot_device) + + assert result is True + mock_nautobot_device.save.assert_called_once() + + def test_update_name(self, mock_nautobot_device): + device_info = DeviceInfo(uuid="test-uuid", name="New-Name") + + result = _update_nautobot_device(device_info, mock_nautobot_device) + + assert result is True + assert mock_nautobot_device.name == "New-Name" + + def test_update_tenant(self, mock_nautobot_device): + device_info = DeviceInfo( + uuid="test-uuid", + tenant_id="12345678-1234-5678-9abc-123456789abc", + ) + + result = _update_nautobot_device(device_info, mock_nautobot_device) + + assert result is True + assert mock_nautobot_device.tenant == "12345678-1234-5678-9abc-123456789abc" + + def test_no_changes(self, mock_nautobot_device): + device_info = DeviceInfo(uuid="test-uuid") + + result = _update_nautobot_device(device_info, mock_nautobot_device) + + assert result is False + mock_nautobot_device.save.assert_not_called() + + +class TestExtractNodeUuidFromEvent: + """Test cases for _extract_node_uuid_from_event function.""" + + def test_extract_from_payload(self): + event_data = { + "payload": { + "ironic_object.data": {"uuid": "12345678-1234-5678-9abc-123456789abc"} + } + } + + result = _extract_node_uuid_from_event(event_data) + + assert result == "12345678-1234-5678-9abc-123456789abc" + + def test_extract_from_ironic_object(self): + event_data = {"ironic_object": {"uuid": "12345678-1234-5678-9abc-123456789abc"}} + + result = _extract_node_uuid_from_event(event_data) + + assert result == "12345678-1234-5678-9abc-123456789abc" + + def test_extract_returns_none_for_missing_uuid(self): + event_data = {"payload": {"ironic_object.data": {}}} + + result = _extract_node_uuid_from_event(event_data) + + assert result is None + + +class TestSyncDeviceToNautobot: + """Test cases for sync_device_to_nautobot function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + @patch("understack_workflows.oslo_event.nautobot_device_sync.IronicClient") + @patch("understack_workflows.oslo_event.nautobot_device_sync.fetch_device_info") + @patch( + "understack_workflows.oslo_event.nautobot_device_sync.sync_interfaces_from_data" + ) + def test_sync_creates_new_device( + self, mock_sync_interfaces, mock_fetch, mock_ironic_class, mock_nautobot + ): + node_uuid = str(uuid.uuid4()) + device_info = DeviceInfo( + uuid=node_uuid, + name="Dell-ABC123", + manufacturer="Dell", + model="PowerEdge R640", + location_id="location-uuid", + status="Active", + ) + mock_fetch.return_value = (device_info, {}, []) + mock_nautobot.dcim.devices.get.return_value = None + mock_nautobot.dcim.devices.create.return_value = MagicMock() + mock_sync_interfaces.return_value = EXIT_STATUS_SUCCESS + + result = sync_device_to_nautobot(node_uuid, mock_nautobot) + + assert result == EXIT_STATUS_SUCCESS + mock_nautobot.dcim.devices.create.assert_called_once() + + @patch("understack_workflows.oslo_event.nautobot_device_sync.IronicClient") + @patch("understack_workflows.oslo_event.nautobot_device_sync.fetch_device_info") + @patch( + "understack_workflows.oslo_event.nautobot_device_sync.sync_interfaces_from_data" + ) + def test_sync_updates_existing_device( + self, mock_sync_interfaces, mock_fetch, mock_ironic_class, mock_nautobot + ): + node_uuid = str(uuid.uuid4()) + device_info = DeviceInfo( + uuid=node_uuid, + name="Dell-ABC123", + status="Active", + ) + mock_fetch.return_value = (device_info, {}, []) + + existing_device = MagicMock() + existing_device.status = MagicMock(name="Planned") + existing_device.name = "Dell-ABC123" + existing_device.serial = None + existing_device.location = None + existing_device.rack = None + existing_device.tenant = None + existing_device.custom_fields = {} + mock_nautobot.dcim.devices.get.return_value = existing_device + mock_sync_interfaces.return_value = EXIT_STATUS_SUCCESS + + result = sync_device_to_nautobot(node_uuid, mock_nautobot) + + assert result == EXIT_STATUS_SUCCESS + mock_nautobot.dcim.devices.create.assert_not_called() + + def test_sync_with_empty_uuid_returns_error(self, mock_nautobot): + result = sync_device_to_nautobot("", mock_nautobot) + + assert result == EXIT_STATUS_FAILURE + + @patch("understack_workflows.oslo_event.nautobot_device_sync.IronicClient") + @patch("understack_workflows.oslo_event.nautobot_device_sync.fetch_device_info") + def test_sync_without_location_returns_error( + self, mock_fetch, mock_ironic_class, mock_nautobot + ): + node_uuid = str(uuid.uuid4()) + device_info = DeviceInfo(uuid=node_uuid) # No location + mock_fetch.return_value = (device_info, {}, []) + mock_nautobot.dcim.devices.get.return_value = None + + result = sync_device_to_nautobot(node_uuid, mock_nautobot) + + assert result == EXIT_STATUS_FAILURE + + +class TestDeleteDeviceFromNautobot: + """Test cases for delete_device_from_nautobot function.""" + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + def test_delete_existing_device(self, mock_nautobot): + node_uuid = str(uuid.uuid4()) + mock_device = MagicMock() + mock_nautobot.dcim.devices.get.return_value = mock_device + + result = delete_device_from_nautobot(node_uuid, mock_nautobot) + + assert result == EXIT_STATUS_SUCCESS + mock_device.delete.assert_called_once() + + def test_delete_nonexistent_device(self, mock_nautobot): + node_uuid = str(uuid.uuid4()) + mock_nautobot.dcim.devices.get.return_value = None + + result = delete_device_from_nautobot(node_uuid, mock_nautobot) + + assert result == EXIT_STATUS_SUCCESS + + def test_delete_with_empty_uuid(self, mock_nautobot): + result = delete_device_from_nautobot("", mock_nautobot) + + assert result == EXIT_STATUS_FAILURE + + +class TestHandleNodeEvent: + """Test cases for handle_node_event function.""" + + @pytest.fixture + def mock_conn(self): + return MagicMock() + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + @patch( + "understack_workflows.oslo_event.nautobot_device_sync.sync_device_to_nautobot" + ) + def test_handle_node_event_success(self, mock_sync, mock_conn, mock_nautobot): + node_uuid = str(uuid.uuid4()) + event_data = { + "event_type": "baremetal.node.provision_set.end", + "payload": { + "ironic_object.data": { + "uuid": node_uuid, + } + }, + } + mock_sync.return_value = EXIT_STATUS_SUCCESS + + result = handle_node_event(mock_conn, mock_nautobot, event_data) + + assert result == EXIT_STATUS_SUCCESS + mock_sync.assert_called_once_with(node_uuid, mock_nautobot) + + def test_handle_node_event_no_uuid(self, mock_conn, mock_nautobot): + event_data = {"payload": {"ironic_object.data": {}}} + + result = handle_node_event(mock_conn, mock_nautobot, event_data) + + assert result == EXIT_STATUS_FAILURE + + +class TestHandleNodeDeleteEvent: + """Test cases for handle_node_delete_event function.""" + + @pytest.fixture + def mock_conn(self): + return MagicMock() + + @pytest.fixture + def mock_nautobot(self): + return MagicMock() + + @patch( + "understack_workflows.oslo_event.nautobot_device_sync.delete_device_from_nautobot" + ) + def test_handle_delete_event_success(self, mock_delete, mock_conn, mock_nautobot): + node_uuid = str(uuid.uuid4()) + event_data = { + "payload": { + "ironic_object.data": { + "uuid": node_uuid, + } + }, + } + mock_delete.return_value = EXIT_STATUS_SUCCESS + + result = handle_node_delete_event(mock_conn, mock_nautobot, event_data) + + assert result == EXIT_STATUS_SUCCESS + mock_delete.assert_called_once_with(node_uuid, mock_nautobot) diff --git a/python/understack-workflows/tests/test_oslo_event_ironic_node.py b/python/understack-workflows/tests/test_oslo_event_ironic_node.py index 8948f2299..a9ff73da4 100644 --- a/python/understack-workflows/tests/test_oslo_event_ironic_node.py +++ b/python/understack-workflows/tests/test_oslo_event_ironic_node.py @@ -27,33 +27,31 @@ def test_from_event_dict_success(self): oslo_message = json.loads(sample_data["oslomessage"]) # Create event data in the format expected by from_event_dict - ironic_data = oslo_message["payload"]["ironic_object.data"] event_data = { - "instance_uuid": ironic_data["instance_uuid"], "payload": oslo_message["payload"], } event = IronicProvisionSetEvent.from_event_dict(event_data) - # Verify the parsed values match the real data - # UUIDs are formatted with hyphens when converted to UUID objects - assert str(event.owner) == "32e02632-f4f0-4415-bab5-895d1e7247b7" - assert str(event.lessee) == "5f5955bc-89e1-48e5-9a12-110a3945e4d7" - assert str(event.instance_uuid) == "5027885e-52a8-48f9-adf4-14d8f5f4ccb8" - assert str(event.node_uuid) == "461737c4-037c-41bf-9c17-f4f33ff20dd7" + # Verify the parsed values match the real data (strings as stored in JSON) + # owner and lessee are undashed UUIDs in the sample + assert event.owner == "32e02632f4f04415bab5895d1e7247b7" + assert event.lessee == "5f5955bc89e148e59a12110a3945e4d7" + # instance_uuid and node_uuid have dashes in the sample + assert event.instance_uuid == "5027885e-52a8-48f9-adf4-14d8f5f4ccb8" + assert event.node_uuid == "461737c4-037c-41bf-9c17-f4f33ff20dd7" assert event.event == "done" def test_from_event_dict_no_payload(self): """Test event parsing with missing payload.""" - event_data = {"instance_uuid": uuid.uuid4()} + event_data = {"instance_uuid": str(uuid.uuid4())} - with pytest.raises(ValueError, match="invalid event"): + with pytest.raises(ValueError, match="Invalid event. No 'payload'"): IronicProvisionSetEvent.from_event_dict(event_data) def test_from_event_dict_no_ironic_object_data(self): """Test event parsing with missing ironic_object.data.""" event_data = { - "instance_uuid": uuid.uuid4(), "payload": {"other_field": "value"}, } @@ -65,10 +63,9 @@ def test_from_event_dict_no_ironic_object_data(self): def test_from_event_dict_missing_required_fields(self): """Test event parsing with missing required fields in ironic_object.data.""" event_data = { - "instance_uuid": uuid.uuid4(), "payload": { "ironic_object.data": { - "owner": uuid.uuid4(), + "owner": str(uuid.uuid4()), # Missing lessee, event, uuid } }, @@ -79,10 +76,10 @@ def test_from_event_dict_missing_required_fields(self): def test_direct_initialization(self): """Test direct initialization of IronicProvisionSetEvent.""" - owner_uuid = uuid.uuid4() - lessee_uuid = uuid.uuid4() - instance_uuid = uuid.uuid4() - node_uuid = uuid.uuid4() + owner_uuid = str(uuid.uuid4()) + lessee_uuid = str(uuid.uuid4()) + instance_uuid = str(uuid.uuid4()) + node_uuid = str(uuid.uuid4()) event_type = "provision_end" event = IronicProvisionSetEvent( @@ -119,15 +116,59 @@ def valid_event_data(self): return { "payload": { "ironic_object.data": { - "instance_uuid": uuid.uuid4(), - "owner": uuid.uuid4(), - "lessee": uuid.uuid4(), + "instance_uuid": str(uuid.uuid4()), + "owner": str(uuid.uuid4()), + "lessee": str(uuid.uuid4()), "event": "provision_end", - "uuid": uuid.uuid4(), + "uuid": str(uuid.uuid4()), } }, } + def test_handle_provision_end_no_payload_data(self, mock_conn, mock_nautobot): + """Test handling when payload data cannot be extracted.""" + invalid_event_data = {"payload": None} + + result = handle_provision_end(mock_conn, mock_nautobot, invalid_event_data) + + assert result == 1 + + def test_handle_provision_end_no_lessee(self, mock_conn, mock_nautobot): + """Test handling when lessee is missing (not an instance deployment).""" + event_data = { + "payload": { + "ironic_object.data": { + "instance_uuid": str(uuid.uuid4()), + "owner": str(uuid.uuid4()), + "lessee": None, + "event": "done", + "uuid": str(uuid.uuid4()), + } + }, + } + + result = handle_provision_end(mock_conn, mock_nautobot, event_data) + + assert result == 0 + + def test_handle_provision_end_no_instance_uuid(self, mock_conn, mock_nautobot): + """Test handling when instance_uuid is missing (not an instance deployment).""" + event_data = { + "payload": { + "ironic_object.data": { + "instance_uuid": None, + "owner": str(uuid.uuid4()), + "lessee": str(uuid.uuid4()), + "event": "done", + "uuid": str(uuid.uuid4()), + } + }, + } + + result = handle_provision_end(mock_conn, mock_nautobot, event_data) + + assert result == 0 + @patch("understack_workflows.oslo_event.ironic_node.is_project_svm_enabled") def test_handle_provision_end_project_not_svm_enabled( self, mock_is_svm_enabled, mock_conn, mock_nautobot, valid_event_data @@ -138,8 +179,9 @@ def test_handle_provision_end_project_not_svm_enabled( result = handle_provision_end(mock_conn, mock_nautobot, valid_event_data) assert result == 0 - lessee_uuid = valid_event_data["payload"]["ironic_object.data"]["lessee"] - mock_is_svm_enabled.assert_called_once_with(mock_conn, str(lessee_uuid.hex)) + lessee = valid_event_data["payload"]["ironic_object.data"]["lessee"] + lessee_undashed = uuid.UUID(lessee).hex + mock_is_svm_enabled.assert_called_once_with(mock_conn, lessee_undashed) @patch("understack_workflows.oslo_event.ironic_node.create_volume_connector") @patch("understack_workflows.oslo_event.ironic_node.save_output") @@ -198,8 +240,8 @@ def test_handle_provision_end_storage_wanted( # Check save_output calls expected_calls = [ ("storage", "wanted"), - ("node_uuid", str(node_uuid)), - ("instance_uuid", str(instance_uuid)), + ("node_uuid", node_uuid), + ("instance_uuid", instance_uuid), ] actual_calls = [call.args for call in mock_save_output.call_args_list] assert actual_calls == expected_calls @@ -237,8 +279,8 @@ def test_handle_provision_end_storage_not_wanted( # Check save_output calls expected_calls = [ ("storage", "not-set"), - ("node_uuid", str(node_uuid)), - ("instance_uuid", str(instance_uuid)), + ("node_uuid", node_uuid), + ("instance_uuid", instance_uuid), ] actual_calls = [call.args for call in mock_save_output.call_args_list] assert actual_calls == expected_calls @@ -264,23 +306,21 @@ def test_handle_provision_end_storage_metadata_missing( mock_server.metadata = {"other_key": "value"} mock_conn.get_server_by_id.return_value = mock_server - # This should raise a KeyError when accessing metadata["storage"] - with pytest.raises(KeyError): - handle_provision_end(mock_conn, mock_nautobot, valid_event_data) - - @patch("understack_workflows.oslo_event.ironic_node.is_project_svm_enabled") - def test_handle_provision_end_invalid_event_data( - self, mock_is_svm_enabled, mock_conn, mock_nautobot - ): - """Test handling with invalid event data.""" - invalid_event_data = {"instance_uuid": uuid.uuid4(), "payload": {}} + result = handle_provision_end(mock_conn, mock_nautobot, valid_event_data) - with pytest.raises( - ValueError, match="Invalid event. No 'ironic_object.data' in payload" - ): - handle_provision_end(mock_conn, mock_nautobot, invalid_event_data) + # Should return 0 and set storage to "not-set" (uses .get()) + assert result == 0 + ironic_data = valid_event_data["payload"]["ironic_object.data"] + instance_uuid = ironic_data["instance_uuid"] + node_uuid = ironic_data["uuid"] - mock_is_svm_enabled.assert_not_called() + expected_calls = [ + ("storage", "not-set"), + ("node_uuid", node_uuid), + ("instance_uuid", instance_uuid), + ] + actual_calls = [call.args for call in mock_save_output.call_args_list] + assert actual_calls == expected_calls class TestCreateVolumeConnector: @@ -295,10 +335,10 @@ def mock_conn(self): def sample_event(self): """Create a sample IronicProvisionSetEvent.""" return IronicProvisionSetEvent( - owner=uuid.uuid4(), - lessee=uuid.uuid4(), - instance_uuid=uuid.uuid4(), - node_uuid=uuid.uuid4(), + owner=str(uuid.uuid4()), + lessee=str(uuid.uuid4()), + instance_uuid=str(uuid.uuid4()), + node_uuid=str(uuid.uuid4()), event="provision_end", ) @@ -321,7 +361,7 @@ def test_create_volume_connector_with_different_instance_uuid( self, mock_conn, sample_event ): """Test volume connector creation with different instance UUID.""" - different_instance_uuid = uuid.uuid4() + different_instance_uuid = str(uuid.uuid4()) sample_event.instance_uuid = different_instance_uuid mock_connector = MagicMock() @@ -354,7 +394,7 @@ class TestInstanceNqn: def test_instance_nqn_format(self): """Test NQN format generation.""" - test_uuid = uuid.uuid4() + test_uuid = str(uuid.uuid4()) expected_nqn = f"nqn.2014-08.org.nvmexpress:uuid:{test_uuid}" result = instance_nqn(test_uuid) @@ -363,8 +403,8 @@ def test_instance_nqn_format(self): def test_instance_nqn_different_uuids(self): """Test NQN generation with different UUIDs.""" - uuid1 = uuid.uuid4() - uuid2 = uuid.uuid4() + uuid1 = str(uuid.uuid4()) + uuid2 = str(uuid.uuid4()) nqn1 = instance_nqn(uuid1) nqn2 = instance_nqn(uuid2) @@ -375,18 +415,17 @@ def test_instance_nqn_different_uuids(self): def test_instance_nqn_prefix_constant(self): """Test that NQN prefix is consistent.""" - test_uuid = uuid.uuid4() + test_uuid = str(uuid.uuid4()) result = instance_nqn(test_uuid) assert result.startswith("nqn.2014-08.org.nvmexpress:uuid:") - assert str(test_uuid) in result + assert test_uuid in result def test_instance_nqn_with_known_uuid(self): """Test NQN generation with a known UUID string.""" known_uuid_str = "12345678-1234-5678-9abc-123456789abc" - known_uuid = uuid.UUID(known_uuid_str) expected_nqn = f"nqn.2014-08.org.nvmexpress:uuid:{known_uuid_str}" - result = instance_nqn(known_uuid) + result = instance_nqn(known_uuid_str) assert result == expected_nqn diff --git a/python/understack-workflows/tests/test_sync_provision_state.py b/python/understack-workflows/tests/test_sync_provision_state.py deleted file mode 100644 index cf91572d8..000000000 --- a/python/understack-workflows/tests/test_sync_provision_state.py +++ /dev/null @@ -1,53 +0,0 @@ -import pytest - -from understack_workflows.main.sync_provision_state import argument_parser -from understack_workflows.main.sync_provision_state import do_action - - -@pytest.fixture -def fakebot(mocker): - return mocker.patch("understack_workflows.nautobot.Nautobot", autospec=True) - - -def test_parse_device_name(): - parser = argument_parser() - with pytest.raises(SystemExit): - parser.parse_args(["--device-id", "FOO"]) - - -def test_parse_device_id(device_id): - parser = argument_parser() - args = parser.parse_args( - ["--device-id", str(device_id), "--provision-state", "active"] - ) - - assert args.device_id == device_id - - -def test_calls_update_cf(fakebot, device_id): - do_action(fakebot, device_id, "active") - fakebot.update_cf.assert_called_once_with( - device_id=device_id, - tenant_id=None, - fields={ - "ironic_provision_state": "active", - "resource_class": None, - }, - ) - - -def test_updates_device_status(fakebot, device_id): - do_action(fakebot, device_id, "error") - - fakebot.update_device_status.assert_called_once_with(device_id, "Quarantine") - - -def test_no_change_irrelevant_state(fakebot, device_id): - do_action(fakebot, device_id, "servicing") - - fakebot.update_device_status.assert_not_called() - - -def test_no_change_on_wrong_state(fakebot, device_id): - with pytest.raises(ValueError): - do_action(fakebot, device_id, "this-is-made-up") diff --git a/python/understack-workflows/understack_workflows/ironic/client.py b/python/understack-workflows/understack_workflows/ironic/client.py index 227587378..c07aee9e4 100644 --- a/python/understack-workflows/understack_workflows/ironic/client.py +++ b/python/understack-workflows/understack_workflows/ironic/client.py @@ -1,68 +1,102 @@ +from typing import cast + +from ironicclient.common.apiclient import exceptions as ironic_exceptions +from ironicclient.v1.client import Client as IronicV1Client +from ironicclient.v1.node import Node +from ironicclient.v1.port import Port + +from understack_workflows.helpers import setup_logger from understack_workflows.openstack.client import get_ironic_client +logger = setup_logger(__name__) + class IronicClient: - def __init__( - self, - ) -> None: + def __init__(self, cloud: str | None = None) -> None: """Initialize our ironicclient wrapper.""" - self.logged_in = False - - def login(self): - self.client = get_ironic_client() + self.client: IronicV1Client = get_ironic_client(cloud=cloud) self.logged_in = True - def create_node(self, node_data: dict): - self._ensure_logged_in() - - return self.client.node.create(**node_data) + def create_node(self, node_data: dict) -> Node: + return cast(Node, self.client.node.create(**node_data)) def list_nodes(self): - self._ensure_logged_in() - return self.client.node.list() - def get_node(self, node_ident: str, fields: list[str] | None = None): - self._ensure_logged_in() - - return self.client.node.get( - node_ident, - fields, - ) + def get_node(self, node_ident: str, fields: list[str] | None = None) -> Node: + return cast(Node, self.client.node.get(node_ident, fields)) def update_node(self, node_id, patch): - self._ensure_logged_in() - - return self.client.node.update( - node_id, - patch, - ) - - def create_port(self, port_data: dict): - self._ensure_logged_in() - - return self.client.port.create(**port_data) + return self.client.node.update(node_id, patch) + + def get_node_inventory(self, node_ident: str) -> dict: + """Fetch node inventory data from Ironic API. + + Args: + node_ident: Node UUID, name, or other identifier + + Returns: + Dict containing node inventory data + + Raises: + ironic_exceptions.NotFound: If node doesn't exist + ironic_exceptions.ClientException: For other API errors + """ + try: + logger.info("Fetching inventory for node: %s", node_ident) + + # Call the inventory API endpoint + inventory = self.client.node.get_inventory(node_ident) + + logger.info("Successfully retrieved inventory for node %s", node_ident) + return inventory + + except ironic_exceptions.NotFound: + logger.error("Node not found: %s", node_ident) + raise + except ironic_exceptions.ClientException as e: + logger.error("Ironic API error for node %s: %s", node_ident, e) + raise + except Exception as e: + logger.error( + "Unexpected error fetching inventory for %s: %s", node_ident, e + ) + raise + + def create_port(self, port_data: dict) -> Port: + return cast(Port, self.client.port.create(**port_data)) + + def get_port(self, port_ident: str, fields: list[str] | None = None) -> Port: + """Get a specific port by UUID or address. + + Args: + port_ident: Port UUID or MAC address + fields: Optional list of fields to return + + Returns: + Port object + + Raises: + ironic_exceptions.NotFound: If port doesn't exist + ironic_exceptions.ClientException: For other API errors + """ + try: + logger.debug("Fetching port: %s", port_ident) + port = self.client.port.get(port_ident, fields) + logger.debug("Successfully retrieved port %s", port_ident) + return cast(Port, port) + except ironic_exceptions.NotFound: + logger.error("Port not found: %s", port_ident) + raise + except ironic_exceptions.ClientException as e: + logger.error("Ironic API error for port %s: %s", port_ident, e) + raise def update_port(self, port_id: str, patch: list): - self._ensure_logged_in() - - return self.client.port.update( - port_id, - patch, - ) + return self.client.port.update(port_id, patch) def delete_port(self, port_id: str): - self._ensure_logged_in() - - return self.client.port.delete( - port_id, - ) + return self.client.port.delete(port_id) def list_ports(self, node_id: str): - self._ensure_logged_in() - return self.client.port.list(node=node_id, detail=True) - - def _ensure_logged_in(self): - if not self.logged_in: - self.login() diff --git a/python/understack-workflows/understack_workflows/ironic_node.py b/python/understack-workflows/understack_workflows/ironic_node.py index 869075eb0..5332ca253 100644 --- a/python/understack-workflows/understack_workflows/ironic_node.py +++ b/python/understack-workflows/understack_workflows/ironic_node.py @@ -22,7 +22,8 @@ def create_or_update(bmc: Bmc, name: str, manufacturer: str) -> IronicNodeConfig "Using existing baremetal node %s with name %s", ironic_node.uuid, name ) update_ironic_node(client, bmc, ironic_node, name, driver) - return ironic_node + # Return node as IronicNodeConfiguration (duck typing - Node has same attrs) + return ironic_node # type: ignore[return-value] except ironicclient.common.apiclient.exceptions.NotFound: logger.debug("Baremetal Node with name %s not found in Ironic, creating.", name) return create_ironic_node(client, bmc, name, driver) @@ -61,7 +62,8 @@ def create_ironic_node( name: str, driver: str, ) -> IronicNodeConfiguration: - return client.create_node( + # Return node as IronicNodeConfiguration (duck typing - Node has same attrs) + return client.create_node( # type: ignore[return-value] { "name": name, "driver": driver, diff --git a/python/understack-workflows/understack_workflows/main/openstack_oslo_event.py b/python/understack-workflows/understack_workflows/main/openstack_oslo_event.py index 16e0e5893..ef7dd90e6 100644 --- a/python/understack-workflows/understack_workflows/main/openstack_oslo_event.py +++ b/python/understack-workflows/understack_workflows/main/openstack_oslo_event.py @@ -17,6 +17,7 @@ from understack_workflows.oslo_event import ironic_port from understack_workflows.oslo_event import ironic_portgroup from understack_workflows.oslo_event import keystone_project +from understack_workflows.oslo_event import nautobot_device_sync from understack_workflows.oslo_event import neutron_network from understack_workflows.oslo_event import neutron_subnet @@ -64,14 +65,20 @@ class NoEventHandlerError(Exception): EventHandler = Callable[[Connection, NautobotApi, dict[str, Any]], int] # add the event_type here and the function that should be called -_event_handlers: dict[str, EventHandler] = { +_event_handlers: dict[str, EventHandler | list[EventHandler]] = { "baremetal.port.create.end": ironic_port.handle_port_create_update, "baremetal.port.update.end": ironic_port.handle_port_create_update, "baremetal.port.delete.end": ironic_port.handle_port_delete, "baremetal.portgroup.create.end": ironic_portgroup.handle_portgroup_create_update, "baremetal.portgroup.update.end": ironic_portgroup.handle_portgroup_create_update, "baremetal.portgroup.delete.end": ironic_portgroup.handle_portgroup_delete, - "baremetal.node.provision_set.end": ironic_node.handle_provision_end, + "baremetal.node.create.end": nautobot_device_sync.handle_node_event, + "baremetal.node.update.end": nautobot_device_sync.handle_node_event, + "baremetal.node.delete.end": nautobot_device_sync.handle_node_delete_event, + "baremetal.node.provision_set.end": [ + ironic_node.handle_provision_end, + nautobot_device_sync.handle_node_event, + ], "identity.project.created": keystone_project.handle_project_created, "identity.project.updated": keystone_project.handle_project_updated, "identity.project.deleted": keystone_project.handle_project_deleted, @@ -183,14 +190,20 @@ def main() -> int: logger.info("Processing event type: %s", event_type) - # look up the event handler - event_handler = _event_handlers.get(event_type) - if event_handler is None: + # look up the event handler(s) + event_handlers = _event_handlers.get(event_type) + if event_handlers is None: logger.error("No event handler for event type: %s", event_type) logger.debug("Available event handlers: %s", list(_event_handlers.keys())) sys.exit(_EXIT_NO_EVENT_HANDLER) - logger.debug("Found event handler for event type: %s", event_type) + # normalize to list for consistent processing + if not isinstance(event_handlers, list): + event_handlers = [event_handlers] + + logger.debug( + "Found %d handler(s) for event type: %s", len(event_handlers), event_type + ) # get a connection to OpenStack and to Nautobot try: @@ -199,17 +212,21 @@ def main() -> int: logger.exception("Client initialization failed") sys.exit(_EXIT_CLIENT_ERROR) - # execute the event handler - logger.info("Executing event handler for event type: %s", event_type) - try: - ret = event_handler(conn, nautobot, event) - except Exception: - logger.exception("Event handler failed") - sys.exit(_EXIT_HANDLER_ERROR) - - logger.info("Event handler completed successfully with return code: %s", ret) - - # exit if the event handler provided a return code or just with success - if isinstance(ret, int): - return ret - return _EXIT_SUCCESS + # execute all event handlers + last_ret = _EXIT_SUCCESS + for idx, event_handler in enumerate(event_handlers, 1): + handler_name = getattr(event_handler, "__name__", f"handler_{idx}") + logger.info( + "Executing handler %d/%d: %s", idx, len(event_handlers), handler_name + ) + try: + ret = event_handler(conn, nautobot, event) + if isinstance(ret, int): + last_ret = ret + logger.info("Handler %s completed with return code: %s", handler_name, ret) + except Exception: + logger.exception("Handler %s failed", handler_name) + sys.exit(_EXIT_HANDLER_ERROR) + + logger.info("All handlers completed successfully") + return last_ret diff --git a/python/understack-workflows/understack_workflows/main/sync_provision_state.py b/python/understack-workflows/understack_workflows/main/sync_provision_state.py deleted file mode 100644 index 9cc29477c..000000000 --- a/python/understack-workflows/understack_workflows/main/sync_provision_state.py +++ /dev/null @@ -1,75 +0,0 @@ -import argparse -import os -from uuid import UUID - -from understack_workflows.helpers import credential -from understack_workflows.helpers import parser_nautobot_args -from understack_workflows.helpers import setup_logger -from understack_workflows.ironic.provision_state_mapper import ProvisionStateMapper -from understack_workflows.nautobot import Nautobot - - -def argument_parser(): - parser = argparse.ArgumentParser( - prog=os.path.basename(__file__), - description="Synchronize Ironic provision_state to Nautobot", - ) - parser.add_argument( - "--device-id", required=True, type=UUID, help="Nautobot device UUID" - ) - parser.add_argument("--provision-state", required=True) - parser.add_argument("--lessee") - parser.add_argument("--resource-class") - parser = parser_nautobot_args(parser) - - return parser - - -logger = setup_logger(__name__) - - -def do_action( - nautobot, - device_uuid, - provision_state, - tenant_id=None, - resource_class=None, -): - new_status = ProvisionStateMapper.translate_to_nautobot(provision_state) - - custom_fields_to_update = { - "ironic_provision_state": provision_state, - "resource_class": resource_class, - } - - if new_status: - nautobot.update_cf( - device_id=device_uuid, tenant_id=tenant_id, fields=custom_fields_to_update - ) - nautobot.update_device_status(device_uuid, new_status) - - -def main(): - args = argument_parser().parse_args() - - device_uuid = args.device_id - tenant_id = None - if args.lessee: - try: - tenant_id = UUID(args.lessee) - except ValueError: - logger.warning("Invalid UUID format for lessee: %s", args.lessee) - nb_token = args.nautobot_token or credential("nb-token", "token") - - nautobot = Nautobot(args.nautobot_url, nb_token, logger=logger) - do_action( - nautobot, - device_uuid=device_uuid, - provision_state=args.provision_state, - tenant_id=tenant_id, - resource_class=args.resource_class, - ) - - -if __name__ == "__main__": - main() diff --git a/python/understack-workflows/understack_workflows/oslo_event/ironic_node.py b/python/understack-workflows/understack_workflows/oslo_event/ironic_node.py index 525dd7290..b436134f3 100644 --- a/python/understack-workflows/understack_workflows/oslo_event/ironic_node.py +++ b/python/understack-workflows/understack_workflows/oslo_event/ironic_node.py @@ -1,10 +1,9 @@ -from typing import Self +from dataclasses import dataclass +from typing import Any from uuid import UUID from openstack.connection import Connection from openstack.exceptions import ConflictException -from pydantic import BaseModel -from pydantic import computed_field from pynautobot.core.api import Api as Nautobot from understack_workflows.helpers import save_output @@ -14,42 +13,70 @@ logger = setup_logger(__name__) -class IronicProvisionSetEvent(BaseModel): - owner: UUID - lessee: UUID - instance_uuid: UUID - node_uuid: UUID +@dataclass +class IronicProvisionSetEvent: + node_uuid: str event: str + owner: str + lessee: str + instance_uuid: str @classmethod - def from_event_dict(cls, data: dict) -> Self: + def from_event_dict(cls, data: dict[str, Any]) -> "IronicProvisionSetEvent": payload = data.get("payload") if payload is None: - raise ValueError("invalid event") + raise ValueError("Invalid event. No 'payload'") payload_data = payload.get("ironic_object.data") if payload_data is None: raise ValueError("Invalid event. No 'ironic_object.data' in payload") return cls( + node_uuid=payload_data["uuid"], + event=payload_data["event"], owner=payload_data["owner"], lessee=payload_data["lessee"], instance_uuid=payload_data["instance_uuid"], - event=payload_data["event"], - node_uuid=payload_data["uuid"], ) - @computed_field @property def lessee_undashed(self) -> str: """Returns lessee without dashes.""" - return self.lessee.hex + return UUID(self.lessee).hex -def handle_provision_end(conn: Connection, _: Nautobot, event_data: dict) -> int: +def _extract_payload_data(event_data: dict[str, Any]) -> dict[str, Any] | None: + """Extract ironic_object.data from event payload.""" + payload = event_data.get("payload", {}) + if isinstance(payload, dict): + return payload.get("ironic_object.data") + return None + + +def handle_provision_end( + conn: Connection, _: Nautobot, event_data: dict[str, Any] +) -> int: """Operates on an Ironic Node provisioning END event.""" - # Check if the project is configured with tags. + payload_data = _extract_payload_data(event_data) + if not payload_data: + logger.error("Could not extract payload data from event") + return 1 + + node_uuid = payload_data.get("uuid") + + # Skip if no lessee (not an instance deployment) + if not payload_data.get("lessee"): + logger.info("No lessee on node %s, skipping SVM check", node_uuid) + return 0 + + # Skip if no instance_uuid (not an instance deployment) + if not payload_data.get("instance_uuid"): + logger.info("No instance_uuid on node %s, skipping SVM check", node_uuid) + return 0 + + # Now safe to create the event object with all required fields event = IronicProvisionSetEvent.from_event_dict(event_data) + logger.info("Checking if project %s is tagged with UNDERSTACK_SVM", event.lessee) if not is_project_svm_enabled(conn, event.lessee_undashed): return 0 @@ -63,14 +90,14 @@ def handle_provision_end(conn: Connection, _: Nautobot, event_data: dict) -> int save_output("storage", "not-found") return 1 - if server.metadata["storage"] == "wanted": + if server.metadata.get("storage") == "wanted": save_output("storage", "wanted") else: logger.info("Server %s did not want storage enabled.", server.id) save_output("storage", "not-set") - save_output("node_uuid", str(event.node_uuid)) - save_output("instance_uuid", str(event.instance_uuid)) + save_output("node_uuid", event.node_uuid) + save_output("instance_uuid", event.instance_uuid) create_volume_connector(conn, event) return 0 @@ -90,5 +117,5 @@ def create_volume_connector(conn: Connection, event: IronicProvisionSetEvent): logger.info("Connector already exists.") -def instance_nqn(instance_id: UUID): +def instance_nqn(instance_id: str | None) -> str: return f"nqn.2014-08.org.nvmexpress:uuid:{instance_id}" diff --git a/python/understack-workflows/understack_workflows/oslo_event/nautobot_device_interface_sync.py b/python/understack-workflows/understack_workflows/oslo_event/nautobot_device_interface_sync.py new file mode 100644 index 000000000..6f2738d54 --- /dev/null +++ b/python/understack-workflows/understack_workflows/oslo_event/nautobot_device_interface_sync.py @@ -0,0 +1,704 @@ +"""Nautobot device interface synchronization from Ironic. + +This module syncs interfaces from Ironic node inventory to Nautobot. +It: +1. Fetches node inventory from Ironic (contains interface list with MACs) +2. Fetches ports from Ironic (contains port UUIDs and local_link_connection) +3. Creates/updates interfaces in Nautobot with matching UUIDs +4. Creates/updates iDRAC management interface from inventory bmc_mac + +The Ironic port UUID is used as the Nautobot interface ID to maintain +consistency between the two systems. For iDRAC interfaces, a deterministic +UUID is generated from the device UUID and MAC address. +""" + +from dataclasses import dataclass +from dataclasses import field +from typing import Any + +from openstack.connection import Connection +from pynautobot.core.api import Api as Nautobot + +from understack_workflows.helpers import setup_logger +from understack_workflows.ironic.client import IronicClient + +logger = setup_logger(__name__) + +EXIT_STATUS_SUCCESS = 0 +EXIT_STATUS_FAILURE = 1 + +# Interface type mapping based on NIC naming conventions +INTERFACE_TYPE_MAP = { + "slot": "25gbase-x-sfp28", # PCIe slot NICs typically 25GbE + "embedded": "25gbase-x-sfp28", # Embedded NICs + "integrated": "25gbase-x-sfp28", # Integrated NICs +} +DEFAULT_INTERFACE_TYPE = "unknown" + + +@dataclass +class InterfaceInfo: + """Interface information to sync to Nautobot.""" + + uuid: str # Ironic port UUID, used as Nautobot interface ID + name: str # Interface name (e.g., NIC.Slot.1-1) + mac_address: str + device_uuid: str # Node UUID + description: str = "" + interface_type: str = DEFAULT_INTERFACE_TYPE + enabled: bool = True + mgmt_only: bool = False + pxe_enabled: bool = False + + # Local link connection info (for cable management) + switch_port_id: str | None = None + switch_info: str | None = None + switch_id: str | None = None + physical_network: str | None = None + + +@dataclass +class DeviceInterfacesInfo: + """All interfaces for a device.""" + + device_uuid: str + interfaces: list[InterfaceInfo] = field(default_factory=list) + + +def _get_interface_type(name: str) -> str: + """Determine interface type based on name.""" + name_lower = name.lower() + for key, iface_type in INTERFACE_TYPE_MAP.items(): + if key in name_lower: + return iface_type + return DEFAULT_INTERFACE_TYPE + + +def _get_interface_description(name: str) -> str: + """Generate human-readable description from interface name. + + Examples: + NIC.Embedded.1-1 -> "Embedded NIC 1 Port 1" + NIC.Embedded.1-1-1 -> "Embedded NIC 1 Port 1 Partition 1" + NIC.Integrated.1-1 -> "Integrated NIC 1 Port 1" + NIC.Integrated.1-1-1 -> "Integrated NIC 1 Port 1 Partition 1" + NIC.Slot.1-1 -> "NIC in Slot 1 Port 1" + NIC.Slot.1-2 -> "NIC in Slot 1 Port 2" + """ + if "idrac" in name.lower(): + return "Dedicated iDRAC interface" + + parts = name.rsplit(".", 1) + if len(parts) != 2: + return "" + + [prefix, suffix] = parts + prefix = { + "nic.integrated": "Integrated NIC", + "nic.embedded": "Embedded NIC", + "nic.slot": "NIC in Slot", + }.get(prefix.lower()) + + if prefix is None: + return "" + + match suffix.split("-"): + case [nic, port]: + return f"{prefix} {nic} Port {port}" + case [nic, port, partition]: + return f"{prefix} {nic} Port {port} Partition {partition}" + case _: + return "" + + +def _build_interface_map_from_inventory(inventory: dict) -> dict[str, str]: + """Build a map of MAC address -> interface name from inventory. + + Args: + inventory: Ironic node inventory dict + + Returns: + Dict mapping MAC address (lowercase) to interface name + """ + interfaces = inventory.get("inventory", {}).get("interfaces", []) + return { + interface["mac_address"].lower(): interface["name"] + for interface in interfaces + if "mac_address" in interface and "name" in interface + } + + +def _assign_ip_to_interface( + nautobot_client: Nautobot, + interface_id: str, + ip_address: str, +) -> None: + """Assign an IP address to an interface in Nautobot. + + Creates the IP address if it doesn't exist, then associates it with + the interface. + + Args: + nautobot_client: Nautobot API client + interface_id: Nautobot interface ID + ip_address: IP address string (e.g., "10.46.96.157") + """ + if not ip_address: + return + + # Check if IP already exists + existing_ip = nautobot_client.ipam.ip_addresses.get(address=ip_address) + + if existing_ip and not isinstance(existing_ip, list) and hasattr(existing_ip, "id"): + ip_id = existing_ip.id # type: ignore[union-attr] + logger.debug("IP address %s already exists: %s", ip_address, ip_id) + else: + # Create new IP address + # Note: We don't specify parent prefix - Nautobot will auto-assign + # based on existing prefixes if configured + try: + new_ip = nautobot_client.ipam.ip_addresses.create( + address=ip_address, + status="Active", + ) + ip_id = getattr(new_ip, "id", None) + if not ip_id: + logger.warning("Failed to get ID for created IP address %s", ip_address) + return + logger.info("Created IP address %s: %s", ip_address, ip_id) + except Exception as e: + logger.warning("Failed to create IP address %s: %s", ip_address, e) + return + + # Check if IP is already associated with this interface + existing_assoc = nautobot_client.ipam.ip_address_to_interface.get(ip_address=ip_id) + + if existing_assoc and not isinstance(existing_assoc, list): + assoc_interface = getattr(existing_assoc, "interface", None) + assoc_interface_id = ( + getattr(assoc_interface, "id", None) if assoc_interface else None + ) + if assoc_interface_id == interface_id: + logger.debug( + "IP %s already associated with interface %s", ip_address, interface_id + ) + return + else: + # IP is associated with a different interface + logger.warning( + "IP %s is already associated with interface %s, not %s", + ip_address, + assoc_interface_id, + interface_id, + ) + return + + # Associate IP with interface + try: + nautobot_client.ipam.ip_address_to_interface.create( + ip_address=ip_id, + interface=interface_id, + is_primary=True, + ) + logger.info("Associated IP %s with interface %s", ip_address, interface_id) + except Exception as e: + logger.warning("Failed to associate IP %s with interface: %s", ip_address, e) + + +def sync_idrac_interface( + device_uuid: str, + bmc_mac: str, + nautobot_client: Nautobot, + bmc_ip: str | None = None, +) -> None: + """Sync iDRAC interface to Nautobot. + + Creates or updates the iDRAC management interface for a device. + Looks up existing interface by name + device_id. + Optionally assigns the BMC IP address to the interface. + + TODO: Add cable management for iDRAC. Currently not implemented because + LLDP data for iDRAC switch connection is not available in Ironic inventory. + Would require querying the BMC directly via Redfish (see bmc_chassis_info.py). + + Args: + device_uuid: Nautobot device UUID + bmc_mac: BMC MAC address from inventory + nautobot_client: Nautobot API client + bmc_ip: Optional BMC IP address from inventory (bmc_address) + """ + if not bmc_mac: + logger.debug("No bmc_mac provided for device %s", device_uuid) + return + + mac_address = bmc_mac.upper() + idrac_interface = None + + # Check if iDRAC interface already exists + existing = nautobot_client.dcim.interfaces.get( + device_id=device_uuid, + name="iDRAC", + ) + + # pynautobot.get() can return Record, list, or None - we expect a single Record + if existing and not isinstance(existing, list): + # Update if MAC changed + current_mac = getattr(existing, "mac_address", None) + if current_mac and hasattr(current_mac, "upper"): + current_mac = current_mac.upper() + + if current_mac != mac_address: + existing.mac_address = mac_address # type: ignore[attr-defined] + existing.save() # type: ignore[union-attr] + logger.info("Updated iDRAC MAC for device %s: %s", device_uuid, mac_address) + else: + logger.debug( + "iDRAC interface already up to date for device %s", device_uuid + ) + idrac_interface = existing + else: + # Create new iDRAC interface + idrac_interface = nautobot_client.dcim.interfaces.create( + device=device_uuid, + name="iDRAC", + type="1000base-t", + mac_address=mac_address, + description="Dedicated iDRAC interface", + mgmt_only=True, + enabled=True, + status="Active", + ) + logger.info( + "Created iDRAC interface for device %s: %s", device_uuid, mac_address + ) + + # Assign BMC IP address to iDRAC interface + if idrac_interface and bmc_ip: + idrac_id = getattr(idrac_interface, "id", None) + if idrac_id: + _assign_ip_to_interface(nautobot_client, idrac_id, bmc_ip) + + +def _build_interfaces_from_ports( + node_uuid: str, + ports: list, + inventory_map: dict[str, str], +) -> list[InterfaceInfo]: + """Build InterfaceInfo list from Ironic ports and inventory. + + Args: + node_uuid: Ironic node UUID + ports: List of Ironic port objects + inventory_map: MAC -> interface info from inventory + + Returns: + List of InterfaceInfo objects + """ + interfaces = [] + + for port in ports: + mac = port.address.lower() if port.address else "" + extra = port.extra or {} + llc = port.local_link_connection or {} + + # Get interface name: prefer bios_name from extra, then inventory, + # then port name + bios_name = extra.get("bios_name") + inv_name = inventory_map.get(mac, "") + + # Priority: bios_name > inventory name > port name > port UUID + name = bios_name or inv_name or port.name or port.uuid + + interface = InterfaceInfo( + uuid=port.uuid, + name=name, + mac_address=mac.upper(), # Nautobot expects uppercase MACs + device_uuid=node_uuid, + description=_get_interface_description(name), + interface_type=_get_interface_type(name), + pxe_enabled=port.pxe_enabled or False, + switch_port_id=llc.get("port_id"), + switch_info=llc.get("switch_info"), + switch_id=llc.get("switch_id"), + physical_network=port.physical_network, + ) + interfaces.append(interface) + + return interfaces + + +def _create_nautobot_interface( + interface: InterfaceInfo, + nautobot_client: Nautobot, +): + """Create a new interface in Nautobot.""" + attrs = { + "id": interface.uuid, + "name": interface.name, + "type": interface.interface_type, + "status": "Active", + "mac_address": interface.mac_address, + "device": interface.device_uuid, + "enabled": interface.enabled, + "mgmt_only": interface.mgmt_only, + } + + if interface.description: + attrs["description"] = interface.description + + try: + nautobot_intf = nautobot_client.dcim.interfaces.create(**attrs) + logger.info( + "Created interface %s (%s) in Nautobot", interface.name, interface.uuid + ) + return nautobot_intf + except Exception as e: + # Handle race condition - interface may already exist + if "unique" in str(e).lower(): + logger.info("Interface %s already exists, fetching", interface.uuid) + return nautobot_client.dcim.interfaces.get(id=interface.uuid) + raise + + +def _delete_nautobot_interface(nautobot_intf, nautobot_client: Nautobot) -> None: + """Delete an interface and its associated cable from Nautobot.""" + intf_id = nautobot_intf.id + + # Delete cable first if exists + if nautobot_intf.cable: + try: + nautobot_intf.cable.delete() + logger.debug("Deleted cable for interface %s", intf_id) + except Exception as e: + logger.warning("Failed to delete cable for interface %s: %s", intf_id, e) + + # Delete the interface + nautobot_intf.delete() + logger.info("Deleted interface %s from Nautobot", intf_id) + + +def _cleanup_stale_interfaces( + node_uuid: str, + valid_interface_ids: set[str], + nautobot_client: Nautobot, +) -> None: + """Remove interfaces from Nautobot that no longer exist in Ironic. + + Args: + node_uuid: Device UUID + valid_interface_ids: Set of interface UUIDs that should exist + nautobot_client: Nautobot API client + """ + existing_interfaces = nautobot_client.dcim.interfaces.filter(device_id=node_uuid) + + for intf in existing_interfaces: + intf_name = getattr(intf, "name", None) + intf_id = getattr(intf, "id", None) + + # Skip iDRAC - it's managed separately and not in Ironic ports + if intf_name == "iDRAC": + continue + + if intf_id not in valid_interface_ids: + try: + _delete_nautobot_interface(intf, nautobot_client) + except Exception as e: + logger.warning("Failed to delete stale interface %s: %s", intf_id, e) + + +def _update_nautobot_interface( + interface: InterfaceInfo, + nautobot_intf, + nautobot_client: Nautobot, +): + """Update existing Nautobot interface. + + If there's a name conflict with another interface on the same device, + deletes the conflicting interface first, then updates this one. + + Returns the updated interface object. + """ + updated = False + + # Name - if different, check for conflicts + if interface.name and nautobot_intf.name != interface.name: + # Check if another interface with this name already exists on the device + existing = nautobot_client.dcim.interfaces.get( + device_id=interface.device_uuid, + name=interface.name, + ) + if ( + existing + and not isinstance(existing, list) + and existing.id != interface.uuid + ): + # Delete the conflicting interface and recreate with fresh data + logger.info( + "Name conflict: deleting interface %s ('%s') to update %s", + existing.id, + interface.name, + interface.uuid, + ) + _delete_nautobot_interface(existing, nautobot_client) + + nautobot_intf.name = interface.name + updated = True + logger.debug("Updating interface name: %s", interface.name) + + # MAC address + if interface.mac_address and nautobot_intf.mac_address != interface.mac_address: + nautobot_intf.mac_address = interface.mac_address + updated = True + logger.debug("Updating interface MAC: %s", interface.mac_address) + + # Type + current_type = getattr(nautobot_intf.type, "value", None) + if interface.interface_type and current_type != interface.interface_type: + nautobot_intf.type = interface.interface_type + updated = True + logger.debug("Updating interface type: %s", interface.interface_type) + + # Description + if interface.description and nautobot_intf.description != interface.description: + nautobot_intf.description = interface.description + updated = True + logger.debug("Updating interface description: %s", interface.description) + + # Management only flag (important for iDRAC interfaces) + if nautobot_intf.mgmt_only != interface.mgmt_only: + nautobot_intf.mgmt_only = interface.mgmt_only + updated = True + logger.debug("Updating interface mgmt_only: %s", interface.mgmt_only) + + if updated: + nautobot_intf.save() + logger.info("Updated interface %s in Nautobot", interface.uuid) + + return nautobot_intf + + +def _handle_cable_management( + interface: InterfaceInfo, + nautobot_intf, + nautobot_client: Nautobot, +) -> None: + """Handle cable creation/update for interface with switch connection info.""" + if not interface.switch_info or not interface.switch_port_id: + return + + logger.debug( + "Handling cable for interface %s -> %s:%s", + interface.uuid, + interface.switch_info, + interface.switch_port_id, + ) + + # Find the switch interface + switch_intf = nautobot_client.dcim.interfaces.get( + device=interface.switch_info, + name=interface.switch_port_id, + ) + if not switch_intf or isinstance(switch_intf, list): + logger.warning( + "Switch interface %s not found on device %s", + interface.switch_port_id, + interface.switch_info, + ) + return + + switch_intf_id = switch_intf.id + + # Check if cable already exists + if nautobot_intf.cable: + cable = nautobot_intf.cable + # Verify cable connects to correct switch port + actual_terminations = {cable.termination_a_id, cable.termination_b_id} + required_terminations = {interface.uuid, switch_intf_id} + if actual_terminations == required_terminations: + logger.debug( + "Cable already exists correctly for interface %s", interface.uuid + ) + return + + # Update cable to correct endpoints + cable.termination_a_id = interface.uuid + cable.termination_a_type = "dcim.interface" + cable.termination_b_id = switch_intf_id + cable.termination_b_type = "dcim.interface" + cable.status = "Connected" + cable.save() + logger.info("Updated cable for interface %s", interface.uuid) + else: + # Create new cable + try: + nautobot_client.dcim.cables.create( + termination_a_type="dcim.interface", + termination_a_id=interface.uuid, + termination_b_type="dcim.interface", + termination_b_id=switch_intf_id, + status="Connected", + ) + logger.info( + "Created cable connecting %s to %s:%s", + interface.uuid, + interface.switch_info, + interface.switch_port_id, + ) + except Exception as e: + logger.warning( + "Failed to create cable for interface %s: %s", interface.uuid, e + ) + + +def sync_interfaces_from_data( + node_uuid: str, + inventory: dict, + ports: list, + nautobot_client: Nautobot, +) -> int: + """Sync interfaces to Nautobot using pre-fetched inventory and ports. + + Use this when you already have inventory and ports data (e.g., from + nautobot_device_sync) to avoid duplicate API calls. + + Args: + node_uuid: Ironic node UUID + inventory: Ironic node inventory dict (from get_node_inventory) + ports: List of Ironic port objects (from list_ports) + nautobot_client: Nautobot API client + + Returns: + EXIT_STATUS_SUCCESS on success, EXIT_STATUS_FAILURE on failure + """ + if not node_uuid: + logger.error("Missing node UUID") + return EXIT_STATUS_FAILURE + + try: + # Build MAC -> interface info map from inventory + inventory_map = _build_interface_map_from_inventory(inventory) + + # Build interface list from ports and inventory + interfaces = _build_interfaces_from_ports(node_uuid, ports, inventory_map) + + # Sync each interface + for interface in interfaces: + nautobot_intf = nautobot_client.dcim.interfaces.get(id=interface.uuid) + + if not nautobot_intf: + nautobot_intf = _create_nautobot_interface(interface, nautobot_client) + else: + _update_nautobot_interface(interface, nautobot_intf, nautobot_client) + + # Handle cable management + if nautobot_intf: + _handle_cable_management(interface, nautobot_intf, nautobot_client) + + # Sync iDRAC interface separately (not part of Ironic ports) + inv = inventory.get("inventory", {}) + bmc_mac = inv.get("bmc_mac") + bmc_ip = inv.get("bmc_address") + if bmc_mac: + sync_idrac_interface(node_uuid, bmc_mac, nautobot_client, bmc_ip) + + # Cleanup stale interfaces no longer in Ironic + valid_ids = {intf.uuid for intf in interfaces} + _cleanup_stale_interfaces(node_uuid, valid_ids, nautobot_client) + + logger.info( + "Synced %d interfaces for node %s to Nautobot", + len(interfaces), + node_uuid, + ) + return EXIT_STATUS_SUCCESS + + except Exception: + logger.exception("Failed to sync interfaces for node %s to Nautobot", node_uuid) + return EXIT_STATUS_FAILURE + + +def sync_interfaces_to_nautobot( + node_uuid: str, + nautobot_client: Nautobot, + ironic_client: IronicClient | None = None, +) -> int: + """Sync all interfaces for an Ironic node to Nautobot. + + This fetches inventory and ports from Ironic, then syncs to Nautobot. + If you already have inventory and ports data, use sync_interfaces_from_data() + instead to avoid duplicate API calls. + + Args: + node_uuid: Ironic node UUID + nautobot_client: Nautobot API client + ironic_client: Optional Ironic client (created if not provided) + + Returns: + EXIT_STATUS_SUCCESS on success, EXIT_STATUS_FAILURE on failure + """ + try: + if ironic_client is None: + ironic_client = IronicClient() + + # Fetch inventory + try: + inventory = ironic_client.get_node_inventory(node_ident=node_uuid) + except Exception as e: + logger.warning("Could not fetch inventory for node %s: %s", node_uuid, e) + inventory = {} + + # Fetch ports + ports = ironic_client.list_ports(node_id=node_uuid) + + # Delegate to the data-based sync + return sync_interfaces_from_data(node_uuid, inventory, ports, nautobot_client) + + except Exception: + logger.exception("Failed to sync interfaces for node %s to Nautobot", node_uuid) + return EXIT_STATUS_FAILURE + + +def _extract_node_uuid_from_event(event_data: dict[str, Any]) -> str | None: + """Extract node UUID from Ironic event payload.""" + payload = event_data.get("payload", {}) + if isinstance(payload, dict): + ironic_data = payload.get("ironic_object.data", {}) + if isinstance(ironic_data, dict): + # For port events, get node_uuid + if ironic_data.get("node_uuid"): + return ironic_data["node_uuid"] + # For node events, get uuid + if ironic_data.get("uuid"): + return ironic_data["uuid"] + return None + + +def handle_interface_sync_event( + _conn: Connection, + nautobot_client: Nautobot, + event_data: dict[str, Any], +) -> int: + """Handle events that should trigger interface sync. + + Works with: + - baremetal.node.inspect.end (inspection completed, inventory available) + - baremetal.port.create.end + - baremetal.port.update.end + + Args: + _conn: OpenStack connection (unused, kept for handler signature) + nautobot_client: Nautobot API client + event_data: Raw event data dict + + Returns: + EXIT_STATUS_SUCCESS on success, EXIT_STATUS_FAILURE on failure + """ + node_uuid = _extract_node_uuid_from_event(event_data) + if not node_uuid: + logger.error("Could not extract node UUID from event: %s", event_data) + return EXIT_STATUS_FAILURE + + event_type = event_data.get("event_type", "unknown") + logger.info("Handling %s - syncing interfaces for node %s", event_type, node_uuid) + + return sync_interfaces_to_nautobot(node_uuid, nautobot_client) diff --git a/python/understack-workflows/understack_workflows/oslo_event/nautobot_device_sync.py b/python/understack-workflows/understack_workflows/oslo_event/nautobot_device_sync.py new file mode 100644 index 000000000..5538ae087 --- /dev/null +++ b/python/understack-workflows/understack_workflows/oslo_event/nautobot_device_sync.py @@ -0,0 +1,555 @@ +"""Nautobot device synchronization from Ironic. + +This module provides a simple, robust sync function that: +1. Takes just a node_uuid +2. Fetches current state from Ironic (node API + inventory API) +3. Syncs everything to Nautobot (create or update) + +Can be called from any event handler - provision, inspect, CRUD, etc. +""" + +import re +from dataclasses import dataclass +from dataclasses import field +from typing import Any +from uuid import UUID + +from openstack.connection import Connection +from pynautobot.core.api import Api as Nautobot + +from understack_workflows.helpers import setup_logger +from understack_workflows.ironic.client import IronicClient +from understack_workflows.ironic.provision_state_mapper import ProvisionStateMapper +from understack_workflows.oslo_event.nautobot_device_interface_sync import ( + sync_interfaces_from_data, +) + +logger = setup_logger(__name__) + +EXIT_STATUS_SUCCESS = 0 +EXIT_STATUS_FAILURE = 1 + + +@dataclass +class DeviceInfo: + """Complete device information synced to Nautobot. + + Populated from Ironic node API and inventory API. + """ + + uuid: str + + # Identity + name: str | None = None + serial_number: str | None = None + service_tag: str | None = None + + # Hardware + manufacturer: str | None = None + model: str | None = None + + # Specs (from properties) + memory_mb: int | None = None + cpus: int | None = None + cpu_arch: str | None = None + local_gb: int | None = None + + # Classification + traits: list[str] = field(default_factory=list) + + # Location + location_id: str | None = None + rack_id: str | None = None + + # Status + status: str | None = None + + # Role + role: str = "server" + + tenant_id: str | None = None + + # Custom fields for Nautobot + custom_fields: dict[str, str] = field(default_factory=dict) + + +class RackLocationError(Exception): + """Raised when node rack location cannot be determined.""" + + pass + + +def _normalise_manufacturer(name: str) -> str: + """Return a standard name for Manufacturer.""" + if "DELL" in name.upper(): + return "Dell" + elif "HP" in name.upper(): + return "HP" + raise ValueError(f"Server manufacturer {name} not supported") + + +def _populate_from_node(device_info: DeviceInfo, node) -> None: + """Populate device info from Ironic node object.""" + props = node.properties or {} + + # Hardware specs + if props.get("memory_mb"): + device_info.memory_mb = int(props["memory_mb"]) + if props.get("cpus"): + device_info.cpus = int(props["cpus"]) + device_info.cpu_arch = props.get("cpu_arch") + if props.get("local_gb"): + device_info.local_gb = int(props["local_gb"]) + + # Traits + if hasattr(node, "traits") and node.traits: + device_info.traits = list(node.traits) + + # Provision state -> Nautobot status + device_info.status = ProvisionStateMapper.translate_to_nautobot( + node.provision_state + ) + + lessee = node.lessee + # Convert lessee to string UUID if present + if lessee: + try: + device_info.tenant_id = ( + str(UUID(lessee)) if isinstance(lessee, str) else str(lessee) + ) + except (ValueError, TypeError) as e: + logger.warning("Invalid lessee UUID %s: %s", lessee, e) + + +def _populate_from_inventory(device_info: DeviceInfo, inventory: dict | None) -> None: + """Populate device info from Ironic inventory.""" + if not inventory: + return + + inv = inventory.get("inventory", {}) + system_vendor = inv.get("system_vendor", {}) + + # Manufacturer from inventory + vendor = system_vendor.get("manufacturer") + if vendor: + device_info.manufacturer = _normalise_manufacturer(vendor) + + # Model - extract base model name, strip SKU/extra info in parentheses + # e.g., "PowerEdge R7615 (SKU=0AF7;ModelName=PowerEdge R7615)" -> "PowerEdge R7615" + # Uses same regex as ironic_understack/inspect_hook_chassis_model.py + product_name = system_vendor.get("product_name") + if product_name and product_name != "System": + device_info.model = re.sub(r" \(.*\)", "", str(product_name)) + + # Service tag: sku (REDFISH) or serial_number (AGENT) + service_tag = system_vendor.get("sku") or system_vendor.get("serial_number") + if service_tag: + device_info.service_tag = service_tag + + # Serial number: only if sku exists (REDFISH has both) + if system_vendor.get("sku"): + device_info.serial_number = system_vendor.get("serial_number") + + +def _generate_device_name(device_info: DeviceInfo) -> None: + """Generate device name from manufacturer and service tag.""" + if device_info.manufacturer and device_info.service_tag: + device_info.name = f"{device_info.manufacturer}-{device_info.service_tag}" + + +def _set_location_from_switches( + device_info: DeviceInfo, + ports: list, + nautobot_client: Nautobot, +) -> None: + """Determine device location from connected switches. + + Args: + device_info: DeviceInfo to update with location + ports: Pre-fetched list of Ironic port objects + nautobot_client: Nautobot API client + """ + try: + locations = set() + + for port in ports: + llc = port.local_link_connection or {} + switch_info = llc.get("switch_info") + + if not switch_info: + continue + + # Find switch in Nautobot by name + device = nautobot_client.dcim.devices.get(name=switch_info) + + if ( + device + and not isinstance(device, list) + and device.location + and device.rack + ): + locations.add((device.location.id, device.rack.id)) + + if not locations: + logger.warning("No switch locations found for node %s", device_info.uuid) + return + + if len(locations) > 1: + logger.warning( + "Node %s connected to switches in multiple racks: %s", + device_info.uuid, + locations, + ) + + # Use first location found + location_id, rack_id = next(iter(locations)) + device_info.location_id = location_id + device_info.rack_id = rack_id + + except Exception as e: + logger.error( + "Failed to determine location for node %s: %s", device_info.uuid, e + ) + + +def fetch_device_info( + node_uuid: str, + ironic_client: IronicClient, + nautobot_client: Nautobot, +) -> tuple[DeviceInfo, dict, list]: + """Fetch complete device info from Ironic. + + Args: + node_uuid: Ironic node UUID + ironic_client: Ironic API client + nautobot_client: Nautobot API client (for switch location lookup) + + Returns: + Tuple of (DeviceInfo, inventory dict, ports list) + """ + device_info = DeviceInfo(uuid=node_uuid) + + node = ironic_client.get_node(node_uuid) + inventory = ironic_client.get_node_inventory(node_ident=node_uuid) + ports = ironic_client.list_ports(node_id=node_uuid) + + # Populate in order + _populate_from_node(device_info, node) + _populate_from_inventory(device_info, inventory) + _generate_device_name(device_info) + _set_location_from_switches(device_info, ports, nautobot_client) + + return device_info, inventory, ports + + +def _create_nautobot_device(device_info: DeviceInfo, nautobot_client: Nautobot): + """Create a new device in Nautobot with minimal required fields. + + Returns the created device object for subsequent updates. + """ + if not device_info.location_id: + raise ValueError(f"Cannot create device {device_info.uuid} without location") + + # Only mandatory fields for creation + device_attrs = { + "id": device_info.uuid, + "name": device_info.name or device_info.uuid, # Fallback to UUID if no name + "status": "Planned", # Default status, will be updated + "role": {"name": device_info.role}, + "device_type": { + "manufacturer": {"name": device_info.manufacturer}, + "model": device_info.model, + }, + "location": device_info.location_id, + } + + nautobot_device = nautobot_client.dcim.devices.create(**device_attrs) + logger.info("Created device %s in Nautobot", device_info.uuid) + return nautobot_device + + +def _get_record_value(record, attr: str = "value") -> str | None: + """Extract value from pynautobot Record object. + + pynautobot returns Record objects for choice fields (status, etc.) + and related objects (location, rack, tenant). This helper extracts + the comparable value. + + Args: + record: pynautobot Record object or primitive value + attr: Attribute to extract ("value" for choices, "id" for relations) + + Returns: + String value or None + """ + if record is None: + return None + if hasattr(record, attr): + return getattr(record, attr) + # Already a primitive value + return str(record) if record else None + + +def _update_nautobot_device( + device_info: DeviceInfo, + nautobot_device, +) -> bool: + """Update existing Nautobot device with current info. + + Returns True if any changes were made. + """ + updated = False + + # Status (Record with .name for display name e.g., "Staged", "Active") + # ProvisionStateMapper returns display names like "Staged", "Active" + if device_info.status: + current_status = _get_record_value(nautobot_device.status, "name") + if current_status != device_info.status: + nautobot_device.status = device_info.status + updated = True + logger.debug( + "Updating status: %s -> %s", current_status, device_info.status + ) + + # Name (can change on chassis swap) + if device_info.name and nautobot_device.name != device_info.name: + nautobot_device.name = device_info.name + updated = True + logger.debug("Updating name: %s", device_info.name) + + # Serial number (can change on chassis swap) + if ( + device_info.serial_number + and nautobot_device.serial != device_info.serial_number + ): + nautobot_device.serial = device_info.serial_number + updated = True + logger.debug("Updating serial: %s", device_info.serial_number) + + # Location (Record with .id attribute) + if device_info.location_id: + current_location = _get_record_value(nautobot_device.location, "id") + if current_location != device_info.location_id: + nautobot_device.location = device_info.location_id + updated = True + logger.debug("Updating location: %s", device_info.location_id) + + # Rack (Record with .id attribute) + if device_info.rack_id: + current_rack = _get_record_value(nautobot_device.rack, "id") + if current_rack != device_info.rack_id: + nautobot_device.rack = device_info.rack_id + updated = True + logger.debug("Updating rack: %s", device_info.rack_id) + + # Tenant (Record with .id attribute, from Ironic lessee) + if device_info.tenant_id: + current_tenant = _get_record_value(nautobot_device.tenant, "id") + if current_tenant != str(device_info.tenant_id): + nautobot_device.tenant = str(device_info.tenant_id) + updated = True + logger.debug("Updating tenant: %s", device_info.tenant_id) + + # Custom fields (merge, don't replace) + # pynautobot tracks custom_fields specially - we need to modify in place + cf_updated = False + if device_info.custom_fields: + current_cf = ( + dict(nautobot_device.custom_fields) if nautobot_device.custom_fields else {} + ) + for key, value in device_info.custom_fields.items(): + if current_cf.get(key) != value: + current_cf[key] = value + cf_updated = True + logger.debug("Updating custom field %s: %s", key, value) + if cf_updated: + nautobot_device.custom_fields = current_cf + updated = True + + if updated: + result = nautobot_device.save() + logger.info( + "Updated device %s in Nautobot, save result: %s", device_info.uuid, result + ) + else: + logger.debug("No changes for device %s", device_info.uuid) + + return updated + + +def sync_device_to_nautobot( + node_uuid: str, + nautobot_client: Nautobot, + sync_interfaces: bool = True, +) -> int: + """Sync an Ironic node to Nautobot. + + This is the main entry point. It: + 1. Fetches current state from Ironic (node + inventory + ports) + 2. Creates or updates the device in Nautobot + 3. Optionally syncs interfaces (ports) to Nautobot + + Can be called from any event handler. + + Args: + node_uuid: Ironic node UUID + nautobot_client: Nautobot API client + sync_interfaces: Whether to also sync interfaces (default: True) + + Returns: + EXIT_STATUS_SUCCESS on success, EXIT_STATUS_FAILURE on failure + """ + if not node_uuid: + logger.error("Missing node UUID") + return EXIT_STATUS_FAILURE + + try: + ironic_client = IronicClient() + + # Fetch all device info from Ironic (returns inventory and ports too) + device_info, inventory, ports = fetch_device_info( + node_uuid, ironic_client, nautobot_client + ) + + # Check if device exists in Nautobot + nautobot_device = nautobot_client.dcim.devices.get(id=device_info.uuid) + + if not nautobot_device: + # Create new device with minimal fields + if not device_info.location_id: + logger.error("Cannot create device %s: no location found", node_uuid) + return EXIT_STATUS_FAILURE + nautobot_device = _create_nautobot_device(device_info, nautobot_client) + + # Update device with all fields (works for both new and existing) + _update_nautobot_device(device_info, nautobot_device) + + # Sync interfaces using already-fetched inventory and ports + if sync_interfaces: + interface_result = sync_interfaces_from_data( + node_uuid, inventory, ports, nautobot_client + ) + if interface_result != EXIT_STATUS_SUCCESS: + logger.warning( + "Interface sync failed for node %s, device sync succeeded", + node_uuid, + ) + # Don't fail the whole operation if interface sync fails + # Device is already synced successfully + + return EXIT_STATUS_SUCCESS + + except Exception: + logger.exception("Failed to sync device %s to Nautobot", node_uuid) + return EXIT_STATUS_FAILURE + + +def _extract_node_uuid_from_event(event_data: dict[str, Any]) -> str | None: + """Extract node UUID from any Ironic node event payload. + + Supports: + - NodeSetProvisionStatePayload (provision_set events) + - NodeCRUDPayload (create/update/delete events) + - NodeSetPowerStatePayload (power_set events) + - NodeCorrectedPowerStatePayload (power_state_corrected events) + - NodePayload (maintenance events) + + All these payloads have 'uuid' in ironic_object.data or at top level. + """ + # Try payload.ironic_object.data.uuid first (standard notification format) + payload = event_data.get("payload", {}) + if isinstance(payload, dict): + ironic_data = payload.get("ironic_object.data", {}) + if isinstance(ironic_data, dict) and ironic_data.get("uuid"): + return ironic_data["uuid"] + + # Try ironic_object.uuid at top level (some event formats) + ironic_object = event_data.get("ironic_object", {}) + if isinstance(ironic_object, dict) and ironic_object.get("uuid"): + return ironic_object["uuid"] + + return None + + +def handle_node_event( + _conn: Connection, nautobot_client: Nautobot, event_data: dict[str, Any] +) -> int: + """Handle any Ironic node event and sync to Nautobot. + + This is a generic handler that works with: + - baremetal.node.provision_set.end + - baremetal.node.create.end + - baremetal.node.update.end + - baremetal.node.power_set.end + - baremetal.node.power_state_corrected.success + - baremetal.node.maintenance_set.end + + Args: + _conn: OpenStack connection (unused, kept for handler signature) + nautobot_client: Nautobot API client + event_data: Raw event data dict + + Returns: + EXIT_STATUS_SUCCESS on success, EXIT_STATUS_FAILURE on failure + """ + node_uuid = _extract_node_uuid_from_event(event_data) + if not node_uuid: + logger.error("Could not extract node UUID from event: %s", event_data) + return EXIT_STATUS_FAILURE + + event_type = event_data.get("event_type", "unknown") + logger.info("Handling %s for node %s", event_type, node_uuid) + + return sync_device_to_nautobot(node_uuid, nautobot_client) + + +def delete_device_from_nautobot(node_uuid: str, nautobot_client: Nautobot) -> int: + """Delete a device from Nautobot. + + Args: + node_uuid: Ironic node UUID (used as device ID in Nautobot) + nautobot_client: Nautobot API client + + Returns: + EXIT_STATUS_SUCCESS on success, EXIT_STATUS_FAILURE on failure + """ + if not node_uuid: + logger.error("Missing node UUID for delete") + return EXIT_STATUS_FAILURE + + try: + nautobot_device = nautobot_client.dcim.devices.get(id=node_uuid) + + if not nautobot_device or isinstance(nautobot_device, list): + logger.info("Device %s not found in Nautobot, nothing to delete", node_uuid) + return EXIT_STATUS_SUCCESS + + nautobot_device.delete() + logger.info("Deleted device %s from Nautobot", node_uuid) + return EXIT_STATUS_SUCCESS + + except Exception: + logger.exception("Failed to delete device %s from Nautobot", node_uuid) + return EXIT_STATUS_FAILURE + + +def handle_node_delete_event( + _conn: Connection, nautobot_client: Nautobot, event_data: dict[str, Any] +) -> int: + """Handle Ironic node delete event and remove from Nautobot. + + Args: + _conn: OpenStack connection (unused, kept for handler signature) + nautobot_client: Nautobot API client + event_data: Raw event data dict + + Returns: + EXIT_STATUS_SUCCESS on success, EXIT_STATUS_FAILURE on failure + """ + node_uuid = _extract_node_uuid_from_event(event_data) + if not node_uuid: + logger.error("Could not extract node UUID from delete event: %s", event_data) + return EXIT_STATUS_FAILURE + + logger.info("Handling node delete for %s", node_uuid) + return delete_device_from_nautobot(node_uuid, nautobot_client)