From dee2433c3d150d0138c6febb79d59142b3b2a8b7 Mon Sep 17 00:00:00 2001 From: aviruthen <91846056+aviruthen@users.noreply.github.com> Date: Wed, 26 Nov 2025 06:55:32 -0800 Subject: [PATCH] Fix TuningStep validation error by adding V3 HyperparameterTuner duck typing support --- .../src/sagemaker/core/workflow/pipeline_context.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sagemaker-core/src/sagemaker/core/workflow/pipeline_context.py b/sagemaker-core/src/sagemaker/core/workflow/pipeline_context.py index a70b7fc1aa..e6fc0067d6 100644 --- a/sagemaker-core/src/sagemaker/core/workflow/pipeline_context.py +++ b/sagemaker-core/src/sagemaker/core/workflow/pipeline_context.py @@ -382,11 +382,11 @@ def retrieve_caller_name(job_instance): if isinstance(job_instance, Transformer): return "transform" - # Duck typing for HyperparameterTuner: has 'fit' method and 'best_estimator' method - # This avoids importing from sagemaker.train which would violate architecture - if hasattr(job_instance, 'fit') and hasattr(job_instance, 'best_estimator'): + # Duck typing for HyperparameterTuner: has 'tune' method and 'model_trainer' attribute + # This covers both V2 (fit/best_estimator) and V3 (tune/model_trainer) implementations + if (hasattr(job_instance, 'fit') and hasattr(job_instance, 'best_estimator')) or \ + (hasattr(job_instance, 'tune') and hasattr(job_instance, 'model_trainer')): return "tune" - # if isinstance(job_instance, AutoML): # return "auto_ml"