@@ -714,8 +714,9 @@ def fit(
714714 else :
715715 if train is not None :
716716 warnings .warn (
717- "train data is provided but datamodule is provided."
718- " Ignoring the train data and using the datamodule"
717+ "train data and datamodule is provided."
718+ " Ignoring the train data and using the datamodule."
719+ " Set either one of them to None to avoid this warning."
719720 )
720721 model = self .prepare_model (
721722 datamodule ,
@@ -791,8 +792,9 @@ def pretrain(
791792 else :
792793 if train is not None :
793794 warnings .warn (
794- "train data is provided but datamodule is provided."
795- " Ignoring the train data and using the datamodule"
795+ "train data and datamodule is provided."
796+ " Ignoring the train data and using the datamodule."
797+ " Set either one of them to None to avoid this warning."
796798 )
797799 model = self .prepare_model (
798800 datamodule ,
@@ -1050,8 +1052,9 @@ def finetune(
10501052 else :
10511053 if train is not None :
10521054 warnings .warn (
1053- "train data is provided but datamodule is provided."
1054- " Ignoring the train data and using the datamodule"
1055+ "train data and datamodule is provided."
1056+ " Ignoring the train data and using the datamodule."
1057+ " Set either one of them to None to avoid this warning."
10551058 )
10561059 if freeze_backbone :
10571060 for param in self .model .backbone .parameters ():
@@ -1197,7 +1200,9 @@ def predict(
11971200 If classification, it returns probabilities and final prediction
11981201 """
11991202 warnings .warn (
1200- "`include_input_features` will be deprecated in the next release." ,
1203+ "`include_input_features` will be deprecated in the next release."
1204+ " Please add index columns to the test dataframe if you want to"
1205+ " retain some features like the key or id" ,
12011206 DeprecationWarning ,
12021207 )
12031208 assert all (q <= 1 and q >= 0 for q in quantiles ), "Quantiles should be a decimal between 0 and 1"
@@ -1286,6 +1291,11 @@ def predict(
12861291 pred_df ["prediction" ] = self .datamodule .label_encoder .inverse_transform (
12871292 np .argmax (point_predictions , axis = 1 )
12881293 )
1294+ warnings .warn (
1295+ "Classification prediction column will be renamed to `{target_col}_prediction` "
1296+ "in the next release to maintain consistency with regression." ,
1297+ DeprecationWarning ,
1298+ )
12891299 if ret_logits :
12901300 for k , v in logits_predictions .items ():
12911301 v = torch .cat (v , dim = 0 ).numpy ()
@@ -1558,6 +1568,7 @@ def explain(
15581568 Defaults to None.
15591569
15601570 **kwargs: Additional keyword arguments to be passed to the Captum method `attribute` function.
1571+
15611572 Returns:
15621573 DataFrame: The dataframe with the feature importance
15631574 """
@@ -1587,7 +1598,7 @@ def explain(
15871598 if len (data ) <= 100 :
15881599 warnings .warn (
15891600 f"{ method } gives better results when the number of samples is"
1590- " large. For better results, try usingmore samples or some other"
1601+ " large. For better results, try using more samples or some other"
15911602 " methods like GradientShap which works well on single examples."
15921603 )
15931604 is_full_baselines = method in ["GradientShap" , "DeepLiftShap" ]
@@ -1742,6 +1753,7 @@ def cross_validate(
17421753 fold, they will be valid for all the other folds. Defaults to True.
17431754
17441755 **kwargs: Additional keyword arguments to be passed to the `fit` method of the model.
1756+
17451757 Returns:
17461758 DataFrame: The dataframe with the cross validation results
17471759 """
@@ -1900,6 +1912,7 @@ def bagging_predict(
19001912 Defaults to None.
19011913
19021914 **kwargs: Additional keyword arguments to be passed to the `fit` method of the model.
1915+
19031916 Returns:
19041917 DataFrame: The dataframe with the bagged predictions.
19051918 """
0 commit comments