Skip to content

Commit d0c01d3

Browse files
gustavocidornelaswhoseoyster
authored andcommitted
Make inference pipeline name and description optional. Defaults to 'Production'
1 parent 97aeeef commit d0c01d3

File tree

1 file changed

+17
-10
lines changed

1 file changed

+17
-10
lines changed

openlayer/__init__.py

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1390,8 +1390,8 @@ def load_project_version(self, version_id: str) -> Project:
13901390
def create_inference_pipeline(
13911391
self,
13921392
project_id: str,
1393-
name: str,
13941393
task_type: TaskType,
1394+
name: Optional[str] = None,
13951395
description: Optional[str] = None,
13961396
) -> InferencePipeline:
13971397
"""Creates an inference pipeline in an Openlayer project.
@@ -1400,14 +1400,16 @@ def create_inference_pipeline(
14001400
14011401
Parameters
14021402
----------
1403-
name : str
1404-
Name of your inference pipeline.
1403+
name : str, optional
1404+
Name of your inference pipeline. If not specified, the name will be
1405+
set to ``"Production"``.
14051406
14061407
.. important::
14071408
The inference pipeline name must be unique within a project.
14081409
1409-
description : str
1410-
Inference pipeline description.
1410+
description : str, optional
1411+
Inference pipeline description. If not specified, the description will be
1412+
set to ``"Monitoring production data."``.
14111413
14121414
Returns
14131415
-------
@@ -1441,8 +1443,8 @@ def create_inference_pipeline(
14411443
"""
14421444
# Validate inference pipeline
14431445
inference_pipeline_config = {
1444-
"name": name,
1445-
"description": description,
1446+
"name": name or "Production",
1447+
"description": description or "Monitoring production data.",
14461448
}
14471449
inference_pipeline_validator = (
14481450
inference_pipeline_validators.InferencePipelineValidator(
@@ -1474,16 +1476,20 @@ def create_inference_pipeline(
14741476
return inference_pipeline
14751477

14761478
def load_inference_pipeline(
1477-
self, project_id: str, name: str, task_type: TaskType
1479+
self,
1480+
project_id: str,
1481+
task_type: TaskType,
1482+
name: Optional[str] = None,
14781483
) -> InferencePipeline:
14791484
"""Loads an existing inference pipeline from an Openlayer project.
14801485
14811486
Parameters
14821487
----------
1483-
name : str
1488+
name : str, optional
14841489
Name of the inference pipeline to be loaded.
14851490
The name of the inference piepline is the one displayed on the
1486-
Openlayer platform.
1491+
Openlayer platform. If not specified, will try to load the
1492+
inference pipeline named ``"Production"``.
14871493
14881494
.. note::
14891495
If you haven't created the inference pipeline yet, you should use the
@@ -1515,6 +1521,7 @@ def load_inference_pipeline(
15151521
platform. Refer to :obj:`upload_reference_dataset` and
15161522
:obj:`publish_batch_data` for detailed examples.
15171523
"""
1524+
name = name or "Production"
15181525
endpoint = f"/projects/{project_id}/inference-pipelines?name={name}"
15191526
inference_pipeline_data = self.api.get_request(endpoint)
15201527
if len(inference_pipeline_data["items"]) == 0:

0 commit comments

Comments
 (0)