From ebaaa5ca22577bfff3785396df16a7af43abd4f7 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 08:23:06 +0100 Subject: [PATCH 1/8] Removing _discover_U_and_V from convert.nemo As it is not needed, now that we require users to provide field names explicitly --- src/parcels/convert.py | 12 +----------- 1 file changed, 1 insertion(+), 11 deletions(-) diff --git a/src/parcels/convert.py b/src/parcels/convert.py index 554ef86298..6bf4b35731 100644 --- a/src/parcels/convert.py +++ b/src/parcels/convert.py @@ -23,16 +23,6 @@ if typing.TYPE_CHECKING: import uxarray as ux -_NEMO_CF_STANDARD_NAME_FALLBACKS = { - "UV": [ - ( - "sea_water_x_velocity", - "sea_water_y_velocity", - ), - ], - "W": ["upward_sea_water_velocity", "vertical_sea_water_velocity"], -} - _NEMO_DIMENSION_COORD_NAMES = ["x", "y", "time", "x", "x_center", "y", "y_center", "depth", "glamf", "gphif"] _NEMO_AXIS_VARNAMES = { @@ -131,6 +121,7 @@ def _ds_rename_using_standard_names(ds: xr.Dataset | ux.UxDataset, name_dict: di return ds +# TODO is this function still needed, now that we require users to provide field names explicitly? def _discover_U_and_V(ds: xr.Dataset, cf_standard_names_fallbacks) -> xr.Dataset: # Assumes that the dataset has U and V data @@ -219,7 +210,6 @@ def nemo_to_sgrid(*, fields: dict[str, xr.Dataset | xr.DataArray], coords: xr.Da ds = xr.merge(list(fields.values()) + [coords]) ds = _maybe_rename_variables(ds, _NEMO_VARNAMES_MAPPING) - ds = _discover_U_and_V(ds, _NEMO_CF_STANDARD_NAME_FALLBACKS) ds = _maybe_create_depth_dim(ds) ds = _maybe_bring_UV_depths_to_depth(ds) ds = _drop_unused_dimensions_and_coords(ds, _NEMO_DIMENSION_COORD_NAMES) From e07b143792c6a6c8d3bb5a3aa85ea11273395ae7 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 11:57:30 +0100 Subject: [PATCH 2/8] Removing units from repr Since units has been removed as part of #2459 --- src/parcels/_reprs.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/parcels/_reprs.py b/src/parcels/_reprs.py index 07f58b1532..dc0ef6948e 100644 --- a/src/parcels/_reprs.py +++ b/src/parcels/_reprs.py @@ -39,7 +39,6 @@ def field_repr(field: Field, level: int = 0) -> str: name : {field.name!r} interp_method : {field.interp_method!r} time_interval : {field.time_interval!r} - units : {field.units!r} igrid : {field.igrid!r} DataArray: {textwrap.indent(repr(field.data), 8 * " ")} From 622cf938853c559143c513270b6cc735eff576b4 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 11:58:59 +0100 Subject: [PATCH 3/8] Creating convert.copernicusmarine_to_sgrid --- src/parcels/convert.py | 71 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 71 insertions(+) diff --git a/src/parcels/convert.py b/src/parcels/convert.py index 6bf4b35731..f66bc972bf 100644 --- a/src/parcels/convert.py +++ b/src/parcels/convert.py @@ -42,6 +42,13 @@ "wo": "W", } +_COPERNICUS_MARINE_AXIS_VARNAMES = { + "X": "lon", + "Y": "lat", + "Z": "depth", + "T": "time", +} + def _maybe_bring_UV_depths_to_depth(ds): if "U" in ds.variables and "depthu" in ds.U.coords and "depth" in ds.coords: @@ -257,3 +264,67 @@ def nemo_to_sgrid(*, fields: dict[str, xr.Dataset | xr.DataArray], coords: xr.Da # Update to use lon and lat for internal naming ds = sgrid.rename(ds, {"gphif": "lat", "glamf": "lon"}) # TODO: Logging message about rename return ds + + +def copernicusmarine_to_sgrid( + *, fields: dict[str, xr.Dataset | xr.DataArray], coords: xr.Dataset | None = None +) -> xr.Dataset: + """Create an sgrid-compliant xarray.Dataset from a dataset of Copernicus Marine netcdf files. + + Parameters + ---------- + fields : dict[str, xr.Dataset | xr.DataArray] + Dictionary of xarray.DataArray objects as obtained from a set of Copernicus Marine netcdf files. + coords : xarray.Dataset, optional + xarray.Dataset containing coordinate variables. By default these are time, depth, latitude, longitude + + Returns + ------- + xarray.Dataset + Dataset object following SGRID conventions to be (optionally) modified and passed to a FieldSet constructor. + + Notes + ----- + See https://help.marine.copernicus.eu/en/collections/9080063-copernicus-marine-toolbox for more information on the copernicusmarine toolbox. + The toolbox to ingest data from most of the products on the Copernicus Marine Service (https://data.marine.copernicus.eu/products) into an xarray.Dataset. + You can use indexing and slicing to select a subset of the data before passing it to this function. + + """ + fields = fields.copy() + + for name, field_da in fields.items(): + if isinstance(field_da, xr.Dataset): + field_da = field_da[name] + # TODO: logging message, warn if multiple fields are in this dataset + else: + field_da = field_da.rename(name) + fields[name] = field_da + + ds = xr.merge(list(fields.values()) + ([coords] if coords is not None else [])) + ds.attrs.clear() # Clear global attributes from the merging + + ds = _maybe_rename_coords(ds, _COPERNICUS_MARINE_AXIS_VARNAMES) + if "W" in ds.data_vars: + # Negate W to convert from up positive to down positive (as that's the direction of positive z) + ds["W"].data *= -1 + + if "grid" in ds.cf.cf_roles: + raise ValueError( + "Dataset already has a 'grid' variable (according to cf_roles). Didn't expect there to be grid metadata on copernicusmarine datasets - please open an issue with more information about your dataset." + ) + ds["grid"] = xr.DataArray( + 0, + attrs=sgrid.Grid2DMetadata( # use dummy *_center dimensions - this is A grid data (all defined on nodes) + cf_role="grid_topology", + topology_dimension=2, + node_dimensions=("lon", "lat"), + node_coordinates=("lon", "lat"), + face_dimensions=( + sgrid.DimDimPadding("x_center", "lon", sgrid.Padding.LOW), + sgrid.DimDimPadding("y_center", "lat", sgrid.Padding.LOW), + ), + vertical_dimensions=(sgrid.DimDimPadding("z_center", "depth", sgrid.Padding.LOW),), + ).to_attrs(), + ) + + return ds From f269cd94de08a6e16b49ddef5bb0112b431aaa9a Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 11:59:54 +0100 Subject: [PATCH 4/8] Generalising _is_coordinate_in_degrees By looking at whether a version of "degrees" is present in units. Before this, "degrees_east" was for example not recognised --- src/parcels/_core/fieldset.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/parcels/_core/fieldset.py b/src/parcels/_core/fieldset.py index 289d41c025..ac72fe4d7a 100644 --- a/src/parcels/_core/fieldset.py +++ b/src/parcels/_core/fieldset.py @@ -556,12 +556,11 @@ def _is_agrid(ds: xr.Dataset) -> bool: def _is_coordinate_in_degrees(da: xr.DataArray) -> bool: - match da.attrs.get("units"): - case None: - raise ValueError( - f"Coordinate {da.name!r} of your dataset has no 'units' attribute - we don't know what the spatial units are." - ) - case "degrees": - return True - case _: - return False + units = da.attrs.get("units") + if units is None: + raise ValueError( + f"Coordinate {da.name!r} of your dataset has no 'units' attribute - we don't know what the spatial units are." + ) + if isinstance(units, str) and "degree" in units.lower(): + return True + return False From 2f4194f19225f23ea1a98f6dd9d743ac85a25ca8 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 12:58:03 +0100 Subject: [PATCH 5/8] Moving copernicusmarine unit tests to test_convert --- tests/test_convert.py | 49 +++++++++++++++++++++++++++++ tests/test_fieldset.py | 71 ------------------------------------------ 2 files changed, 49 insertions(+), 71 deletions(-) diff --git a/tests/test_convert.py b/tests/test_convert.py index eaf39d8100..64d0e33a91 100644 --- a/tests/test_convert.py +++ b/tests/test_convert.py @@ -1,8 +1,11 @@ +import pytest import xarray as xr import parcels import parcels.convert as convert +from parcels import FieldSet from parcels._core.utils import sgrid +from parcels._datasets.structured.circulation_models import datasets as datasets_circulation_models def test_nemo_to_sgrid(): @@ -34,3 +37,49 @@ def test_nemo_to_sgrid(): meta.get_value_by_id("face_dimension1"), # X center meta.get_value_by_id("node_dimension2"), # Y edge }.issubset(set(ds["V"].dims)) + + +_COPERNICUS_DATASETS = [ + datasets_circulation_models["ds_copernicusmarine"], + datasets_circulation_models["ds_copernicusmarine_waves"], +] + + +@pytest.mark.parametrize("ds", _COPERNICUS_DATASETS) +def test_fieldset_from_copernicusmarine(ds, caplog): + if "uo" in ds: + fields = {"U": ds["uo"], "V": ds["vo"]} + elif "VSDX" in ds: + fields = {"U": ds["VSDX"], "V": ds["VSDY"]} + else: + raise ValueError("Test dataset does not contain recognized current variables.") + ds_fset = convert.copernicusmarine_to_sgrid(fields=fields) + fieldset = FieldSet.from_sgrid_conventions(ds_fset) + assert "U" in fieldset.fields + assert "V" in fieldset.fields + assert "UV" in fieldset.fields + + +def test_fieldset_from_copernicusmarine_no_currents(caplog): + ds = datasets_circulation_models["ds_copernicusmarine"] + ds_fset = convert.copernicusmarine_to_sgrid(fields={"do": ds["uo"]}) + fieldset = FieldSet.from_sgrid_conventions(ds_fset) + assert "U" not in fieldset.fields + assert "V" not in fieldset.fields + assert "UV" not in fieldset.fields + assert caplog.text == "" + + +@pytest.mark.parametrize("ds", _COPERNICUS_DATASETS) +def test_fieldset_from_copernicusmarine_no_logs(ds, caplog): + ds = ds.copy() + zeros = xr.zeros_like(list(ds.data_vars.values())[0]) + ds["U"] = zeros + ds["V"] = zeros + + ds_fset = convert.copernicusmarine_to_sgrid(fields={"U": ds["U"], "V": ds["V"]}) + fieldset = FieldSet.from_sgrid_conventions(ds_fset) + assert "U" in fieldset.fields + assert "V" in fieldset.fields + assert "UV" in fieldset.fields + assert caplog.text == "" diff --git a/tests/test_fieldset.py b/tests/test_fieldset.py index 23b205cc93..070e331375 100644 --- a/tests/test_fieldset.py +++ b/tests/test_fieldset.py @@ -8,7 +8,6 @@ from parcels import Field, ParticleFile, ParticleSet, VectorField, XGrid from parcels._core.fieldset import CalendarError, FieldSet, _datetime_to_msg -from parcels._datasets.structured.circulation_models import datasets as datasets_circulation_models from parcels._datasets.structured.generic import T as T_structured from parcels._datasets.structured.generic import datasets as datasets_structured from parcels._datasets.structured.generic import datasets_sgrid @@ -243,76 +242,6 @@ def test_fieldset_add_field_after_pset(): ... -_COPERNICUS_DATASETS = [ - datasets_circulation_models["ds_copernicusmarine"], - datasets_circulation_models["ds_copernicusmarine_waves"], -] - - -@pytest.mark.parametrize("ds", _COPERNICUS_DATASETS) -def test_fieldset_from_copernicusmarine(ds, caplog): - fieldset = FieldSet.from_copernicusmarine(ds) - assert "U" in fieldset.fields - assert "V" in fieldset.fields - assert "UV" in fieldset.fields - assert "renamed it to 'U'" in caplog.text - assert "renamed it to 'V'" in caplog.text - - -@pytest.mark.parametrize("ds", [datasets_circulation_models["ds_copernicusmarine"].copy()]) -def test_fieldset_from_copernicusmarine_missing_axis(ds, caplog): - del ds["latitude"].attrs["axis"] - - with pytest.raises( - ValueError, - match="Dataset missing CF compliant metadata for axes " - ".*. Expected 'axis' attribute to be set " - "on all dimension axes .*. " - "HINT: Add xarray metadata attribute 'axis' to dimension .*", - ): - FieldSet.from_copernicusmarine(ds) - - -def test_fieldset_from_copernicusmarine_no_currents(caplog): - ds = datasets_circulation_models["ds_copernicusmarine"].cf.drop_vars( - ["eastward_sea_water_velocity", "northward_sea_water_velocity"] - ) - fieldset = FieldSet.from_copernicusmarine(ds) - assert "U" not in fieldset.fields - assert "V" not in fieldset.fields - assert "UV" not in fieldset.fields - assert caplog.text == "" - - -@pytest.mark.parametrize("ds", _COPERNICUS_DATASETS) -def test_fieldset_from_copernicusmarine_no_logs(ds, caplog): - ds = ds.copy() - zeros = xr.zeros_like(list(ds.data_vars.values())[0]) - ds["U"] = zeros - ds["V"] = zeros - - fieldset = FieldSet.from_copernicusmarine(ds) - assert "U" in fieldset.fields - assert "V" in fieldset.fields - assert "UV" in fieldset.fields - assert caplog.text == "" - - -def test_fieldset_from_copernicusmarine_with_W(caplog): - ds = datasets_circulation_models["ds_copernicusmarine"] - ds = ds.copy() - ds["wo"] = ds["uo"] - ds["wo"].attrs["standard_name"] = "vertical_sea_water_velocity" - - fieldset = FieldSet.from_copernicusmarine(ds) - assert "U" in fieldset.fields - assert "V" in fieldset.fields - assert "W" in fieldset.fields - assert "UV" in fieldset.fields - assert "UVW" in fieldset.fields - assert "renamed it to 'W'" in caplog.text - - def test_fieldset_from_fesom2(): ds = datasets_unstructured["stommel_gyre_delaunay"] fieldset = FieldSet.from_fesom2(ds) From 2eab36b52463f288c152f733798c6511dcc77bdd Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 13:12:22 +0100 Subject: [PATCH 6/8] Updating tutorials and tests to use convert.copernicusmarine --- docs/getting_started/explanation_concepts.md | 4 +++- docs/getting_started/tutorial_output.ipynb | 9 ++++++--- docs/getting_started/tutorial_quickstart.md | 11 ++++++++--- .../user_guide/examples/explanation_kernelloop.md | 9 ++++++++- .../user_guide/examples/tutorial_Argofloats.ipynb | 11 ++++++++++- .../user_guide/examples/tutorial_delaystart.ipynb | 9 ++++++--- docs/user_guide/examples/tutorial_diffusion.ipynb | 4 +++- .../examples/tutorial_dt_integrators.ipynb | 5 ++++- .../examples/tutorial_gsw_density.ipynb | 14 +++++++++++--- docs/user_guide/examples/tutorial_sampling.ipynb | 9 ++++++--- .../examples_v3/tutorial_splitparticles.ipynb | 15 +++++++++------ tests/test_particlefile.py | 5 ++++- 12 files changed, 78 insertions(+), 27 deletions(-) diff --git a/docs/getting_started/explanation_concepts.md b/docs/getting_started/explanation_concepts.md index b58ddb61eb..8d4c6bf548 100644 --- a/docs/getting_started/explanation_concepts.md +++ b/docs/getting_started/explanation_concepts.md @@ -36,7 +36,9 @@ For several common input datasets, such as the Copernicus Marine Service analysi ```python dataset = xr.open_mfdataset("insert_copernicus_data_files.nc") -fieldset = parcels.FieldSet.from_copernicusmarine(dataset) +fields = {"U": ds_fields["uo"], "V": ds_fields["vo"]} +ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields) +fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset) ``` In some cases, we might want to combine `parcels.Field`s from different sources in the same `parcels.FieldSet`, such as ocean currents from one dataset and Stokes drift from another. This is possible in Parcels by adding each `parcels.Field` separately: diff --git a/docs/getting_started/tutorial_output.ipynb b/docs/getting_started/tutorial_output.ipynb index dfd1c44581..acf54cafac 100644 --- a/docs/getting_started/tutorial_output.ipynb +++ b/docs/getting_started/tutorial_output.ipynb @@ -59,7 +59,10 @@ "ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n", "ds_fields.load() # load the dataset into memory\n", "\n", - "fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)" + "# Convert to SGRID-compliant dataset and create FieldSet\n", + "fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)" ] }, { @@ -557,7 +560,7 @@ "metadata": { "celltoolbar": "Metagegevens bewerken", "kernelspec": { - "display_name": "test-notebooks", + "display_name": "docs", "language": "python", "name": "python3" }, @@ -571,7 +574,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/getting_started/tutorial_quickstart.md b/docs/getting_started/tutorial_quickstart.md index b843ea9df5..348309eaf0 100644 --- a/docs/getting_started/tutorial_quickstart.md +++ b/docs/getting_started/tutorial_quickstart.md @@ -42,11 +42,16 @@ As we can see, the reanalysis dataset contains eastward velocity `uo`, northward (`thetao`) and salinity (`so`) fields. These hydrodynamic fields need to be stored in a {py:obj}`parcels.FieldSet` object. Parcels provides tooling to parse many types -of models or observations into such a `parcels.FieldSet` object. Here, we use {py:func}`parcels.FieldSet.from_copernicusmarine()`, which -recognizes the standard names of a velocity field: +of models or observations into such a `parcels.FieldSet` object. This is done in a two-step approach. + +First, we convert the dataset into an SGRID-compliant dataset, for example by using a version of `parcels.convert._to_sgrid()`. Then, we create the `parcels.FieldSet` from the SGRID-compliant dataset using `parcels.FieldSet.from_sgrid_conventions()`. + +Below, we use a combination of {py:func}`parcels.convert.copernicusmarine_to_sgrid()` and {py:func}`parcels.FieldSet.from_sgrid_conventions()`, providing the names of the velocity fields in the dataset in the dictionary `fields`: ```{code-cell} -fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields) +fields = {"U": ds_fields["uo"], "V": ds_fields["vo"]} +ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields) +fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset) ``` The subset contains a region of the Agulhas current along the southeastern coast of Africa: diff --git a/docs/user_guide/examples/explanation_kernelloop.md b/docs/user_guide/examples/explanation_kernelloop.md index 025305ce32..4cd2adb0f0 100644 --- a/docs/user_guide/examples/explanation_kernelloop.md +++ b/docs/user_guide/examples/explanation_kernelloop.md @@ -68,7 +68,14 @@ ds_fields["VWind"] = xr.DataArray( data=np.zeros((tdim, ydim, xdim)), coords=[ds_fields.time, ds_fields.latitude, ds_fields.longitude]) -fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields) +fields = { + "U": ds_fields["uo"], + "V": ds_fields["vo"], + "UWind": ds_fields["UWind"], + "VWind": ds_fields["VWind"], +} +ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields) +fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset) # Create a vecorfield for the wind windvector = parcels.VectorField( diff --git a/docs/user_guide/examples/tutorial_Argofloats.ipynb b/docs/user_guide/examples/tutorial_Argofloats.ipynb index 94cb9808ce..338541676d 100644 --- a/docs/user_guide/examples/tutorial_Argofloats.ipynb +++ b/docs/user_guide/examples/tutorial_Argofloats.ipynb @@ -121,7 +121,16 @@ "# TODO check how we can get good performance without loading full dataset in memory\n", "ds_fields.load() # load the dataset into memory\n", "\n", - "fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)\n", + "# Select fields\n", + "fields = {\n", + " \"U\": ds_fields[\"uo\"],\n", + " \"V\": ds_fields[\"vo\"],\n", + " \"thetao\": ds_fields[\"thetao\"],\n", + "}\n", + "\n", + "# Convert to SGRID-compliant dataset and create FieldSet\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)\n", "fieldset.add_constant(\"mindepth\", 1.0)\n", "\n", "# Define a new Particle type including extra Variables\n", diff --git a/docs/user_guide/examples/tutorial_delaystart.ipynb b/docs/user_guide/examples/tutorial_delaystart.ipynb index 365a2ed28d..81ac797f4b 100644 --- a/docs/user_guide/examples/tutorial_delaystart.ipynb +++ b/docs/user_guide/examples/tutorial_delaystart.ipynb @@ -57,7 +57,10 @@ "ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n", "ds_fields.load() # load the dataset into memory\n", "\n", - "fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)" + "# Convert to SGRID-compliant dataset and create FieldSet\n", + "fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)" ] }, { @@ -433,7 +436,7 @@ ], "metadata": { "kernelspec": { - "display_name": "test-notebooks", + "display_name": "docs", "language": "python", "name": "python3" }, @@ -447,7 +450,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/user_guide/examples/tutorial_diffusion.ipynb b/docs/user_guide/examples/tutorial_diffusion.ipynb index a0aeac6b87..eca4c35092 100644 --- a/docs/user_guide/examples/tutorial_diffusion.ipynb +++ b/docs/user_guide/examples/tutorial_diffusion.ipynb @@ -491,7 +491,9 @@ "metadata": {}, "outputs": [], "source": [ - "fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)\n", + "fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)\n", "\n", "\n", "def degree_lat_to_meter(d):\n", diff --git a/docs/user_guide/examples/tutorial_dt_integrators.ipynb b/docs/user_guide/examples/tutorial_dt_integrators.ipynb index f33892dde6..2ecf7f81af 100644 --- a/docs/user_guide/examples/tutorial_dt_integrators.ipynb +++ b/docs/user_guide/examples/tutorial_dt_integrators.ipynb @@ -68,7 +68,10 @@ "ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n", "ds_fields.load() # load the dataset into memory\n", "\n", - "fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)\n", + "# Convert to SGRID-compliant dataset and create FieldSet\n", + "fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)\n", "\n", "# Check field resolution in time and space\n", "print(\n", diff --git a/docs/user_guide/examples/tutorial_gsw_density.ipynb b/docs/user_guide/examples/tutorial_gsw_density.ipynb index ab64272218..44b5928a7a 100644 --- a/docs/user_guide/examples/tutorial_gsw_density.ipynb +++ b/docs/user_guide/examples/tutorial_gsw_density.ipynb @@ -47,7 +47,15 @@ "# TODO check how we can get good performance without loading full dataset in memory\n", "ds_fields.load() # load the dataset into memory\n", "\n", - "fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)" + "# Convert to SGRID-compliant dataset and create FieldSet\n", + "fields = {\n", + " \"U\": ds_fields[\"uo\"],\n", + " \"V\": ds_fields[\"vo\"],\n", + " \"thetao\": ds_fields[\"thetao\"],\n", + " \"so\": ds_fields[\"so\"],\n", + "}\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)" ] }, { @@ -137,7 +145,7 @@ ], "metadata": { "kernelspec": { - "display_name": "test-notebooks", + "display_name": "docs", "language": "python", "name": "python3" }, @@ -151,7 +159,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/docs/user_guide/examples/tutorial_sampling.ipynb b/docs/user_guide/examples/tutorial_sampling.ipynb index 4fc1ad7e16..28b9c3954f 100644 --- a/docs/user_guide/examples/tutorial_sampling.ipynb +++ b/docs/user_guide/examples/tutorial_sampling.ipynb @@ -66,7 +66,10 @@ "ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n", "ds_fields.load() # load the dataset into memory\n", "\n", - "fieldset = parcels.FieldSet.from_copernicusmarine(ds_fields)" + "# Convert to SGRID-compliant dataset and create FieldSet\n", + "fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"], \"thetao\": ds_fields[\"thetao\"]}\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)" ] }, { @@ -391,7 +394,7 @@ "metadata": { "celltoolbar": "Raw-celnotatie", "kernelspec": { - "display_name": "test-notebooks", + "display_name": "docs", "language": "python", "name": "python3" }, @@ -405,7 +408,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" }, "pycharm": { "stem_cell": { diff --git a/docs/user_guide/examples_v3/tutorial_splitparticles.ipynb b/docs/user_guide/examples_v3/tutorial_splitparticles.ipynb index 02a0d26c90..d954fbd3be 100644 --- a/docs/user_guide/examples_v3/tutorial_splitparticles.ipynb +++ b/docs/user_guide/examples_v3/tutorial_splitparticles.ipynb @@ -48,10 +48,13 @@ " \"CopernicusMarine_data_for_Argo_tutorial\"\n", ")\n", "\n", - "ds = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n", - "ds.load() # load the dataset into memory\n", + "ds_fields = xr.open_mfdataset(f\"{example_dataset_folder}/*.nc\", combine=\"by_coords\")\n", + "ds_fields.load() # load the dataset into memory\n", "\n", - "fieldset = parcels.FieldSet.from_copernicusmarine(ds)" + "# Convert to SGRID-compliant dataset and create FieldSet\n", + "fields = {\"U\": ds_fields[\"uo\"], \"V\": ds_fields[\"vo\"]}\n", + "ds_fset = parcels.convert.copernicusmarine_to_sgrid(fields=fields)\n", + "fieldset = parcels.FieldSet.from_sgrid_conventions(ds_fset)" ] }, { @@ -117,7 +120,7 @@ "source": [ "ds_out = xr.open_zarr(\"growingparticles.zarr\")\n", "plt.plot(\n", - " (ds_out.time.values[:].T - ds.time.values[0]).astype(\"timedelta64[h]\"),\n", + " (ds_out.time.values[:].T - ds_fields.time.values[0]).astype(\"timedelta64[h]\"),\n", " ds_out.mass.T,\n", ")\n", "plt.grid()\n", @@ -136,7 +139,7 @@ ], "metadata": { "kernelspec": { - "display_name": "test-notebooks", + "display_name": "docs", "language": "python", "name": "python3" }, @@ -150,7 +153,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.0" + "version": "3.14.2" } }, "nbformat": 4, diff --git a/tests/test_particlefile.py b/tests/test_particlefile.py index 5b433c3e0a..2dbb812c49 100755 --- a/tests/test_particlefile.py +++ b/tests/test_particlefile.py @@ -25,6 +25,7 @@ from parcels._core.utils.time import TimeInterval, timedelta_to_float from parcels._datasets.structured.generated import peninsula_dataset from parcels._datasets.structured.generic import datasets +from parcels.convert import copernicusmarine_to_sgrid from parcels.interpolators import XLinear, XLinear_Velocity from parcels.kernels import AdvectionRK4 from tests.common_kernels import DoNothing @@ -444,7 +445,9 @@ def test_pset_execute_outputdt_backwards_fieldset_timevarying(): # TODO: Not ideal using the `download_example_dataset` here, but I'm struggling to recreate this error using the test suite fieldsets we have example_dataset_folder = download_example_dataset("CopernicusMarine_data_for_Argo_tutorial") ds_in = xr.open_mfdataset(f"{example_dataset_folder}/*.nc", combine="by_coords") - fieldset = FieldSet.from_copernicusmarine(ds_in) + fields = {"U": ds_in["uo"], "V": ds_in["vo"]} + ds_fset = copernicusmarine_to_sgrid(fields=fields) + fieldset = FieldSet.from_sgrid_conventions(ds_fset) ds = setup_pset_execute(outputdt=outputdt, execute_kwargs=dict(runtime=runtime, dt=dt), fieldset=fieldset) file_outputdt = ds.isel(trajectory=0).time.diff(dim="obs").values From fdceaedf0475675ddf1ae06aa006119c3e19495b Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 13:13:17 +0100 Subject: [PATCH 7/8] Removing fieldset.from_copernicusmarine --- src/parcels/_core/fieldset.py | 60 +---------------------------------- 1 file changed, 1 insertion(+), 59 deletions(-) diff --git a/src/parcels/_core/fieldset.py b/src/parcels/_core/fieldset.py index ac72fe4d7a..f7ae6bd0fc 100644 --- a/src/parcels/_core/fieldset.py +++ b/src/parcels/_core/fieldset.py @@ -20,7 +20,7 @@ from parcels._logger import logger from parcels._reprs import fieldset_repr from parcels._typing import Mesh -from parcels.convert import _discover_U_and_V, _ds_rename_using_standard_names, _maybe_rename_coords +from parcels.convert import _ds_rename_using_standard_names from parcels.interpolators import ( CGrid_Velocity, Ux_Velocity, @@ -187,64 +187,6 @@ def gridset(self) -> list[BaseGrid]: grids.append(field.grid) return grids - @classmethod - def from_copernicusmarine(cls, ds: xr.Dataset): - """Create a FieldSet from a Copernicus Marine Service xarray.Dataset. - - Parameters - ---------- - ds : xarray.Dataset - xarray.Dataset as obtained from the copernicusmarine toolbox. - - Returns - ------- - FieldSet - FieldSet object containing the fields from the dataset that can be used for a Parcels simulation. - - Notes - ----- - See https://help.marine.copernicus.eu/en/collections/9080063-copernicus-marine-toolbox for more information on the copernicusmarine toolbox. - The toolbox to ingest data from most of the products on the Copernicus Marine Service (https://data.marine.copernicus.eu/products) into an xarray.Dataset. - You can use indexing and slicing to select a subset of the data before passing it to this function. - Note that most Parcels uses will require both U and V fields to be present in the dataset. This function will try to find out which variables in the dataset correspond to U and V. - To override the automatic detection, rename the appropriate variables in your dataset to 'U' and 'V' before passing it to this function. - - """ - ds = ds.copy() - ds = _discover_U_and_V(ds, _COPERNICUS_MARINE_CF_STANDARD_NAME_FALLBACKS) - expected_axes = set("XYZT") # TODO: Update after we have support for 2D spatial fields - if missing_axes := (expected_axes - set(ds.cf.axes)): - raise ValueError( - f"Dataset missing CF compliant metadata for axes " - f"{missing_axes}. Expected 'axis' attribute to be set " - f"on all dimension axes {expected_axes}. " - "HINT: Add xarray metadata attribute 'axis' to dimension - e.g., ds['lat'].attrs['axis'] = 'Y'" - ) - - ds = _maybe_rename_coords(ds, _COPERNICUS_MARINE_AXIS_VARNAMES) - if "W" in ds.data_vars: - # Negate W to convert from up positive to down positive (as that's the direction of positive z) - ds["W"].data *= -1 - - if "grid" in ds.cf.cf_roles: - raise ValueError( - "Dataset already has a 'grid' variable (according to cf_roles). Didn't expect there to be grid metadata on copernicusmarine datasets - please open an issue with more information about your dataset." - ) - ds["grid"] = xr.DataArray( - 0, - attrs=sgrid.Grid2DMetadata( # use dummy *_center dimensions - this is A grid data (all defined on nodes) - cf_role="grid_topology", - topology_dimension=2, - node_dimensions=("lon", "lat"), - face_dimensions=( - sgrid.DimDimPadding("x_center", "lon", sgrid.Padding.LOW), - sgrid.DimDimPadding("y_center", "lat", sgrid.Padding.LOW), - ), - vertical_dimensions=(sgrid.DimDimPadding("z_center", "depth", sgrid.Padding.LOW),), - ).to_attrs(), - ) - return cls.from_sgrid_conventions(ds, mesh="spherical") - @classmethod def from_fesom2(cls, ds: ux.UxDataset): """Create a FieldSet from a FESOM2 uxarray.UxDataset. From 3234672a66b9c7dcf79dd8ceee3997f8594c7ace Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Thu, 15 Jan 2026 13:38:34 +0100 Subject: [PATCH 8/8] Renaming test names to highlight they test convert --- tests/test_convert.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/test_convert.py b/tests/test_convert.py index 64d0e33a91..b80271a40a 100644 --- a/tests/test_convert.py +++ b/tests/test_convert.py @@ -46,7 +46,7 @@ def test_nemo_to_sgrid(): @pytest.mark.parametrize("ds", _COPERNICUS_DATASETS) -def test_fieldset_from_copernicusmarine(ds, caplog): +def test_convert_copernicusmarine(ds, caplog): if "uo" in ds: fields = {"U": ds["uo"], "V": ds["vo"]} elif "VSDX" in ds: @@ -60,7 +60,7 @@ def test_fieldset_from_copernicusmarine(ds, caplog): assert "UV" in fieldset.fields -def test_fieldset_from_copernicusmarine_no_currents(caplog): +def test_convert_copernicusmarine_no_currents(caplog): ds = datasets_circulation_models["ds_copernicusmarine"] ds_fset = convert.copernicusmarine_to_sgrid(fields={"do": ds["uo"]}) fieldset = FieldSet.from_sgrid_conventions(ds_fset) @@ -71,7 +71,7 @@ def test_fieldset_from_copernicusmarine_no_currents(caplog): @pytest.mark.parametrize("ds", _COPERNICUS_DATASETS) -def test_fieldset_from_copernicusmarine_no_logs(ds, caplog): +def test_convert_copernicusmarine_no_logs(ds, caplog): ds = ds.copy() zeros = xr.zeros_like(list(ds.data_vars.values())[0]) ds["U"] = zeros