diff --git a/src/project/common/utils/file/base.py b/src/project/common/utils/file/base.py
new file mode 100644
index 0000000..1712998
--- /dev/null
+++ b/src/project/common/utils/file/base.py
@@ -0,0 +1,73 @@
+from pathlib import Path
+from typing import Any, Protocol, TypeVar
+
+T = TypeVar('T')
+
+JsonLikeValue = dict[str, Any] | list[Any] | str | int | float | bool | None
+
+
+class FileLoader(Protocol):
+ """Protocol for loading data from files.
+
+ Implementations should handle file format-specific deserialization.
+ """
+
+ def load(self, path: str | Path) -> Any: # noqa: ANN401
+ """Load data from the specified file path.
+
+ Args:
+ path: Path to the file to load
+
+ Returns:
+ Deserialized data from the file
+
+ """
+ ...
+
+
+class FileSaver(Protocol):
+ """Protocol for saving data to files.
+
+ Implementations should handle file format-specific serialization.
+ """
+
+ def save(
+ self,
+ data: Any, # noqa: ANN401
+ path: str | Path,
+ *,
+ parents: bool = True,
+ exist_ok: bool = True,
+ ) -> None:
+ """Save data to the specified file path.
+
+ Args:
+ data: Data to serialize and save
+ path: Path where the file should be saved
+ parents: If True, create parent directories as needed
+ exist_ok: If True, don't raise error if directory exists
+
+ """
+ ...
+
+
+class FileHandler(Protocol):
+ """Combined protocol for both loading and saving files.
+
+ Provides a complete interface for file I/O operations.
+ """
+
+ def load(self, path: str | Path) -> Any: # noqa: ANN401
+ """Load data from the specified file path."""
+ ...
+
+ def save(
+ self,
+ data: Any, # noqa: ANN401
+ path: str | Path,
+ *,
+ parents: bool = True,
+ exist_ok: bool = True,
+ ) -> None:
+ """Save data to the specified file path."""
+ ...
diff --git a/src/project/common/utils/file/config.py b/src/project/common/utils/file/config.py
index 31f96a4..47f83ab 100644
--- a/src/project/common/utils/file/config.py
+++ b/src/project/common/utils/file/config.py
@@ -1,23 +1,24 @@
from pathlib import Path
from typing import Any
-from project.common.utils.file.json import load_json
-from project.common.utils.file.toml import load_toml
-from project.common.utils.file.yaml import load_yaml
+from project.common.utils.file.io import load_file
def load_config(path: str | Path) -> dict[str, Any]:
- """Load configuration from a file (JSON, YAML, or TOML)."""
- ext = Path(path).suffix.lower()
-
- if ext == '.json':
- data = load_json(path)
- elif ext in ('.yaml', '.yml'):
- data = load_yaml(path)
- elif ext == '.toml':
- data = load_toml(path)
- else:
- raise ValueError(f'Unsupported config file format: {ext}')
+ """Load configuration from a file (JSON, YAML, TOML, XML).
+
+ Args:
+ path: Path to the configuration file. Format is detected from extension.
+
+ Returns:
+ Configuration data as a dictionary.
+
+ Raises:
+ ValueError: If file format is not supported.
+ TypeError: If the loaded data is not a dictionary.
+
+ """
+ data = load_file(path)
if not isinstance(data, dict):
raise TypeError(f'Config file {path!r} did not return a dict, got {type(data).__name__}')
diff --git a/src/project/common/utils/file/factory.py b/src/project/common/utils/file/factory.py
new file mode 100644
index 0000000..443fa80
--- /dev/null
+++ b/src/project/common/utils/file/factory.py
@@ -0,0 +1,91 @@
+from pathlib import Path
+from typing import ClassVar, Literal
+
+from project.common.utils.file.base import FileHandler
+from project.common.utils.file.json import JsonFileHandler
+from project.common.utils.file.toml import TomlFileHandler
+from project.common.utils.file.xml import XmlFileHandler
+from project.common.utils.file.yaml import YamlFileHandler
+
+FileFormat = Literal['json', 'yaml', 'toml', 'xml']
+
+
+class FileHandlerFactory:
+ """Factory for creating file handlers based on file format."""
+
+ _handlers: ClassVar[dict[FileFormat, type[FileHandler]]] = {
+ 'json': JsonFileHandler,
+ 'yaml': YamlFileHandler,
+ 'toml': TomlFileHandler,
+ 'xml': XmlFileHandler,
+ }
+
+ @classmethod
+ def create(cls, format_type: FileFormat) -> FileHandler:
+ """Create a file handler for the specified format.
+
+ Args:
+ format_type: File format ('json', 'yaml', or 'toml')
+
+ Returns:
+ File handler instance for the specified format
+
+ Raises:
+ ValueError: If format_type is not supported
+
+ """
+ handler_class = cls._handlers.get(format_type)
+ if handler_class is None:
+ supported = ', '.join(cls._handlers.keys())
+ msg = f'Unsupported file format: {format_type}. Supported formats: {supported}'
+ raise ValueError(msg)
+ return handler_class()
+
+ @classmethod
+ def from_path(cls, path: str | Path) -> FileHandler:
+ """Create a file handler by detecting format from file extension.
+
+ Args:
+ path: File path with extension
+
+ Returns:
+ File handler instance for the detected format
+
+ Raises:
+ ValueError: If file extension is not recognized or missing
+
+ """
+ suffix = Path(path).suffix.lstrip('.')
+ if not suffix:
+ msg = f'Cannot detect file format: no extension in {path}'
+ raise ValueError(msg)
+
+ # Map common extensions to format types
+ extension_map: dict[str, FileFormat] = {
+ 'json': 'json',
+ 'yaml': 'yaml',
+ 'yml': 'yaml',
+ 'toml': 'toml',
+ 'xml': 'xml',
+ }
+
+ format_type = extension_map.get(suffix.lower())
+ if format_type is None:
+ supported = ', '.join(extension_map.keys())
+ msg = f'Unsupported file extension: .{suffix}. Supported extensions: {supported}'
+ raise ValueError(msg)
+
+ return cls.create(format_type)
+
+
+def get_file_handler(path: str | Path) -> FileHandler:
+ """Get a file handler from a file path.
+
+ Args:
+ path: File path with extension
+
+ Returns:
+ File handler instance for the detected format
+
+ """
+ return FileHandlerFactory.from_path(path)
diff --git a/src/project/common/utils/file/io.py b/src/project/common/utils/file/io.py
new file mode 100644
index 0000000..f8374f3
--- /dev/null
+++ b/src/project/common/utils/file/io.py
@@ -0,0 +1,60 @@
+"""Generic file I/O operations using FileHandler abstraction.
+
+This module provides format-agnostic file operations that automatically
+detect and handle different file formats (JSON, YAML, TOML).
+"""
+
+from pathlib import Path
+from typing import Any
+
+from project.common.utils.file.factory import get_file_handler
+
+
+def load_file(path: str | Path) -> Any: # noqa: ANN401
+ """Load data from a file, automatically detecting format from extension.
+
+ Args:
+ path: Path to the file (extension determines format)
+
+ Returns:
+ Deserialized data from the file
+
+ Raises:
+ ValueError: If file format cannot be detected or is unsupported
+
+ Example:
+ >>> data = load_file('config.json')
+ >>> data = load_file('settings.yaml')
+ >>> data = load_file('pyproject.toml')
+
+ """
+ handler = get_file_handler(path)
+ return handler.load(path)
+
+
+def save_file(
+ data: Any, # noqa: ANN401
+ path: str | Path,
+ *,
+ parents: bool = True,
+ exist_ok: bool = True,
+) -> None:
+ """Save data to a file, automatically detecting format from extension.
+
+ Args:
+ data: Data to save
+ path: Path where the file should be saved (extension determines format)
+ parents: If True, create parent directories as needed
+ exist_ok: If True, don't raise error if directory exists
+
+ Raises:
+ ValueError: If file format cannot be detected or is unsupported
+
+ Example:
+ >>> save_file({'key': 'value'}, 'output.json')
+ >>> save_file(['item1', 'item2'], 'output.yaml')
+ >>> save_file({'tool': {'poetry': {}}}, 'pyproject.toml')
+
+ """
+ handler = get_file_handler(path)
+ handler.save(data, path, parents=parents, exist_ok=exist_ok)
diff --git a/src/project/common/utils/file/json.py b/src/project/common/utils/file/json.py
index 8f814a9..73f273c 100644
--- a/src/project/common/utils/file/json.py
+++ b/src/project/common/utils/file/json.py
@@ -20,3 +20,22 @@ def save_as_indented_json(
target.parent.mkdir(parents=parents, exist_ok=exist_ok)
with target.open(mode='w', encoding='utf-8') as fout:
json.dump(data, fout, ensure_ascii=False, indent=4, separators=(',', ': '))
+
+
+class JsonFileHandler:
+ """JSON file handler implementing FileHandler protocol."""
+
+ def load(self, path: str | Path) -> JsonValue:
+ """Load JSON data from file."""
+ return load_json(path)
+
+ def save(
+ self,
+ data: JsonValue,
+ path: str | Path,
+ *,
+ parents: bool = True,
+ exist_ok: bool = True,
+ ) -> None:
+ """Save data as indented JSON to file."""
+ save_as_indented_json(data, path, parents=parents, exist_ok=exist_ok)
diff --git a/src/project/common/utils/file/toml.py b/src/project/common/utils/file/toml.py
index f17944e..0e87278 100644
--- a/src/project/common/utils/file/toml.py
+++ b/src/project/common/utils/file/toml.py
@@ -19,3 +19,22 @@ def save_as_toml(
target.parent.mkdir(parents=parents, exist_ok=exist_ok)
with target.open(mode='w', encoding='utf-8') as fout:
toml.dump(data, fout)
+
+
+class TomlFileHandler:
+ """TOML file handler implementing FileHandler protocol."""
+
+ def load(self, path: str | Path) -> dict[str, Any]:
+ """Load TOML data from file."""
+ return load_toml(path)
+
+ def save(
+ self,
+ data: dict[str, Any],
+ path: str | Path,
+ *,
+ parents: bool = True,
+ exist_ok: bool = True,
+ ) -> None:
+ """Save data as TOML to file."""
+ save_as_toml(data, path, parents=parents, exist_ok=exist_ok)
diff --git a/src/project/common/utils/file/xml.py b/src/project/common/utils/file/xml.py
new file mode 100644
index 0000000..4eb6ad4
--- /dev/null
+++ b/src/project/common/utils/file/xml.py
@@ -0,0 +1,145 @@
+import xml.etree.ElementTree as ET
+from pathlib import Path
+from typing import Any
+
+
+def _dict_to_xml(tag: str, data: dict[str, Any]) -> ET.Element:
+ """Convert a dictionary to an XML Element recursively."""
+ element = ET.Element(tag)
+
+ for key, value in data.items():
+ child = ET.SubElement(element, str(key))
+ if isinstance(value, dict):
+ for sub_key, sub_value in value.items():
+ sub_child = ET.SubElement(child, str(sub_key))
+ sub_child.text = str(sub_value)
+ elif isinstance(value, list):
+ for item in value:
+ item_element = ET.SubElement(child, 'item')
+ if isinstance(item, dict):
+ for sub_key, sub_value in item.items():
+ sub_child = ET.SubElement(item_element, str(sub_key))
+ sub_child.text = str(sub_value)
+ else:
+ item_element.text = str(item)
+ else:
+ child.text = str(value)
+
+ return element
+
+
+def _xml_to_dict(element: ET.Element) -> dict[str, Any] | list[Any] | str:
+ """Convert an XML Element to a dictionary recursively."""
+ # If element has no children, return its text
+ if len(element) == 0:
+ return element.text or ''
+
+ result: dict[str, Any] = {}
+ for child in element:
+ child_data = _xml_to_dict(child)
+
+ # Handle list items
+ if child.tag == 'item':
+ if element.tag not in result:
+ result[element.tag] = []
+ if isinstance(result[element.tag], list):
+ result[element.tag].append(child_data)
+ # Handle regular elements
+ elif child.tag in result:
+ # Convert to list if duplicate tags
+ if not isinstance(result[child.tag], list):
+ result[child.tag] = [result[child.tag]]
+ result[child.tag].append(child_data)
+ else:
+ result[child.tag] = child_data
+
+ return result
+
+
+def load_xml(path: str | Path) -> dict[str, Any]:
+ """Load XML data from file and convert to dictionary.
+
+ Args:
+ path: Path to the XML file
+
+ Returns:
+ Dictionary representation of the XML data
+
+ Note:
+ This uses xml.etree.ElementTree which is not secure against maliciously
+ constructed data. For untrusted data, consider using defusedxml.
+
+ """
+ tree = ET.parse(str(path)) # noqa: S314
+ root = tree.getroot()
+ result = _xml_to_dict(root)
+
+ # Wrap in root tag name if result is not already wrapped
+ if isinstance(result, dict) and root.tag not in result:
+ return {root.tag: result}
+ return result if isinstance(result, dict) else {root.tag: result}
+
+
+def save_as_xml(
+ data: dict[str, Any],
+ path: str | Path,
+ root_tag: str = 'root',
+ parents: bool = True,
+ exist_ok: bool = True,
+) -> None:
+ """Save dictionary data as XML to file.
+
+ Args:
+ data: Dictionary data to save
+ path: Path where the XML file should be saved
+ root_tag: Tag name for the root element (default: 'root')
+ parents: If True, create parent directories as needed
+ exist_ok: If True, don't raise error if directory exists
+
+ """
+ target = Path(path)
+ target.parent.mkdir(parents=parents, exist_ok=exist_ok)
+
+ # If data has single key, use it as root tag
+ if len(data) == 1:
+ root_tag = next(iter(data.keys()))
+ root_data = data[root_tag]
+ if isinstance(root_data, dict):
+ root = _dict_to_xml(root_tag, root_data)
+ else:
+ root = ET.Element(root_tag)
+ root.text = str(root_data)
+ else:
+ root = _dict_to_xml(root_tag, data)
+
+ tree = ET.ElementTree(root)
+ ET.indent(tree, space=' ')
+ tree.write(str(target), encoding='utf-8', xml_declaration=True)
+
+
+class XmlFileHandler:
+ """XML file handler implementing FileHandler protocol."""
+
+ def __init__(self, root_tag: str = 'root') -> None:
+ """Initialize XML handler.
+
+ Args:
+ root_tag: Default root tag name for saving (default: 'root')
+
+ """
+ self.root_tag = root_tag
+
+ def load(self, path: str | Path) -> dict[str, Any]:
+ """Load XML data from file."""
+ return load_xml(path)
+
+ def save(
+ self,
+ data: dict[str, Any],
+ path: str | Path,
+ *,
+ parents: bool = True,
+ exist_ok: bool = True,
+ ) -> None:
+ """Save data as XML to file."""
+ save_as_xml(data, path, root_tag=self.root_tag, parents=parents, exist_ok=exist_ok)
diff --git a/src/project/common/utils/file/yaml.py b/src/project/common/utils/file/yaml.py
index e2db90c..5832909 100644
--- a/src/project/common/utils/file/yaml.py
+++ b/src/project/common/utils/file/yaml.py
@@ -21,3 +21,22 @@ def save_as_indented_yaml(
target.parent.mkdir(parents=parents, exist_ok=exist_ok)
with target.open(mode='w', encoding='utf-8') as fout:
yaml.dump(data, fout, allow_unicode=True, indent=4, default_flow_style=False)
+
+
+class YamlFileHandler:
+ """YAML file handler implementing FileHandler protocol."""
+
+ def load(self, path: str | Path) -> YamlValue:
+ """Load YAML data from file."""
+ return load_yaml(path)
+
+ def save(
+ self,
+ data: YamlValue,
+ path: str | Path,
+ *,
+ parents: bool = True,
+ exist_ok: bool = True,
+ ) -> None:
+ """Save data as indented YAML to file."""
+ save_as_indented_yaml(data, path, parents=parents, exist_ok=exist_ok)
diff --git a/tests/project/common/utils/file/test_config.py b/tests/project/common/utils/file/test_config.py
new file mode 100644
index 0000000..cd81edd
--- /dev/null
+++ b/tests/project/common/utils/file/test_config.py
@@ -0,0 +1,67 @@
+from pathlib import Path
+
+import pytest
+
+from project.common.utils.file.config import load_config
+
+
+def test_load_config_json(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.json'
+ config_file.write_text('{"key": "value", "number": 42}')
+
+ result = load_config(config_file)
+ assert result == {'key': 'value', 'number': 42}
+
+
+def test_load_config_yaml(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.yaml'
+ config_file.write_text('key: value\nnumber: 42')
+
+ result = load_config(config_file)
+ assert result == {'key': 'value', 'number': 42}
+
+
+def test_load_config_toml(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.toml'
+ config_file.write_text('key = "value"\nnumber = 42')
+
+ result = load_config(config_file)
+ assert result == {'key': 'value', 'number': 42}
+
+
+def test_load_config_xml(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.xml'
+ config_file.write_text("""
+
+ value
+ 42
+""")
+
+ result = load_config(config_file)
+ assert 'config' in result
+ assert result['config']['key'] == 'value'
+ assert result['config']['number'] == '42'
+
+
+def test_load_config_unsupported_format(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.txt'
+ config_file.write_text('some text content')
+
+ with pytest.raises(ValueError, match='Unsupported file extension'):
+ load_config(config_file)
+
+
+def test_load_config_not_dict(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.json'
+ config_file.write_text('["not", "a", "dict"]')
+
+ with pytest.raises(TypeError, match='did not return a dict'):
+ load_config(config_file)
+
+
+def test_load_config_accepts_string_path(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.json'
+ config_file.write_text('{"key": "value"}')
+
+ result = load_config(str(config_file))
+ assert result == {'key': 'value'}
diff --git a/tests/project/common/utils/file/test_factory.py b/tests/project/common/utils/file/test_factory.py
new file mode 100644
index 0000000..3b0a749
--- /dev/null
+++ b/tests/project/common/utils/file/test_factory.py
@@ -0,0 +1,72 @@
+import pytest
+
+from project.common.utils.file.factory import FileHandlerFactory, get_file_handler
+from project.common.utils.file.json import JsonFileHandler
+from project.common.utils.file.toml import TomlFileHandler
+from project.common.utils.file.xml import XmlFileHandler
+from project.common.utils.file.yaml import YamlFileHandler
+
+
+def test_create_json_handler() -> None:
+ handler = FileHandlerFactory.create('json')
+ assert isinstance(handler, JsonFileHandler)
+
+
+def test_create_yaml_handler() -> None:
+ handler = FileHandlerFactory.create('yaml')
+ assert isinstance(handler, YamlFileHandler)
+
+
+def test_create_toml_handler() -> None:
+ handler = FileHandlerFactory.create('toml')
+ assert isinstance(handler, TomlFileHandler)
+
+
+def test_create_xml_handler() -> None:
+ handler = FileHandlerFactory.create('xml')
+ assert isinstance(handler, XmlFileHandler)
+
+
+def test_create_unsupported_format() -> None:
+ with pytest.raises(ValueError, match='Unsupported file format'):
+ FileHandlerFactory.create('txt') # type: ignore[arg-type]
+
+
+def test_from_path_json() -> None:
+ handler = FileHandlerFactory.from_path('config.json')
+ assert isinstance(handler, JsonFileHandler)
+
+
+def test_from_path_yaml() -> None:
+ handler = FileHandlerFactory.from_path('config.yaml')
+ assert isinstance(handler, YamlFileHandler)
+
+
+def test_from_path_yml() -> None:
+ handler = FileHandlerFactory.from_path('config.yml')
+ assert isinstance(handler, YamlFileHandler)
+
+
+def test_from_path_toml() -> None:
+ handler = FileHandlerFactory.from_path('pyproject.toml')
+ assert isinstance(handler, TomlFileHandler)
+
+
+def test_from_path_xml() -> None:
+ handler = FileHandlerFactory.from_path('data.xml')
+ assert isinstance(handler, XmlFileHandler)
+
+
+def test_from_path_no_extension() -> None:
+ with pytest.raises(ValueError, match='no extension'):
+ FileHandlerFactory.from_path('config')
+
+
+def test_from_path_unsupported_extension() -> None:
+ with pytest.raises(ValueError, match='Unsupported file extension'):
+ FileHandlerFactory.from_path('data.txt')
+
+
+def test_get_file_handler() -> None:
+ handler = get_file_handler('settings.json')
+ assert isinstance(handler, JsonFileHandler)
diff --git a/tests/project/common/utils/file/test_io.py b/tests/project/common/utils/file/test_io.py
new file mode 100644
index 0000000..ba0221a
--- /dev/null
+++ b/tests/project/common/utils/file/test_io.py
@@ -0,0 +1,137 @@
+from pathlib import Path
+from typing import Any
+
+import pytest
+
+from project.common.utils.file.io import load_file, save_file
+
+
+@pytest.fixture
+def sample_data() -> dict[str, Any]:
+ return {'key': 'value', 'number': 42, 'list': [1, 2, 3]}
+
+
+def test_save_and_load_json(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.json'
+ save_file(sample_data, file_path)
+
+ assert file_path.exists()
+ loaded = load_file(file_path)
+ assert loaded == sample_data
+
+
+def test_save_and_load_yaml(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.yaml'
+ save_file(sample_data, file_path)
+
+ assert file_path.exists()
+ loaded = load_file(file_path)
+ assert loaded == sample_data
+
+
+def test_save_and_load_yml(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.yml'
+ save_file(sample_data, file_path)
+
+ assert file_path.exists()
+ loaded = load_file(file_path)
+ assert loaded == sample_data
+
+
+def test_save_and_load_toml(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.toml'
+ save_file(sample_data, file_path)
+
+ assert file_path.exists()
+ loaded = load_file(file_path)
+ assert loaded == sample_data
+
+
+def test_save_and_load_xml(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.xml'
+ save_file(sample_data, file_path)
+
+ assert file_path.exists()
+ loaded = load_file(file_path)
+ # XML preserves structure but converts values to strings
+ assert 'key' in loaded or 'root' in loaded
+
+
+def test_save_creates_parent_directories(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'nested' / 'dir' / 'test.json'
+ save_file(sample_data, file_path)
+
+ assert file_path.exists()
+ loaded = load_file(file_path)
+ assert loaded == sample_data
+
+
+def test_save_respects_parents_flag(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ """Test that parents=False prevents creation of nested directories."""
+ deeply_nested_dir = tmp_path / 'level1' / 'level2'
+ file_path = deeply_nested_dir / 'test.json'
+
+ # Multiple levels of directories don't exist, expect FileNotFoundError
+ with pytest.raises(FileNotFoundError):
+ save_file(sample_data, file_path, parents=False)
+
+ # With parents=True, nested directories are created
+ save_file(sample_data, file_path, parents=True)
+ assert file_path.exists()
+
+
+def test_load_unsupported_format(tmp_path: Path) -> None:
+ file_path = tmp_path / 'test.txt'
+ file_path.write_text('plain text content')
+
+ with pytest.raises(ValueError, match='Unsupported file extension'):
+ load_file(file_path)
+
+
+def test_save_unsupported_format(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.txt'
+
+ with pytest.raises(ValueError, match='Unsupported file extension'):
+ save_file(sample_data, file_path)
+
+
+def test_json_indentation(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.json'
+ save_file(sample_data, file_path)
+
+ content = file_path.read_text()
+ assert ' ' in content # 4-space indentation
+
+
+def test_yaml_formatting(tmp_path: Path, sample_data: dict[str, Any]) -> None:
+ file_path = tmp_path / 'test.yaml'
+ save_file(sample_data, file_path)
+
+ content = file_path.read_text()
+ assert 'key: value' in content
+
+
+def test_roundtrip_preserves_data_types(tmp_path: Path) -> None:
+ """Test that data types are preserved across save/load cycles."""
+ common_data = {
+ 'string': 'text',
+ 'int': 42,
+ 'float': 3.14,
+ 'bool': True,
+ 'list': [1, 2, 3],
+ 'nested': {'inner': 'value'},
+ }
+
+ # Test JSON and YAML with null values (TOML doesn't support null)
+ data_with_null = {**common_data, 'null': None}
+ for ext in ['json', 'yaml']:
+ file_path = tmp_path / f'test.{ext}'
+ save_file(data_with_null, file_path)
+ loaded = load_file(file_path)
+ assert loaded == data_with_null
+
+ # Test TOML without null values
+ toml_path = tmp_path / 'test.toml'
+ save_file(common_data, toml_path)
+ loaded = load_file(toml_path)
+ assert loaded == common_data
diff --git a/tests/project/common/utils/file/test_json.py b/tests/project/common/utils/file/test_json.py
index dff314f..39f8883 100644
--- a/tests/project/common/utils/file/test_json.py
+++ b/tests/project/common/utils/file/test_json.py
@@ -1,88 +1,37 @@
import json
from pathlib import Path
-from unittest.mock import mock_open, patch
import pytest
from project.common.utils.file.json import JsonValue, load_json, save_as_indented_json
-@pytest.mark.parametrize(
- ('input_data', 'expected_result'),
- [
- ('{"key": "value"}', {'key': 'value'}),
- ('{"nested": {"key": "value"}}', {'nested': {'key': 'value'}}),
- ('["item1", "item2"]', ['item1', 'item2']),
- ('{}', {}),
- ('[]', []),
- ],
-)
-def test_load_json(input_data: str, expected_result: JsonValue) -> None:
- """Test that load_json correctly loads and parses JSON data."""
- # Mock the open function to return our test data
- with patch('pathlib.Path.open', mock_open(read_data=input_data)):
- # Test with string path
- result_str = load_json('dummy/path.json')
- assert result_str == expected_result
+def test_load_json_from_file(tmp_path: Path) -> None:
+ payload = {'name': 'tester', 'values': [1, 2, 3]}
+ json_file = tmp_path / 'config.json'
+ json_file.write_text(json.dumps(payload), encoding='utf-8')
- # Test with Path object
- result_path = load_json(Path('dummy/path.json'))
- assert result_path == expected_result
+ assert load_json(str(json_file)) == payload
+ assert load_json(json_file) == payload
-@pytest.mark.parametrize(
- 'input_data_tuple',
- [
- ({'key': 'value'},),
- ({'nested': {'key': 'value'}},),
- (['item1', 'item2'],),
- ({},),
- ([],),
- ],
-)
-def test_save_as_indented_json(input_data_tuple: tuple[JsonValue, ...]) -> None:
- """Test that save_as_indented_json correctly writes JSON data to a file."""
- input_data = input_data_tuple[0]
- mock_file = mock_open()
+def test_save_and_load_json(tmp_path: Path) -> None:
+ data: JsonValue = {'flag': True, 'count': 5}
+ json_file = tmp_path / 'nested' / 'config.json'
- # Create a patch for both the open function and mkdir
- with (
- patch('pathlib.Path.open', mock_file),
- patch('pathlib.Path.mkdir') as mock_mkdir,
- ):
- # Test with string path
- save_as_indented_json(input_data, 'dummy/path.json')
+ save_as_indented_json(data, json_file)
+ assert json_file.exists()
- # Verify mkdir was called with the expected parameters
- mock_mkdir.assert_called_once_with(parents=True, exist_ok=True)
+ loaded = load_json(json_file)
+ assert loaded == data
- # Verify that the file was opened in write mode
- mock_file.assert_called_once_with(mode='w', encoding='utf-8')
+ content = json_file.read_text(encoding='utf-8')
+ assert content.startswith('{\n')
- # Get the handle to the mock file
- handle = mock_file()
- # Verify that json.dump was called with the correct parameters
- written_data = ''.join(call.args[0] for call in handle.write.call_args_list)
- assert json.loads(written_data) == input_data
+def test_save_json_without_parents_raises(tmp_path: Path) -> None:
+ data: JsonValue = {'value': 'test'}
+ json_file = tmp_path / 'level1' / 'level2' / 'config.json'
-
-def test_save_as_indented_json_path_object() -> None:
- """Test save_as_indented_json with a Path object."""
- mock_file = mock_open()
- test_data = {'key': 'value'}
-
- with (
- patch('pathlib.Path.open', mock_file),
- patch('pathlib.Path.mkdir') as mock_mkdir,
- ):
- # Test with Path object
- save_as_indented_json(test_data, Path('dummy/path.json'))
-
- # Verify mkdir and open were called
- mock_mkdir.assert_called_once()
- mock_file.assert_called_once()
-
- # Verify content was written
- handle = mock_file()
- assert handle.write.called
+ with pytest.raises(FileNotFoundError):
+ save_as_indented_json(data, json_file, parents=False)
diff --git a/tests/project/common/utils/file/test_toml.py b/tests/project/common/utils/file/test_toml.py
index b0b25fc..dda42b9 100644
--- a/tests/project/common/utils/file/test_toml.py
+++ b/tests/project/common/utils/file/test_toml.py
@@ -1,88 +1,42 @@
from pathlib import Path
-from unittest.mock import mock_open, patch
import pytest
+import toml
from project.common.utils.file.toml import load_toml, save_as_toml
-@pytest.mark.parametrize(
- ('input_data', 'expected_result'),
- [
- ('key = "value"', {'key': 'value'}),
- ('[nested]\nkey = "value"', {'nested': {'key': 'value'}}),
- (
- '[array]\nvalues = ["item1", "item2"]',
- {'array': {'values': ['item1', 'item2']}},
- ),
- ('', {}),
- ],
-)
-def test_load_toml(input_data: str, expected_result: object) -> None:
- """Test that load_toml correctly loads and parses TOML data."""
- # Mock the open function to return our test data
- with patch('pathlib.Path.open', mock_open(read_data=input_data)):
- # Test with string path
- result_str = load_toml('dummy/path.toml')
- assert result_str == expected_result
+def test_load_toml_from_file(tmp_path: Path) -> None:
+ content = """\
+title = "Example"
+[nested]
+value = 1
+"""
+ toml_file = tmp_path / 'config.toml'
+ toml_file.write_text(content, encoding='utf-8')
- # Test with Path object
- result_path = load_toml(Path('dummy/path.toml'))
- assert result_path == expected_result
+ expected = {'title': 'Example', 'nested': {'value': 1}}
+ assert load_toml(str(toml_file)) == expected
+ assert load_toml(toml_file) == expected
-@pytest.mark.parametrize(
- 'input_data_tuple',
- [
- ({'key': 'value'},),
- ({'nested': {'key': 'value'}},),
- ({'array': {'values': ['item1', 'item2']}},),
- ({},),
- ],
-)
-def test_save_as_toml(input_data_tuple: tuple[dict[str, object], ...]) -> None:
- """Test that save_as_toml correctly writes TOML data to a file."""
- input_data = input_data_tuple[0]
- mock_file = mock_open()
+def test_save_and_load_toml(tmp_path: Path) -> None:
+ data = {'service': {'host': 'localhost', 'port': 8080}, 'flag': True}
+ toml_file = tmp_path / 'nested' / 'config.toml'
- # Create a patch for both the open function, mkdir, and toml.dump
- with (
- patch('pathlib.Path.open', mock_file),
- patch('pathlib.Path.mkdir') as mock_mkdir,
- patch('toml.dump') as mock_dump,
- ):
- # Test with string path
- save_as_toml(input_data, 'dummy/path.toml')
+ save_as_toml(data, toml_file)
+ assert toml_file.exists()
- # Verify mkdir was called with the expected parameters
- mock_mkdir.assert_called_once_with(parents=True, exist_ok=True)
+ loaded = load_toml(toml_file)
+ assert loaded == data
- # Verify that the file was opened in write mode
- mock_file.assert_called_once_with(mode='w', encoding='utf-8')
+ parsed = toml.load(toml_file)
+ assert parsed == data
- # Get the handle to the mock file
- handle = mock_file()
- # Verify toml.dump was called with the correct arguments
- mock_dump.assert_called_once_with(input_data, handle)
+def test_save_toml_without_parents_raises(tmp_path: Path) -> None:
+ data = {'key': 'value'}
+ toml_file = tmp_path / 'level1' / 'level2' / 'config.toml'
-
-def test_save_as_toml_path_object() -> None:
- """Test save_as_toml with a Path object."""
- mock_file = mock_open()
- test_data = {'key': 'value'}
-
- with (
- patch('pathlib.Path.open', mock_file),
- patch('pathlib.Path.mkdir') as mock_mkdir,
- ):
- # Test with Path object
- save_as_toml(test_data, Path('dummy/path.toml'))
-
- # Verify mkdir and open were called
- mock_mkdir.assert_called_once()
- mock_file.assert_called_once()
-
- # Verify content was written
- handle = mock_file()
- assert handle.write.called
+ with pytest.raises(FileNotFoundError):
+ save_as_toml(data, toml_file, parents=False)
diff --git a/tests/project/common/utils/file/test_xml.py b/tests/project/common/utils/file/test_xml.py
new file mode 100644
index 0000000..a63588a
--- /dev/null
+++ b/tests/project/common/utils/file/test_xml.py
@@ -0,0 +1,132 @@
+from pathlib import Path
+from typing import Any
+
+import pytest
+
+from project.common.utils.file.xml import XmlFileHandler, load_xml, save_as_xml
+
+
+@pytest.fixture
+def sample_xml_data() -> dict[str, Any]:
+ return {
+ 'root': {
+ 'name': 'John Doe',
+ 'age': '30',
+ 'city': 'Tokyo',
+ }
+ }
+
+
+@pytest.fixture
+def nested_xml_data() -> dict[str, Any]:
+ return {
+ 'config': {
+ 'database': {
+ 'host': 'localhost',
+ 'port': '5432',
+ },
+ 'cache': {
+ 'enabled': 'true',
+ 'ttl': '3600',
+ },
+ }
+ }
+
+
+def test_load_xml_simple(tmp_path: Path, sample_xml_data: dict[str, Any]) -> None:
+ xml_file = tmp_path / 'data.xml'
+ xml_content = """
+
+ John Doe
+ 30
+ Tokyo
+"""
+ xml_file.write_text(xml_content)
+
+ result = load_xml(xml_file)
+ assert result == sample_xml_data
+
+
+def test_save_as_xml_simple(tmp_path: Path, sample_xml_data: dict[str, Any]) -> None:
+ xml_file = tmp_path / 'output.xml'
+ save_as_xml(sample_xml_data, xml_file)
+
+ assert xml_file.exists()
+ content = xml_file.read_text()
+ assert '' in content
+ assert 'John Doe' in content
+ assert '30' in content
+ assert 'Tokyo' in content
+
+
+def test_save_and_load_xml_roundtrip(tmp_path: Path, sample_xml_data: dict[str, Any]) -> None:
+ xml_file = tmp_path / 'roundtrip.xml'
+ save_as_xml(sample_xml_data, xml_file)
+ loaded = load_xml(xml_file)
+
+ assert loaded == sample_xml_data
+
+
+def test_save_and_load_nested_xml(tmp_path: Path, nested_xml_data: dict[str, Any]) -> None:
+ xml_file = tmp_path / 'nested.xml'
+ save_as_xml(nested_xml_data, xml_file)
+ loaded = load_xml(xml_file)
+
+ assert loaded == nested_xml_data
+
+
+def test_xml_file_handler(tmp_path: Path, sample_xml_data: dict[str, Any]) -> None:
+ handler = XmlFileHandler()
+ xml_file = tmp_path / 'handler_test.xml'
+
+ handler.save(sample_xml_data, xml_file)
+ assert xml_file.exists()
+
+ loaded = handler.load(xml_file)
+ assert loaded == sample_xml_data
+
+
+def test_xml_file_handler_custom_root_tag(tmp_path: Path) -> None:
+ handler = XmlFileHandler(root_tag='config')
+ data = {'setting1': 'value1', 'setting2': 'value2'}
+ xml_file = tmp_path / 'custom_root.xml'
+
+ handler.save(data, xml_file)
+ content = xml_file.read_text()
+ assert '' in content
+
+
+def test_save_xml_creates_parent_directories(tmp_path: Path, sample_xml_data: dict[str, Any]) -> None:
+ xml_file = tmp_path / 'nested' / 'dir' / 'data.xml'
+ save_as_xml(sample_xml_data, xml_file)
+
+ assert xml_file.exists()
+
+
+def test_save_xml_respects_parents_flag(tmp_path: Path, sample_xml_data: dict[str, Any]) -> None:
+ deeply_nested_dir = tmp_path / 'level1' / 'level2'
+ xml_file = deeply_nested_dir / 'data.xml'
+
+ with pytest.raises(FileNotFoundError):
+ save_as_xml(sample_xml_data, xml_file, parents=False)
+
+
+def test_xml_with_list_data(tmp_path: Path) -> None:
+ data = {'items': {'item': ['apple', 'banana', 'cherry']}}
+ xml_file = tmp_path / 'list_data.xml'
+
+ save_as_xml(data, xml_file)
+ assert xml_file.exists()
+
+ loaded = load_xml(xml_file)
+ # XML conversion may change structure slightly for lists
+ assert 'items' in loaded
+
+
+def test_load_xml_accepts_string_path(tmp_path: Path, sample_xml_data: dict[str, Any]) -> None:
+ xml_file = tmp_path / 'string_path.xml'
+ save_as_xml(sample_xml_data, xml_file)
+
+ result = load_xml(str(xml_file))
+ assert result == sample_xml_data
diff --git a/tests/project/common/utils/file/test_yaml.py b/tests/project/common/utils/file/test_yaml.py
index 742ddef..9dcc118 100644
--- a/tests/project/common/utils/file/test_yaml.py
+++ b/tests/project/common/utils/file/test_yaml.py
@@ -1,91 +1,42 @@
from pathlib import Path
-from unittest.mock import mock_open, patch
import pytest
from project.common.utils.file.yaml import YamlValue, load_yaml, save_as_indented_yaml
-@pytest.mark.parametrize(
- ('input_data', 'expected_result'),
- [
- ('key: value', {'key': 'value'}),
- ('nested:\n key: value', {'nested': {'key': 'value'}}),
- ('- item1\n- item2', ['item1', 'item2']),
- ('{}', {}),
- ('[]', []),
- ],
-)
-def test_load_yaml(input_data: str, expected_result: YamlValue) -> None:
- """Test that load_yaml correctly loads and parses YAML data."""
- # Mock the open function to return our test data
- with patch('pathlib.Path.open', mock_open(read_data=input_data)):
- # Test with string path
- result_str = load_yaml('dummy/path.yaml')
- assert result_str == expected_result
+def test_load_yaml_from_file(tmp_path: Path) -> None:
+ yaml_content = """\
+key: value
+nested:
+ number: 42
+"""
+ yaml_file = tmp_path / 'config.yaml'
+ yaml_file.write_text(yaml_content, encoding='utf-8')
- # Test with Path object
- result_path = load_yaml(Path('dummy/path.yaml'))
- assert result_path == expected_result
+ expected: YamlValue = {'key': 'value', 'nested': {'number': 42}}
+ assert load_yaml(str(yaml_file)) == expected
+ assert load_yaml(yaml_file) == expected
-@pytest.mark.parametrize(
- 'input_data_tuple',
- [
- ({'key': 'value'},),
- ({'nested': {'key': 'value'}},),
- (['item1', 'item2'],),
- ({},),
- ([],),
- ],
-)
-def test_save_as_indented_yaml(input_data_tuple: tuple[YamlValue, ...]) -> None:
- """Test that save_as_indented_yaml correctly writes YAML data to a file."""
- input_data = input_data_tuple[0]
- mock_file = mock_open()
+def test_save_and_load_yaml(tmp_path: Path) -> None:
+ data: YamlValue = {'message': 'hello', 'count': 3}
+ yaml_file = tmp_path / 'nested' / 'config.yaml'
- # Create a patch for both the open function and mkdir
- with (
- patch('pathlib.Path.open', mock_file),
- patch('pathlib.Path.mkdir') as mock_mkdir,
- ):
- # Test with string path
- save_as_indented_yaml(input_data, 'dummy/path.yaml')
+ save_as_indented_yaml(data, yaml_file)
+ assert yaml_file.exists()
- # Verify mkdir was called with the expected parameters
- mock_mkdir.assert_called_once_with(parents=True, exist_ok=True)
+ loaded = load_yaml(yaml_file)
+ assert loaded == data
- # Verify that the file was opened in write mode
- mock_file.assert_called_once_with(mode='w', encoding='utf-8')
+ content = yaml_file.read_text(encoding='utf-8')
+ assert 'message:' in content
+ assert 'count:' in content
- # Get the handle to the mock file
- handle = mock_file()
- # Verify content was written
- assert handle.write.called
+def test_save_yaml_without_parents_raises(tmp_path: Path) -> None:
+ data: YamlValue = {'flag': True}
+ yaml_file = tmp_path / 'level1' / 'level2' / 'config.yaml'
- # We can't easily verify the exact YAML output due to formatting differences,
- # but we can check that it was called with something
- written_data = ''.join(call.args[0] for call in handle.write.call_args_list)
- assert written_data # Assert that something was written
-
-
-def test_save_as_indented_yaml_path_object() -> None:
- """Test save_as_indented_yaml with a Path object."""
- mock_file = mock_open()
- test_data = {'key': 'value'}
-
- with (
- patch('pathlib.Path.open', mock_file),
- patch('pathlib.Path.mkdir') as mock_mkdir,
- ):
- # Test with Path object
- save_as_indented_yaml(test_data, Path('dummy/path.yaml'))
-
- # Verify mkdir and open were called
- mock_mkdir.assert_called_once()
- mock_file.assert_called_once()
-
- # Verify content was written
- handle = mock_file()
- assert handle.write.called
+ with pytest.raises(FileNotFoundError):
+ save_as_indented_yaml(data, yaml_file, parents=False)
diff --git a/tests/project/common/utils/test_cli_utils.py b/tests/project/common/utils/test_cli_utils.py
index d7506cf..02cefe0 100644
--- a/tests/project/common/utils/test_cli_utils.py
+++ b/tests/project/common/utils/test_cli_utils.py
@@ -1,76 +1,32 @@
+import json
from pathlib import Path
-from unittest.mock import patch
-
-import pytest
from project.common.utils.cli_utils import load_cli_config
-@pytest.mark.parametrize(
- ('config_file_exists', 'config_content', 'kwargs', 'expected'),
- [
- # Case 1: Config file exists with content, no kwargs
- (True, {'file_key': 'file_value'}, {}, {'file_key': 'file_value'}),
- # Case 2: Config file exists with content, with kwargs
- (
- True,
- {'file_key': 'file_value', 'override_key': 'file_value'},
- {'override_key': 'cli_value', 'new_key': 'cli_value'},
- {
- 'file_key': 'file_value',
- 'override_key': 'cli_value',
- 'new_key': 'cli_value',
- },
- ),
- # Case 3: No config file, with kwargs
- (False, None, {'cli_key': 'cli_value'}, {'cli_key': 'cli_value'}),
- # Case 4: No config file, no kwargs
- (False, None, {}, {}),
- ],
-)
-def test_load_cli_config(
- config_file_exists: bool,
- config_content: dict[str, object] | None,
- kwargs: dict[str, object],
- expected: dict[str, object],
-) -> None:
- mock_path = 'path/to/config.json' if config_file_exists else None
-
- # Mock the load_config function directly within the cli_utils module
- with patch('project.common.utils.cli_utils.load_config') as mock_load_config:
- mock_load_config.return_value = config_content
+def test_load_cli_config_merges_file_and_kwargs(tmp_path: Path) -> None:
+ config_file = tmp_path / 'config.json'
+ config_payload = {'file_key': 'file_value', 'override_key': 'file_value'}
+ config_file.write_text(json.dumps(config_payload), encoding='utf-8')
- # Call the function under test
- result = load_cli_config(mock_path, **kwargs)
+ result = load_cli_config(config_file, override_key='cli_value', new_key='cli_value')
- # Verify the results
- assert result == expected
+ assert result == {
+ 'file_key': 'file_value',
+ 'override_key': 'cli_value',
+ 'new_key': 'cli_value',
+ }
- # Verify that load_config was called appropriately
- if config_file_exists:
- mock_load_config.assert_called_once_with(mock_path)
- else:
- mock_load_config.assert_not_called()
+def test_load_cli_config_without_file_returns_kwargs() -> None:
+ result = load_cli_config(None, cli_key='cli_value')
+ assert result == {'cli_key': 'cli_value'}
-@pytest.mark.parametrize(
- ('config_file_path', 'expected_path_type'),
- [
- # Test with string path
- ('path/to/config.json', str),
- # Test with Path object
- (Path('path/to/config.json'), Path),
- ],
-)
-def test_load_cli_config_path_types(config_file_path: str | Path, expected_path_type: type[object]) -> None:
- # Create a comprehensive mock to prevent file system access by mocking where the function is used
- with patch('project.common.utils.cli_utils.load_config') as mock_load_config:
- mock_load_config.return_value = {}
- # Ensure we don't actually try to open a file
- load_cli_config(config_file_path)
+def test_load_cli_config_accepts_path_types(tmp_path: Path) -> None:
+ payload = {'setting': 'value'}
+ config_file = tmp_path / 'config.json'
+ config_file.write_text(json.dumps(payload), encoding='utf-8')
- # Verify that load_config was called with the correct path type
- mock_load_config.assert_called_once()
- args, _ = mock_load_config.call_args
- assert isinstance(args[0], expected_path_type)
+ assert load_cli_config(str(config_file)) == payload
+ assert load_cli_config(config_file) == payload
diff --git a/tests/project/common/utils/test_import_utils.py b/tests/project/common/utils/test_import_utils.py
index c12ac0f..497ad1e 100644
--- a/tests/project/common/utils/test_import_utils.py
+++ b/tests/project/common/utils/test_import_utils.py
@@ -1,80 +1,37 @@
-import os
-import sys
-from collections.abc import Generator
+import importlib
from pathlib import Path
-from unittest.mock import patch
import pytest
from project.common.utils.import_utils import get_imported_function_path, import_function
-@pytest.fixture
-def test_module_file(tmp_path: Path) -> Generator[str]:
- """Create a temporary Python module for testing import functions."""
- module_dir = tmp_path / 'test_module'
- module_dir.mkdir()
- (module_dir / '__init__.py').write_text('')
-
- module_file = module_dir / 'test_func.py'
- module_file.write_text(
- 'def test_function():\n'
- " return 'Hello from test_function'\n"
- '\n'
- 'def another_function():\n'
- " return 'Hello from another_function'\n"
- )
-
- # Add the tmp_path to sys.path temporarily
- sys.path.insert(0, str(tmp_path.parent))
- yield str(module_file)
-
- # Clean up
- sys.path.remove(str(tmp_path.parent))
+def test_import_function_returns_target_loader() -> None:
+ target_path = Path('src/project/common/utils/file/json.py')
+ function = import_function(str(target_path), 'load_json')
+ module = importlib.import_module('src.project.common.utils.file.json')
+ assert function is module.load_json
-@pytest.mark.parametrize(
- ('function_name', 'expected_result'),
- [
- ('test_function', 'Hello from test_function'),
- ('another_function', 'Hello from another_function'),
- ],
-)
-def test_import_function(test_module_file: str, function_name: str, expected_result: str) -> None:
- # Handle the default case where function_name is None
- if function_name is None:
- # Mock the stem attribute to return "test_func"
- with patch('pathlib.Path') as mock_path:
- mock_path_instance = mock_path.return_value
- mock_path_instance.resolve.return_value = Path(test_module_file)
- mock_path_instance.stem = 'test_function'
- # Mock the current working directory
- with patch('pathlib.Path.cwd') as mock_cwd:
- mock_cwd.return_value = Path(test_module_file).parent.parent
-
- # Add mock for relative_to
- mock_path_instance.relative_to.return_value = Path('test_module/test_func.py')
- mock_path_instance.with_suffix.return_value = Path('test_module/test_func')
+def test_import_function_default_name(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+ project_dir = tmp_path / 'project'
+ project_dir.mkdir()
+ module_file = project_dir / 'dynamic_module.py'
+ module_file.write_text(
+ "def dynamic_module():\n return 'dynamic result'\n",
+ encoding='utf-8',
+ )
- function = import_function(test_module_file, function_name)
- else:
- # For normal cases where function_name is provided
- with patch('pathlib.Path.cwd') as mock_cwd:
- mock_cwd.return_value = Path(test_module_file).parent.parent
- function = import_function(test_module_file, function_name)
+ monkeypatch.chdir(project_dir)
- # Assert that the imported function returns the expected result
- assert function() == expected_result
+ function = import_function(str(module_file))
+ assert function() == 'dynamic result'
def test_get_imported_function_path() -> None:
- # Create a dummy function for testing
def dummy_function() -> None:
pass
- # Get the file path of the dummy function
file_path = get_imported_function_path(dummy_function)
-
- # Assert that the returned path is the path of this test file
- assert os.path.realpath(file_path) == os.path.realpath(__file__)
+ assert Path(file_path).resolve() == Path(__file__).resolve()