Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion sdk/ai/azure-ai-projects/cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,8 @@
"Ministral",
"cogsvc",
"evals",
"FineTuning"
"FineTuning",
"closefd"
],
"ignorePaths": [
"*.csv",
Expand Down
13 changes: 0 additions & 13 deletions sdk/ai/azure-ai-projects/samples/agents/assets/.gitattributes

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def test_agent_file_search(self, **kwargs):
assert vector_store.id

# Upload file to vector store
with open(asset_file_path, "rb") as f:
with self.open_with_lf(asset_file_path, "rb") as f:
file = openai_client.vector_stores.files.upload_and_poll(
vector_store_id=vector_store.id,
file=f,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ async def test_agent_file_search_async(self, **kwargs):
assert vector_store.id

# Upload file to vector store
with open(asset_file_path, "rb") as f:
with self.open_with_lf(asset_file_path, "rb") as f:
file = await openai_client.vector_stores.files.upload_and_poll(
vector_store_id=vector_store.id,
file=f,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def test_agent_file_search_stream(self, **kwargs):
assert vector_store.id

# Upload file to vector store
with open(asset_file_path, "rb") as f:
with self.open_with_lf(asset_file_path, "rb") as f:
file = openai_client.vector_stores.files.upload_and_poll(
vector_store_id=vector_store.id,
file=f,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ async def test_agent_file_search_stream_async(self, **kwargs):
assert vector_store.id

# Upload file to vector store
with open(asset_file_path, "rb") as f:
with self.open_with_lf(asset_file_path, "rb") as f:
file = await openai_client.vector_stores.files.upload_and_poll(
vector_store_id=vector_store.id,
file=f,
Expand Down
10 changes: 4 additions & 6 deletions sdk/ai/azure-ai-projects/tests/samples/test_samples.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from azure.core.exceptions import HttpResponseError
from devtools_testutils.aio import recorded_by_proxy_async
from devtools_testutils import AzureRecordedTestCase, recorded_by_proxy, RecordedTransport
from test_base import servicePreparer
from test_base import servicePreparer, patched_open_crlf_to_lf
from pytest import MonkeyPatch
from azure.ai.projects import AIProjectClient

Expand Down Expand Up @@ -54,9 +54,11 @@ def _capture_print(self, *args, **kwargs):

def execute(self):
"""Execute a synchronous sample with proper mocking and environment setup."""

with (
MonkeyPatch.context() as mp,
mock.patch("builtins.print", side_effect=self._capture_print),
mock.patch("builtins.open", side_effect=patched_open_crlf_to_lf),
mock.patch("azure.identity.DefaultAzureCredential") as mock_credential,
):
for var_name, var_value in self.env_vars.items():
Expand All @@ -77,6 +79,7 @@ async def execute_async(self):
with (
MonkeyPatch.context() as mp,
mock.patch("builtins.print", side_effect=self._capture_print),
mock.patch("builtins.open", side_effect=patched_open_crlf_to_lf),
mock.patch("azure.identity.aio.DefaultAzureCredential") as mock_credential,
):
for var_name, var_value in self.env_vars.items():
Expand Down Expand Up @@ -168,9 +171,6 @@ def _get_tools_sample_paths():
"sample_agent_memory_search.py",
"sample_agent_openapi_with_project_connection.py",
"sample_agent_to_agent.py",
"sample_agent_code_interpreter.py",
"sample_agent_file_search.py",
"sample_agent_file_search_in_stream.py",
]
samples = []

Expand All @@ -193,8 +193,6 @@ def _get_tools_sample_paths_async():
tools_samples_to_skip = [
"sample_agent_mcp_with_project_connection_async.py",
"sample_agent_memory_search_async.py",
"sample_agent_code_interpreter_async.py",
"sample_agent_file_search_in_stream_async.py",
]
samples = []

Expand Down
129 changes: 128 additions & 1 deletion sdk/ai/azure-ai-projects/tests/test_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
import re
import functools
import json
from typing import Optional, Any, Dict, Final
import os
import tempfile
from typing import Optional, Any, Dict, Final, IO, Union, overload, Literal, TextIO, BinaryIO
from azure.ai.projects.models import (
Connection,
ConnectionType,
Expand All @@ -33,6 +35,9 @@
from azure.ai.projects import AIProjectClient as AIProjectClient
from azure.ai.projects.aio import AIProjectClient as AsyncAIProjectClient

# Store reference to built-in open before any mocking occurs
_BUILTIN_OPEN = open


# Load secrets from environment variables
servicePreparer = functools.partial(
Expand Down Expand Up @@ -63,6 +68,70 @@
DEVELOPER_TIER_TRAINING_TYPE: Final[str] = "developerTier"


def patched_open_crlf_to_lf(*args, **kwargs):
"""
Patched open function that converts CRLF to LF for text files.

This function should be used with mock.patch("builtins.open", side_effect=TestBase.patched_open_crlf_to_lf)
to ensure consistent line endings in test files during recording and playback.

Note: CRLF to LF conversion is only performed when opening text-like files (.txt, .json, .jsonl, .csv,
.md, .yaml, .yml, .xml) in binary read mode ("rb"). For all other modes or file types, the call is
forwarded to the built-in open function as is.
"""
# Extract file path - first positional arg or 'file' keyword arg
if args:
file_path = args[0]
elif "file" in kwargs:
file_path = kwargs["file"]
else:
# No file path provided, just pass through
return _BUILTIN_OPEN(*args, **kwargs)

# Extract mode - second positional arg or 'mode' keyword arg
if len(args) > 1:
mode = str(args[1])
else:
mode = str(kwargs.get("mode", "r"))

# Check if this is binary read mode for text-like files
if "r" in mode and "b" in mode and file_path and isinstance(file_path, str):
# Check file extension to determine if it's a text file
text_extensions = {".txt", ".json", ".jsonl", ".csv", ".md", ".yaml", ".yml", ".xml"}
ext = os.path.splitext(file_path)[1].lower()
if ext in text_extensions:
# Read the original file
with _BUILTIN_OPEN(file_path, "rb") as f:
content = f.read()

# Convert CRLF to LF
converted_content = content.replace(b"\r\n", b"\n")

# Only create temp file if conversion was needed
if converted_content != content:
# Create a sub temp folder and save file with same filename
temp_dir = tempfile.mkdtemp()
original_filename = os.path.basename(file_path)
temp_path = os.path.join(temp_dir, original_filename)

# Write the converted content to the temp file
print(f"Converting CRLF to LF for {file_path} and saving to {temp_path}")
with _BUILTIN_OPEN(temp_path, "wb") as temp_file:
temp_file.write(converted_content)

# Replace file path with temp path
if args:
# File path was passed as positional arg
return _BUILTIN_OPEN(temp_path, *args[1:], **kwargs)
else:
# File path was passed as keyword arg
kwargs = kwargs.copy()
kwargs["file"] = temp_path
return _BUILTIN_OPEN(**kwargs)

return _BUILTIN_OPEN(*args, **kwargs)


class TestBase(AzureRecordedTestCase):

test_redteams_params = {
Expand Down Expand Up @@ -149,6 +218,64 @@ class TestBase(AzureRecordedTestCase):
r"^InstrumentationKey=[0-9a-fA-F-]{36};IngestionEndpoint=https://.+.applicationinsights.azure.com/;LiveEndpoint=https://.+.monitor.azure.com/;ApplicationId=[0-9a-fA-F-]{36}$"
)

@overload
def open_with_lf(
self,
file: Union[str, bytes, os.PathLike, int],
mode: Literal["r", "w", "a", "x", "r+", "w+", "a+", "x+"] = "r",
buffering: int = -1,
encoding: Optional[str] = None,
errors: Optional[str] = None,
newline: Optional[str] = None,
closefd: bool = True,
opener: Optional[Any] = None,
) -> TextIO: ...

@overload
def open_with_lf(
self,
file: Union[str, bytes, os.PathLike, int],
mode: Literal["rb", "wb", "ab", "xb", "r+b", "w+b", "a+b", "x+b"],
buffering: int = -1,
encoding: Optional[str] = None,
errors: Optional[str] = None,
newline: Optional[str] = None,
closefd: bool = True,
opener: Optional[Any] = None,
) -> BinaryIO: ...

@overload
def open_with_lf(
self,
file: Union[str, bytes, os.PathLike, int],
mode: str,
buffering: int = -1,
encoding: Optional[str] = None,
errors: Optional[str] = None,
newline: Optional[str] = None,
closefd: bool = True,
opener: Optional[Any] = None,
) -> IO[Any]: ...

def open_with_lf(
self,
file: Union[str, bytes, os.PathLike, int],
mode: str = "r",
buffering: int = -1,
encoding: Optional[str] = None,
errors: Optional[str] = None,
newline: Optional[str] = None,
closefd: bool = True,
opener: Optional[Any] = None,
) -> IO[Any]:
"""
Open function that converts CRLF to LF for text files.

This function has the same signature as built-in open and converts line endings
to ensure consistent behavior during test recording and playback.
"""
return patched_open_crlf_to_lf(file, mode, buffering, encoding, errors, newline, closefd, opener)

# helper function: create projects client using environment variables
def create_client(self, *, operation_group: Optional[str] = None, **kwargs) -> AIProjectClient:
# fetch environment variables
Expand Down