diff --git a/tests/api_resources/audio/test_speech.py b/tests/api_resources/audio/test_speech.py index a689c0d220..b1c7f79b1e 100644 --- a/tests/api_resources/audio/test_speech.py +++ b/tests/api_resources/audio/test_speech.py @@ -12,18 +12,14 @@ import openai._legacy_response as _legacy_response from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI # pyright: reportDeprecated=false base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestSpeech: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize @pytest.mark.respx(base_url=base_url) @@ -86,15 +82,13 @@ def test_streaming_response_create(self, client: OpenAI, respx_mock: MockRouter) class TestAsyncSpeech: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize @pytest.mark.respx(base_url=base_url) - async def test_method_create(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None: + async def test_method_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None: respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - speech = await client.audio.speech.create( + speech = await async_client.audio.speech.create( input="string", model="string", voice="alloy", @@ -104,9 +98,9 @@ async def test_method_create(self, client: AsyncOpenAI, respx_mock: MockRouter) @parametrize @pytest.mark.respx(base_url=base_url) - async def test_method_create_with_all_params(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None: + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None: respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - speech = await client.audio.speech.create( + speech = await async_client.audio.speech.create( input="string", model="string", voice="alloy", @@ -118,10 +112,10 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI, respx_mo @parametrize @pytest.mark.respx(base_url=base_url) - async def test_raw_response_create(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None: + async def test_raw_response_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None: respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = await client.audio.speech.with_raw_response.create( + response = await async_client.audio.speech.with_raw_response.create( input="string", model="string", voice="alloy", @@ -134,9 +128,9 @@ async def test_raw_response_create(self, client: AsyncOpenAI, respx_mock: MockRo @parametrize @pytest.mark.respx(base_url=base_url) - async def test_streaming_response_create(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None: + async def test_streaming_response_create(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None: respx_mock.post("/audio/speech").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - async with client.audio.speech.with_streaming_response.create( + async with async_client.audio.speech.with_streaming_response.create( input="string", model="string", voice="alloy", diff --git a/tests/api_resources/audio/test_transcriptions.py b/tests/api_resources/audio/test_transcriptions.py index 992adbabd9..d957871abc 100644 --- a/tests/api_resources/audio/test_transcriptions.py +++ b/tests/api_resources/audio/test_transcriptions.py @@ -9,17 +9,13 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.types.audio import Transcription base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestTranscriptions: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -69,21 +65,19 @@ def test_streaming_response_create(self, client: OpenAI) -> None: class TestAsyncTranscriptions: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - transcription = await client.audio.transcriptions.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + transcription = await async_client.audio.transcriptions.create( file=b"raw file contents", model="whisper-1", ) assert_matches_type(Transcription, transcription, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - transcription = await client.audio.transcriptions.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + transcription = await async_client.audio.transcriptions.create( file=b"raw file contents", model="whisper-1", language="string", @@ -94,8 +88,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(Transcription, transcription, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.audio.transcriptions.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.audio.transcriptions.with_raw_response.create( file=b"raw file contents", model="whisper-1", ) @@ -106,8 +100,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(Transcription, transcription, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.audio.transcriptions.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.audio.transcriptions.with_streaming_response.create( file=b"raw file contents", model="whisper-1", ) as response: diff --git a/tests/api_resources/audio/test_translations.py b/tests/api_resources/audio/test_translations.py index 913c443a79..72960c3249 100644 --- a/tests/api_resources/audio/test_translations.py +++ b/tests/api_resources/audio/test_translations.py @@ -9,17 +9,13 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.types.audio import Translation base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestTranslations: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -68,21 +64,19 @@ def test_streaming_response_create(self, client: OpenAI) -> None: class TestAsyncTranslations: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - translation = await client.audio.translations.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + translation = await async_client.audio.translations.create( file=b"raw file contents", model="whisper-1", ) assert_matches_type(Translation, translation, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - translation = await client.audio.translations.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + translation = await async_client.audio.translations.create( file=b"raw file contents", model="whisper-1", prompt="string", @@ -92,8 +86,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(Translation, translation, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.audio.translations.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.audio.translations.with_raw_response.create( file=b"raw file contents", model="whisper-1", ) @@ -104,8 +98,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(Translation, translation, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.audio.translations.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.audio.translations.with_streaming_response.create( file=b"raw file contents", model="whisper-1", ) as response: diff --git a/tests/api_resources/beta/assistants/test_files.py b/tests/api_resources/beta/assistants/test_files.py index 7db1368ccb..66e3e2efe6 100644 --- a/tests/api_resources/beta/assistants/test_files.py +++ b/tests/api_resources/beta/assistants/test_files.py @@ -9,18 +9,14 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncCursorPage, AsyncCursorPage from openai.types.beta.assistants import AssistantFile, FileDeleteResponse base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestFiles: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -211,21 +207,19 @@ def test_path_params_delete(self, client: OpenAI) -> None: class TestAsyncFiles: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - file = await client.beta.assistants.files.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.assistants.files.create( "file-abc123", file_id="string", ) assert_matches_type(AssistantFile, file, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.files.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.files.with_raw_response.create( "file-abc123", file_id="string", ) @@ -236,8 +230,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(AssistantFile, file, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.files.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.files.with_streaming_response.create( "file-abc123", file_id="string", ) as response: @@ -250,24 +244,24 @@ async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_create(self, client: AsyncOpenAI) -> None: + async def test_path_params_create(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"): - await client.beta.assistants.files.with_raw_response.create( + await async_client.beta.assistants.files.with_raw_response.create( "", file_id="string", ) @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - file = await client.beta.assistants.files.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.assistants.files.retrieve( "string", assistant_id="string", ) assert_matches_type(AssistantFile, file, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.files.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.files.with_raw_response.retrieve( "string", assistant_id="string", ) @@ -278,8 +272,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(AssistantFile, file, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.files.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.files.with_streaming_response.retrieve( "string", assistant_id="string", ) as response: @@ -292,29 +286,29 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"): - await client.beta.assistants.files.with_raw_response.retrieve( + await async_client.beta.assistants.files.with_raw_response.retrieve( "string", assistant_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"): - await client.beta.assistants.files.with_raw_response.retrieve( + await async_client.beta.assistants.files.with_raw_response.retrieve( "", assistant_id="string", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - file = await client.beta.assistants.files.list( + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.assistants.files.list( "string", ) assert_matches_type(AsyncCursorPage[AssistantFile], file, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - file = await client.beta.assistants.files.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.assistants.files.list( "string", after="string", before="string", @@ -324,8 +318,8 @@ async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[AssistantFile], file, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.files.with_raw_response.list( + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.files.with_raw_response.list( "string", ) @@ -335,8 +329,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[AssistantFile], file, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.files.with_streaming_response.list( + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.files.with_streaming_response.list( "string", ) as response: assert not response.is_closed @@ -348,23 +342,23 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_list(self, client: AsyncOpenAI) -> None: + async def test_path_params_list(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"): - await client.beta.assistants.files.with_raw_response.list( + await async_client.beta.assistants.files.with_raw_response.list( "", ) @parametrize - async def test_method_delete(self, client: AsyncOpenAI) -> None: - file = await client.beta.assistants.files.delete( + async def test_method_delete(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.assistants.files.delete( "string", assistant_id="string", ) assert_matches_type(FileDeleteResponse, file, path=["response"]) @parametrize - async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.files.with_raw_response.delete( + async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.files.with_raw_response.delete( "string", assistant_id="string", ) @@ -375,8 +369,8 @@ async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: assert_matches_type(FileDeleteResponse, file, path=["response"]) @parametrize - async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.files.with_streaming_response.delete( + async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.files.with_streaming_response.delete( "string", assistant_id="string", ) as response: @@ -389,15 +383,15 @@ async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_delete(self, client: AsyncOpenAI) -> None: + async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"): - await client.beta.assistants.files.with_raw_response.delete( + await async_client.beta.assistants.files.with_raw_response.delete( "string", assistant_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"): - await client.beta.assistants.files.with_raw_response.delete( + await async_client.beta.assistants.files.with_raw_response.delete( "", assistant_id="string", ) diff --git a/tests/api_resources/beta/test_assistants.py b/tests/api_resources/beta/test_assistants.py index fa09769622..8db40bde93 100644 --- a/tests/api_resources/beta/test_assistants.py +++ b/tests/api_resources/beta/test_assistants.py @@ -9,7 +9,6 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncCursorPage, AsyncCursorPage from openai.types.beta import ( Assistant, @@ -17,13 +16,10 @@ ) base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestAssistants: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -234,20 +230,18 @@ def test_path_params_delete(self, client: OpenAI) -> None: class TestAsyncAssistants: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.create( model="string", ) assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.create( model="string", description="string", file_ids=["string", "string", "string"], @@ -259,8 +253,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.with_raw_response.create( model="string", ) @@ -270,8 +264,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.with_streaming_response.create( model="string", ) as response: assert not response.is_closed @@ -283,15 +277,15 @@ async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.retrieve( "string", ) assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.with_raw_response.retrieve( "string", ) @@ -301,8 +295,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.with_streaming_response.retrieve( "string", ) as response: assert not response.is_closed @@ -314,22 +308,22 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"): - await client.beta.assistants.with_raw_response.retrieve( + await async_client.beta.assistants.with_raw_response.retrieve( "", ) @parametrize - async def test_method_update(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.update( + async def test_method_update(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.update( "string", ) assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.update( + async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.update( "string", description="string", file_ids=["string", "string", "string"], @@ -342,8 +336,8 @@ async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_raw_response_update(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.with_raw_response.update( + async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.with_raw_response.update( "string", ) @@ -353,8 +347,8 @@ async def test_raw_response_update(self, client: AsyncOpenAI) -> None: assert_matches_type(Assistant, assistant, path=["response"]) @parametrize - async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.with_streaming_response.update( + async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.with_streaming_response.update( "string", ) as response: assert not response.is_closed @@ -366,20 +360,20 @@ async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_update(self, client: AsyncOpenAI) -> None: + async def test_path_params_update(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"): - await client.beta.assistants.with_raw_response.update( + await async_client.beta.assistants.with_raw_response.update( "", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.list() + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.list() assert_matches_type(AsyncCursorPage[Assistant], assistant, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.list( after="string", before="string", limit=0, @@ -388,8 +382,8 @@ async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[Assistant], assistant, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.with_raw_response.list() + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -397,8 +391,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[Assistant], assistant, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.with_streaming_response.list() as response: + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -408,15 +402,15 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_delete(self, client: AsyncOpenAI) -> None: - assistant = await client.beta.assistants.delete( + async def test_method_delete(self, async_client: AsyncOpenAI) -> None: + assistant = await async_client.beta.assistants.delete( "string", ) assert_matches_type(AssistantDeleted, assistant, path=["response"]) @parametrize - async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: - response = await client.beta.assistants.with_raw_response.delete( + async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.assistants.with_raw_response.delete( "string", ) @@ -426,8 +420,8 @@ async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: assert_matches_type(AssistantDeleted, assistant, path=["response"]) @parametrize - async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: - async with client.beta.assistants.with_streaming_response.delete( + async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.assistants.with_streaming_response.delete( "string", ) as response: assert not response.is_closed @@ -439,8 +433,8 @@ async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_delete(self, client: AsyncOpenAI) -> None: + async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `assistant_id` but received ''"): - await client.beta.assistants.with_raw_response.delete( + await async_client.beta.assistants.with_raw_response.delete( "", ) diff --git a/tests/api_resources/beta/test_threads.py b/tests/api_resources/beta/test_threads.py index ba55cc85da..5b347de1f0 100644 --- a/tests/api_resources/beta/test_threads.py +++ b/tests/api_resources/beta/test_threads.py @@ -9,7 +9,6 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.types.beta import ( Thread, ThreadDeleted, @@ -17,13 +16,10 @@ from openai.types.beta.threads import Run base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestThreads: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -266,18 +262,16 @@ def test_streaming_response_create_and_run(self, client: OpenAI) -> None: class TestAsyncThreads: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.create() + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.create() assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.create( messages=[ { "role": "user", @@ -303,8 +297,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.with_raw_response.create() + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.with_raw_response.create() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -312,8 +306,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.with_streaming_response.create() as response: + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.with_streaming_response.create() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -323,15 +317,15 @@ async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.retrieve( "string", ) assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.with_raw_response.retrieve( "string", ) @@ -341,8 +335,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.with_streaming_response.retrieve( "string", ) as response: assert not response.is_closed @@ -354,30 +348,30 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.with_raw_response.retrieve( + await async_client.beta.threads.with_raw_response.retrieve( "", ) @parametrize - async def test_method_update(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.update( + async def test_method_update(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.update( "string", ) assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.update( + async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.update( "string", metadata={}, ) assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_raw_response_update(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.with_raw_response.update( + async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.with_raw_response.update( "string", ) @@ -387,8 +381,8 @@ async def test_raw_response_update(self, client: AsyncOpenAI) -> None: assert_matches_type(Thread, thread, path=["response"]) @parametrize - async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.with_streaming_response.update( + async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.with_streaming_response.update( "string", ) as response: assert not response.is_closed @@ -400,22 +394,22 @@ async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_update(self, client: AsyncOpenAI) -> None: + async def test_path_params_update(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.with_raw_response.update( + await async_client.beta.threads.with_raw_response.update( "", ) @parametrize - async def test_method_delete(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.delete( + async def test_method_delete(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.delete( "string", ) assert_matches_type(ThreadDeleted, thread, path=["response"]) @parametrize - async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.with_raw_response.delete( + async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.with_raw_response.delete( "string", ) @@ -425,8 +419,8 @@ async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: assert_matches_type(ThreadDeleted, thread, path=["response"]) @parametrize - async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.with_streaming_response.delete( + async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.with_streaming_response.delete( "string", ) as response: assert not response.is_closed @@ -438,22 +432,22 @@ async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_delete(self, client: AsyncOpenAI) -> None: + async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.with_raw_response.delete( + await async_client.beta.threads.with_raw_response.delete( "", ) @parametrize - async def test_method_create_and_run(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.create_and_run( + async def test_method_create_and_run(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.create_and_run( assistant_id="string", ) assert_matches_type(Run, thread, path=["response"]) @parametrize - async def test_method_create_and_run_with_all_params(self, client: AsyncOpenAI) -> None: - thread = await client.beta.threads.create_and_run( + async def test_method_create_and_run_with_all_params(self, async_client: AsyncOpenAI) -> None: + thread = await async_client.beta.threads.create_and_run( assistant_id="string", instructions="string", metadata={}, @@ -486,8 +480,8 @@ async def test_method_create_and_run_with_all_params(self, client: AsyncOpenAI) assert_matches_type(Run, thread, path=["response"]) @parametrize - async def test_raw_response_create_and_run(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.with_raw_response.create_and_run( + async def test_raw_response_create_and_run(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.with_raw_response.create_and_run( assistant_id="string", ) @@ -497,8 +491,8 @@ async def test_raw_response_create_and_run(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, thread, path=["response"]) @parametrize - async def test_streaming_response_create_and_run(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.with_streaming_response.create_and_run( + async def test_streaming_response_create_and_run(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.with_streaming_response.create_and_run( assistant_id="string", ) as response: assert not response.is_closed diff --git a/tests/api_resources/beta/threads/messages/test_files.py b/tests/api_resources/beta/threads/messages/test_files.py index 2d248642e9..4d0613fd2f 100644 --- a/tests/api_resources/beta/threads/messages/test_files.py +++ b/tests/api_resources/beta/threads/messages/test_files.py @@ -9,18 +9,14 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncCursorPage, AsyncCursorPage from openai.types.beta.threads.messages import MessageFile base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestFiles: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_retrieve(self, client: OpenAI) -> None: @@ -144,13 +140,11 @@ def test_path_params_list(self, client: OpenAI) -> None: class TestAsyncFiles: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - file = await client.beta.threads.messages.files.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.threads.messages.files.retrieve( "file-abc123", thread_id="thread_abc123", message_id="msg_abc123", @@ -158,8 +152,8 @@ async def test_method_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(MessageFile, file, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.messages.files.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.messages.files.with_raw_response.retrieve( "file-abc123", thread_id="thread_abc123", message_id="msg_abc123", @@ -171,8 +165,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(MessageFile, file, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.messages.files.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.messages.files.with_streaming_response.retrieve( "file-abc123", thread_id="thread_abc123", message_id="msg_abc123", @@ -186,39 +180,39 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.messages.files.with_raw_response.retrieve( + await async_client.beta.threads.messages.files.with_raw_response.retrieve( "file-abc123", thread_id="", message_id="msg_abc123", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): - await client.beta.threads.messages.files.with_raw_response.retrieve( + await async_client.beta.threads.messages.files.with_raw_response.retrieve( "file-abc123", thread_id="thread_abc123", message_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"): - await client.beta.threads.messages.files.with_raw_response.retrieve( + await async_client.beta.threads.messages.files.with_raw_response.retrieve( "", thread_id="thread_abc123", message_id="msg_abc123", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - file = await client.beta.threads.messages.files.list( + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.threads.messages.files.list( "string", thread_id="string", ) assert_matches_type(AsyncCursorPage[MessageFile], file, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - file = await client.beta.threads.messages.files.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + file = await async_client.beta.threads.messages.files.list( "string", thread_id="string", after="string", @@ -229,8 +223,8 @@ async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[MessageFile], file, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.messages.files.with_raw_response.list( + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.messages.files.with_raw_response.list( "string", thread_id="string", ) @@ -241,8 +235,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[MessageFile], file, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.messages.files.with_streaming_response.list( + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.messages.files.with_streaming_response.list( "string", thread_id="string", ) as response: @@ -255,15 +249,15 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_list(self, client: AsyncOpenAI) -> None: + async def test_path_params_list(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.messages.files.with_raw_response.list( + await async_client.beta.threads.messages.files.with_raw_response.list( "string", thread_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): - await client.beta.threads.messages.files.with_raw_response.list( + await async_client.beta.threads.messages.files.with_raw_response.list( "", thread_id="string", ) diff --git a/tests/api_resources/beta/threads/runs/test_steps.py b/tests/api_resources/beta/threads/runs/test_steps.py index 2ec164a535..c15848cd70 100644 --- a/tests/api_resources/beta/threads/runs/test_steps.py +++ b/tests/api_resources/beta/threads/runs/test_steps.py @@ -9,18 +9,14 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncCursorPage, AsyncCursorPage from openai.types.beta.threads.runs import RunStep base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestSteps: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_retrieve(self, client: OpenAI) -> None: @@ -144,13 +140,11 @@ def test_path_params_list(self, client: OpenAI) -> None: class TestAsyncSteps: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - step = await client.beta.threads.runs.steps.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + step = await async_client.beta.threads.runs.steps.retrieve( "string", thread_id="string", run_id="string", @@ -158,8 +152,8 @@ async def test_method_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(RunStep, step, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.steps.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.steps.with_raw_response.retrieve( "string", thread_id="string", run_id="string", @@ -171,8 +165,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(RunStep, step, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.steps.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.steps.with_streaming_response.retrieve( "string", thread_id="string", run_id="string", @@ -186,39 +180,39 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.steps.with_raw_response.retrieve( + await async_client.beta.threads.runs.steps.with_raw_response.retrieve( "string", thread_id="", run_id="string", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await client.beta.threads.runs.steps.with_raw_response.retrieve( + await async_client.beta.threads.runs.steps.with_raw_response.retrieve( "string", thread_id="string", run_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `step_id` but received ''"): - await client.beta.threads.runs.steps.with_raw_response.retrieve( + await async_client.beta.threads.runs.steps.with_raw_response.retrieve( "", thread_id="string", run_id="string", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - step = await client.beta.threads.runs.steps.list( + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + step = await async_client.beta.threads.runs.steps.list( "string", thread_id="string", ) assert_matches_type(AsyncCursorPage[RunStep], step, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - step = await client.beta.threads.runs.steps.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + step = await async_client.beta.threads.runs.steps.list( "string", thread_id="string", after="string", @@ -229,8 +223,8 @@ async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[RunStep], step, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.steps.with_raw_response.list( + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.steps.with_raw_response.list( "string", thread_id="string", ) @@ -241,8 +235,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[RunStep], step, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.steps.with_streaming_response.list( + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.steps.with_streaming_response.list( "string", thread_id="string", ) as response: @@ -255,15 +249,15 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_list(self, client: AsyncOpenAI) -> None: + async def test_path_params_list(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.steps.with_raw_response.list( + await async_client.beta.threads.runs.steps.with_raw_response.list( "string", thread_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await client.beta.threads.runs.steps.with_raw_response.list( + await async_client.beta.threads.runs.steps.with_raw_response.list( "", thread_id="string", ) diff --git a/tests/api_resources/beta/threads/test_messages.py b/tests/api_resources/beta/threads/test_messages.py index 508e9b96c9..538d2f4c2a 100644 --- a/tests/api_resources/beta/threads/test_messages.py +++ b/tests/api_resources/beta/threads/test_messages.py @@ -9,18 +9,14 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncCursorPage, AsyncCursorPage from openai.types.beta.threads import ThreadMessage base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestMessages: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -235,13 +231,11 @@ def test_path_params_list(self, client: OpenAI) -> None: class TestAsyncMessages: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - message = await client.beta.threads.messages.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + message = await async_client.beta.threads.messages.create( "string", content="x", role="user", @@ -249,8 +243,8 @@ async def test_method_create(self, client: AsyncOpenAI) -> None: assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - message = await client.beta.threads.messages.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + message = await async_client.beta.threads.messages.create( "string", content="x", role="user", @@ -260,8 +254,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.messages.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.messages.with_raw_response.create( "string", content="x", role="user", @@ -273,8 +267,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.messages.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.messages.with_streaming_response.create( "string", content="x", role="user", @@ -288,25 +282,25 @@ async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_create(self, client: AsyncOpenAI) -> None: + async def test_path_params_create(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.messages.with_raw_response.create( + await async_client.beta.threads.messages.with_raw_response.create( "", content="x", role="user", ) @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - message = await client.beta.threads.messages.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + message = await async_client.beta.threads.messages.retrieve( "string", thread_id="string", ) assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.messages.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.messages.with_raw_response.retrieve( "string", thread_id="string", ) @@ -317,8 +311,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.messages.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.messages.with_streaming_response.retrieve( "string", thread_id="string", ) as response: @@ -331,30 +325,30 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.messages.with_raw_response.retrieve( + await async_client.beta.threads.messages.with_raw_response.retrieve( "string", thread_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): - await client.beta.threads.messages.with_raw_response.retrieve( + await async_client.beta.threads.messages.with_raw_response.retrieve( "", thread_id="string", ) @parametrize - async def test_method_update(self, client: AsyncOpenAI) -> None: - message = await client.beta.threads.messages.update( + async def test_method_update(self, async_client: AsyncOpenAI) -> None: + message = await async_client.beta.threads.messages.update( "string", thread_id="string", ) assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None: - message = await client.beta.threads.messages.update( + async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None: + message = await async_client.beta.threads.messages.update( "string", thread_id="string", metadata={}, @@ -362,8 +356,8 @@ async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_raw_response_update(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.messages.with_raw_response.update( + async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.messages.with_raw_response.update( "string", thread_id="string", ) @@ -374,8 +368,8 @@ async def test_raw_response_update(self, client: AsyncOpenAI) -> None: assert_matches_type(ThreadMessage, message, path=["response"]) @parametrize - async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.messages.with_streaming_response.update( + async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.messages.with_streaming_response.update( "string", thread_id="string", ) as response: @@ -388,29 +382,29 @@ async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_update(self, client: AsyncOpenAI) -> None: + async def test_path_params_update(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.messages.with_raw_response.update( + await async_client.beta.threads.messages.with_raw_response.update( "string", thread_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_id` but received ''"): - await client.beta.threads.messages.with_raw_response.update( + await async_client.beta.threads.messages.with_raw_response.update( "", thread_id="string", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - message = await client.beta.threads.messages.list( + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + message = await async_client.beta.threads.messages.list( "string", ) assert_matches_type(AsyncCursorPage[ThreadMessage], message, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - message = await client.beta.threads.messages.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + message = await async_client.beta.threads.messages.list( "string", after="string", before="string", @@ -420,8 +414,8 @@ async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[ThreadMessage], message, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.messages.with_raw_response.list( + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.messages.with_raw_response.list( "string", ) @@ -431,8 +425,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[ThreadMessage], message, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.messages.with_streaming_response.list( + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.messages.with_streaming_response.list( "string", ) as response: assert not response.is_closed @@ -444,8 +438,8 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_list(self, client: AsyncOpenAI) -> None: + async def test_path_params_list(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.messages.with_raw_response.list( + await async_client.beta.threads.messages.with_raw_response.list( "", ) diff --git a/tests/api_resources/beta/threads/test_runs.py b/tests/api_resources/beta/threads/test_runs.py index 66a9edd5c0..9e88d65eaf 100644 --- a/tests/api_resources/beta/threads/test_runs.py +++ b/tests/api_resources/beta/threads/test_runs.py @@ -9,20 +9,16 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncCursorPage, AsyncCursorPage from openai.types.beta.threads import ( Run, ) base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestRuns: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -336,21 +332,19 @@ def test_path_params_submit_tool_outputs(self, client: OpenAI) -> None: class TestAsyncRuns: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.create( "string", assistant_id="string", ) assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.create( "string", assistant_id="string", additional_instructions="string", @@ -362,8 +356,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.with_raw_response.create( "string", assistant_id="string", ) @@ -374,8 +368,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.with_streaming_response.create( "string", assistant_id="string", ) as response: @@ -388,24 +382,24 @@ async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_create(self, client: AsyncOpenAI) -> None: + async def test_path_params_create(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.with_raw_response.create( + await async_client.beta.threads.runs.with_raw_response.create( "", assistant_id="string", ) @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.retrieve( "string", thread_id="string", ) assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.with_raw_response.retrieve( "string", thread_id="string", ) @@ -416,8 +410,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.with_streaming_response.retrieve( "string", thread_id="string", ) as response: @@ -430,30 +424,30 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.with_raw_response.retrieve( + await async_client.beta.threads.runs.with_raw_response.retrieve( "string", thread_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await client.beta.threads.runs.with_raw_response.retrieve( + await async_client.beta.threads.runs.with_raw_response.retrieve( "", thread_id="string", ) @parametrize - async def test_method_update(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.update( + async def test_method_update(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.update( "string", thread_id="string", ) assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.update( + async def test_method_update_with_all_params(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.update( "string", thread_id="string", metadata={}, @@ -461,8 +455,8 @@ async def test_method_update_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_raw_response_update(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.with_raw_response.update( + async def test_raw_response_update(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.with_raw_response.update( "string", thread_id="string", ) @@ -473,8 +467,8 @@ async def test_raw_response_update(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.with_streaming_response.update( + async def test_streaming_response_update(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.with_streaming_response.update( "string", thread_id="string", ) as response: @@ -487,29 +481,29 @@ async def test_streaming_response_update(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_update(self, client: AsyncOpenAI) -> None: + async def test_path_params_update(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.with_raw_response.update( + await async_client.beta.threads.runs.with_raw_response.update( "string", thread_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await client.beta.threads.runs.with_raw_response.update( + await async_client.beta.threads.runs.with_raw_response.update( "", thread_id="string", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.list( + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.list( "string", ) assert_matches_type(AsyncCursorPage[Run], run, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.list( "string", after="string", before="string", @@ -519,8 +513,8 @@ async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[Run], run, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.with_raw_response.list( + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.with_raw_response.list( "string", ) @@ -530,8 +524,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[Run], run, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.with_streaming_response.list( + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.with_streaming_response.list( "string", ) as response: assert not response.is_closed @@ -543,23 +537,23 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_list(self, client: AsyncOpenAI) -> None: + async def test_path_params_list(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.with_raw_response.list( + await async_client.beta.threads.runs.with_raw_response.list( "", ) @parametrize - async def test_method_cancel(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.cancel( + async def test_method_cancel(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.cancel( "string", thread_id="string", ) assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_raw_response_cancel(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.with_raw_response.cancel( + async def test_raw_response_cancel(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.with_raw_response.cancel( "string", thread_id="string", ) @@ -570,8 +564,8 @@ async def test_raw_response_cancel(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_streaming_response_cancel(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.with_streaming_response.cancel( + async def test_streaming_response_cancel(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.with_streaming_response.cancel( "string", thread_id="string", ) as response: @@ -584,22 +578,22 @@ async def test_streaming_response_cancel(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_cancel(self, client: AsyncOpenAI) -> None: + async def test_path_params_cancel(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.with_raw_response.cancel( + await async_client.beta.threads.runs.with_raw_response.cancel( "string", thread_id="", ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await client.beta.threads.runs.with_raw_response.cancel( + await async_client.beta.threads.runs.with_raw_response.cancel( "", thread_id="string", ) @parametrize - async def test_method_submit_tool_outputs(self, client: AsyncOpenAI) -> None: - run = await client.beta.threads.runs.submit_tool_outputs( + async def test_method_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None: + run = await async_client.beta.threads.runs.submit_tool_outputs( "string", thread_id="string", tool_outputs=[{}, {}, {}], @@ -607,8 +601,8 @@ async def test_method_submit_tool_outputs(self, client: AsyncOpenAI) -> None: assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_raw_response_submit_tool_outputs(self, client: AsyncOpenAI) -> None: - response = await client.beta.threads.runs.with_raw_response.submit_tool_outputs( + async def test_raw_response_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None: + response = await async_client.beta.threads.runs.with_raw_response.submit_tool_outputs( "string", thread_id="string", tool_outputs=[{}, {}, {}], @@ -620,8 +614,8 @@ async def test_raw_response_submit_tool_outputs(self, client: AsyncOpenAI) -> No assert_matches_type(Run, run, path=["response"]) @parametrize - async def test_streaming_response_submit_tool_outputs(self, client: AsyncOpenAI) -> None: - async with client.beta.threads.runs.with_streaming_response.submit_tool_outputs( + async def test_streaming_response_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None: + async with async_client.beta.threads.runs.with_streaming_response.submit_tool_outputs( "string", thread_id="string", tool_outputs=[{}, {}, {}], @@ -635,16 +629,16 @@ async def test_streaming_response_submit_tool_outputs(self, client: AsyncOpenAI) assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_submit_tool_outputs(self, client: AsyncOpenAI) -> None: + async def test_path_params_submit_tool_outputs(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `thread_id` but received ''"): - await client.beta.threads.runs.with_raw_response.submit_tool_outputs( + await async_client.beta.threads.runs.with_raw_response.submit_tool_outputs( "string", thread_id="", tool_outputs=[{}, {}, {}], ) with pytest.raises(ValueError, match=r"Expected a non-empty value for `run_id` but received ''"): - await client.beta.threads.runs.with_raw_response.submit_tool_outputs( + await async_client.beta.threads.runs.with_raw_response.submit_tool_outputs( "", thread_id="string", tool_outputs=[{}, {}, {}], diff --git a/tests/api_resources/chat/test_completions.py b/tests/api_resources/chat/test_completions.py index 860ec80f48..4fa069ba2e 100644 --- a/tests/api_resources/chat/test_completions.py +++ b/tests/api_resources/chat/test_completions.py @@ -9,17 +9,13 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.types.chat import ChatCompletion base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestCompletions: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create_overload_1(self, client: OpenAI) -> None: @@ -249,13 +245,11 @@ def test_streaming_response_create_overload_2(self, client: OpenAI) -> None: class TestAsyncCompletions: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create_overload_1(self, client: AsyncOpenAI) -> None: - completion = await client.chat.completions.create( + async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None: + completion = await async_client.chat.completions.create( messages=[ { "content": "string", @@ -267,8 +261,8 @@ async def test_method_create_overload_1(self, client: AsyncOpenAI) -> None: assert_matches_type(ChatCompletion, completion, path=["response"]) @parametrize - async def test_method_create_with_all_params_overload_1(self, client: AsyncOpenAI) -> None: - completion = await client.chat.completions.create( + async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None: + completion = await async_client.chat.completions.create( messages=[ { "content": "string", @@ -330,8 +324,8 @@ async def test_method_create_with_all_params_overload_1(self, client: AsyncOpenA assert_matches_type(ChatCompletion, completion, path=["response"]) @parametrize - async def test_raw_response_create_overload_1(self, client: AsyncOpenAI) -> None: - response = await client.chat.completions.with_raw_response.create( + async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None: + response = await async_client.chat.completions.with_raw_response.create( messages=[ { "content": "string", @@ -347,8 +341,8 @@ async def test_raw_response_create_overload_1(self, client: AsyncOpenAI) -> None assert_matches_type(ChatCompletion, completion, path=["response"]) @parametrize - async def test_streaming_response_create_overload_1(self, client: AsyncOpenAI) -> None: - async with client.chat.completions.with_streaming_response.create( + async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None: + async with async_client.chat.completions.with_streaming_response.create( messages=[ { "content": "string", @@ -366,8 +360,8 @@ async def test_streaming_response_create_overload_1(self, client: AsyncOpenAI) - assert cast(Any, response.is_closed) is True @parametrize - async def test_method_create_overload_2(self, client: AsyncOpenAI) -> None: - completion_stream = await client.chat.completions.create( + async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None: + completion_stream = await async_client.chat.completions.create( messages=[ { "content": "string", @@ -380,8 +374,8 @@ async def test_method_create_overload_2(self, client: AsyncOpenAI) -> None: await completion_stream.response.aclose() @parametrize - async def test_method_create_with_all_params_overload_2(self, client: AsyncOpenAI) -> None: - completion_stream = await client.chat.completions.create( + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None: + completion_stream = await async_client.chat.completions.create( messages=[ { "content": "string", @@ -443,8 +437,8 @@ async def test_method_create_with_all_params_overload_2(self, client: AsyncOpenA await completion_stream.response.aclose() @parametrize - async def test_raw_response_create_overload_2(self, client: AsyncOpenAI) -> None: - response = await client.chat.completions.with_raw_response.create( + async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None: + response = await async_client.chat.completions.with_raw_response.create( messages=[ { "content": "string", @@ -460,8 +454,8 @@ async def test_raw_response_create_overload_2(self, client: AsyncOpenAI) -> None await stream.close() @parametrize - async def test_streaming_response_create_overload_2(self, client: AsyncOpenAI) -> None: - async with client.chat.completions.with_streaming_response.create( + async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None: + async with async_client.chat.completions.with_streaming_response.create( messages=[ { "content": "string", diff --git a/tests/api_resources/fine_tuning/test_jobs.py b/tests/api_resources/fine_tuning/test_jobs.py index 50c7278855..204cc3b1f5 100644 --- a/tests/api_resources/fine_tuning/test_jobs.py +++ b/tests/api_resources/fine_tuning/test_jobs.py @@ -9,7 +9,6 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncCursorPage, AsyncCursorPage from openai.types.fine_tuning import ( FineTuningJob, @@ -17,13 +16,10 @@ ) base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestJobs: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -232,21 +228,19 @@ def test_path_params_list_events(self, client: OpenAI) -> None: class TestAsyncJobs: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.create( model="gpt-3.5-turbo", training_file="file-abc123", ) assert_matches_type(FineTuningJob, job, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.create( model="gpt-3.5-turbo", training_file="file-abc123", hyperparameters={ @@ -260,8 +254,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(FineTuningJob, job, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.fine_tuning.jobs.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.fine_tuning.jobs.with_raw_response.create( model="gpt-3.5-turbo", training_file="file-abc123", ) @@ -272,8 +266,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(FineTuningJob, job, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.fine_tuning.jobs.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.fine_tuning.jobs.with_streaming_response.create( model="gpt-3.5-turbo", training_file="file-abc123", ) as response: @@ -286,15 +280,15 @@ async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.retrieve( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) assert_matches_type(FineTuningJob, job, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.fine_tuning.jobs.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.fine_tuning.jobs.with_raw_response.retrieve( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) @@ -304,8 +298,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(FineTuningJob, job, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.fine_tuning.jobs.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.fine_tuning.jobs.with_streaming_response.retrieve( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) as response: assert not response.is_closed @@ -317,28 +311,28 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `fine_tuning_job_id` but received ''"): - await client.fine_tuning.jobs.with_raw_response.retrieve( + await async_client.fine_tuning.jobs.with_raw_response.retrieve( "", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.list() + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.list() assert_matches_type(AsyncCursorPage[FineTuningJob], job, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.list( after="string", limit=0, ) assert_matches_type(AsyncCursorPage[FineTuningJob], job, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.fine_tuning.jobs.with_raw_response.list() + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.fine_tuning.jobs.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -346,8 +340,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[FineTuningJob], job, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.fine_tuning.jobs.with_streaming_response.list() as response: + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.fine_tuning.jobs.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -357,15 +351,15 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_cancel(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.cancel( + async def test_method_cancel(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.cancel( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) assert_matches_type(FineTuningJob, job, path=["response"]) @parametrize - async def test_raw_response_cancel(self, client: AsyncOpenAI) -> None: - response = await client.fine_tuning.jobs.with_raw_response.cancel( + async def test_raw_response_cancel(self, async_client: AsyncOpenAI) -> None: + response = await async_client.fine_tuning.jobs.with_raw_response.cancel( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) @@ -375,8 +369,8 @@ async def test_raw_response_cancel(self, client: AsyncOpenAI) -> None: assert_matches_type(FineTuningJob, job, path=["response"]) @parametrize - async def test_streaming_response_cancel(self, client: AsyncOpenAI) -> None: - async with client.fine_tuning.jobs.with_streaming_response.cancel( + async def test_streaming_response_cancel(self, async_client: AsyncOpenAI) -> None: + async with async_client.fine_tuning.jobs.with_streaming_response.cancel( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) as response: assert not response.is_closed @@ -388,22 +382,22 @@ async def test_streaming_response_cancel(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_cancel(self, client: AsyncOpenAI) -> None: + async def test_path_params_cancel(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `fine_tuning_job_id` but received ''"): - await client.fine_tuning.jobs.with_raw_response.cancel( + await async_client.fine_tuning.jobs.with_raw_response.cancel( "", ) @parametrize - async def test_method_list_events(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.list_events( + async def test_method_list_events(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.list_events( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) assert_matches_type(AsyncCursorPage[FineTuningJobEvent], job, path=["response"]) @parametrize - async def test_method_list_events_with_all_params(self, client: AsyncOpenAI) -> None: - job = await client.fine_tuning.jobs.list_events( + async def test_method_list_events_with_all_params(self, async_client: AsyncOpenAI) -> None: + job = await async_client.fine_tuning.jobs.list_events( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", after="string", limit=0, @@ -411,8 +405,8 @@ async def test_method_list_events_with_all_params(self, client: AsyncOpenAI) -> assert_matches_type(AsyncCursorPage[FineTuningJobEvent], job, path=["response"]) @parametrize - async def test_raw_response_list_events(self, client: AsyncOpenAI) -> None: - response = await client.fine_tuning.jobs.with_raw_response.list_events( + async def test_raw_response_list_events(self, async_client: AsyncOpenAI) -> None: + response = await async_client.fine_tuning.jobs.with_raw_response.list_events( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) @@ -422,8 +416,8 @@ async def test_raw_response_list_events(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncCursorPage[FineTuningJobEvent], job, path=["response"]) @parametrize - async def test_streaming_response_list_events(self, client: AsyncOpenAI) -> None: - async with client.fine_tuning.jobs.with_streaming_response.list_events( + async def test_streaming_response_list_events(self, async_client: AsyncOpenAI) -> None: + async with async_client.fine_tuning.jobs.with_streaming_response.list_events( "ft-AF1WoRqd3aJAHsqc9NY7iL8F", ) as response: assert not response.is_closed @@ -435,8 +429,8 @@ async def test_streaming_response_list_events(self, client: AsyncOpenAI) -> None assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_list_events(self, client: AsyncOpenAI) -> None: + async def test_path_params_list_events(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `fine_tuning_job_id` but received ''"): - await client.fine_tuning.jobs.with_raw_response.list_events( + await async_client.fine_tuning.jobs.with_raw_response.list_events( "", ) diff --git a/tests/api_resources/test_completions.py b/tests/api_resources/test_completions.py index a5e8dc809a..916cdd3cb6 100644 --- a/tests/api_resources/test_completions.py +++ b/tests/api_resources/test_completions.py @@ -10,16 +10,12 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type from openai.types import Completion -from openai._client import OpenAI, AsyncOpenAI base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestCompletions: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create_overload_1(self, client: OpenAI) -> None: @@ -139,21 +135,19 @@ def test_streaming_response_create_overload_2(self, client: OpenAI) -> None: class TestAsyncCompletions: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create_overload_1(self, client: AsyncOpenAI) -> None: - completion = await client.completions.create( + async def test_method_create_overload_1(self, async_client: AsyncOpenAI) -> None: + completion = await async_client.completions.create( model="string", prompt="This is a test.", ) assert_matches_type(Completion, completion, path=["response"]) @parametrize - async def test_method_create_with_all_params_overload_1(self, client: AsyncOpenAI) -> None: - completion = await client.completions.create( + async def test_method_create_with_all_params_overload_1(self, async_client: AsyncOpenAI) -> None: + completion = await async_client.completions.create( model="string", prompt="This is a test.", best_of=0, @@ -175,8 +169,8 @@ async def test_method_create_with_all_params_overload_1(self, client: AsyncOpenA assert_matches_type(Completion, completion, path=["response"]) @parametrize - async def test_raw_response_create_overload_1(self, client: AsyncOpenAI) -> None: - response = await client.completions.with_raw_response.create( + async def test_raw_response_create_overload_1(self, async_client: AsyncOpenAI) -> None: + response = await async_client.completions.with_raw_response.create( model="string", prompt="This is a test.", ) @@ -187,8 +181,8 @@ async def test_raw_response_create_overload_1(self, client: AsyncOpenAI) -> None assert_matches_type(Completion, completion, path=["response"]) @parametrize - async def test_streaming_response_create_overload_1(self, client: AsyncOpenAI) -> None: - async with client.completions.with_streaming_response.create( + async def test_streaming_response_create_overload_1(self, async_client: AsyncOpenAI) -> None: + async with async_client.completions.with_streaming_response.create( model="string", prompt="This is a test.", ) as response: @@ -201,8 +195,8 @@ async def test_streaming_response_create_overload_1(self, client: AsyncOpenAI) - assert cast(Any, response.is_closed) is True @parametrize - async def test_method_create_overload_2(self, client: AsyncOpenAI) -> None: - completion_stream = await client.completions.create( + async def test_method_create_overload_2(self, async_client: AsyncOpenAI) -> None: + completion_stream = await async_client.completions.create( model="string", prompt="This is a test.", stream=True, @@ -210,8 +204,8 @@ async def test_method_create_overload_2(self, client: AsyncOpenAI) -> None: await completion_stream.response.aclose() @parametrize - async def test_method_create_with_all_params_overload_2(self, client: AsyncOpenAI) -> None: - completion_stream = await client.completions.create( + async def test_method_create_with_all_params_overload_2(self, async_client: AsyncOpenAI) -> None: + completion_stream = await async_client.completions.create( model="string", prompt="This is a test.", stream=True, @@ -233,8 +227,8 @@ async def test_method_create_with_all_params_overload_2(self, client: AsyncOpenA await completion_stream.response.aclose() @parametrize - async def test_raw_response_create_overload_2(self, client: AsyncOpenAI) -> None: - response = await client.completions.with_raw_response.create( + async def test_raw_response_create_overload_2(self, async_client: AsyncOpenAI) -> None: + response = await async_client.completions.with_raw_response.create( model="string", prompt="This is a test.", stream=True, @@ -245,8 +239,8 @@ async def test_raw_response_create_overload_2(self, client: AsyncOpenAI) -> None await stream.close() @parametrize - async def test_streaming_response_create_overload_2(self, client: AsyncOpenAI) -> None: - async with client.completions.with_streaming_response.create( + async def test_streaming_response_create_overload_2(self, async_client: AsyncOpenAI) -> None: + async with async_client.completions.with_streaming_response.create( model="string", prompt="This is a test.", stream=True, diff --git a/tests/api_resources/test_embeddings.py b/tests/api_resources/test_embeddings.py index 77875fc46f..cd4ff8e391 100644 --- a/tests/api_resources/test_embeddings.py +++ b/tests/api_resources/test_embeddings.py @@ -10,16 +10,12 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type from openai.types import CreateEmbeddingResponse -from openai._client import OpenAI, AsyncOpenAI base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestEmbeddings: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -67,21 +63,19 @@ def test_streaming_response_create(self, client: OpenAI) -> None: class TestAsyncEmbeddings: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - embedding = await client.embeddings.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + embedding = await async_client.embeddings.create( input="The quick brown fox jumped over the lazy dog", model="text-embedding-ada-002", ) assert_matches_type(CreateEmbeddingResponse, embedding, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - embedding = await client.embeddings.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + embedding = await async_client.embeddings.create( input="The quick brown fox jumped over the lazy dog", model="text-embedding-ada-002", encoding_format="float", @@ -90,8 +84,8 @@ async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(CreateEmbeddingResponse, embedding, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.embeddings.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.embeddings.with_raw_response.create( input="The quick brown fox jumped over the lazy dog", model="text-embedding-ada-002", ) @@ -102,8 +96,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(CreateEmbeddingResponse, embedding, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.embeddings.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.embeddings.with_streaming_response.create( input="The quick brown fox jumped over the lazy dog", model="text-embedding-ada-002", ) as response: diff --git a/tests/api_resources/test_files.py b/tests/api_resources/test_files.py index 89ad9e222f..d1a17923a6 100644 --- a/tests/api_resources/test_files.py +++ b/tests/api_resources/test_files.py @@ -13,19 +13,15 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type from openai.types import FileObject, FileDeleted -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncPage, AsyncPage # pyright: reportDeprecated=false base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestFiles: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -261,21 +257,19 @@ def test_path_params_retrieve_content(self, client: OpenAI) -> None: class TestAsyncFiles: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - file = await client.files.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + file = await async_client.files.create( file=b"raw file contents", purpose="fine-tune", ) assert_matches_type(FileObject, file, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.files.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.files.with_raw_response.create( file=b"raw file contents", purpose="fine-tune", ) @@ -286,8 +280,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(FileObject, file, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.files.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.files.with_streaming_response.create( file=b"raw file contents", purpose="fine-tune", ) as response: @@ -300,15 +294,15 @@ async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - file = await client.files.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + file = await async_client.files.retrieve( "string", ) assert_matches_type(FileObject, file, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.files.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.files.with_raw_response.retrieve( "string", ) @@ -318,8 +312,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(FileObject, file, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.files.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.files.with_streaming_response.retrieve( "string", ) as response: assert not response.is_closed @@ -331,27 +325,27 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"): - await client.files.with_raw_response.retrieve( + await async_client.files.with_raw_response.retrieve( "", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - file = await client.files.list() + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + file = await async_client.files.list() assert_matches_type(AsyncPage[FileObject], file, path=["response"]) @parametrize - async def test_method_list_with_all_params(self, client: AsyncOpenAI) -> None: - file = await client.files.list( + async def test_method_list_with_all_params(self, async_client: AsyncOpenAI) -> None: + file = await async_client.files.list( purpose="string", ) assert_matches_type(AsyncPage[FileObject], file, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.files.with_raw_response.list() + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.files.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -359,8 +353,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncPage[FileObject], file, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.files.with_streaming_response.list() as response: + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.files.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -370,15 +364,15 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_delete(self, client: AsyncOpenAI) -> None: - file = await client.files.delete( + async def test_method_delete(self, async_client: AsyncOpenAI) -> None: + file = await async_client.files.delete( "string", ) assert_matches_type(FileDeleted, file, path=["response"]) @parametrize - async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: - response = await client.files.with_raw_response.delete( + async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None: + response = await async_client.files.with_raw_response.delete( "string", ) @@ -388,8 +382,8 @@ async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: assert_matches_type(FileDeleted, file, path=["response"]) @parametrize - async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: - async with client.files.with_streaming_response.delete( + async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None: + async with async_client.files.with_streaming_response.delete( "string", ) as response: assert not response.is_closed @@ -401,17 +395,17 @@ async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_delete(self, client: AsyncOpenAI) -> None: + async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"): - await client.files.with_raw_response.delete( + await async_client.files.with_raw_response.delete( "", ) @parametrize @pytest.mark.respx(base_url=base_url) - async def test_method_content(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None: + async def test_method_content(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None: respx_mock.get("/files/string/content").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - file = await client.files.content( + file = await async_client.files.content( "string", ) assert isinstance(file, _legacy_response.HttpxBinaryResponseContent) @@ -419,10 +413,10 @@ async def test_method_content(self, client: AsyncOpenAI, respx_mock: MockRouter) @parametrize @pytest.mark.respx(base_url=base_url) - async def test_raw_response_content(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None: + async def test_raw_response_content(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None: respx_mock.get("/files/string/content").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - response = await client.files.with_raw_response.content( + response = await async_client.files.with_raw_response.content( "string", ) @@ -433,9 +427,9 @@ async def test_raw_response_content(self, client: AsyncOpenAI, respx_mock: MockR @parametrize @pytest.mark.respx(base_url=base_url) - async def test_streaming_response_content(self, client: AsyncOpenAI, respx_mock: MockRouter) -> None: + async def test_streaming_response_content(self, async_client: AsyncOpenAI, respx_mock: MockRouter) -> None: respx_mock.get("/files/string/content").mock(return_value=httpx.Response(200, json={"foo": "bar"})) - async with client.files.with_streaming_response.content( + async with async_client.files.with_streaming_response.content( "string", ) as response: assert not response.is_closed @@ -448,25 +442,25 @@ async def test_streaming_response_content(self, client: AsyncOpenAI, respx_mock: @parametrize @pytest.mark.respx(base_url=base_url) - async def test_path_params_content(self, client: AsyncOpenAI) -> None: + async def test_path_params_content(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"): - await client.files.with_raw_response.content( + await async_client.files.with_raw_response.content( "", ) @parametrize - async def test_method_retrieve_content(self, client: AsyncOpenAI) -> None: + async def test_method_retrieve_content(self, async_client: AsyncOpenAI) -> None: with pytest.warns(DeprecationWarning): - file = await client.files.retrieve_content( + file = await async_client.files.retrieve_content( "string", ) assert_matches_type(str, file, path=["response"]) @parametrize - async def test_raw_response_retrieve_content(self, client: AsyncOpenAI) -> None: + async def test_raw_response_retrieve_content(self, async_client: AsyncOpenAI) -> None: with pytest.warns(DeprecationWarning): - response = await client.files.with_raw_response.retrieve_content( + response = await async_client.files.with_raw_response.retrieve_content( "string", ) @@ -476,9 +470,9 @@ async def test_raw_response_retrieve_content(self, client: AsyncOpenAI) -> None: assert_matches_type(str, file, path=["response"]) @parametrize - async def test_streaming_response_retrieve_content(self, client: AsyncOpenAI) -> None: + async def test_streaming_response_retrieve_content(self, async_client: AsyncOpenAI) -> None: with pytest.warns(DeprecationWarning): - async with client.files.with_streaming_response.retrieve_content( + async with async_client.files.with_streaming_response.retrieve_content( "string", ) as response: assert not response.is_closed @@ -490,9 +484,9 @@ async def test_streaming_response_retrieve_content(self, client: AsyncOpenAI) -> assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve_content(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve_content(self, async_client: AsyncOpenAI) -> None: with pytest.warns(DeprecationWarning): with pytest.raises(ValueError, match=r"Expected a non-empty value for `file_id` but received ''"): - await client.files.with_raw_response.retrieve_content( + await async_client.files.with_raw_response.retrieve_content( "", ) diff --git a/tests/api_resources/test_images.py b/tests/api_resources/test_images.py index 553bd018ee..b6cb2572ab 100644 --- a/tests/api_resources/test_images.py +++ b/tests/api_resources/test_images.py @@ -10,16 +10,12 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type from openai.types import ImagesResponse -from openai._client import OpenAI, AsyncOpenAI base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestImages: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create_variation(self, client: OpenAI) -> None: @@ -159,20 +155,18 @@ def test_streaming_response_generate(self, client: OpenAI) -> None: class TestAsyncImages: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create_variation(self, client: AsyncOpenAI) -> None: - image = await client.images.create_variation( + async def test_method_create_variation(self, async_client: AsyncOpenAI) -> None: + image = await async_client.images.create_variation( image=b"raw file contents", ) assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_method_create_variation_with_all_params(self, client: AsyncOpenAI) -> None: - image = await client.images.create_variation( + async def test_method_create_variation_with_all_params(self, async_client: AsyncOpenAI) -> None: + image = await async_client.images.create_variation( image=b"raw file contents", model="dall-e-2", n=1, @@ -183,8 +177,8 @@ async def test_method_create_variation_with_all_params(self, client: AsyncOpenAI assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_raw_response_create_variation(self, client: AsyncOpenAI) -> None: - response = await client.images.with_raw_response.create_variation( + async def test_raw_response_create_variation(self, async_client: AsyncOpenAI) -> None: + response = await async_client.images.with_raw_response.create_variation( image=b"raw file contents", ) @@ -194,8 +188,8 @@ async def test_raw_response_create_variation(self, client: AsyncOpenAI) -> None: assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_streaming_response_create_variation(self, client: AsyncOpenAI) -> None: - async with client.images.with_streaming_response.create_variation( + async def test_streaming_response_create_variation(self, async_client: AsyncOpenAI) -> None: + async with async_client.images.with_streaming_response.create_variation( image=b"raw file contents", ) as response: assert not response.is_closed @@ -207,16 +201,16 @@ async def test_streaming_response_create_variation(self, client: AsyncOpenAI) -> assert cast(Any, response.is_closed) is True @parametrize - async def test_method_edit(self, client: AsyncOpenAI) -> None: - image = await client.images.edit( + async def test_method_edit(self, async_client: AsyncOpenAI) -> None: + image = await async_client.images.edit( image=b"raw file contents", prompt="A cute baby sea otter wearing a beret", ) assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_method_edit_with_all_params(self, client: AsyncOpenAI) -> None: - image = await client.images.edit( + async def test_method_edit_with_all_params(self, async_client: AsyncOpenAI) -> None: + image = await async_client.images.edit( image=b"raw file contents", prompt="A cute baby sea otter wearing a beret", mask=b"raw file contents", @@ -229,8 +223,8 @@ async def test_method_edit_with_all_params(self, client: AsyncOpenAI) -> None: assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_raw_response_edit(self, client: AsyncOpenAI) -> None: - response = await client.images.with_raw_response.edit( + async def test_raw_response_edit(self, async_client: AsyncOpenAI) -> None: + response = await async_client.images.with_raw_response.edit( image=b"raw file contents", prompt="A cute baby sea otter wearing a beret", ) @@ -241,8 +235,8 @@ async def test_raw_response_edit(self, client: AsyncOpenAI) -> None: assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_streaming_response_edit(self, client: AsyncOpenAI) -> None: - async with client.images.with_streaming_response.edit( + async def test_streaming_response_edit(self, async_client: AsyncOpenAI) -> None: + async with async_client.images.with_streaming_response.edit( image=b"raw file contents", prompt="A cute baby sea otter wearing a beret", ) as response: @@ -255,15 +249,15 @@ async def test_streaming_response_edit(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_generate(self, client: AsyncOpenAI) -> None: - image = await client.images.generate( + async def test_method_generate(self, async_client: AsyncOpenAI) -> None: + image = await async_client.images.generate( prompt="A cute baby sea otter", ) assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_method_generate_with_all_params(self, client: AsyncOpenAI) -> None: - image = await client.images.generate( + async def test_method_generate_with_all_params(self, async_client: AsyncOpenAI) -> None: + image = await async_client.images.generate( prompt="A cute baby sea otter", model="dall-e-3", n=1, @@ -276,8 +270,8 @@ async def test_method_generate_with_all_params(self, client: AsyncOpenAI) -> Non assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_raw_response_generate(self, client: AsyncOpenAI) -> None: - response = await client.images.with_raw_response.generate( + async def test_raw_response_generate(self, async_client: AsyncOpenAI) -> None: + response = await async_client.images.with_raw_response.generate( prompt="A cute baby sea otter", ) @@ -287,8 +281,8 @@ async def test_raw_response_generate(self, client: AsyncOpenAI) -> None: assert_matches_type(ImagesResponse, image, path=["response"]) @parametrize - async def test_streaming_response_generate(self, client: AsyncOpenAI) -> None: - async with client.images.with_streaming_response.generate( + async def test_streaming_response_generate(self, async_client: AsyncOpenAI) -> None: + async with async_client.images.with_streaming_response.generate( prompt="A cute baby sea otter", ) as response: assert not response.is_closed diff --git a/tests/api_resources/test_models.py b/tests/api_resources/test_models.py index b41e50eb71..d031d54f6a 100644 --- a/tests/api_resources/test_models.py +++ b/tests/api_resources/test_models.py @@ -10,17 +10,13 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type from openai.types import Model, ModelDeleted -from openai._client import OpenAI, AsyncOpenAI from openai.pagination import SyncPage, AsyncPage base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestModels: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_retrieve(self, client: OpenAI) -> None: @@ -125,20 +121,18 @@ def test_path_params_delete(self, client: OpenAI) -> None: class TestAsyncModels: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_retrieve(self, client: AsyncOpenAI) -> None: - model = await client.models.retrieve( + async def test_method_retrieve(self, async_client: AsyncOpenAI) -> None: + model = await async_client.models.retrieve( "gpt-3.5-turbo", ) assert_matches_type(Model, model, path=["response"]) @parametrize - async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: - response = await client.models.with_raw_response.retrieve( + async def test_raw_response_retrieve(self, async_client: AsyncOpenAI) -> None: + response = await async_client.models.with_raw_response.retrieve( "gpt-3.5-turbo", ) @@ -148,8 +142,8 @@ async def test_raw_response_retrieve(self, client: AsyncOpenAI) -> None: assert_matches_type(Model, model, path=["response"]) @parametrize - async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: - async with client.models.with_streaming_response.retrieve( + async def test_streaming_response_retrieve(self, async_client: AsyncOpenAI) -> None: + async with async_client.models.with_streaming_response.retrieve( "gpt-3.5-turbo", ) as response: assert not response.is_closed @@ -161,20 +155,20 @@ async def test_streaming_response_retrieve(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_retrieve(self, client: AsyncOpenAI) -> None: + async def test_path_params_retrieve(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"): - await client.models.with_raw_response.retrieve( + await async_client.models.with_raw_response.retrieve( "", ) @parametrize - async def test_method_list(self, client: AsyncOpenAI) -> None: - model = await client.models.list() + async def test_method_list(self, async_client: AsyncOpenAI) -> None: + model = await async_client.models.list() assert_matches_type(AsyncPage[Model], model, path=["response"]) @parametrize - async def test_raw_response_list(self, client: AsyncOpenAI) -> None: - response = await client.models.with_raw_response.list() + async def test_raw_response_list(self, async_client: AsyncOpenAI) -> None: + response = await async_client.models.with_raw_response.list() assert response.is_closed is True assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -182,8 +176,8 @@ async def test_raw_response_list(self, client: AsyncOpenAI) -> None: assert_matches_type(AsyncPage[Model], model, path=["response"]) @parametrize - async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: - async with client.models.with_streaming_response.list() as response: + async def test_streaming_response_list(self, async_client: AsyncOpenAI) -> None: + async with async_client.models.with_streaming_response.list() as response: assert not response.is_closed assert response.http_request.headers.get("X-Stainless-Lang") == "python" @@ -193,15 +187,15 @@ async def test_streaming_response_list(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_method_delete(self, client: AsyncOpenAI) -> None: - model = await client.models.delete( + async def test_method_delete(self, async_client: AsyncOpenAI) -> None: + model = await async_client.models.delete( "ft:gpt-3.5-turbo:acemeco:suffix:abc123", ) assert_matches_type(ModelDeleted, model, path=["response"]) @parametrize - async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: - response = await client.models.with_raw_response.delete( + async def test_raw_response_delete(self, async_client: AsyncOpenAI) -> None: + response = await async_client.models.with_raw_response.delete( "ft:gpt-3.5-turbo:acemeco:suffix:abc123", ) @@ -211,8 +205,8 @@ async def test_raw_response_delete(self, client: AsyncOpenAI) -> None: assert_matches_type(ModelDeleted, model, path=["response"]) @parametrize - async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: - async with client.models.with_streaming_response.delete( + async def test_streaming_response_delete(self, async_client: AsyncOpenAI) -> None: + async with async_client.models.with_streaming_response.delete( "ft:gpt-3.5-turbo:acemeco:suffix:abc123", ) as response: assert not response.is_closed @@ -224,8 +218,8 @@ async def test_streaming_response_delete(self, client: AsyncOpenAI) -> None: assert cast(Any, response.is_closed) is True @parametrize - async def test_path_params_delete(self, client: AsyncOpenAI) -> None: + async def test_path_params_delete(self, async_client: AsyncOpenAI) -> None: with pytest.raises(ValueError, match=r"Expected a non-empty value for `model` but received ''"): - await client.models.with_raw_response.delete( + await async_client.models.with_raw_response.delete( "", ) diff --git a/tests/api_resources/test_moderations.py b/tests/api_resources/test_moderations.py index 88d35f003d..285e738c0e 100644 --- a/tests/api_resources/test_moderations.py +++ b/tests/api_resources/test_moderations.py @@ -10,16 +10,12 @@ from openai import OpenAI, AsyncOpenAI from tests.utils import assert_matches_type from openai.types import ModerationCreateResponse -from openai._client import OpenAI, AsyncOpenAI base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") -api_key = "My API Key" class TestModerations: - strict_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize def test_method_create(self, client: OpenAI) -> None: @@ -62,28 +58,26 @@ def test_streaming_response_create(self, client: OpenAI) -> None: class TestAsyncModerations: - strict_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=True) - loose_client = AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=False) - parametrize = pytest.mark.parametrize("client", [strict_client, loose_client], ids=["strict", "loose"]) + parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"]) @parametrize - async def test_method_create(self, client: AsyncOpenAI) -> None: - moderation = await client.moderations.create( + async def test_method_create(self, async_client: AsyncOpenAI) -> None: + moderation = await async_client.moderations.create( input="I want to kill them.", ) assert_matches_type(ModerationCreateResponse, moderation, path=["response"]) @parametrize - async def test_method_create_with_all_params(self, client: AsyncOpenAI) -> None: - moderation = await client.moderations.create( + async def test_method_create_with_all_params(self, async_client: AsyncOpenAI) -> None: + moderation = await async_client.moderations.create( input="I want to kill them.", model="text-moderation-stable", ) assert_matches_type(ModerationCreateResponse, moderation, path=["response"]) @parametrize - async def test_raw_response_create(self, client: AsyncOpenAI) -> None: - response = await client.moderations.with_raw_response.create( + async def test_raw_response_create(self, async_client: AsyncOpenAI) -> None: + response = await async_client.moderations.with_raw_response.create( input="I want to kill them.", ) @@ -93,8 +87,8 @@ async def test_raw_response_create(self, client: AsyncOpenAI) -> None: assert_matches_type(ModerationCreateResponse, moderation, path=["response"]) @parametrize - async def test_streaming_response_create(self, client: AsyncOpenAI) -> None: - async with client.moderations.with_streaming_response.create( + async def test_streaming_response_create(self, async_client: AsyncOpenAI) -> None: + async with async_client.moderations.with_streaming_response.create( input="I want to kill them.", ) as response: assert not response.is_closed diff --git a/tests/conftest.py b/tests/conftest.py index c3a1efe9df..15af57e770 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,9 +1,17 @@ +from __future__ import annotations + +import os import asyncio import logging -from typing import Iterator +from typing import TYPE_CHECKING, Iterator, AsyncIterator import pytest +from openai import OpenAI, AsyncOpenAI + +if TYPE_CHECKING: + from _pytest.fixtures import FixtureRequest + pytest.register_assert_rewrite("tests.utils") logging.getLogger("openai").setLevel(logging.DEBUG) @@ -14,3 +22,28 @@ def event_loop() -> Iterator[asyncio.AbstractEventLoop]: loop = asyncio.new_event_loop() yield loop loop.close() + + +base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") + +api_key = "My API Key" + + +@pytest.fixture(scope="session") +def client(request: FixtureRequest) -> Iterator[OpenAI]: + strict = getattr(request, "param", True) + if not isinstance(strict, bool): + raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") + + with OpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + yield client + + +@pytest.fixture(scope="session") +async def async_client(request: FixtureRequest) -> AsyncIterator[AsyncOpenAI]: + strict = getattr(request, "param", True) + if not isinstance(strict, bool): + raise TypeError(f"Unexpected fixture parameter type {type(strict)}, expected {bool}") + + async with AsyncOpenAI(base_url=base_url, api_key=api_key, _strict_response_validation=strict) as client: + yield client