diff --git a/.stats.yml b/.stats.yml
index 239e17b..c809f63 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,2 +1,2 @@
configured_endpoints: 21
-openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-fd67aea6883f1ee9e46f31a42d3940f0acb1749e787055bd9b9f278b20fa53ec.yml
+openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic-75f0573c3d6d79650bcbd8b1b4fcf93ce146d567afeb1061cd4afccf8d1d6799.yml
diff --git a/api.md b/api.md
index c520f8b..86fbe0d 100644
--- a/api.md
+++ b/api.md
@@ -94,7 +94,7 @@ Methods:
- client.messages.batches.list(\*\*params) -> SyncPage[MessageBatch]
- client.messages.batches.delete(message_batch_id) -> DeletedMessageBatch
- client.messages.batches.cancel(message_batch_id) -> MessageBatch
-- client.messages.batches.results(message_batch_id) -> BinaryAPIResponse
+- client.messages.batches.results(message_batch_id) -> JSONLDecoder[MessageBatchIndividualResponse]
# Models
@@ -217,4 +217,4 @@ Methods:
- client.beta.messages.batches.list(\*\*params) -> SyncPage[BetaMessageBatch]
- client.beta.messages.batches.delete(message_batch_id) -> BetaDeletedMessageBatch
- client.beta.messages.batches.cancel(message_batch_id) -> BetaMessageBatch
-- client.beta.messages.batches.results(message_batch_id) -> BinaryAPIResponse
+- client.beta.messages.batches.results(message_batch_id) -> JSONLDecoder[BetaMessageBatchIndividualResponse]
diff --git a/src/anthropic/_decoders/jsonl.py b/src/anthropic/_decoders/jsonl.py
new file mode 100644
index 0000000..e9d29a1
--- /dev/null
+++ b/src/anthropic/_decoders/jsonl.py
@@ -0,0 +1,101 @@
+from __future__ import annotations
+
+import json
+from typing_extensions import Generic, TypeVar, Iterator, AsyncIterator
+
+import httpx
+
+from .._models import construct_type_unchecked
+
+_T = TypeVar("_T")
+
+
+class JSONLDecoder(Generic[_T]):
+ """A decoder for [JSON Lines](https://jsonlines.org) format.
+
+ This class provides an iterator over a byte-iterator that parses each JSON Line
+ into a given type.
+ """
+
+ http_response: httpx.Response | None
+ """The HTTP response this decoder was constructed from"""
+
+ def __init__(
+ self, *, raw_iterator: Iterator[bytes], line_type: type[_T], http_response: httpx.Response | None
+ ) -> None:
+ super().__init__()
+ self.http_response = http_response
+ self._raw_iterator = raw_iterator
+ self._line_type = line_type
+ self._iterator = self.__decode__()
+
+ def __decode__(self) -> Iterator[_T]:
+ buf = b""
+ for chunk in self._raw_iterator:
+ for line in chunk.splitlines(keepends=True):
+ buf += line
+ if buf.endswith((b"\r", b"\n", b"\r\n")):
+ yield construct_type_unchecked(
+ value=json.loads(buf),
+ type_=self._line_type,
+ )
+ buf = b""
+
+ # flush
+ if buf:
+ yield construct_type_unchecked(
+ value=json.loads(buf),
+ type_=self._line_type,
+ )
+
+ def __next__(self) -> _T:
+ return self._iterator.__next__()
+
+ def __iter__(self) -> Iterator[_T]:
+ for item in self._iterator:
+ yield item
+
+
+class AsyncJSONLDecoder(Generic[_T]):
+ """A decoder for [JSON Lines](https://jsonlines.org) format.
+
+ This class provides an async iterator over a byte-iterator that parses each JSON Line
+ into a given type.
+ """
+
+ http_response: httpx.Response | None
+
+ def __init__(
+ self, *, raw_iterator: AsyncIterator[bytes], line_type: type[_T], http_response: httpx.Response | None
+ ) -> None:
+ super().__init__()
+ self.http_response = http_response
+ self._raw_iterator = raw_iterator
+ self._line_type = line_type
+ self._iterator = self.__decode__()
+
+ async def __decode__(self) -> AsyncIterator[_T]:
+ buf = b""
+ async for chunk in self._raw_iterator:
+ for line in chunk.splitlines(keepends=True):
+ buf += line
+ if buf.endswith((b"\r", b"\n", b"\r\n")):
+ yield construct_type_unchecked(
+ value=json.loads(buf),
+ type_=self._line_type,
+ )
+ buf = b""
+
+ # flush
+ if buf:
+ yield construct_type_unchecked(
+ value=json.loads(buf),
+ type_=self._line_type,
+ )
+
+ async def __anext__(self) -> _T:
+ return await self._iterator.__anext__()
+
+ async def __aiter__(self) -> AsyncIterator[_T]:
+ async for item in self._iterator:
+ yield item
diff --git a/src/anthropic/_legacy_response.py b/src/anthropic/_legacy_response.py
index ccf6e56..c79a39a 100644
--- a/src/anthropic/_legacy_response.py
+++ b/src/anthropic/_legacy_response.py
@@ -29,6 +29,7 @@
from ._constants import RAW_RESPONSE_HEADER
from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type
from ._exceptions import APIResponseValidationError
+from ._decoders.jsonl import JSONLDecoder, AsyncJSONLDecoder
if TYPE_CHECKING:
from ._models import FinalRequestOptions
@@ -204,6 +205,27 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
origin = get_origin(cast_to) or cast_to
+ if inspect.isclass(origin):
+ if issubclass(cast(Any, origin), JSONLDecoder):
+ return cast(
+ R,
+ cast("type[JSONLDecoder[Any]]", cast_to)(
+ raw_iterator=self.http_response.iter_bytes(chunk_size=4096),
+ line_type=extract_type_arg(cast_to, 0),
+ http_response=self.http_response,
+ ),
+ )
+
+ if issubclass(cast(Any, origin), AsyncJSONLDecoder):
+ return cast(
+ R,
+ cast("type[AsyncJSONLDecoder[Any]]", cast_to)(
+ raw_iterator=self.http_response.aiter_bytes(chunk_size=4096),
+ line_type=extract_type_arg(cast_to, 0),
+ http_response=self.http_response,
+ ),
+ )
+
if self._stream:
if to:
if not is_stream_class_type(to):
diff --git a/src/anthropic/_response.py b/src/anthropic/_response.py
index 86793b8..c561e0c 100644
--- a/src/anthropic/_response.py
+++ b/src/anthropic/_response.py
@@ -30,6 +30,7 @@
from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER
from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type
from ._exceptions import AnthropicError, APIResponseValidationError
+from ._decoders.jsonl import JSONLDecoder, AsyncJSONLDecoder
if TYPE_CHECKING:
from ._models import FinalRequestOptions
@@ -138,6 +139,27 @@ def _parse(self, *, to: type[_T] | None = None) -> R | _T:
origin = get_origin(cast_to) or cast_to
+ if inspect.isclass(origin):
+ if issubclass(cast(Any, origin), JSONLDecoder):
+ return cast(
+ R,
+ cast("type[JSONLDecoder[Any]]", cast_to)(
+ raw_iterator=self.http_response.iter_bytes(chunk_size=4096),
+ line_type=extract_type_arg(cast_to, 0),
+ http_response=self.http_response,
+ ),
+ )
+
+ if issubclass(cast(Any, origin), AsyncJSONLDecoder):
+ return cast(
+ R,
+ cast("type[AsyncJSONLDecoder[Any]]", cast_to)(
+ raw_iterator=self.http_response.aiter_bytes(chunk_size=4096),
+ line_type=extract_type_arg(cast_to, 0),
+ http_response=self.http_response,
+ ),
+ )
+
if self._is_sse_stream:
if to:
if not is_stream_class_type(to):
diff --git a/src/anthropic/resources/beta/messages/batches.py b/src/anthropic/resources/beta/messages/batches.py
index fb60271..7b5c9e2 100644
--- a/src/anthropic/resources/beta/messages/batches.py
+++ b/src/anthropic/resources/beta/messages/batches.py
@@ -17,24 +17,15 @@
)
from ...._compat import cached_property
from ...._resource import SyncAPIResource, AsyncAPIResource
-from ...._response import (
- BinaryAPIResponse,
- AsyncBinaryAPIResponse,
- StreamedBinaryAPIResponse,
- AsyncStreamedBinaryAPIResponse,
- to_streamed_response_wrapper,
- to_custom_raw_response_wrapper,
- async_to_streamed_response_wrapper,
- to_custom_streamed_response_wrapper,
- async_to_custom_raw_response_wrapper,
- async_to_custom_streamed_response_wrapper,
-)
+from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
from ....pagination import SyncPage, AsyncPage
from ...._base_client import AsyncPaginator, make_request_options
+from ...._decoders.jsonl import JSONLDecoder, AsyncJSONLDecoder
from ....types.beta.messages import batch_list_params, batch_create_params
from ....types.anthropic_beta_param import AnthropicBetaParam
from ....types.beta.messages.beta_message_batch import BetaMessageBatch
from ....types.beta.messages.beta_deleted_message_batch import BetaDeletedMessageBatch
+from ....types.beta.messages.beta_message_batch_individual_response import BetaMessageBatchIndividualResponse
__all__ = ["Batches", "AsyncBatches"]
@@ -356,7 +347,7 @@ def results(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> BinaryAPIResponse:
+ ) -> JSONLDecoder[BetaMessageBatchIndividualResponse]:
"""
Streams the results of a Message Batch as a `.jsonl` file.
@@ -379,7 +370,7 @@ def results(
"""
if not message_batch_id:
raise ValueError(f"Expected a non-empty value for `message_batch_id` but received {message_batch_id!r}")
- extra_headers = {"Accept": "application/binary", **(extra_headers or {})}
+ extra_headers = {"Accept": "application/x-jsonl", **(extra_headers or {})}
extra_headers = {
**strip_not_given(
{
@@ -396,7 +387,8 @@ def results(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=BinaryAPIResponse,
+ cast_to=JSONLDecoder[BetaMessageBatchIndividualResponse],
+ stream=True,
)
@@ -717,7 +709,7 @@ async def results(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AsyncBinaryAPIResponse:
+ ) -> AsyncJSONLDecoder[BetaMessageBatchIndividualResponse]:
"""
Streams the results of a Message Batch as a `.jsonl` file.
@@ -740,7 +732,7 @@ async def results(
"""
if not message_batch_id:
raise ValueError(f"Expected a non-empty value for `message_batch_id` but received {message_batch_id!r}")
- extra_headers = {"Accept": "application/binary", **(extra_headers or {})}
+ extra_headers = {"Accept": "application/x-jsonl", **(extra_headers or {})}
extra_headers = {
**strip_not_given(
{
@@ -757,7 +749,8 @@ async def results(
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AsyncBinaryAPIResponse,
+ cast_to=AsyncJSONLDecoder[BetaMessageBatchIndividualResponse],
+ stream=True,
)
@@ -780,9 +773,8 @@ def __init__(self, batches: Batches) -> None:
self.cancel = _legacy_response.to_raw_response_wrapper(
batches.cancel,
)
- self.results = to_custom_raw_response_wrapper(
+ self.results = _legacy_response.to_raw_response_wrapper(
batches.results,
- BinaryAPIResponse,
)
@@ -805,9 +797,8 @@ def __init__(self, batches: AsyncBatches) -> None:
self.cancel = _legacy_response.async_to_raw_response_wrapper(
batches.cancel,
)
- self.results = async_to_custom_raw_response_wrapper(
+ self.results = _legacy_response.async_to_raw_response_wrapper(
batches.results,
- AsyncBinaryAPIResponse,
)
@@ -830,9 +821,8 @@ def __init__(self, batches: Batches) -> None:
self.cancel = to_streamed_response_wrapper(
batches.cancel,
)
- self.results = to_custom_streamed_response_wrapper(
+ self.results = to_streamed_response_wrapper(
batches.results,
- StreamedBinaryAPIResponse,
)
@@ -855,7 +845,6 @@ def __init__(self, batches: AsyncBatches) -> None:
self.cancel = async_to_streamed_response_wrapper(
batches.cancel,
)
- self.results = async_to_custom_streamed_response_wrapper(
+ self.results = async_to_streamed_response_wrapper(
batches.results,
- AsyncStreamedBinaryAPIResponse,
)
diff --git a/src/anthropic/resources/messages/batches.py b/src/anthropic/resources/messages/batches.py
index 9dbcbfe..90a40d5 100644
--- a/src/anthropic/resources/messages/batches.py
+++ b/src/anthropic/resources/messages/batches.py
@@ -14,23 +14,14 @@
)
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
-from ..._response import (
- BinaryAPIResponse,
- AsyncBinaryAPIResponse,
- StreamedBinaryAPIResponse,
- AsyncStreamedBinaryAPIResponse,
- to_streamed_response_wrapper,
- to_custom_raw_response_wrapper,
- async_to_streamed_response_wrapper,
- to_custom_streamed_response_wrapper,
- async_to_custom_raw_response_wrapper,
- async_to_custom_streamed_response_wrapper,
-)
+from ..._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper
from ...pagination import SyncPage, AsyncPage
from ..._base_client import AsyncPaginator, make_request_options
from ...types.messages import batch_list_params, batch_create_params
+from ..._decoders.jsonl import JSONLDecoder, AsyncJSONLDecoder
from ...types.messages.message_batch import MessageBatch
from ...types.messages.deleted_message_batch import DeletedMessageBatch
+from ...types.messages.message_batch_individual_response import MessageBatchIndividualResponse
__all__ = ["Batches", "AsyncBatches"]
@@ -281,7 +272,7 @@ def results(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> BinaryAPIResponse:
+ ) -> JSONLDecoder[MessageBatchIndividualResponse]:
"""
Streams the results of a Message Batch as a `.jsonl` file.
@@ -302,13 +293,14 @@ def results(
"""
if not message_batch_id:
raise ValueError(f"Expected a non-empty value for `message_batch_id` but received {message_batch_id!r}")
- extra_headers = {"Accept": "application/binary", **(extra_headers or {})}
+ extra_headers = {"Accept": "application/x-jsonl", **(extra_headers or {})}
return self._get(
f"/v1/messages/batches/{message_batch_id}/results",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=BinaryAPIResponse,
+ cast_to=JSONLDecoder[MessageBatchIndividualResponse],
+ stream=True,
)
@@ -558,7 +550,7 @@ async def results(
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
- ) -> AsyncBinaryAPIResponse:
+ ) -> AsyncJSONLDecoder[MessageBatchIndividualResponse]:
"""
Streams the results of a Message Batch as a `.jsonl` file.
@@ -579,13 +571,14 @@ async def results(
"""
if not message_batch_id:
raise ValueError(f"Expected a non-empty value for `message_batch_id` but received {message_batch_id!r}")
- extra_headers = {"Accept": "application/binary", **(extra_headers or {})}
+ extra_headers = {"Accept": "application/x-jsonl", **(extra_headers or {})}
return await self._get(
f"/v1/messages/batches/{message_batch_id}/results",
options=make_request_options(
extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
),
- cast_to=AsyncBinaryAPIResponse,
+ cast_to=AsyncJSONLDecoder[MessageBatchIndividualResponse],
+ stream=True,
)
@@ -608,9 +601,8 @@ def __init__(self, batches: Batches) -> None:
self.cancel = _legacy_response.to_raw_response_wrapper(
batches.cancel,
)
- self.results = to_custom_raw_response_wrapper(
+ self.results = _legacy_response.to_raw_response_wrapper(
batches.results,
- BinaryAPIResponse,
)
@@ -633,9 +625,8 @@ def __init__(self, batches: AsyncBatches) -> None:
self.cancel = _legacy_response.async_to_raw_response_wrapper(
batches.cancel,
)
- self.results = async_to_custom_raw_response_wrapper(
+ self.results = _legacy_response.async_to_raw_response_wrapper(
batches.results,
- AsyncBinaryAPIResponse,
)
@@ -658,9 +649,8 @@ def __init__(self, batches: Batches) -> None:
self.cancel = to_streamed_response_wrapper(
batches.cancel,
)
- self.results = to_custom_streamed_response_wrapper(
+ self.results = to_streamed_response_wrapper(
batches.results,
- StreamedBinaryAPIResponse,
)
@@ -683,7 +673,6 @@ def __init__(self, batches: AsyncBatches) -> None:
self.cancel = async_to_streamed_response_wrapper(
batches.cancel,
)
- self.results = async_to_custom_streamed_response_wrapper(
+ self.results = async_to_streamed_response_wrapper(
batches.results,
- AsyncStreamedBinaryAPIResponse,
)
diff --git a/tests/api_resources/beta/messages/test_batches.py b/tests/api_resources/beta/messages/test_batches.py
index a286f9e..67cd66c 100644
--- a/tests/api_resources/beta/messages/test_batches.py
+++ b/tests/api_resources/beta/messages/test_batches.py
@@ -5,26 +5,18 @@
import os
from typing import Any, cast
-import httpx
import pytest
-from respx import MockRouter
from anthropic import Anthropic, AsyncAnthropic
from tests.utils import assert_matches_type
-from anthropic._response import (
- BinaryAPIResponse,
- AsyncBinaryAPIResponse,
- StreamedBinaryAPIResponse,
- AsyncStreamedBinaryAPIResponse,
-)
from anthropic.pagination import SyncPage, AsyncPage
+from anthropic._decoders.jsonl import JSONLDecoder, AsyncJSONLDecoder
from anthropic.types.beta.messages import (
BetaMessageBatch,
BetaDeletedMessageBatch,
+ BetaMessageBatchIndividualResponse,
)
-# pyright: reportDeprecated=false
-
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -337,71 +329,51 @@ def test_path_params_cancel(self, client: Anthropic) -> None:
message_batch_id="",
)
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- def test_method_results(self, client: Anthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ def test_method_results(self, client: Anthropic) -> None:
batch = client.beta.messages.batches.results(
message_batch_id="message_batch_id",
)
- assert batch.is_closed
- assert batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, BinaryAPIResponse)
+ assert_matches_type(JSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- def test_method_results_with_all_params(self, client: Anthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ def test_method_results_with_all_params(self, client: Anthropic) -> None:
batch = client.beta.messages.batches.results(
message_batch_id="message_batch_id",
betas=["string"],
)
- assert batch.is_closed
- assert batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, BinaryAPIResponse)
+ assert_matches_type(JSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- def test_raw_response_results(self, client: Anthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
-
- batch = client.beta.messages.batches.with_raw_response.results(
+ def test_raw_response_results(self, client: Anthropic) -> None:
+ response = client.beta.messages.batches.with_raw_response.results(
message_batch_id="message_batch_id",
)
- assert batch.is_closed is True
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
- assert batch.json() == {"foo": "bar"}
- assert isinstance(batch, BinaryAPIResponse)
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ batch = response.parse()
+ assert_matches_type(JSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- def test_streaming_response_results(self, client: Anthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ def test_streaming_response_results(self, client: Anthropic) -> None:
with client.beta.messages.batches.with_streaming_response.results(
message_batch_id="message_batch_id",
- ) as batch:
- assert not batch.is_closed
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- assert batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, StreamedBinaryAPIResponse)
+ batch = response.parse()
+ assert_matches_type(JSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
- assert cast(Any, batch.is_closed) is True
+ assert cast(Any, response.is_closed) is True
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
def test_path_params_results(self, client: Anthropic) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_batch_id` but received ''"):
client.beta.messages.batches.with_raw_response.results(
@@ -718,71 +690,51 @@ async def test_path_params_cancel(self, async_client: AsyncAnthropic) -> None:
message_batch_id="",
)
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- async def test_method_results(self, async_client: AsyncAnthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ async def test_method_results(self, async_client: AsyncAnthropic) -> None:
batch = await async_client.beta.messages.batches.results(
message_batch_id="message_batch_id",
)
- assert batch.is_closed
- assert await batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, AsyncBinaryAPIResponse)
+ assert_matches_type(AsyncJSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- async def test_method_results_with_all_params(self, async_client: AsyncAnthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ async def test_method_results_with_all_params(self, async_client: AsyncAnthropic) -> None:
batch = await async_client.beta.messages.batches.results(
message_batch_id="message_batch_id",
betas=["string"],
)
- assert batch.is_closed
- assert await batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, AsyncBinaryAPIResponse)
+ assert_matches_type(AsyncJSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- async def test_raw_response_results(self, async_client: AsyncAnthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
-
- batch = await async_client.beta.messages.batches.with_raw_response.results(
+ async def test_raw_response_results(self, async_client: AsyncAnthropic) -> None:
+ response = await async_client.beta.messages.batches.with_raw_response.results(
message_batch_id="message_batch_id",
)
- assert batch.is_closed is True
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
- assert await batch.json() == {"foo": "bar"}
- assert isinstance(batch, AsyncBinaryAPIResponse)
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ batch = response.parse()
+ assert_matches_type(AsyncJSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- async def test_streaming_response_results(self, async_client: AsyncAnthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results?beta=true").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ async def test_streaming_response_results(self, async_client: AsyncAnthropic) -> None:
async with async_client.beta.messages.batches.with_streaming_response.results(
message_batch_id="message_batch_id",
- ) as batch:
- assert not batch.is_closed
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- assert await batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, AsyncStreamedBinaryAPIResponse)
+ batch = await response.parse()
+ assert_matches_type(AsyncJSONLDecoder[BetaMessageBatchIndividualResponse], batch, path=["response"])
- assert cast(Any, batch.is_closed) is True
+ assert cast(Any, response.is_closed) is True
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
async def test_path_params_results(self, async_client: AsyncAnthropic) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_batch_id` but received ''"):
await async_client.beta.messages.batches.with_raw_response.results(
diff --git a/tests/api_resources/messages/test_batches.py b/tests/api_resources/messages/test_batches.py
index 93f45a6..079be16 100644
--- a/tests/api_resources/messages/test_batches.py
+++ b/tests/api_resources/messages/test_batches.py
@@ -5,22 +5,17 @@
import os
from typing import Any, cast
-import httpx
import pytest
-from respx import MockRouter
from anthropic import Anthropic, AsyncAnthropic
from tests.utils import assert_matches_type
-from anthropic._response import (
- BinaryAPIResponse,
- AsyncBinaryAPIResponse,
- StreamedBinaryAPIResponse,
- AsyncStreamedBinaryAPIResponse,
-)
from anthropic.pagination import SyncPage, AsyncPage
-from anthropic.types.messages import MessageBatch, DeletedMessageBatch
-
-# pyright: reportDeprecated=false
+from anthropic.types.messages import (
+ MessageBatch,
+ DeletedMessageBatch,
+ MessageBatchIndividualResponse,
+)
+from anthropic._decoders.jsonl import JSONLDecoder, AsyncJSONLDecoder
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -249,56 +244,42 @@ def test_path_params_cancel(self, client: Anthropic) -> None:
"",
)
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- def test_method_results(self, client: Anthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ def test_method_results(self, client: Anthropic) -> None:
batch = client.messages.batches.results(
"message_batch_id",
)
- assert batch.is_closed
- assert batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, BinaryAPIResponse)
+ assert_matches_type(JSONLDecoder[MessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- def test_raw_response_results(self, client: Anthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
-
- batch = client.messages.batches.with_raw_response.results(
+ def test_raw_response_results(self, client: Anthropic) -> None:
+ response = client.messages.batches.with_raw_response.results(
"message_batch_id",
)
- assert batch.is_closed is True
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
- assert batch.json() == {"foo": "bar"}
- assert isinstance(batch, BinaryAPIResponse)
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ batch = response.parse()
+ assert_matches_type(JSONLDecoder[MessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- def test_streaming_response_results(self, client: Anthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ def test_streaming_response_results(self, client: Anthropic) -> None:
with client.messages.batches.with_streaming_response.results(
"message_batch_id",
- ) as batch:
- assert not batch.is_closed
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- assert batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, StreamedBinaryAPIResponse)
+ batch = response.parse()
+ assert_matches_type(JSONLDecoder[MessageBatchIndividualResponse], batch, path=["response"])
- assert cast(Any, batch.is_closed) is True
+ assert cast(Any, response.is_closed) is True
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
def test_path_params_results(self, client: Anthropic) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_batch_id` but received ''"):
client.messages.batches.with_raw_response.results(
@@ -530,56 +511,42 @@ async def test_path_params_cancel(self, async_client: AsyncAnthropic) -> None:
"",
)
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- async def test_method_results(self, async_client: AsyncAnthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ async def test_method_results(self, async_client: AsyncAnthropic) -> None:
batch = await async_client.messages.batches.results(
"message_batch_id",
)
- assert batch.is_closed
- assert await batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, AsyncBinaryAPIResponse)
+ assert_matches_type(AsyncJSONLDecoder[MessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- async def test_raw_response_results(self, async_client: AsyncAnthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
-
- batch = await async_client.messages.batches.with_raw_response.results(
+ async def test_raw_response_results(self, async_client: AsyncAnthropic) -> None:
+ response = await async_client.messages.batches.with_raw_response.results(
"message_batch_id",
)
- assert batch.is_closed is True
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
- assert await batch.json() == {"foo": "bar"}
- assert isinstance(batch, AsyncBinaryAPIResponse)
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ batch = response.parse()
+ assert_matches_type(AsyncJSONLDecoder[MessageBatchIndividualResponse], batch, path=["response"])
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
- async def test_streaming_response_results(self, async_client: AsyncAnthropic, respx_mock: MockRouter) -> None:
- respx_mock.get("/v1/messages/batches/message_batch_id/results").mock(
- return_value=httpx.Response(200, json={"foo": "bar"})
- )
+ async def test_streaming_response_results(self, async_client: AsyncAnthropic) -> None:
async with async_client.messages.batches.with_streaming_response.results(
"message_batch_id",
- ) as batch:
- assert not batch.is_closed
- assert batch.http_request.headers.get("X-Stainless-Lang") == "python"
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
- assert await batch.json() == {"foo": "bar"}
- assert cast(Any, batch.is_closed) is True
- assert isinstance(batch, AsyncStreamedBinaryAPIResponse)
+ batch = await response.parse()
+ assert_matches_type(AsyncJSONLDecoder[MessageBatchIndividualResponse], batch, path=["response"])
- assert cast(Any, batch.is_closed) is True
+ assert cast(Any, response.is_closed) is True
+ @pytest.mark.skip(reason="Prism doesn't support JSONL responses yet")
@parametrize
- @pytest.mark.respx(base_url=base_url)
async def test_path_params_results(self, async_client: AsyncAnthropic) -> None:
with pytest.raises(ValueError, match=r"Expected a non-empty value for `message_batch_id` but received ''"):
await async_client.messages.batches.with_raw_response.results(