diff --git a/.release-please-manifest.json b/.release-please-manifest.json index c174a897..71a38f28 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "1.73.0" + ".": "1.74.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index f7f837b4..2999b0d9 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 29 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/pi-labs%2Fwithpi-faad46442edb6996db5c98a0f07f2164216854d0297dd2f4012ab070a0177eaa.yml -openapi_spec_hash: 4a0019defdc5d1d5e52b625e599f6d36 +configured_endpoints: 28 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/pi-labs%2Fwithpi-7bbc18b8a1475905c0c210baabf634f248bd714721db886c257b1717e42746e3.yml +openapi_spec_hash: 0dcc4e7471a1b19ee6ebe1342a171835 config_hash: fd3e1149c629c2972c54a66a88fd4fab diff --git a/CHANGELOG.md b/CHANGELOG.md index b7ff5187..90fadc01 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## 1.74.0 (2025-09-18) + +Full Changelog: [v1.73.0...v1.74.0](https://github.com/withpi/sdk-python/compare/v1.73.0...v1.74.0) + +### Features + +* **api:** api update ([985599e](https://github.com/withpi/sdk-python/commit/985599ee7b0ae2a7a02819332f2d1f42194e5821)) + + +### Chores + +* **internal:** update pydantic dependency ([854b393](https://github.com/withpi/sdk-python/commit/854b3930cac89de3ba0c361e5d53fcd44f076758)) + ## 1.73.0 (2025-09-10) Full Changelog: [v1.72.0...v1.73.0](https://github.com/withpi/sdk-python/compare/v1.72.0...v1.73.0) diff --git a/api.md b/api.md index 89d2ea93..9b0f50de 100644 --- a/api.md +++ b/api.md @@ -5,7 +5,6 @@ from withpi.types import ( DataGenerationStatus, Example, ExplorationMode, - QueryClassifierResult, Question, ScoringSpecCalibrationStatus, ScoringSystemMetrics, @@ -128,18 +127,6 @@ Methods: - client.search.embed(\*\*params) -> SearchEmbedResponse - client.search.rank(\*\*params) -> SearchRankResponse -## QueryClassifier - -Types: - -```python -from withpi.types.search import QueryClassifierClassifyResponse -``` - -Methods: - -- client.search.query_classifier.classify(\*\*params) -> QueryClassifierClassifyResponse - ## Groundedness Types: diff --git a/pyproject.toml b/pyproject.toml index e419fda1..faa47ee2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "withpi" -version = "1.73.0" +version = "1.74.0" description = "The official Python library for the Pi Client API" dynamic = ["readme"] license = "Apache-2.0" diff --git a/requirements-dev.lock b/requirements-dev.lock index 60e28563..81369f7a 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -88,9 +88,9 @@ pluggy==1.5.0 propcache==0.3.1 # via aiohttp # via yarl -pydantic==2.10.3 +pydantic==2.11.9 # via withpi -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic pygments==2.18.0 # via rich @@ -125,7 +125,10 @@ typing-extensions==4.12.2 # via pydantic # via pydantic-core # via pyright + # via typing-inspection # via withpi +typing-inspection==0.4.1 + # via pydantic virtualenv==20.24.5 # via nox yarl==1.20.0 diff --git a/requirements.lock b/requirements.lock index 461cc06e..353c8852 100644 --- a/requirements.lock +++ b/requirements.lock @@ -55,9 +55,9 @@ multidict==6.4.4 propcache==0.3.1 # via aiohttp # via yarl -pydantic==2.10.3 +pydantic==2.11.9 # via withpi -pydantic-core==2.27.1 +pydantic-core==2.33.2 # via pydantic sniffio==1.3.0 # via anyio @@ -67,6 +67,9 @@ typing-extensions==4.12.2 # via multidict # via pydantic # via pydantic-core + # via typing-inspection # via withpi +typing-inspection==0.4.1 + # via pydantic yarl==1.20.0 # via aiohttp diff --git a/src/withpi/_models.py b/src/withpi/_models.py index 3a6017ef..6a3cd1d2 100644 --- a/src/withpi/_models.py +++ b/src/withpi/_models.py @@ -256,7 +256,7 @@ def model_dump( mode: Literal["json", "python"] | str = "python", include: IncEx | None = None, exclude: IncEx | None = None, - by_alias: bool = False, + by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, @@ -264,6 +264,7 @@ def model_dump( warnings: bool | Literal["none", "warn", "error"] = True, context: dict[str, Any] | None = None, serialize_as_any: bool = False, + fallback: Callable[[Any], Any] | None = None, ) -> dict[str, Any]: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump @@ -295,10 +296,12 @@ def model_dump( raise ValueError("context is only supported in Pydantic v2") if serialize_as_any != False: raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") dumped = super().dict( # pyright: ignore[reportDeprecated] include=include, exclude=exclude, - by_alias=by_alias, + by_alias=by_alias if by_alias is not None else False, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, @@ -313,13 +316,14 @@ def model_dump_json( indent: int | None = None, include: IncEx | None = None, exclude: IncEx | None = None, - by_alias: bool = False, + by_alias: bool | None = None, exclude_unset: bool = False, exclude_defaults: bool = False, exclude_none: bool = False, round_trip: bool = False, warnings: bool | Literal["none", "warn", "error"] = True, context: dict[str, Any] | None = None, + fallback: Callable[[Any], Any] | None = None, serialize_as_any: bool = False, ) -> str: """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json @@ -348,11 +352,13 @@ def model_dump_json( raise ValueError("context is only supported in Pydantic v2") if serialize_as_any != False: raise ValueError("serialize_as_any is only supported in Pydantic v2") + if fallback is not None: + raise ValueError("fallback is only supported in Pydantic v2") return super().json( # type: ignore[reportDeprecated] indent=indent, include=include, exclude=exclude, - by_alias=by_alias, + by_alias=by_alias if by_alias is not None else False, exclude_unset=exclude_unset, exclude_defaults=exclude_defaults, exclude_none=exclude_none, diff --git a/src/withpi/_version.py b/src/withpi/_version.py index f0eff73f..a6fa7dad 100644 --- a/src/withpi/_version.py +++ b/src/withpi/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "withpi" -__version__ = "1.73.0" # x-release-please-version +__version__ = "1.74.0" # x-release-please-version diff --git a/src/withpi/resources/search/__init__.py b/src/withpi/resources/search/__init__.py index c4073e70..74d0ae07 100644 --- a/src/withpi/resources/search/__init__.py +++ b/src/withpi/resources/search/__init__.py @@ -16,22 +16,8 @@ GroundednessResourceWithStreamingResponse, AsyncGroundednessResourceWithStreamingResponse, ) -from .query_classifier import ( - QueryClassifierResource, - AsyncQueryClassifierResource, - QueryClassifierResourceWithRawResponse, - AsyncQueryClassifierResourceWithRawResponse, - QueryClassifierResourceWithStreamingResponse, - AsyncQueryClassifierResourceWithStreamingResponse, -) __all__ = [ - "QueryClassifierResource", - "AsyncQueryClassifierResource", - "QueryClassifierResourceWithRawResponse", - "AsyncQueryClassifierResourceWithRawResponse", - "QueryClassifierResourceWithStreamingResponse", - "AsyncQueryClassifierResourceWithStreamingResponse", "GroundednessResource", "AsyncGroundednessResource", "GroundednessResourceWithRawResponse", diff --git a/src/withpi/resources/search/query_classifier.py b/src/withpi/resources/search/query_classifier.py deleted file mode 100644 index dc282d02..00000000 --- a/src/withpi/resources/search/query_classifier.py +++ /dev/null @@ -1,216 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Iterable, Optional -from typing_extensions import Literal - -import httpx - -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, SequenceNotStr -from ..._utils import maybe_transform, async_maybe_transform -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ..._base_client import make_request_options -from ...types.search import query_classifier_classify_params -from ...types.search.query_classifier_classify_response import QueryClassifierClassifyResponse - -__all__ = ["QueryClassifierResource", "AsyncQueryClassifierResource"] - - -class QueryClassifierResource(SyncAPIResource): - @cached_property - def with_raw_response(self) -> QueryClassifierResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/withpi/sdk-python#accessing-raw-response-data-eg-headers - """ - return QueryClassifierResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> QueryClassifierResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/withpi/sdk-python#with_streaming_response - """ - return QueryClassifierResourceWithStreamingResponse(self) - - def classify( - self, - *, - classes: Iterable[query_classifier_classify_params.Class], - queries: SequenceNotStr[str], - batch_size: int | NotGiven = NOT_GIVEN, - examples: Optional[Iterable[query_classifier_classify_params.Example]] | NotGiven = NOT_GIVEN, - mode: Literal["generative", "probabilistic"] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> QueryClassifierClassifyResponse: - """ - Classifies queries into provided classes based on a custom taxonomy. - - Args: - classes: The list of class definitions to classify the queries into. Must be <= 20. - - queries: The list of queries to classify. Must be <= 10. - - batch_size: Number of inputs to generate in one LLM call. Must be <=10. - - examples: Optional list of examples to provide as few-shot examples for the classifier. - Must be <= 20. - - mode: The mode to use for the classification. The probabilistic mode returns - probabilities for each class. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/search/query_classifier/classify", - body=maybe_transform( - { - "classes": classes, - "queries": queries, - "batch_size": batch_size, - "examples": examples, - "mode": mode, - }, - query_classifier_classify_params.QueryClassifierClassifyParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=QueryClassifierClassifyResponse, - ) - - -class AsyncQueryClassifierResource(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncQueryClassifierResourceWithRawResponse: - """ - This property can be used as a prefix for any HTTP method call to return - the raw response object instead of the parsed content. - - For more information, see https://www.github.com/withpi/sdk-python#accessing-raw-response-data-eg-headers - """ - return AsyncQueryClassifierResourceWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncQueryClassifierResourceWithStreamingResponse: - """ - An alternative to `.with_raw_response` that doesn't eagerly read the response body. - - For more information, see https://www.github.com/withpi/sdk-python#with_streaming_response - """ - return AsyncQueryClassifierResourceWithStreamingResponse(self) - - async def classify( - self, - *, - classes: Iterable[query_classifier_classify_params.Class], - queries: SequenceNotStr[str], - batch_size: int | NotGiven = NOT_GIVEN, - examples: Optional[Iterable[query_classifier_classify_params.Example]] | NotGiven = NOT_GIVEN, - mode: Literal["generative", "probabilistic"] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> QueryClassifierClassifyResponse: - """ - Classifies queries into provided classes based on a custom taxonomy. - - Args: - classes: The list of class definitions to classify the queries into. Must be <= 20. - - queries: The list of queries to classify. Must be <= 10. - - batch_size: Number of inputs to generate in one LLM call. Must be <=10. - - examples: Optional list of examples to provide as few-shot examples for the classifier. - Must be <= 20. - - mode: The mode to use for the classification. The probabilistic mode returns - probabilities for each class. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/search/query_classifier/classify", - body=await async_maybe_transform( - { - "classes": classes, - "queries": queries, - "batch_size": batch_size, - "examples": examples, - "mode": mode, - }, - query_classifier_classify_params.QueryClassifierClassifyParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=QueryClassifierClassifyResponse, - ) - - -class QueryClassifierResourceWithRawResponse: - def __init__(self, query_classifier: QueryClassifierResource) -> None: - self._query_classifier = query_classifier - - self.classify = to_raw_response_wrapper( - query_classifier.classify, - ) - - -class AsyncQueryClassifierResourceWithRawResponse: - def __init__(self, query_classifier: AsyncQueryClassifierResource) -> None: - self._query_classifier = query_classifier - - self.classify = async_to_raw_response_wrapper( - query_classifier.classify, - ) - - -class QueryClassifierResourceWithStreamingResponse: - def __init__(self, query_classifier: QueryClassifierResource) -> None: - self._query_classifier = query_classifier - - self.classify = to_streamed_response_wrapper( - query_classifier.classify, - ) - - -class AsyncQueryClassifierResourceWithStreamingResponse: - def __init__(self, query_classifier: AsyncQueryClassifierResource) -> None: - self._query_classifier = query_classifier - - self.classify = async_to_streamed_response_wrapper( - query_classifier.classify, - ) diff --git a/src/withpi/resources/search/search.py b/src/withpi/resources/search/search.py index 86e534b0..2c7cbc35 100644 --- a/src/withpi/resources/search/search.py +++ b/src/withpi/resources/search/search.py @@ -24,14 +24,6 @@ AsyncGroundednessResourceWithStreamingResponse, ) from ..._base_client import make_request_options -from .query_classifier import ( - QueryClassifierResource, - AsyncQueryClassifierResource, - QueryClassifierResourceWithRawResponse, - AsyncQueryClassifierResourceWithRawResponse, - QueryClassifierResourceWithStreamingResponse, - AsyncQueryClassifierResourceWithStreamingResponse, -) from ...types.search_rank_response import SearchRankResponse from ...types.search_embed_response import SearchEmbedResponse @@ -39,10 +31,6 @@ class SearchResource(SyncAPIResource): - @cached_property - def query_classifier(self) -> QueryClassifierResource: - return QueryClassifierResource(self._client) - @cached_property def groundedness(self) -> GroundednessResource: return GroundednessResource(self._client) @@ -160,10 +148,6 @@ def rank( class AsyncSearchResource(AsyncAPIResource): - @cached_property - def query_classifier(self) -> AsyncQueryClassifierResource: - return AsyncQueryClassifierResource(self._client) - @cached_property def groundedness(self) -> AsyncGroundednessResource: return AsyncGroundednessResource(self._client) @@ -291,10 +275,6 @@ def __init__(self, search: SearchResource) -> None: search.rank, ) - @cached_property - def query_classifier(self) -> QueryClassifierResourceWithRawResponse: - return QueryClassifierResourceWithRawResponse(self._search.query_classifier) - @cached_property def groundedness(self) -> GroundednessResourceWithRawResponse: return GroundednessResourceWithRawResponse(self._search.groundedness) @@ -311,10 +291,6 @@ def __init__(self, search: AsyncSearchResource) -> None: search.rank, ) - @cached_property - def query_classifier(self) -> AsyncQueryClassifierResourceWithRawResponse: - return AsyncQueryClassifierResourceWithRawResponse(self._search.query_classifier) - @cached_property def groundedness(self) -> AsyncGroundednessResourceWithRawResponse: return AsyncGroundednessResourceWithRawResponse(self._search.groundedness) @@ -331,10 +307,6 @@ def __init__(self, search: SearchResource) -> None: search.rank, ) - @cached_property - def query_classifier(self) -> QueryClassifierResourceWithStreamingResponse: - return QueryClassifierResourceWithStreamingResponse(self._search.query_classifier) - @cached_property def groundedness(self) -> GroundednessResourceWithStreamingResponse: return GroundednessResourceWithStreamingResponse(self._search.groundedness) @@ -351,10 +323,6 @@ def __init__(self, search: AsyncSearchResource) -> None: search.rank, ) - @cached_property - def query_classifier(self) -> AsyncQueryClassifierResourceWithStreamingResponse: - return AsyncQueryClassifierResourceWithStreamingResponse(self._search.query_classifier) - @cached_property def groundedness(self) -> AsyncGroundednessResourceWithStreamingResponse: return AsyncGroundednessResourceWithStreamingResponse(self._search.groundedness) diff --git a/src/withpi/types/__init__.py b/src/withpi/types/__init__.py index 15d5a08e..53a89381 100644 --- a/src/withpi/types/__init__.py +++ b/src/withpi/types/__init__.py @@ -9,7 +9,6 @@ SyntheticDataStatus as SyntheticDataStatus, DataGenerationStatus as DataGenerationStatus, ScoringSystemMetrics as ScoringSystemMetrics, - QueryClassifierResult as QueryClassifierResult, ScoringSpecCalibrationStatus as ScoringSpecCalibrationStatus, ) from .search_rank_params import SearchRankParams as SearchRankParams diff --git a/src/withpi/types/search/__init__.py b/src/withpi/types/search/__init__.py index 3eae7bd3..5569f4ba 100644 --- a/src/withpi/types/search/__init__.py +++ b/src/withpi/types/search/__init__.py @@ -4,5 +4,3 @@ from .groundedness_check_params import GroundednessCheckParams as GroundednessCheckParams from .groundedness_check_response import GroundednessCheckResponse as GroundednessCheckResponse -from .query_classifier_classify_params import QueryClassifierClassifyParams as QueryClassifierClassifyParams -from .query_classifier_classify_response import QueryClassifierClassifyResponse as QueryClassifierClassifyResponse diff --git a/src/withpi/types/search/query_classifier_classify_params.py b/src/withpi/types/search/query_classifier_classify_params.py deleted file mode 100644 index f2af59cf..00000000 --- a/src/withpi/types/search/query_classifier_classify_params.py +++ /dev/null @@ -1,45 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Iterable, Optional -from typing_extensions import Literal, Required, TypedDict - -from ..._types import SequenceNotStr - -__all__ = ["QueryClassifierClassifyParams", "Class", "Example"] - - -class QueryClassifierClassifyParams(TypedDict, total=False): - classes: Required[Iterable[Class]] - """The list of class definitions to classify the queries into. Must be <= 20.""" - - queries: Required[SequenceNotStr[str]] - """The list of queries to classify. Must be <= 10.""" - - batch_size: int - """Number of inputs to generate in one LLM call. Must be <=10.""" - - examples: Optional[Iterable[Example]] - """Optional list of examples to provide as few-shot examples for the classifier. - - Must be <= 20. - """ - - mode: Literal["generative", "probabilistic"] - """The mode to use for the classification. - - The probabilistic mode returns probabilities for each class. - """ - - -class Class(TypedDict, total=False): - description: Required[str] - - label: Required[str] - - -class Example(TypedDict, total=False): - label: Required[str] - - text: Required[str] diff --git a/src/withpi/types/search/query_classifier_classify_response.py b/src/withpi/types/search/query_classifier_classify_response.py deleted file mode 100644 index f1970bba..00000000 --- a/src/withpi/types/search/query_classifier_classify_response.py +++ /dev/null @@ -1,12 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List - -from ..._models import BaseModel -from ..shared.query_classifier_result import QueryClassifierResult - -__all__ = ["QueryClassifierClassifyResponse"] - - -class QueryClassifierClassifyResponse(BaseModel): - results: List[QueryClassifierResult] diff --git a/src/withpi/types/shared/__init__.py b/src/withpi/types/shared/__init__.py index 7102d762..be229c08 100644 --- a/src/withpi/types/shared/__init__.py +++ b/src/withpi/types/shared/__init__.py @@ -6,5 +6,4 @@ from .synthetic_data_status import SyntheticDataStatus as SyntheticDataStatus from .data_generation_status import DataGenerationStatus as DataGenerationStatus from .scoring_system_metrics import ScoringSystemMetrics as ScoringSystemMetrics -from .query_classifier_result import QueryClassifierResult as QueryClassifierResult from .scoring_spec_calibration_status import ScoringSpecCalibrationStatus as ScoringSpecCalibrationStatus diff --git a/src/withpi/types/shared/query_classifier_result.py b/src/withpi/types/shared/query_classifier_result.py deleted file mode 100644 index 0cefcbfa..00000000 --- a/src/withpi/types/shared/query_classifier_result.py +++ /dev/null @@ -1,21 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List - -from ..._models import BaseModel - -__all__ = ["QueryClassifierResult", "Probability"] - - -class Probability(BaseModel): - label: str - - score: float - - -class QueryClassifierResult(BaseModel): - prediction: str - - probabilities: List[Probability] - - query: str diff --git a/tests/api_resources/search/test_query_classifier.py b/tests/api_resources/search/test_query_classifier.py deleted file mode 100644 index acf9ab69..00000000 --- a/tests/api_resources/search/test_query_classifier.py +++ /dev/null @@ -1,284 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, cast - -import pytest - -from withpi import PiClient, AsyncPiClient -from tests.utils import assert_matches_type -from withpi.types.search import QueryClassifierClassifyResponse - -base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010") - - -class TestQueryClassifier: - parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - def test_method_classify(self, client: PiClient) -> None: - query_classifier = client.search.query_classifier.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - ) - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - def test_method_classify_with_all_params(self, client: PiClient) -> None: - query_classifier = client.search.query_classifier.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - batch_size=10, - examples=[ - { - "label": "factual", - "text": "When was the Eiffel Tower built?", - }, - { - "label": "opinion", - "text": "Is jazz better than classical music?", - }, - { - "label": "procedural", - "text": "How do I change a flat tire?", - }, - ], - mode="generative", - ) - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - def test_raw_response_classify(self, client: PiClient) -> None: - response = client.search.query_classifier.with_raw_response.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - query_classifier = response.parse() - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - def test_streaming_response_classify(self, client: PiClient) -> None: - with client.search.query_classifier.with_streaming_response.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - query_classifier = response.parse() - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - assert cast(Any, response.is_closed) is True - - -class TestAsyncQueryClassifier: - parametrize = pytest.mark.parametrize( - "async_client", [False, True, {"http_client": "aiohttp"}], indirect=True, ids=["loose", "strict", "aiohttp"] - ) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - async def test_method_classify(self, async_client: AsyncPiClient) -> None: - query_classifier = await async_client.search.query_classifier.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - ) - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - async def test_method_classify_with_all_params(self, async_client: AsyncPiClient) -> None: - query_classifier = await async_client.search.query_classifier.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - batch_size=10, - examples=[ - { - "label": "factual", - "text": "When was the Eiffel Tower built?", - }, - { - "label": "opinion", - "text": "Is jazz better than classical music?", - }, - { - "label": "procedural", - "text": "How do I change a flat tire?", - }, - ], - mode="generative", - ) - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - async def test_raw_response_classify(self, async_client: AsyncPiClient) -> None: - response = await async_client.search.query_classifier.with_raw_response.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - ) - - assert response.is_closed is True - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - query_classifier = await response.parse() - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - @pytest.mark.skip(reason="Prism tests are disabled") - @parametrize - async def test_streaming_response_classify(self, async_client: AsyncPiClient) -> None: - async with async_client.search.query_classifier.with_streaming_response.classify( - classes=[ - { - "description": "Questions seeking objective, verifiable information or facts", - "label": "factual", - }, - { - "description": "Questions asking for subjective judgments, preferences, or personal views", - "label": "opinion", - }, - { - "description": "Questions about how to perform tasks or follow specific processes", - "label": "procedural", - }, - ], - queries=[ - "What is the capital of France?", - "How do I feel about the current political climate?", - "What steps should I follow to bake a chocolate cake?", - ], - ) as response: - assert not response.is_closed - assert response.http_request.headers.get("X-Stainless-Lang") == "python" - - query_classifier = await response.parse() - assert_matches_type(QueryClassifierClassifyResponse, query_classifier, path=["response"]) - - assert cast(Any, response.is_closed) is True