From e67fd76883e33dd51464b4c4f52f19d095976229 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 16:50:04 +0000 Subject: [PATCH 1/2] feat(api): adds support for Claude Sonnet 4.5 and context management features --- ...48dadcd4f01870e9abedd2917b87c7d76e88c.json | 4 - ...94bc92b09016bbe4dd8f9de461fdf661c7666.json | 4 - .stats.yml | 6 +- README.md | 36 +- api.md | 17 + examples/bedrock.py | 4 +- examples/images.py | 2 +- examples/memory/basic.py | 392 ++++++++++++++++++ examples/messages.py | 4 +- examples/messages_stream.py | 2 +- examples/text_completions_demo_async.py | 2 +- examples/text_completions_demo_sync.py | 2 +- examples/text_completions_streaming.py | 4 +- examples/thinking.py | 2 +- examples/thinking_stream.py | 2 +- examples/tools.py | 4 +- examples/tools_stream.py | 2 +- examples/web_search.py | 2 +- examples/web_search_stream.py | 2 +- pyproject.toml | 2 +- src/anthropic/lib/streaming/_beta_messages.py | 1 + src/anthropic/lib/tools/__init__.py | 7 + .../lib/tools/_beta_builtin_memory_tool.py | 245 +++++++++++ src/anthropic/lib/tools/_beta_functions.py | 30 ++ src/anthropic/lib/tools/_beta_runner.py | 24 +- .../resources/beta/messages/messages.py | 66 ++- src/anthropic/types/beta/__init__.py | 35 ++ ...eta_clear_tool_uses_20250919_edit_param.py | 38 ++ ..._clear_tool_uses_20250919_edit_response.py | 18 + .../beta_context_management_config_param.py | 15 + .../beta/beta_context_management_response.py | 13 + ...ount_tokens_context_management_response.py | 10 + .../beta_input_tokens_clear_at_least_param.py | 13 + .../beta/beta_input_tokens_trigger_param.py | 13 + .../beta/beta_memory_tool_20250818_command.py | 26 ++ ...eta_memory_tool_20250818_create_command.py | 18 + ...eta_memory_tool_20250818_delete_command.py | 15 + ...eta_memory_tool_20250818_insert_command.py | 21 + .../beta/beta_memory_tool_20250818_param.py | 23 + ...eta_memory_tool_20250818_rename_command.py | 18 + ...emory_tool_20250818_str_replace_command.py | 21 + .../beta_memory_tool_20250818_view_command.py | 19 + src/anthropic/types/beta/beta_message.py | 4 + .../types/beta/beta_message_tokens_count.py | 6 + .../beta/beta_raw_message_delta_event.py | 4 + src/anthropic/types/beta/beta_stop_reason.py | 4 +- .../types/beta/beta_tool_union_param.py | 2 + .../types/beta/beta_tool_uses_keep_param.py | 13 + .../beta/beta_tool_uses_trigger_param.py | 13 + .../types/beta/message_count_tokens_params.py | 8 +- .../types/beta/message_create_params.py | 4 + src/anthropic/types/model.py | 2 + src/anthropic/types/model_param.py | 2 + src/anthropic/types/stop_reason.py | 4 +- .../beta/messages/test_batches.py | 42 ++ tests/api_resources/beta/test_messages.py | 126 ++++++ .../d105e140-a30c-4d6b-91df-257247da3623.json | 4 - .../ef758469-6fa6-454c-b2e6-19d0b450a8c5.json | 4 + tests/lib/tools/test_runners.py | 49 +-- tools.md | 4 +- uv.lock | 2 +- 61 files changed, 1383 insertions(+), 98 deletions(-) delete mode 100644 .inline-snapshot/external/3a494e1680ee379c50e7ecae59148dadcd4f01870e9abedd2917b87c7d76e88c.json delete mode 100644 .inline-snapshot/external/77ebaa9dcceb3437c00fa25ed6794bc92b09016bbe4dd8f9de461fdf661c7666.json create mode 100644 examples/memory/basic.py create mode 100644 src/anthropic/lib/tools/_beta_builtin_memory_tool.py create mode 100644 src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_param.py create mode 100644 src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_response.py create mode 100644 src/anthropic/types/beta/beta_context_management_config_param.py create mode 100644 src/anthropic/types/beta/beta_context_management_response.py create mode 100644 src/anthropic/types/beta/beta_count_tokens_context_management_response.py create mode 100644 src/anthropic/types/beta/beta_input_tokens_clear_at_least_param.py create mode 100644 src/anthropic/types/beta/beta_input_tokens_trigger_param.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_command.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_create_command.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_delete_command.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_insert_command.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_param.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_rename_command.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_str_replace_command.py create mode 100644 src/anthropic/types/beta/beta_memory_tool_20250818_view_command.py create mode 100644 src/anthropic/types/beta/beta_tool_uses_keep_param.py create mode 100644 src/anthropic/types/beta/beta_tool_uses_trigger_param.py delete mode 100644 tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/d105e140-a30c-4d6b-91df-257247da3623.json create mode 100644 tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/ef758469-6fa6-454c-b2e6-19d0b450a8c5.json diff --git a/.inline-snapshot/external/3a494e1680ee379c50e7ecae59148dadcd4f01870e9abedd2917b87c7d76e88c.json b/.inline-snapshot/external/3a494e1680ee379c50e7ecae59148dadcd4f01870e9abedd2917b87c7d76e88c.json deleted file mode 100644 index a446e6c..0000000 --- a/.inline-snapshot/external/3a494e1680ee379c50e7ecae59148dadcd4f01870e9abedd2917b87c7d76e88c.json +++ /dev/null @@ -1,4 +0,0 @@ -[ - "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01A2JSjTak1GruJX1LikXGDU\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-3-5-sonnet-20241022\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":473,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":2,\"service_tier\":\"standard\"}}}\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"I'll\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" help you check the weather in San Francisco\"}}\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\". The function requires me to specify the\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" units (celsius or fahrenheit),\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" so I'll check it in both units\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" to give you a complete picture.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":1,\"content_block\":{\"type\":\"tool_use\",\"id\":\"toolu_017o8qjpdoyixa3MKzz9J9pn\",\"name\":\"get_weather\",\"input\":{}} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"{\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"loca\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"tion\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\": \\\"San Franc\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"isco\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\", CA\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\", \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"unit\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"s\\\": \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"f\\\"}\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":1 }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":2,\"content_block\":{\"type\":\"tool_use\",\"id\":\"toolu_01MTKoz2BN2tZ9tj5hP3kzST\",\"name\":\"get_weather\",\"input\":{}} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"{\\\"locati\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"on\\\": \\\"San \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"Fran\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"cisco\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\", \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"CA\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\", \\\"unit\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"s\\\": \\\"c\\\"}\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":2 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"tool_use\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":473,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"output_tokens\":170} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n", - "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01UhvaGgFondNqXUWKx4PszP\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-3-5-sonnet-20241022\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":761,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":2,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"The\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" weather in San Francisco is currently\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" sunny with a temperature of 68\u00b0\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"F (20\u00b0C).\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":761,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"output_tokens\":25} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n" -] \ No newline at end of file diff --git a/.inline-snapshot/external/77ebaa9dcceb3437c00fa25ed6794bc92b09016bbe4dd8f9de461fdf661c7666.json b/.inline-snapshot/external/77ebaa9dcceb3437c00fa25ed6794bc92b09016bbe4dd8f9de461fdf661c7666.json deleted file mode 100644 index 138efe8..0000000 --- a/.inline-snapshot/external/77ebaa9dcceb3437c00fa25ed6794bc92b09016bbe4dd8f9de461fdf661c7666.json +++ /dev/null @@ -1,4 +0,0 @@ -[ - "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01YRiDgGtAnVWtzGJqRSZjAn\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-3-5-sonnet-20241022\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":473,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":2,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"I'll\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" help you check the weather in\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" San Francisco. Since\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" the location parameter\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" requires city and state format\"}}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\", I'll use \\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"San Francisco, CA\\\".\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" I'll show\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" you the temperature\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" in both Celsius an\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"d Fahrenheit.\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":1,\"content_block\":{\"type\":\"tool_use\",\"id\":\"toolu_012hgxfzW99WoVYezL1WcYey\",\"name\":\"get_weather\",\"input\":{}} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"{\\\"l\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"ocation\\\": \\\"S\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"an Franc\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"isco, C\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"A\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\", \\\"units\\\":\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":1,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\" \\\"c\\\"}\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":1 }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":2,\"content_block\":{\"type\":\"tool_use\",\"id\":\"toolu_01NuZ5a7BDewKGEjC5XMchFi\",\"name\":\"get_weather\",\"input\":{}} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"{\\\"location\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"\\\": \\\"San Fran\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"cisco, CA\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\", \\\"unit\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"s\\\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":2,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\": \\\"f\\\"}\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":2 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"tool_use\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":473,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"output_tokens\":175} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n", - "event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_01FXBHYjKkNTFc6PnCiarAoh\",\"type\":\"message\",\"role\":\"assistant\",\"model\":\"claude-3-5-sonnet-20241022\",\"content\":[],\"stop_reason\":null,\"stop_sequence\":null,\"usage\":{\"input_tokens\":766,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"cache_creation\":{\"ephemeral_5m_input_tokens\":0,\"ephemeral_1h_input_tokens\":0},\"output_tokens\":2,\"service_tier\":\"standard\"}} }\n\nevent: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"The\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" weather in San Francisco,\"} }\n\nevent: ping\ndata: {\"type\": \"ping\"}\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" CA is currently\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\" sunny with a temperature of \"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"20\u00b0C (68\"} }\n\nevent: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"\u00b0F).\"} }\n\nevent: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0 }\n\nevent: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"input_tokens\":766,\"cache_creation_input_tokens\":0,\"cache_read_input_tokens\":0,\"output_tokens\":27} }\n\nevent: message_stop\ndata: {\"type\":\"message_stop\" }\n\n" -] \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 12d7911..b016494 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ configured_endpoints: 26 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-24b1aa2f2f5fb2a8f4093d3bf2d1fe77b71242ab86b6fd64d891c288cdb81f28.yml -openapi_spec_hash: 87710d3846b4968bd8df1df88d865dbf -config_hash: bfd3194db84787aefad40f2597c134d0 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/anthropic%2Fanthropic-9c7d1ea59095c76b24f14fe279825c2b0dc10f165a973a46b8a548af9aeda62e.yml +openapi_spec_hash: 592d90505082223899e0638ad56ba162 +config_hash: e0f97b085ca213c87341488cb257ed57 diff --git a/README.md b/README.md index 7233341..a94da6a 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ message = client.messages.create( "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(message.content) ``` @@ -71,7 +71,7 @@ async def main() -> None: "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(message.content) @@ -113,7 +113,7 @@ async def main() -> None: "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(message.content) @@ -138,7 +138,7 @@ stream = client.messages.create( "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", stream=True, ) for event in stream: @@ -160,7 +160,7 @@ stream = await client.messages.create( "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", stream=True, ) async for event in stream: @@ -202,7 +202,7 @@ def get_weather(location: str) -> str: runner = client.beta.messages.tool_runner( max_tokens=1024, - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", tools=[get_weather], messages=[ {"role": "user", "content": "What is the weather in SF?"}, @@ -235,7 +235,7 @@ async def main() -> None: "content": "Say hello there!", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) as stream: async for text in stream.text_stream: print(text, end="", flush=True) @@ -257,7 +257,7 @@ To get the token count for a message without creating it you can use the `client ```py count = client.messages.count_tokens( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", messages=[ {"role": "user", "content": "Hello, world"} ] @@ -287,7 +287,7 @@ await client.messages.batches.create( { "custom_id": "my-first-request", "params": { - "model": "claude-sonnet-4-20250514", + "model": "claude-sonnet-4-5-20250929", "max_tokens": 1024, "messages": [{"role": "user", "content": "Hello, world"}], }, @@ -295,7 +295,7 @@ await client.messages.batches.create( { "custom_id": "my-second-request", "params": { - "model": "claude-sonnet-4-20250514", + "model": "claude-sonnet-4-5-20250929", "max_tokens": 1024, "messages": [{"role": "user", "content": "Hi again, friend"}], }, @@ -338,7 +338,7 @@ message = client.messages.create( "content": "Hello!", } ], - model="anthropic.claude-sonnet-4-20250514-v1:0", + model="anthropic.claude-sonnet-4-5-20250929-v1:0", ) print(message) ``` @@ -480,7 +480,7 @@ message = client.messages.create( "role": "user", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", metadata={}, ) print(message.metadata) @@ -527,7 +527,7 @@ try: "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) except anthropic.APIConnectionError as e: print("The server could not be reached") @@ -568,7 +568,7 @@ message = client.messages.create( "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(message._request_id) # req_018EeWyXxfu5pfWkrYcMdjWG ``` @@ -603,7 +603,7 @@ client.with_options(max_retries=5).messages.create( "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) ``` @@ -635,7 +635,7 @@ client.with_options(timeout=5.0).messages.create( "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) ``` @@ -718,7 +718,7 @@ response = client.messages.with_raw_response.create( "role": "user", "content": "Hello, Claude", }], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(response.headers.get('X-My-Header')) @@ -752,7 +752,7 @@ with client.messages.with_streaming_response.create( "content": "Hello, Claude", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) as response: print(response.headers.get("X-My-Header")) diff --git a/api.md b/api.md index ffc0a2a..7693e3d 100644 --- a/api.md +++ b/api.md @@ -227,6 +227,8 @@ from anthropic.types.beta import ( BetaCitationsConfigParam, BetaCitationsDelta, BetaCitationsWebSearchResultLocation, + BetaClearToolUses20250919Edit, + BetaClearToolUses20250919EditResponse, BetaCodeExecutionOutputBlock, BetaCodeExecutionOutputBlockParam, BetaCodeExecutionResultBlock, @@ -247,14 +249,27 @@ from anthropic.types.beta import ( BetaContentBlockParam, BetaContentBlockSource, BetaContentBlockSourceContent, + BetaContextManagementConfig, + BetaContextManagementResponse, + BetaCountTokensContextManagementResponse, BetaDocumentBlock, BetaFileDocumentSource, BetaFileImageSource, BetaImageBlockParam, BetaInputJSONDelta, + BetaInputTokensClearAtLeast, + BetaInputTokensTrigger, BetaMCPToolResultBlock, BetaMCPToolUseBlock, BetaMCPToolUseBlockParam, + BetaMemoryTool20250818, + BetaMemoryTool20250818Command, + BetaMemoryTool20250818CreateCommand, + BetaMemoryTool20250818DeleteCommand, + BetaMemoryTool20250818InsertCommand, + BetaMemoryTool20250818RenameCommand, + BetaMemoryTool20250818StrReplaceCommand, + BetaMemoryTool20250818ViewCommand, BetaMessage, BetaMessageDeltaUsage, BetaMessageParam, @@ -320,6 +335,8 @@ from anthropic.types.beta import ( BetaToolUnion, BetaToolUseBlock, BetaToolUseBlockParam, + BetaToolUsesKeep, + BetaToolUsesTrigger, BetaURLImageSource, BetaURLPDFSource, BetaUsage, diff --git a/examples/bedrock.py b/examples/bedrock.py index cad7666..7b1d85f 100644 --- a/examples/bedrock.py +++ b/examples/bedrock.py @@ -19,7 +19,7 @@ "content": "Hello!", } ], - model="anthropic.claude-sonnet-4-20250514-v1:0", + model="anthropic.claude-sonnet-4-5-20250929-v1:0", ) print(message.model_dump_json(indent=2)) @@ -33,7 +33,7 @@ "content": "Say hello there!", } ], - model="anthropic.claude-sonnet-4-20250514-v1:0", + model="anthropic.claude-sonnet-4-5-20250929-v1:0", ) as stream: for text in stream.text_stream: print(text, end="", flush=True) diff --git a/examples/images.py b/examples/images.py index cdd2132..3a27afe 100644 --- a/examples/images.py +++ b/examples/images.py @@ -25,6 +25,6 @@ ], }, ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(response.model_dump_json(indent=2)) diff --git a/examples/memory/basic.py b/examples/memory/basic.py new file mode 100644 index 0000000..fd0a933 --- /dev/null +++ b/examples/memory/basic.py @@ -0,0 +1,392 @@ +import time +import shutil +import threading +from typing import List, Optional, cast +from pathlib import Path +from typing_extensions import override + +from pydantic import TypeAdapter + +from anthropic import Anthropic +from anthropic.lib.tools import BetaAbstractMemoryTool +from anthropic.types.beta import ( + BetaMessageParam, + BetaContentBlockParam, + BetaMemoryTool20250818Command, + BetaContextManagementConfigParam, + BetaMemoryTool20250818ViewCommand, + BetaMemoryTool20250818CreateCommand, + BetaMemoryTool20250818DeleteCommand, + BetaMemoryTool20250818InsertCommand, + BetaMemoryTool20250818RenameCommand, + BetaMemoryTool20250818StrReplaceCommand, +) + +MEMORY_SYSTEM_PROMPT = """- ***DO NOT just store the conversation history** + - No need to mention your memory tool or what you are writting in it to the user, unless they ask + - Store facts about the user and their preferences + - Before responding, check memory to adjust technical depth and response style appropriately + - Keep memories up-to-date - remove outdated info, add new details as you learn them + - Use an xml format like John Doe""" + + +# Context management automatically clears old tool results to stay within token limits +# Triggers when input exceeds 20k tokens, clears down to 10k tokens +CONTEXT_MANAGEMENT = { + "edits": [ + { + "type": "clear_tool_uses_20250919", + # The below parameters are OPTIONAL: + # Trigger clearing when threshold is exceeded + "trigger": {"type": "input_tokens", "value": 30000}, + # Number of tool uses to keep after clearing + "keep": {"type": "tool_uses", "value": 3}, + # Optional: Clear at least this many tokens + "clear_at_least": {"type": "input_tokens", "value": 5000}, + # Exclude these tools uses from being cleared + "exclude_tools": ["web_search"], + } + ] +} + + +class LocalFilesystemMemoryTool(BetaAbstractMemoryTool): + """File-based memory storage implementation for Claude conversations""" + + def __init__(self, base_path: str = "./memory"): + super().__init__() + self.base_path = Path(base_path) + self.memory_root = self.base_path / "memories" + self.memory_root.mkdir(parents=True, exist_ok=True) + + def _validate_path(self, path: str) -> Path: + """Validate and resolve memory paths""" + if not path.startswith("/memories"): + raise ValueError(f"Path must start with /memories, got: {path}") + + relative_path = path[len("/memories") :].lstrip("/") + full_path = self.memory_root / relative_path if relative_path else self.memory_root + + try: + full_path.resolve().relative_to(self.memory_root.resolve()) + except ValueError as e: + raise ValueError(f"Path {path} would escape /memories directory") from e + + return full_path + + @override + def view(self, command: BetaMemoryTool20250818ViewCommand) -> str: + full_path = self._validate_path(command.path) + + if full_path.is_dir(): + items: List[str] = [] + try: + for item in sorted(full_path.iterdir()): + if item.name.startswith("."): + continue + items.append(f"{item.name}/" if item.is_dir() else item.name) + return f"Directory: {command.path}" + "\n".join([f"- {item}" for item in items]) + except Exception as e: + raise RuntimeError(f"Cannot read directory {command.path}: {e}") from e + + elif full_path.is_file(): + try: + content = full_path.read_text(encoding="utf-8") + lines = content.splitlines() + view_range = command.view_range + if view_range: + start_line = max(1, view_range[0]) - 1 + end_line = len(lines) if view_range[1] == -1 else view_range[1] + lines = lines[start_line:end_line] + start_num = start_line + 1 + else: + start_num = 1 + + numbered_lines = [f"{i + start_num:4d}: {line}" for i, line in enumerate(lines)] + return "\n".join(numbered_lines) + except Exception as e: + raise RuntimeError(f"Cannot read file {command.path}: {e}") from e + else: + raise RuntimeError(f"Path not found: {command.path}") + + @override + def create(self, command: BetaMemoryTool20250818CreateCommand) -> str: + full_path = self._validate_path(command.path) + full_path.parent.mkdir(parents=True, exist_ok=True) + full_path.write_text(command.file_text, encoding="utf-8") + return f"File created successfully at {command.path}" + + @override + def str_replace(self, command: BetaMemoryTool20250818StrReplaceCommand) -> str: + full_path = self._validate_path(command.path) + + if not full_path.is_file(): + raise FileNotFoundError(f"File not found: {command.path}") + + content = full_path.read_text(encoding="utf-8") + count = content.count(command.old_str) + if count == 0: + raise ValueError(f"Text not found in {command.path}") + elif count > 1: + raise ValueError(f"Text appears {count} times in {command.path}. Must be unique.") + + new_content = content.replace(command.old_str, command.new_str) + full_path.write_text(new_content, encoding="utf-8") + return f"File {command.path} has been edited" + + @override + def insert(self, command: BetaMemoryTool20250818InsertCommand) -> str: + full_path = self._validate_path(command.path) + insert_line = command.insert_line + insert_text = command.insert_text + + if not full_path.is_file(): + raise FileNotFoundError(f"File not found: {command.path}") + + lines = full_path.read_text(encoding="utf-8").splitlines() + if insert_line < 0 or insert_line > len(lines): + raise ValueError(f"Invalid insert_line {insert_line}. Must be 0-{len(lines)}") + + lines.insert(insert_line, insert_text.rstrip("\n")) + full_path.write_text("\n".join(lines) + "\n", encoding="utf-8") + return f"Text inserted at line {insert_line} in {command.path}" + + @override + def delete(self, command: BetaMemoryTool20250818DeleteCommand) -> str: + full_path = self._validate_path(command.path) + + if command.path == "/memories": + raise ValueError("Cannot delete the /memories directory itself") + + if full_path.is_file(): + full_path.unlink() + return f"File deleted: {command.path}" + elif full_path.is_dir(): + shutil.rmtree(full_path) + return f"Directory deleted: {command.path}" + else: + raise FileNotFoundError(f"Path not found: {command.path}") + + @override + def rename(self, command: BetaMemoryTool20250818RenameCommand) -> str: + old_full_path = self._validate_path(command.old_path) + new_full_path = self._validate_path(command.new_path) + + if not old_full_path.exists(): + raise FileNotFoundError(f"Source path not found: {command.old_path}") + if new_full_path.exists(): + raise ValueError(f"Destination already exists: {command.new_path}") + + new_full_path.parent.mkdir(parents=True, exist_ok=True) + old_full_path.rename(new_full_path) + return f"Renamed {command.old_path} to {command.new_path}" + + @override + def clear_all_memory(self) -> str: + """Override the base implementation to provide file system clearing.""" + if self.memory_root.exists(): + shutil.rmtree(self.memory_root) + self.memory_root.mkdir(parents=True, exist_ok=True) + return "All memory cleared" + + +class Spinner: + def __init__(self, message: str = "Thinking"): + self.message = message + self.spinning = False + self.thread = None + + def start(self): + self.spinning = True + self.thread = threading.Thread(target=self._spin) + self.thread.start() + + def stop(self): + self.spinning = False + if self.thread: + self.thread.join() + print("\r" + " " * (len(self.message) + 10) + "\r", end="", flush=True) + + def _spin(self): + chars = "⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏" + i = 0 + while self.spinning: + print(f"\r{self.message} {chars[i % len(chars)]}", end="", flush=True) + i += 1 + time.sleep(0.1) + + +def conversation_loop(): + client = Anthropic() + memory = LocalFilesystemMemoryTool() + + messages: list[BetaMessageParam] = [] + + # Initialize tracking for debug + last_response_id: Optional[str] = None + last_usage = None + + print("🧠 Claude with Memory & Web Search - Interactive Session") + print("Commands:") + print(" /quit or /exit - Exit the session") + print(" /clear - Start fresh conversation") + print(" /memory_view - See all memory files") + print(" /memory_clear - Delete all memory") + print(" /debug - View conversation history and token usage") + + # Display context management settings + print(f"\n🧹 Context Management") + print("=" * 60) + + while True: + try: + user_input = input("\nYou: ").strip() + except (EOFError, KeyboardInterrupt): + print("\nGoodbye!") + break + + if user_input.lower() in ["/quit", "/exit"]: + print("Goodbye!") + break + elif user_input.lower() == "/clear": + messages = [] + print("Conversation cleared!") + continue + elif user_input.lower() == "/memory_view": + result = memory.execute(BetaMemoryTool20250818ViewCommand(command="view", path="/memories")) + print("\n📁 Memory contents:") + print(result) + continue + elif user_input.lower() == "/memory_clear": + result = memory.clear_all_memory() + print(f"🗑️ {result}") + continue + elif user_input.lower() == "/debug": + print("\n🔍 Conversation history:") + + # Show last response ID if available + if last_response_id: + print(f"📌 Last response ID: {last_response_id}") + + # Show token usage if available + if last_usage: + usage = last_usage + input_tokens = usage.get("input_tokens", 0) + cached_tokens = usage.get("cache_read_input_tokens", 0) + uncached_tokens = input_tokens - cached_tokens + + print(f"📊 Last API call tokens:") + print(f" Total input: {input_tokens:,} tokens") + print(f" Cached: {cached_tokens:,} tokens") + print(f" Uncached: {uncached_tokens:,} tokens") + + threshold = CONTEXT_MANAGEMENT["edits"][0]["trigger"]["value"] # type: ignore + print(f" Context clearing threshold: {threshold:,} tokens") + if input_tokens >= threshold: + print(f" 🧹 Context clearing should trigger soon!") + elif input_tokens >= threshold * 0.8: # 80% of threshold #type: ignore + print(f" ⚠️ Approaching context clearing threshold!") + + print("=" * 80) + + for i, message in enumerate(messages): + role = message["role"].upper() + print(f"\n[{i + 1}] {role}:") + print("-" * 40) + + content = message["content"] + if isinstance(content, str): + print(content[:500] + "..." if len(content) > 500 else content) + elif isinstance(content, list): + for block in content: + if isinstance(block, dict): + if block.get("type") in ["tool_use", "server_tool_use"]: + print(f"Tool: {block.get('name', 'unknown')}") + elif block.get("type") == "tool_result": + print(f"Tool Result: [content]") + elif block.get("type") == "text": + text = block.get("text", "") + print(f"Text: {text[:200]}..." if len(text) > 200 else f"Text: {text}") + print("=" * 80) + continue + elif not user_input: + continue + + messages.append({"role": "user", "content": user_input}) + + print("\nClaude: ", end="", flush=True) + + spinner = Spinner("Thinking") + spinner.start() + + # Use tool_runner with memory tool + try: + runner = client.beta.messages.tool_runner( + betas=["context-management-2025-06-27"], + model="claude-sonnet-4-20250514", + max_tokens=2048, + system=MEMORY_SYSTEM_PROMPT, + messages=messages, + tools=[memory], + context_management=cast(BetaContextManagementConfigParam, CONTEXT_MANAGEMENT), + ) + except Exception: + spinner.stop() + raise + + # Process all messages from the runner + assistant_content: list[BetaContentBlockParam] = [] + for message in runner: + spinner.stop() + + # Store response ID and usage for debug display + last_response_id = message.id + + if hasattr(message, "usage") and message.usage: + last_usage = message.usage.model_dump() if hasattr(message.usage, "model_dump") else dict(message.usage) + + # Check for context management actions + if message.context_management: + for edit in message.context_management.applied_edits: + print(f"\n🧹 [Context Management: {edit.type} applied]") + + # Process content blocks + for content in message.content: + if content.type == "text": + print(content.text, end="", flush=True) + assistant_content.append({"type": "text", "text": content.text}) + + elif content.type == "tool_use" and content.name == "memory": + tool_input = TypeAdapter[BetaMemoryTool20250818Command]( + BetaMemoryTool20250818Command + ).validate_python(content.input) + + print(f"\n[Memory tool called: {tool_input.command}]") + assistant_content.append( + { + "type": "tool_use", + "id": content.id, + "name": content.name, + "input": content.input, + } + ) + + # Generate tool response automatically + tool_response = runner.generate_tool_call_response() + if tool_response and tool_response["content"]: + # Add tool results to messages + messages.append({"role": "user", "content": tool_response["content"]}) + + for result in tool_response["content"]: + if isinstance(result, dict) and result.get("type") == "tool_result": + print(f"[Tool result processed]") + + # Store assistant message + if assistant_content: + messages.append({"role": "assistant", "content": assistant_content}) + + print() + + +if __name__ == "__main__": + conversation_loop() diff --git a/examples/messages.py b/examples/messages.py index 9332777..aebfcd1 100644 --- a/examples/messages.py +++ b/examples/messages.py @@ -10,7 +10,7 @@ "content": "Hello!", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(response) @@ -30,6 +30,6 @@ "content": "How are you?", }, ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) print(response2) diff --git a/examples/messages_stream.py b/examples/messages_stream.py index 3612f8a..19260b0 100755 --- a/examples/messages_stream.py +++ b/examples/messages_stream.py @@ -16,7 +16,7 @@ async def main() -> None: "content": "Say hello there!", } ], - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", ) as stream: async for event in stream: if event.type == "text": diff --git a/examples/text_completions_demo_async.py b/examples/text_completions_demo_async.py index b039370..00bc593 100644 --- a/examples/text_completions_demo_async.py +++ b/examples/text_completions_demo_async.py @@ -10,7 +10,7 @@ async def main() -> None: client = AsyncAnthropic() res = await client.completions.create( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", prompt=f"{anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court? {anthropic.AI_PROMPT}", max_tokens_to_sample=1000, ) diff --git a/examples/text_completions_demo_sync.py b/examples/text_completions_demo_sync.py index 1b7578b..63ca8b5 100644 --- a/examples/text_completions_demo_sync.py +++ b/examples/text_completions_demo_sync.py @@ -8,7 +8,7 @@ def main() -> None: client = Anthropic() res = client.completions.create( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", prompt=f"{anthropic.HUMAN_PROMPT} how does a court case get to the Supreme Court? {anthropic.AI_PROMPT}", max_tokens_to_sample=1000, ) diff --git a/examples/text_completions_streaming.py b/examples/text_completions_streaming.py index 152f84e..7b897fc 100644 --- a/examples/text_completions_streaming.py +++ b/examples/text_completions_streaming.py @@ -15,7 +15,7 @@ def sync_stream() -> None: stream = client.completions.create( prompt=f"{HUMAN_PROMPT} {question}{AI_PROMPT}", - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", stream=True, max_tokens_to_sample=300, ) @@ -29,7 +29,7 @@ def sync_stream() -> None: async def async_stream() -> None: stream = await async_client.completions.create( prompt=f"{HUMAN_PROMPT} {question}{AI_PROMPT}", - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", stream=True, max_tokens_to_sample=300, ) diff --git a/examples/thinking.py b/examples/thinking.py index 2cfa60b..ee622e1 100644 --- a/examples/thinking.py +++ b/examples/thinking.py @@ -3,7 +3,7 @@ client = anthropic.Anthropic() response = client.messages.create( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", max_tokens=3200, thinking={"type": "enabled", "budget_tokens": 1600}, messages=[{"role": "user", "content": "Create a haiku about Anthropic."}], diff --git a/examples/thinking_stream.py b/examples/thinking_stream.py index a2728f2..3efb52b 100644 --- a/examples/thinking_stream.py +++ b/examples/thinking_stream.py @@ -3,7 +3,7 @@ client = anthropic.Anthropic() with client.messages.stream( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", max_tokens=3200, thinking={"type": "enabled", "budget_tokens": 1600}, messages=[{"role": "user", "content": "Create a haiku about Anthropic."}], diff --git a/examples/tools.py b/examples/tools.py index b6ae6a0..bd254c4 100644 --- a/examples/tools.py +++ b/examples/tools.py @@ -21,7 +21,7 @@ ] message = client.messages.create( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", max_tokens=1024, messages=[user_message], tools=tools, @@ -32,7 +32,7 @@ tool = next(c for c in message.content if c.type == "tool_use") response = client.messages.create( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", max_tokens=1024, messages=[ user_message, diff --git a/examples/tools_stream.py b/examples/tools_stream.py index 16cd456..a69837f 100644 --- a/examples/tools_stream.py +++ b/examples/tools_stream.py @@ -8,7 +8,7 @@ async def main() -> None: async with client.messages.stream( max_tokens=1024, - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", tools=[ { "name": "get_weather", diff --git a/examples/web_search.py b/examples/web_search.py index 0d5bfa4..3363c6d 100644 --- a/examples/web_search.py +++ b/examples/web_search.py @@ -6,7 +6,7 @@ # Create a message with web search enabled message = client.messages.create( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", max_tokens=1024, messages=[{"role": "user", "content": "What's the weather in New York?"}], tools=[ diff --git a/examples/web_search_stream.py b/examples/web_search_stream.py index a1a19a2..4d17e80 100644 --- a/examples/web_search_stream.py +++ b/examples/web_search_stream.py @@ -11,7 +11,7 @@ async def main() -> None: # Create an async stream with web search enabled async with client.beta.messages.stream( - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", max_tokens=1024, messages=[{"role": "user", "content": "What's the weather in New York?"}], tools=[ diff --git a/pyproject.toml b/pyproject.toml index e14c395..99ef901 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -161,7 +161,7 @@ show_error_codes = true # # We also exclude our `tests` as mypy doesn't always infer # types correctly and Pyright will still catch any type errors. -exclude = ['src/anthropic/_files.py', '_dev/.*.py', 'tests/.*', 'examples/mcp_server_weather.py', 'examples/tools_with_mcp.py'] +exclude = ['src/anthropic/_files.py', '_dev/.*.py', 'tests/.*', 'examples/mcp_server_weather.py', 'examples/tools_with_mcp.py', 'examples/memory/basic.py'] strict_equality = true implicit_reexport = true diff --git a/src/anthropic/lib/streaming/_beta_messages.py b/src/anthropic/lib/streaming/_beta_messages.py index b8f3675..0c371cc 100644 --- a/src/anthropic/lib/streaming/_beta_messages.py +++ b/src/anthropic/lib/streaming/_beta_messages.py @@ -486,6 +486,7 @@ def accumulate_event( current_snapshot.stop_reason = event.delta.stop_reason current_snapshot.stop_sequence = event.delta.stop_sequence current_snapshot.usage.output_tokens = event.usage.output_tokens + current_snapshot.context_management = event.context_management # Update other usage fields if they exist in the event if event.usage.input_tokens is not None: diff --git a/src/anthropic/lib/tools/__init__.py b/src/anthropic/lib/tools/__init__.py index e5aaeaf..6fc93cd 100644 --- a/src/anthropic/lib/tools/__init__.py +++ b/src/anthropic/lib/tools/__init__.py @@ -2,19 +2,26 @@ from ._beta_functions import ( BetaFunctionTool, BetaAsyncFunctionTool, + BetaBuiltinFunctionTool, BetaFunctionToolResultType, + BetaAsyncBuiltinFunctionTool, beta_tool, beta_async_tool, ) +from ._beta_builtin_memory_tool import BetaAbstractMemoryTool, BetaAsyncAbstractMemoryTool __all__ = [ "beta_tool", "beta_async_tool", "BetaFunctionTool", "BetaAsyncFunctionTool", + "BetaBuiltinFunctionTool", + "BetaAsyncBuiltinFunctionTool", "BetaToolRunner", "BetaAsyncStreamingToolRunner", "BetaStreamingToolRunner", "BetaAsyncToolRunner", "BetaFunctionToolResultType", + "BetaAbstractMemoryTool", + "BetaAsyncAbstractMemoryTool", ] diff --git a/src/anthropic/lib/tools/_beta_builtin_memory_tool.py b/src/anthropic/lib/tools/_beta_builtin_memory_tool.py new file mode 100644 index 0000000..3a61dc5 --- /dev/null +++ b/src/anthropic/lib/tools/_beta_builtin_memory_tool.py @@ -0,0 +1,245 @@ +from __future__ import annotations + +from abc import abstractmethod +from typing import TYPE_CHECKING, Any, cast +from typing_extensions import override, assert_never + +from ..._models import construct_type_unchecked +from ...types.beta import ( + BetaMemoryTool20250818Param, + BetaMemoryTool20250818Command, + BetaCacheControlEphemeralParam, + BetaMemoryTool20250818ViewCommand, + BetaMemoryTool20250818CreateCommand, + BetaMemoryTool20250818DeleteCommand, + BetaMemoryTool20250818InsertCommand, + BetaMemoryTool20250818RenameCommand, + BetaMemoryTool20250818StrReplaceCommand, +) +from ._beta_functions import BetaBuiltinFunctionTool, BetaFunctionToolResultType, BetaAsyncBuiltinFunctionTool + + +class BetaAbstractMemoryTool(BetaBuiltinFunctionTool): + """Abstract base class for memory tool implementations. + + This class provides the interface for implementing a custom memory backend for Claude. + + Subclass this to create your own memory storage solution (e.g., database, cloud storage, encrypted files, etc.). + + Example usage: + + ```py + class MyMemoryTool(BetaAbstractMemoryTool): + def view(self, command: BetaMemoryTool20250818ViewCommand) -> BetaFunctionToolResultType: + ... + return "view result" + + def create(self, command: BetaMemoryTool20250818CreateCommand) -> BetaFunctionToolResultType: + ... + return "created successfully" + + # ... implement other abstract methods + + + client = Anthropic() + memory_tool = MyMemoryTool() + message = client.beta.messages.run_tools( + model="claude-sonnet-4-5", + messages=[{"role": "user", "content": "Remember that I like coffee"}], + tools=[memory_tool], + ).until_done() + ``` + """ + + def __init__(self, *, cache_control: BetaCacheControlEphemeralParam | None = None) -> None: + super().__init__() + self._cache_control = cache_control + + @override + def to_dict(self) -> BetaMemoryTool20250818Param: + param: BetaMemoryTool20250818Param = {"type": "memory_20250818", "name": "memory"} + + if self._cache_control is not None: + param["cache_control"] = self._cache_control + + return param + + @override + def call(self, input: object) -> BetaFunctionToolResultType: + command = cast( + BetaMemoryTool20250818Command, + construct_type_unchecked(value=input, type_=cast(Any, BetaMemoryTool20250818Command)), + ) + return self.execute(command) + + def execute(self, command: BetaMemoryTool20250818Command) -> BetaFunctionToolResultType: + """Execute a memory command and return the result. + + This method dispatches to the appropriate handler method based on the + command type (view, create, str_replace, insert, delete, rename). + + You typically don't need to override this method. + """ + if command.command == "view": + return self.view(command) + elif command.command == "create": + return self.create(command) + elif command.command == "str_replace": + return self.str_replace(command) + elif command.command == "insert": + return self.insert(command) + elif command.command == "delete": + return self.delete(command) + elif command.command == "rename": + return self.rename(command) + elif TYPE_CHECKING: # type: ignore[unreachable] + assert_never(command) + else: + raise NotImplementedError(f"Unknown command: {command.command}") + + @abstractmethod + def view(self, command: BetaMemoryTool20250818ViewCommand) -> BetaFunctionToolResultType: + """View the contents of a memory path.""" + pass + + @abstractmethod + def create(self, command: BetaMemoryTool20250818CreateCommand) -> BetaFunctionToolResultType: + """Create a new memory file with the specified content.""" + pass + + @abstractmethod + def str_replace(self, command: BetaMemoryTool20250818StrReplaceCommand) -> BetaFunctionToolResultType: + """Replace text in a memory file.""" + pass + + @abstractmethod + def insert(self, command: BetaMemoryTool20250818InsertCommand) -> BetaFunctionToolResultType: + """Insert text at a specific line number in a memory file.""" + pass + + @abstractmethod + def delete(self, command: BetaMemoryTool20250818DeleteCommand) -> BetaFunctionToolResultType: + """Delete a memory file or directory.""" + pass + + @abstractmethod + def rename(self, command: BetaMemoryTool20250818RenameCommand) -> BetaFunctionToolResultType: + """Rename or move a memory file or directory.""" + pass + + def clear_all_memory(self) -> BetaFunctionToolResultType: + """Clear all memory data.""" + raise NotImplementedError("clear_all_memory not implemented") + + +class BetaAsyncAbstractMemoryTool(BetaAsyncBuiltinFunctionTool): + """Abstract base class for memory tool implementations. + + This class provides the interface for implementing a custom memory backend for Claude. + + Subclass this to create your own memory storage solution (e.g., database, cloud storage, encrypted files, etc.). + + Example usage: + + ```py + class MyMemoryTool(BetaAbstractMemoryTool): + def view(self, command: BetaMemoryTool20250818ViewCommand) -> BetaFunctionToolResultType: + ... + return "view result" + + def create(self, command: BetaMemoryTool20250818CreateCommand) -> BetaFunctionToolResultType: + ... + return "created successfully" + + # ... implement other abstract methods + + + client = Anthropic() + memory_tool = MyMemoryTool() + message = client.beta.messages.run_tools( + model="claude-3-5-sonnet-20241022", + messages=[{"role": "user", "content": "Remember that I like coffee"}], + tools=[memory_tool], + ).until_done() + ``` + """ + + def __init__(self, *, cache_control: BetaCacheControlEphemeralParam | None = None) -> None: + super().__init__() + self._cache_control = cache_control + + @override + def to_dict(self) -> BetaMemoryTool20250818Param: + param: BetaMemoryTool20250818Param = {"type": "memory_20250818", "name": "memory"} + + if self._cache_control is not None: + param["cache_control"] = self._cache_control + + return param + + @override + async def call(self, input: object) -> BetaFunctionToolResultType: + command = cast( + BetaMemoryTool20250818Command, + construct_type_unchecked(value=input, type_=cast(Any, BetaMemoryTool20250818Command)), + ) + return await self.execute(command) + + async def execute(self, command: BetaMemoryTool20250818Command) -> BetaFunctionToolResultType: + """Execute a memory command and return the result. + + This method dispatches to the appropriate handler method based on the + command type (view, create, str_replace, insert, delete, rename). + + You typically don't need to override this method. + """ + if command.command == "view": + return await self.view(command) + elif command.command == "create": + return await self.create(command) + elif command.command == "str_replace": + return await self.str_replace(command) + elif command.command == "insert": + return await self.insert(command) + elif command.command == "delete": + return await self.delete(command) + elif command.command == "rename": + return await self.rename(command) + elif TYPE_CHECKING: # type: ignore[unreachable] + assert_never(command) + else: + raise NotImplementedError(f"Unknown command: {command.command}") + + @abstractmethod + async def view(self, command: BetaMemoryTool20250818ViewCommand) -> BetaFunctionToolResultType: + """View the contents of a memory path.""" + pass + + @abstractmethod + async def create(self, command: BetaMemoryTool20250818CreateCommand) -> BetaFunctionToolResultType: + """Create a new memory file with the specified content.""" + pass + + @abstractmethod + async def str_replace(self, command: BetaMemoryTool20250818StrReplaceCommand) -> BetaFunctionToolResultType: + """Replace text in a memory file.""" + pass + + @abstractmethod + async def insert(self, command: BetaMemoryTool20250818InsertCommand) -> BetaFunctionToolResultType: + """Insert text at a specific line number in a memory file.""" + pass + + @abstractmethod + async def delete(self, command: BetaMemoryTool20250818DeleteCommand) -> BetaFunctionToolResultType: + """Delete a memory file or directory.""" + pass + + @abstractmethod + async def rename(self, command: BetaMemoryTool20250818RenameCommand) -> BetaFunctionToolResultType: + """Rename or move a memory file or directory.""" + pass + + async def clear_all_memory(self) -> BetaFunctionToolResultType: + """Clear all memory data.""" + raise NotImplementedError("clear_all_memory not implemented") diff --git a/src/anthropic/lib/tools/_beta_functions.py b/src/anthropic/lib/tools/_beta_functions.py index 0dc1c67..bc2dfa7 100644 --- a/src/anthropic/lib/tools/_beta_functions.py +++ b/src/anthropic/lib/tools/_beta_functions.py @@ -1,6 +1,7 @@ from __future__ import annotations import logging +from abc import ABC, abstractmethod from typing import Any, Union, Generic, TypeVar, Callable, Iterable, Coroutine, cast, overload from inspect import iscoroutinefunction from typing_extensions import TypeAlias, override @@ -13,6 +14,7 @@ from ..._utils import is_dict from ..._compat import cached_property from ..._models import TypeAdapter +from ...types.beta import BetaToolUnionParam from ..._utils._utils import CallableT from ...types.tool_param import ToolParam, InputSchema from ...types.beta.beta_tool_result_block_param import Content as BetaContent @@ -28,6 +30,30 @@ AsyncFunctionT = TypeVar("AsyncFunctionT", bound=AsyncFunction) +class BetaBuiltinFunctionTool(ABC): + @abstractmethod + def to_dict(self) -> BetaToolUnionParam: ... + + @abstractmethod + def call(self, input: object) -> BetaFunctionToolResultType: ... + + @property + def name(self) -> str: + return self.to_dict()["name"] + + +class BetaAsyncBuiltinFunctionTool(ABC): + @abstractmethod + def to_dict(self) -> BetaToolUnionParam: ... + + @abstractmethod + async def call(self, input: object) -> BetaFunctionToolResultType: ... + + @property + def name(self) -> str: + return self.to_dict()["name"] + + class BaseFunctionTool(Generic[CallableT]): func: CallableT """The function this tool is wrapping""" @@ -287,3 +313,7 @@ def decorator(func: AsyncFunctionT) -> BetaAsyncFunctionTool[AsyncFunctionT]: ) return decorator + + +BetaRunnableTool = Union[BetaFunctionTool[Any], BetaBuiltinFunctionTool] +BetaAsyncRunnableTool = Union[BetaAsyncFunctionTool[Any], BetaAsyncBuiltinFunctionTool] diff --git a/src/anthropic/lib/tools/_beta_runner.py b/src/anthropic/lib/tools/_beta_runner.py index 2de1b8f..f466c15 100644 --- a/src/anthropic/lib/tools/_beta_runner.py +++ b/src/anthropic/lib/tools/_beta_runner.py @@ -22,7 +22,14 @@ from ..._types import Body, Query, Headers, NotGiven from ..._utils import consume_sync_iterator, consume_async_iterator from ...types.beta import BetaMessage, BetaContentBlock, BetaMessageParam -from ._beta_functions import BetaFunctionTool, BetaAsyncFunctionTool +from ._beta_functions import ( + BetaFunctionTool, + BetaRunnableTool, + BetaAsyncFunctionTool, + BetaAsyncRunnableTool, + BetaBuiltinFunctionTool, + BetaAsyncBuiltinFunctionTool, +) from ..streaming._beta_messages import BetaMessageStream, BetaAsyncMessageStream from ...types.beta.message_create_params import MessageCreateParamsBase from ...types.beta.beta_tool_result_block_param import BetaToolResultBlockParam @@ -31,7 +38,12 @@ from ..._client import Anthropic, AsyncAnthropic -AnyFunctionToolT = TypeVar("AnyFunctionToolT", bound=Union[BetaFunctionTool[Any], BetaAsyncFunctionTool[Any]]) +AnyFunctionToolT = TypeVar( + "AnyFunctionToolT", + bound=Union[ + BetaFunctionTool[Any], BetaAsyncFunctionTool[Any], BetaBuiltinFunctionTool, BetaAsyncBuiltinFunctionTool + ], +) RunnerItemT = TypeVar("RunnerItemT") log = logging.getLogger(__name__) @@ -97,13 +109,13 @@ def _should_stop(self) -> bool: return False -class BaseSyncToolRunner(BaseToolRunner[BetaFunctionTool[Any]], Generic[RunnerItemT], ABC): +class BaseSyncToolRunner(BaseToolRunner[BetaRunnableTool], Generic[RunnerItemT], ABC): def __init__( self, *, params: MessageCreateParamsBase, options: RequestOptions, - tools: Iterable[BetaFunctionTool[Any]], + tools: Iterable[BetaRunnableTool], client: Anthropic, max_iterations: int | None = None, ) -> None: @@ -250,13 +262,13 @@ def __run__(self) -> Iterator[BetaMessageStream]: message = stream.get_final_message() -class BaseAsyncToolRunner(BaseToolRunner[BetaAsyncFunctionTool[Any]], Generic[RunnerItemT], ABC): +class BaseAsyncToolRunner(BaseToolRunner[BetaAsyncRunnableTool], Generic[RunnerItemT], ABC): def __init__( self, *, params: MessageCreateParamsBase, options: RequestOptions, - tools: Iterable[BetaAsyncFunctionTool[Any]], + tools: Iterable[BetaAsyncRunnableTool], client: AsyncAnthropic, max_iterations: int | None = None, ) -> None: diff --git a/src/anthropic/resources/beta/messages/messages.py b/src/anthropic/resources/beta/messages/messages.py index ffbc2ec..4b53ccd 100644 --- a/src/anthropic/resources/beta/messages/messages.py +++ b/src/anthropic/resources/beta/messages/messages.py @@ -3,7 +3,7 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, List, Union, Iterable, Optional, cast +from typing import TYPE_CHECKING, List, Union, Iterable, Optional, cast from functools import partial from itertools import chain from typing_extensions import Literal, overload @@ -26,9 +26,7 @@ from ...._response import to_streamed_response_wrapper, async_to_streamed_response_wrapper from ....lib.tools import ( BetaToolRunner, - BetaFunctionTool, BetaAsyncToolRunner, - BetaAsyncFunctionTool, BetaStreamingToolRunner, BetaAsyncStreamingToolRunner, ) @@ -44,6 +42,7 @@ from ...messages.messages import DEPRECATED_MODELS from ....types.model_param import ModelParam from ....types.beta.beta_message import BetaMessage +from ....lib.tools._beta_functions import BetaRunnableTool, BetaAsyncRunnableTool from ....types.anthropic_beta_param import AnthropicBetaParam from ....types.beta.beta_message_param import BetaMessageParam from ....types.beta.beta_metadata_param import BetaMetadataParam @@ -53,6 +52,7 @@ from ....types.beta.beta_message_tokens_count import BetaMessageTokensCount from ....types.beta.beta_thinking_config_param import BetaThinkingConfigParam from ....types.beta.beta_raw_message_stream_event import BetaRawMessageStreamEvent +from ....types.beta.beta_context_management_config_param import BetaContextManagementConfigParam from ....types.beta.beta_request_mcp_server_url_definition_param import BetaRequestMCPServerURLDefinitionParam if TYPE_CHECKING: @@ -93,6 +93,7 @@ def create( messages: Iterable[BetaMessageParam], model: ModelParam, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -203,6 +204,8 @@ def create( container: Container identifier for reuse across requests. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request metadata: An object describing metadata about the request. @@ -371,6 +374,7 @@ def create( model: ModelParam, stream: Literal[True], container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -485,6 +489,8 @@ def create( container: Container identifier for reuse across requests. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request metadata: An object describing metadata about the request. @@ -648,6 +654,7 @@ def create( model: ModelParam, stream: bool, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -762,6 +769,8 @@ def create( container: Container identifier for reuse across requests. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request metadata: An object describing metadata about the request. @@ -924,6 +933,7 @@ def create( messages: Iterable[BetaMessageParam], model: ModelParam, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -968,6 +978,7 @@ def create( "messages": messages, "model": model, "container": container, + "context_management": context_management, "mcp_servers": mcp_servers, "metadata": metadata, "service_tier": service_tier, @@ -1000,9 +1011,10 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaFunctionTool[Any]], + tools: Iterable[BetaRunnableTool], max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -1030,10 +1042,11 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaFunctionTool[Any]], + tools: Iterable[BetaRunnableTool], stream: Literal[True], max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -1060,10 +1073,11 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaFunctionTool[Any]], + tools: Iterable[BetaRunnableTool], stream: bool, max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -1089,9 +1103,10 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaFunctionTool[Any]], + tools: Iterable[BetaRunnableTool], max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -1132,6 +1147,7 @@ def tool_runner( "messages": messages, "model": model, "container": container, + "context_management": context_management, "mcp_servers": mcp_servers, "metadata": metadata, "service_tier": service_tier, @@ -1179,6 +1195,7 @@ def stream( messages: Iterable[BetaMessageParam], model: ModelParam, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -1221,6 +1238,7 @@ def stream( "model": model, "metadata": metadata, "container": container, + "context_management": context_management, "mcp_servers": mcp_servers, "service_tier": service_tier, "stop_sequences": stop_sequences, @@ -1249,6 +1267,7 @@ def count_tokens( *, messages: Iterable[BetaMessageParam], model: ModelParam, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, system: Union[str, Iterable[BetaTextBlockParam]] | Omit = omit, thinking: BetaThinkingConfigParam | Omit = omit, @@ -1342,6 +1361,8 @@ def count_tokens( [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request system: System prompt. @@ -1466,6 +1487,7 @@ def count_tokens( { "messages": messages, "model": model, + "context_management": context_management, "mcp_servers": mcp_servers, "system": system, "thinking": thinking, @@ -1513,6 +1535,7 @@ async def create( messages: Iterable[BetaMessageParam], model: ModelParam, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -1623,6 +1646,8 @@ async def create( container: Container identifier for reuse across requests. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request metadata: An object describing metadata about the request. @@ -1791,6 +1816,7 @@ async def create( model: ModelParam, stream: Literal[True], container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -1905,6 +1931,8 @@ async def create( container: Container identifier for reuse across requests. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request metadata: An object describing metadata about the request. @@ -2068,6 +2096,7 @@ async def create( model: ModelParam, stream: bool, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -2182,6 +2211,8 @@ async def create( container: Container identifier for reuse across requests. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request metadata: An object describing metadata about the request. @@ -2344,6 +2375,7 @@ async def create( messages: Iterable[BetaMessageParam], model: ModelParam, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -2388,6 +2420,7 @@ async def create( "messages": messages, "model": model, "container": container, + "context_management": context_management, "mcp_servers": mcp_servers, "metadata": metadata, "service_tier": service_tier, @@ -2420,9 +2453,10 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaAsyncFunctionTool[Any]], + tools: Iterable[BetaAsyncRunnableTool], max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -2450,10 +2484,11 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaAsyncFunctionTool[Any]], + tools: Iterable[BetaAsyncRunnableTool], stream: Literal[True], max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -2480,10 +2515,11 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaAsyncFunctionTool[Any]], + tools: Iterable[BetaAsyncRunnableTool], stream: bool, max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -2509,9 +2545,10 @@ def tool_runner( max_tokens: int, messages: Iterable[BetaMessageParam], model: ModelParam, - tools: Iterable[BetaAsyncFunctionTool[Any]], + tools: Iterable[BetaAsyncRunnableTool], max_iterations: int | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, metadata: BetaMetadataParam | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, @@ -2552,6 +2589,7 @@ def tool_runner( "messages": messages, "model": model, "container": container, + "context_management": context_management, "mcp_servers": mcp_servers, "metadata": metadata, "service_tier": service_tier, @@ -2600,6 +2638,7 @@ def stream( model: ModelParam, metadata: BetaMetadataParam | Omit = omit, container: Optional[str] | Omit = omit, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, service_tier: Literal["auto", "standard_only"] | Omit = omit, stop_sequences: SequenceNotStr[str] | Omit = omit, @@ -2639,6 +2678,7 @@ def stream( "model": model, "metadata": metadata, "container": container, + "context_management": context_management, "mcp_servers": mcp_servers, "service_tier": service_tier, "stop_sequences": stop_sequences, @@ -2667,6 +2707,7 @@ async def count_tokens( *, messages: Iterable[BetaMessageParam], model: ModelParam, + context_management: Optional[BetaContextManagementConfigParam] | Omit = omit, mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] | Omit = omit, system: Union[str, Iterable[BetaTextBlockParam]] | Omit = omit, thinking: BetaThinkingConfigParam | Omit = omit, @@ -2760,6 +2801,8 @@ async def count_tokens( [models](https://docs.anthropic.com/en/docs/models-overview) for additional details and options. + context_management: Configuration for context management operations. + mcp_servers: MCP servers to be utilized in this request system: System prompt. @@ -2884,6 +2927,7 @@ async def count_tokens( { "messages": messages, "model": model, + "context_management": context_management, "mcp_servers": mcp_servers, "system": system, "thinking": thinking, diff --git a/src/anthropic/types/beta/__init__.py b/src/anthropic/types/beta/__init__.py index 52ee4e8..e3a5ca9 100644 --- a/src/anthropic/types/beta/__init__.py +++ b/src/anthropic/types/beta/__init__.py @@ -44,6 +44,7 @@ from .beta_message_tokens_count import BetaMessageTokensCount as BetaMessageTokensCount from .beta_thinking_block_param import BetaThinkingBlockParam as BetaThinkingBlockParam from .beta_tool_use_block_param import BetaToolUseBlockParam as BetaToolUseBlockParam +from .beta_tool_uses_keep_param import BetaToolUsesKeepParam as BetaToolUsesKeepParam from .beta_url_pdf_source_param import BetaURLPDFSourceParam as BetaURLPDFSourceParam from .beta_mcp_tool_result_block import BetaMCPToolResultBlock as BetaMCPToolResultBlock from .beta_server_tool_use_block import BetaServerToolUseBlock as BetaServerToolUseBlock @@ -69,6 +70,7 @@ from .beta_raw_message_start_event import BetaRawMessageStartEvent as BetaRawMessageStartEvent from .beta_redacted_thinking_block import BetaRedactedThinkingBlock as BetaRedactedThinkingBlock from .beta_tool_result_block_param import BetaToolResultBlockParam as BetaToolResultBlockParam +from .beta_tool_uses_trigger_param import BetaToolUsesTriggerParam as BetaToolUsesTriggerParam from .beta_web_search_result_block import BetaWebSearchResultBlock as BetaWebSearchResultBlock from .beta_mcp_tool_use_block_param import BetaMCPToolUseBlockParam as BetaMCPToolUseBlockParam from .beta_raw_message_stream_event import BetaRawMessageStreamEvent as BetaRawMessageStreamEvent @@ -78,13 +80,17 @@ from .beta_search_result_block_param import BetaSearchResultBlockParam as BetaSearchResultBlockParam from .beta_content_block_source_param import BetaContentBlockSourceParam as BetaContentBlockSourceParam from .beta_file_document_source_param import BetaFileDocumentSourceParam as BetaFileDocumentSourceParam +from .beta_input_tokens_trigger_param import BetaInputTokensTriggerParam as BetaInputTokensTriggerParam +from .beta_memory_tool_20250818_param import BetaMemoryTool20250818Param as BetaMemoryTool20250818Param from .beta_code_execution_output_block import BetaCodeExecutionOutputBlock as BetaCodeExecutionOutputBlock from .beta_code_execution_result_block import BetaCodeExecutionResultBlock as BetaCodeExecutionResultBlock +from .beta_context_management_response import BetaContextManagementResponse as BetaContextManagementResponse from .beta_server_tool_use_block_param import BetaServerToolUseBlockParam as BetaServerToolUseBlockParam from .beta_web_fetch_tool_result_block import BetaWebFetchToolResultBlock as BetaWebFetchToolResultBlock from .beta_citation_char_location_param import BetaCitationCharLocationParam as BetaCitationCharLocationParam from .beta_citation_page_location_param import BetaCitationPageLocationParam as BetaCitationPageLocationParam from .beta_container_upload_block_param import BetaContainerUploadBlockParam as BetaContainerUploadBlockParam +from .beta_memory_tool_20250818_command import BetaMemoryTool20250818Command as BetaMemoryTool20250818Command from .beta_raw_content_block_stop_event import BetaRawContentBlockStopEvent as BetaRawContentBlockStopEvent from .beta_request_document_block_param import BetaRequestDocumentBlockParam as BetaRequestDocumentBlockParam from .beta_web_search_tool_result_block import BetaWebSearchToolResultBlock as BetaWebSearchToolResultBlock @@ -100,6 +106,7 @@ from .beta_web_search_tool_20250305_param import BetaWebSearchTool20250305Param as BetaWebSearchTool20250305Param from .beta_citation_content_block_location import BetaCitationContentBlockLocation as BetaCitationContentBlockLocation from .beta_citation_search_result_location import BetaCitationSearchResultLocation as BetaCitationSearchResultLocation +from .beta_context_management_config_param import BetaContextManagementConfigParam as BetaContextManagementConfigParam from .beta_tool_text_editor_20241022_param import BetaToolTextEditor20241022Param as BetaToolTextEditor20241022Param from .beta_tool_text_editor_20250124_param import BetaToolTextEditor20250124Param as BetaToolTextEditor20250124Param from .beta_tool_text_editor_20250429_param import BetaToolTextEditor20250429Param as BetaToolTextEditor20250429Param @@ -117,6 +124,10 @@ from .beta_code_execution_result_block_param import ( BetaCodeExecutionResultBlockParam as BetaCodeExecutionResultBlockParam, ) +from .beta_input_tokens_clear_at_least_param import BetaInputTokensClearAtLeastParam as BetaInputTokensClearAtLeastParam +from .beta_memory_tool_20250818_view_command import ( + BetaMemoryTool20250818ViewCommand as BetaMemoryTool20250818ViewCommand, +) from .beta_web_fetch_tool_result_block_param import BetaWebFetchToolResultBlockParam as BetaWebFetchToolResultBlockParam from .beta_web_fetch_tool_result_error_block import BetaWebFetchToolResultErrorBlock as BetaWebFetchToolResultErrorBlock from .beta_web_search_tool_result_error_code import BetaWebSearchToolResultErrorCode as BetaWebSearchToolResultErrorCode @@ -132,6 +143,21 @@ from .beta_web_search_tool_result_block_param import ( BetaWebSearchToolResultBlockParam as BetaWebSearchToolResultBlockParam, ) +from .beta_clear_tool_uses_20250919_edit_param import ( + BetaClearToolUses20250919EditParam as BetaClearToolUses20250919EditParam, +) +from .beta_memory_tool_20250818_create_command import ( + BetaMemoryTool20250818CreateCommand as BetaMemoryTool20250818CreateCommand, +) +from .beta_memory_tool_20250818_delete_command import ( + BetaMemoryTool20250818DeleteCommand as BetaMemoryTool20250818DeleteCommand, +) +from .beta_memory_tool_20250818_insert_command import ( + BetaMemoryTool20250818InsertCommand as BetaMemoryTool20250818InsertCommand, +) +from .beta_memory_tool_20250818_rename_command import ( + BetaMemoryTool20250818RenameCommand as BetaMemoryTool20250818RenameCommand, +) from .beta_request_mcp_tool_result_block_param import ( BetaRequestMCPToolResultBlockParam as BetaRequestMCPToolResultBlockParam, ) @@ -165,6 +191,9 @@ from .beta_bash_code_execution_result_block_param import ( BetaBashCodeExecutionResultBlockParam as BetaBashCodeExecutionResultBlockParam, ) +from .beta_clear_tool_uses_20250919_edit_response import ( + BetaClearToolUses20250919EditResponse as BetaClearToolUses20250919EditResponse, +) from .beta_code_execution_tool_result_block_param import ( BetaCodeExecutionToolResultBlockParam as BetaCodeExecutionToolResultBlockParam, ) @@ -180,6 +209,12 @@ from .beta_code_execution_tool_result_block_content import ( BetaCodeExecutionToolResultBlockContent as BetaCodeExecutionToolResultBlockContent, ) +from .beta_count_tokens_context_management_response import ( + BetaCountTokensContextManagementResponse as BetaCountTokensContextManagementResponse, +) +from .beta_memory_tool_20250818_str_replace_command import ( + BetaMemoryTool20250818StrReplaceCommand as BetaMemoryTool20250818StrReplaceCommand, +) from .beta_citation_web_search_result_location_param import ( BetaCitationWebSearchResultLocationParam as BetaCitationWebSearchResultLocationParam, ) diff --git a/src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_param.py b/src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_param.py new file mode 100644 index 0000000..64b901f --- /dev/null +++ b/src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_param.py @@ -0,0 +1,38 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Union, Optional +from typing_extensions import Literal, Required, TypeAlias, TypedDict + +from ..._types import SequenceNotStr +from .beta_tool_uses_keep_param import BetaToolUsesKeepParam +from .beta_tool_uses_trigger_param import BetaToolUsesTriggerParam +from .beta_input_tokens_trigger_param import BetaInputTokensTriggerParam +from .beta_input_tokens_clear_at_least_param import BetaInputTokensClearAtLeastParam + +__all__ = ["BetaClearToolUses20250919EditParam", "Trigger"] + +Trigger: TypeAlias = Union[BetaInputTokensTriggerParam, BetaToolUsesTriggerParam] + + +class BetaClearToolUses20250919EditParam(TypedDict, total=False): + type: Required[Literal["clear_tool_uses_20250919"]] + + clear_at_least: Optional[BetaInputTokensClearAtLeastParam] + """Minimum number of tokens that must be cleared when triggered. + + Context will only be modified if at least this many tokens can be removed. + """ + + clear_tool_inputs: Union[bool, SequenceNotStr[str], None] + """Whether to clear all tool inputs (bool) or specific tool inputs to clear (list)""" + + exclude_tools: Optional[SequenceNotStr[str]] + """Tool names whose uses are preserved from clearing""" + + keep: BetaToolUsesKeepParam + """Number of tool uses to retain in the conversation""" + + trigger: Trigger + """Condition that triggers the context management strategy""" diff --git a/src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_response.py b/src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_response.py new file mode 100644 index 0000000..42a7dfa --- /dev/null +++ b/src/anthropic/types/beta/beta_clear_tool_uses_20250919_edit_response.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["BetaClearToolUses20250919EditResponse"] + + +class BetaClearToolUses20250919EditResponse(BaseModel): + cleared_input_tokens: int + """Number of input tokens cleared by this edit.""" + + cleared_tool_uses: int + """Number of tool uses that were cleared.""" + + type: Literal["clear_tool_uses_20250919"] + """The type of context management edit applied.""" diff --git a/src/anthropic/types/beta/beta_context_management_config_param.py b/src/anthropic/types/beta/beta_context_management_config_param.py new file mode 100644 index 0000000..ba3d354 --- /dev/null +++ b/src/anthropic/types/beta/beta_context_management_config_param.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Iterable +from typing_extensions import TypedDict + +from .beta_clear_tool_uses_20250919_edit_param import BetaClearToolUses20250919EditParam + +__all__ = ["BetaContextManagementConfigParam"] + + +class BetaContextManagementConfigParam(TypedDict, total=False): + edits: Iterable[BetaClearToolUses20250919EditParam] + """List of context management edits to apply""" diff --git a/src/anthropic/types/beta/beta_context_management_response.py b/src/anthropic/types/beta/beta_context_management_response.py new file mode 100644 index 0000000..57c9c4d --- /dev/null +++ b/src/anthropic/types/beta/beta_context_management_response.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List + +from ..._models import BaseModel +from .beta_clear_tool_uses_20250919_edit_response import BetaClearToolUses20250919EditResponse + +__all__ = ["BetaContextManagementResponse"] + + +class BetaContextManagementResponse(BaseModel): + applied_edits: List[BetaClearToolUses20250919EditResponse] + """List of context management edits that were applied.""" diff --git a/src/anthropic/types/beta/beta_count_tokens_context_management_response.py b/src/anthropic/types/beta/beta_count_tokens_context_management_response.py new file mode 100644 index 0000000..15dd44f --- /dev/null +++ b/src/anthropic/types/beta/beta_count_tokens_context_management_response.py @@ -0,0 +1,10 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from ..._models import BaseModel + +__all__ = ["BetaCountTokensContextManagementResponse"] + + +class BetaCountTokensContextManagementResponse(BaseModel): + original_input_tokens: int + """The original token count before context management was applied""" diff --git a/src/anthropic/types/beta/beta_input_tokens_clear_at_least_param.py b/src/anthropic/types/beta/beta_input_tokens_clear_at_least_param.py new file mode 100644 index 0000000..e3a137b --- /dev/null +++ b/src/anthropic/types/beta/beta_input_tokens_clear_at_least_param.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["BetaInputTokensClearAtLeastParam"] + + +class BetaInputTokensClearAtLeastParam(TypedDict, total=False): + type: Required[Literal["input_tokens"]] + + value: Required[int] diff --git a/src/anthropic/types/beta/beta_input_tokens_trigger_param.py b/src/anthropic/types/beta/beta_input_tokens_trigger_param.py new file mode 100644 index 0000000..1d5f15c --- /dev/null +++ b/src/anthropic/types/beta/beta_input_tokens_trigger_param.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["BetaInputTokensTriggerParam"] + + +class BetaInputTokensTriggerParam(TypedDict, total=False): + type: Required[Literal["input_tokens"]] + + value: Required[int] diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_command.py b/src/anthropic/types/beta/beta_memory_tool_20250818_command.py new file mode 100644 index 0000000..7ce68f8 --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_command.py @@ -0,0 +1,26 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import Union +from typing_extensions import Annotated, TypeAlias + +from ..._utils import PropertyInfo +from .beta_memory_tool_20250818_view_command import BetaMemoryTool20250818ViewCommand +from .beta_memory_tool_20250818_create_command import BetaMemoryTool20250818CreateCommand +from .beta_memory_tool_20250818_delete_command import BetaMemoryTool20250818DeleteCommand +from .beta_memory_tool_20250818_insert_command import BetaMemoryTool20250818InsertCommand +from .beta_memory_tool_20250818_rename_command import BetaMemoryTool20250818RenameCommand +from .beta_memory_tool_20250818_str_replace_command import BetaMemoryTool20250818StrReplaceCommand + +__all__ = ["BetaMemoryTool20250818Command"] + +BetaMemoryTool20250818Command: TypeAlias = Annotated[ + Union[ + BetaMemoryTool20250818ViewCommand, + BetaMemoryTool20250818CreateCommand, + BetaMemoryTool20250818StrReplaceCommand, + BetaMemoryTool20250818InsertCommand, + BetaMemoryTool20250818DeleteCommand, + BetaMemoryTool20250818RenameCommand, + ], + PropertyInfo(discriminator="command"), +] diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_create_command.py b/src/anthropic/types/beta/beta_memory_tool_20250818_create_command.py new file mode 100644 index 0000000..bd51cdd --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_create_command.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["BetaMemoryTool20250818CreateCommand"] + + +class BetaMemoryTool20250818CreateCommand(BaseModel): + command: Literal["create"] + """Command type identifier""" + + file_text: str + """Content to write to the file""" + + path: str + """Path where the file should be created""" diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_delete_command.py b/src/anthropic/types/beta/beta_memory_tool_20250818_delete_command.py new file mode 100644 index 0000000..044d932 --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_delete_command.py @@ -0,0 +1,15 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["BetaMemoryTool20250818DeleteCommand"] + + +class BetaMemoryTool20250818DeleteCommand(BaseModel): + command: Literal["delete"] + """Command type identifier""" + + path: str + """Path to the file or directory to delete""" diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_insert_command.py b/src/anthropic/types/beta/beta_memory_tool_20250818_insert_command.py new file mode 100644 index 0000000..1970dba --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_insert_command.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["BetaMemoryTool20250818InsertCommand"] + + +class BetaMemoryTool20250818InsertCommand(BaseModel): + command: Literal["insert"] + """Command type identifier""" + + insert_line: int + """Line number where text should be inserted""" + + insert_text: str + """Text to insert at the specified line""" + + path: str + """Path to the file where text should be inserted""" diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_param.py b/src/anthropic/types/beta/beta_memory_tool_20250818_param.py new file mode 100644 index 0000000..67f0f20 --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_param.py @@ -0,0 +1,23 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing import Optional +from typing_extensions import Literal, Required, TypedDict + +from .beta_cache_control_ephemeral_param import BetaCacheControlEphemeralParam + +__all__ = ["BetaMemoryTool20250818Param"] + + +class BetaMemoryTool20250818Param(TypedDict, total=False): + name: Required[Literal["memory"]] + """Name of the tool. + + This is how the tool will be called by the model and in `tool_use` blocks. + """ + + type: Required[Literal["memory_20250818"]] + + cache_control: Optional[BetaCacheControlEphemeralParam] + """Create a cache control breakpoint at this content block.""" diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_rename_command.py b/src/anthropic/types/beta/beta_memory_tool_20250818_rename_command.py new file mode 100644 index 0000000..46e27d8 --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_rename_command.py @@ -0,0 +1,18 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["BetaMemoryTool20250818RenameCommand"] + + +class BetaMemoryTool20250818RenameCommand(BaseModel): + command: Literal["rename"] + """Command type identifier""" + + new_path: str + """New path for the file or directory""" + + old_path: str + """Current path of the file or directory""" diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_str_replace_command.py b/src/anthropic/types/beta/beta_memory_tool_20250818_str_replace_command.py new file mode 100644 index 0000000..1d018b1 --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_str_replace_command.py @@ -0,0 +1,21 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["BetaMemoryTool20250818StrReplaceCommand"] + + +class BetaMemoryTool20250818StrReplaceCommand(BaseModel): + command: Literal["str_replace"] + """Command type identifier""" + + new_str: str + """Text to replace with""" + + old_str: str + """Text to search for and replace""" + + path: str + """Path to the file where text should be replaced""" diff --git a/src/anthropic/types/beta/beta_memory_tool_20250818_view_command.py b/src/anthropic/types/beta/beta_memory_tool_20250818_view_command.py new file mode 100644 index 0000000..8d540bc --- /dev/null +++ b/src/anthropic/types/beta/beta_memory_tool_20250818_view_command.py @@ -0,0 +1,19 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from typing import List, Optional +from typing_extensions import Literal + +from ..._models import BaseModel + +__all__ = ["BetaMemoryTool20250818ViewCommand"] + + +class BetaMemoryTool20250818ViewCommand(BaseModel): + command: Literal["view"] + """Command type identifier""" + + path: str + """Path to directory or file to view""" + + view_range: Optional[List[int]] = None + """Optional line range for viewing specific lines""" diff --git a/src/anthropic/types/beta/beta_message.py b/src/anthropic/types/beta/beta_message.py index 9fb6236..ea62223 100644 --- a/src/anthropic/types/beta/beta_message.py +++ b/src/anthropic/types/beta/beta_message.py @@ -9,6 +9,7 @@ from .beta_container import BetaContainer from .beta_stop_reason import BetaStopReason from .beta_content_block import BetaContentBlock, BetaContentBlock as BetaContentBlock +from .beta_context_management_response import BetaContextManagementResponse __all__ = ["BetaMessage"] @@ -61,6 +62,9 @@ class BetaMessage(BaseModel): ``` """ + context_management: Optional[BetaContextManagementResponse] = None + """Information about context management operations applied during the request.""" + model: Model """ The model that will complete your prompt.\n\nSee diff --git a/src/anthropic/types/beta/beta_message_tokens_count.py b/src/anthropic/types/beta/beta_message_tokens_count.py index 8effd45..6a27a2b 100644 --- a/src/anthropic/types/beta/beta_message_tokens_count.py +++ b/src/anthropic/types/beta/beta_message_tokens_count.py @@ -1,11 +1,17 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. +from typing import Optional + from ..._models import BaseModel +from .beta_count_tokens_context_management_response import BetaCountTokensContextManagementResponse __all__ = ["BetaMessageTokensCount"] class BetaMessageTokensCount(BaseModel): + context_management: Optional[BetaCountTokensContextManagementResponse] = None + """Information about context management applied to the message.""" + input_tokens: int """ The total number of tokens across the provided list of messages, system prompt, diff --git a/src/anthropic/types/beta/beta_raw_message_delta_event.py b/src/anthropic/types/beta/beta_raw_message_delta_event.py index 666f159..a4ef718 100644 --- a/src/anthropic/types/beta/beta_raw_message_delta_event.py +++ b/src/anthropic/types/beta/beta_raw_message_delta_event.py @@ -7,6 +7,7 @@ from .beta_container import BetaContainer from .beta_stop_reason import BetaStopReason from .beta_message_delta_usage import BetaMessageDeltaUsage +from .beta_context_management_response import BetaContextManagementResponse __all__ = ["BetaRawMessageDeltaEvent", "Delta"] @@ -24,6 +25,9 @@ class Delta(BaseModel): class BetaRawMessageDeltaEvent(BaseModel): + context_management: Optional[BetaContextManagementResponse] = None + """Information about context management operations applied during the request.""" + delta: Delta type: Literal["message_delta"] diff --git a/src/anthropic/types/beta/beta_stop_reason.py b/src/anthropic/types/beta/beta_stop_reason.py index f9eff6e..14baeed 100644 --- a/src/anthropic/types/beta/beta_stop_reason.py +++ b/src/anthropic/types/beta/beta_stop_reason.py @@ -4,4 +4,6 @@ __all__ = ["BetaStopReason"] -BetaStopReason: TypeAlias = Literal["end_turn", "max_tokens", "stop_sequence", "tool_use", "pause_turn", "refusal"] +BetaStopReason: TypeAlias = Literal[ + "end_turn", "max_tokens", "stop_sequence", "tool_use", "pause_turn", "refusal", "model_context_window_exceeded" +] diff --git a/src/anthropic/types/beta/beta_tool_union_param.py b/src/anthropic/types/beta/beta_tool_union_param.py index 41eea83..e3590d9 100644 --- a/src/anthropic/types/beta/beta_tool_union_param.py +++ b/src/anthropic/types/beta/beta_tool_union_param.py @@ -8,6 +8,7 @@ from .beta_tool_param import BetaToolParam from .beta_tool_bash_20241022_param import BetaToolBash20241022Param from .beta_tool_bash_20250124_param import BetaToolBash20250124Param +from .beta_memory_tool_20250818_param import BetaMemoryTool20250818Param from .beta_web_fetch_tool_20250910_param import BetaWebFetchTool20250910Param from .beta_web_search_tool_20250305_param import BetaWebSearchTool20250305Param from .beta_tool_text_editor_20241022_param import BetaToolTextEditor20241022Param @@ -28,6 +29,7 @@ BetaCodeExecutionTool20250522Param, BetaCodeExecutionTool20250825Param, BetaToolComputerUse20241022Param, + BetaMemoryTool20250818Param, BetaToolComputerUse20250124Param, BetaToolTextEditor20241022Param, BetaToolTextEditor20250124Param, diff --git a/src/anthropic/types/beta/beta_tool_uses_keep_param.py b/src/anthropic/types/beta/beta_tool_uses_keep_param.py new file mode 100644 index 0000000..3c67ea6 --- /dev/null +++ b/src/anthropic/types/beta/beta_tool_uses_keep_param.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["BetaToolUsesKeepParam"] + + +class BetaToolUsesKeepParam(TypedDict, total=False): + type: Required[Literal["tool_uses"]] + + value: Required[int] diff --git a/src/anthropic/types/beta/beta_tool_uses_trigger_param.py b/src/anthropic/types/beta/beta_tool_uses_trigger_param.py new file mode 100644 index 0000000..15eafd4 --- /dev/null +++ b/src/anthropic/types/beta/beta_tool_uses_trigger_param.py @@ -0,0 +1,13 @@ +# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. + +from __future__ import annotations + +from typing_extensions import Literal, Required, TypedDict + +__all__ = ["BetaToolUsesTriggerParam"] + + +class BetaToolUsesTriggerParam(TypedDict, total=False): + type: Required[Literal["tool_uses"]] + + value: Required[int] diff --git a/src/anthropic/types/beta/message_count_tokens_params.py b/src/anthropic/types/beta/message_count_tokens_params.py index 02a1006..0d09629 100644 --- a/src/anthropic/types/beta/message_count_tokens_params.py +++ b/src/anthropic/types/beta/message_count_tokens_params.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import List, Union, Iterable +from typing import List, Union, Iterable, Optional from typing_extensions import Required, Annotated, TypeAlias, TypedDict from ..._utils import PropertyInfo @@ -15,8 +15,10 @@ from .beta_thinking_config_param import BetaThinkingConfigParam from .beta_tool_bash_20241022_param import BetaToolBash20241022Param from .beta_tool_bash_20250124_param import BetaToolBash20250124Param +from .beta_memory_tool_20250818_param import BetaMemoryTool20250818Param from .beta_web_fetch_tool_20250910_param import BetaWebFetchTool20250910Param from .beta_web_search_tool_20250305_param import BetaWebSearchTool20250305Param +from .beta_context_management_config_param import BetaContextManagementConfigParam from .beta_tool_text_editor_20241022_param import BetaToolTextEditor20241022Param from .beta_tool_text_editor_20250124_param import BetaToolTextEditor20250124Param from .beta_tool_text_editor_20250429_param import BetaToolTextEditor20250429Param @@ -106,6 +108,9 @@ class MessageCountTokensParams(TypedDict, total=False): details and options. """ + context_management: Optional[BetaContextManagementConfigParam] + """Configuration for context management operations.""" + mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] """MCP servers to be utilized in this request""" @@ -225,6 +230,7 @@ class MessageCountTokensParams(TypedDict, total=False): BetaCodeExecutionTool20250522Param, BetaCodeExecutionTool20250825Param, BetaToolComputerUse20241022Param, + BetaMemoryTool20250818Param, BetaToolComputerUse20250124Param, BetaToolTextEditor20241022Param, BetaToolTextEditor20250124Param, diff --git a/src/anthropic/types/beta/message_create_params.py b/src/anthropic/types/beta/message_create_params.py index 53d1c48..ba63013 100644 --- a/src/anthropic/types/beta/message_create_params.py +++ b/src/anthropic/types/beta/message_create_params.py @@ -15,6 +15,7 @@ from .beta_tool_union_param import BetaToolUnionParam from .beta_tool_choice_param import BetaToolChoiceParam from .beta_thinking_config_param import BetaThinkingConfigParam +from .beta_context_management_config_param import BetaContextManagementConfigParam from .beta_request_mcp_server_url_definition_param import BetaRequestMCPServerURLDefinitionParam __all__ = ["MessageCreateParamsBase", "MessageCreateParamsNonStreaming", "MessageCreateParamsStreaming"] @@ -109,6 +110,9 @@ class MessageCreateParamsBase(TypedDict, total=False): container: Optional[str] """Container identifier for reuse across requests.""" + context_management: Optional[BetaContextManagementConfigParam] + """Configuration for context management operations.""" + mcp_servers: Iterable[BetaRequestMCPServerURLDefinitionParam] """MCP servers to be utilized in this request""" diff --git a/src/anthropic/types/model.py b/src/anthropic/types/model.py index 26d7743..fb0491a 100644 --- a/src/anthropic/types/model.py +++ b/src/anthropic/types/model.py @@ -14,6 +14,8 @@ "claude-sonnet-4-20250514", "claude-sonnet-4-0", "claude-4-sonnet-20250514", + "claude-sonnet-4-5", + "claude-sonnet-4-5-20250929", "claude-3-5-sonnet-latest", "claude-3-5-sonnet-20241022", "claude-3-5-sonnet-20240620", diff --git a/src/anthropic/types/model_param.py b/src/anthropic/types/model_param.py index 8481d17..e82c7c8 100644 --- a/src/anthropic/types/model_param.py +++ b/src/anthropic/types/model_param.py @@ -16,6 +16,8 @@ "claude-sonnet-4-20250514", "claude-sonnet-4-0", "claude-4-sonnet-20250514", + "claude-sonnet-4-5", + "claude-sonnet-4-5-20250929", "claude-3-5-sonnet-latest", "claude-3-5-sonnet-20241022", "claude-3-5-sonnet-20240620", diff --git a/src/anthropic/types/stop_reason.py b/src/anthropic/types/stop_reason.py index 3d37159..a4bfdbb 100644 --- a/src/anthropic/types/stop_reason.py +++ b/src/anthropic/types/stop_reason.py @@ -4,4 +4,6 @@ __all__ = ["StopReason"] -StopReason: TypeAlias = Literal["end_turn", "max_tokens", "stop_sequence", "tool_use", "pause_turn", "refusal"] +StopReason: TypeAlias = Literal[ + "end_turn", "max_tokens", "stop_sequence", "tool_use", "pause_turn", "refusal", "model_context_window_exceeded" +] diff --git a/tests/api_resources/beta/messages/test_batches.py b/tests/api_resources/beta/messages/test_batches.py index cea933d..cb54d4f 100644 --- a/tests/api_resources/beta/messages/test_batches.py +++ b/tests/api_resources/beta/messages/test_batches.py @@ -63,6 +63,27 @@ def test_method_create_with_all_params(self, client: Anthropic) -> None: ], "model": "claude-sonnet-4-20250514", "container": "container", + "context_management": { + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, "mcp_servers": [ { "name": "name", @@ -453,6 +474,27 @@ async def test_method_create_with_all_params(self, async_client: AsyncAnthropic) ], "model": "claude-sonnet-4-20250514", "container": "container", + "context_management": { + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, "mcp_servers": [ { "name": "name", diff --git a/tests/api_resources/beta/test_messages.py b/tests/api_resources/beta/test_messages.py index c1de960..c933481 100644 --- a/tests/api_resources/beta/test_messages.py +++ b/tests/api_resources/beta/test_messages.py @@ -48,6 +48,27 @@ def test_method_create_with_all_params_overload_1(self, client: Anthropic) -> No ], model="claude-sonnet-4-20250514", container="container", + context_management={ + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, mcp_servers=[ { "name": "name", @@ -194,6 +215,27 @@ def test_method_create_with_all_params_overload_2(self, client: Anthropic) -> No model="claude-sonnet-4-20250514", stream=True, container="container", + context_management={ + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, mcp_servers=[ { "name": "name", @@ -335,6 +377,27 @@ def test_method_count_tokens_with_all_params(self, client: Anthropic) -> None: } ], model="claude-3-7-sonnet-latest", + context_management={ + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, mcp_servers=[ { "name": "name", @@ -476,6 +539,27 @@ async def test_method_create_with_all_params_overload_1(self, async_client: Asyn ], model="claude-sonnet-4-20250514", container="container", + context_management={ + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, mcp_servers=[ { "name": "name", @@ -622,6 +706,27 @@ async def test_method_create_with_all_params_overload_2(self, async_client: Asyn model="claude-sonnet-4-20250514", stream=True, container="container", + context_management={ + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, mcp_servers=[ { "name": "name", @@ -763,6 +868,27 @@ async def test_method_count_tokens_with_all_params(self, async_client: AsyncAnth } ], model="claude-3-7-sonnet-latest", + context_management={ + "edits": [ + { + "type": "clear_tool_uses_20250919", + "clear_at_least": { + "type": "input_tokens", + "value": 0, + }, + "clear_tool_inputs": True, + "exclude_tools": ["string"], + "keep": { + "type": "tool_uses", + "value": 0, + }, + "trigger": { + "type": "input_tokens", + "value": 1, + }, + } + ] + }, mcp_servers=[ { "name": "name", diff --git a/tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/d105e140-a30c-4d6b-91df-257247da3623.json b/tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/d105e140-a30c-4d6b-91df-257247da3623.json deleted file mode 100644 index 947d7d8..0000000 --- a/tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/d105e140-a30c-4d6b-91df-257247da3623.json +++ /dev/null @@ -1,4 +0,0 @@ -[ - "{\"id\": \"msg_012fGiuDQBxNoEfdbdneHqrX\", \"type\": \"message\", \"role\": \"assistant\", \"model\": \"claude-3-5-sonnet-20241022\", \"content\": [{\"type\": \"text\", \"text\": \"I'll help you check the weather for these cities. Since the temperature unit wasn't specified, I'll use Celsius. I'll check each city one by one.\\n\\nLet's start with San Francisco:\"}, {\"type\": \"tool_use\", \"id\": \"toolu_01PeNQ4nbMcyDpCGiuKSfkMM\", \"name\": \"get_weather\", \"input\": {\"location\": \"San Francisco, CA\", \"units\": \"c\"}}], \"stop_reason\": \"tool_use\", \"stop_sequence\": null, \"usage\": {\"input_tokens\": 522, \"cache_creation_input_tokens\": 0, \"cache_read_input_tokens\": 0, \"cache_creation\": {\"ephemeral_5m_input_tokens\": 0, \"ephemeral_1h_input_tokens\": 0}, \"output_tokens\": 116, \"service_tier\": \"standard\"}}", - "{\"id\": \"msg_01KMmffjmDxyYLUKj7tR71gz\", \"type\": \"message\", \"role\": \"assistant\", \"model\": \"claude-3-5-sonnet-20241022\", \"content\": [{\"type\": \"text\", \"text\": \"Now for New York:\"}, {\"type\": \"tool_use\", \"id\": \"toolu_01WcZvizPr9EybXFMyGXRxYA\", \"name\": \"get_weather\", \"input\": {\"location\": \"New York, NY\", \"units\": \"c\"}}], \"stop_reason\": \"tool_use\", \"stop_sequence\": null, \"usage\": {\"input_tokens\": 677, \"cache_creation_input_tokens\": 0, \"cache_read_input_tokens\": 0, \"cache_creation\": {\"ephemeral_5m_input_tokens\": 0, \"ephemeral_1h_input_tokens\": 0}, \"output_tokens\": 80, \"service_tier\": \"standard\"}}" -] \ No newline at end of file diff --git a/tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/ef758469-6fa6-454c-b2e6-19d0b450a8c5.json b/tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/ef758469-6fa6-454c-b2e6-19d0b450a8c5.json new file mode 100644 index 0000000..926e8e2 --- /dev/null +++ b/tests/lib/tools/__inline_snapshot__/test_runners/TestSyncRunTools.test_max_iterations/ef758469-6fa6-454c-b2e6-19d0b450a8c5.json @@ -0,0 +1,4 @@ +[ + "{\"id\": \"msg_0184u1shM6AjCNAaoknBoEpf\", \"type\": \"message\", \"role\": \"assistant\", \"model\": \"claude-3-5-sonnet-20241022\", \"content\": [{\"type\": \"text\", \"text\": \"I'll help you check the weather for each city one by one. I'll use Fahrenheit units by default. Let me check them sequentially.\\n\\nFirst, let's check San Francisco:\"}, {\"type\": \"tool_use\", \"id\": \"toolu_01GiQJzt5d2ThB4fSUsRCSML\", \"name\": \"get_weather\", \"input\": {\"location\": \"San Francisco, CA\", \"units\": \"f\"}}], \"stop_reason\": \"tool_use\", \"stop_sequence\": null, \"usage\": {\"input_tokens\": 518, \"cache_creation_input_tokens\": 0, \"cache_read_input_tokens\": 0, \"cache_creation\": {\"ephemeral_5m_input_tokens\": 0, \"ephemeral_1h_input_tokens\": 0}, \"output_tokens\": 114, \"service_tier\": \"standard\"}}", + "{\"id\": \"msg_015zyD3V5WXG8r3hgkUaNGCZ\", \"type\": \"message\", \"role\": \"assistant\", \"model\": \"claude-3-5-sonnet-20241022\", \"content\": [{\"type\": \"text\", \"text\": \"Now for New York:\"}, {\"type\": \"tool_use\", \"id\": \"toolu_015yzRQ92SwYGz5Veoq7A3P7\", \"name\": \"get_weather\", \"input\": {\"location\": \"New York, NY\", \"units\": \"f\"}}], \"stop_reason\": \"tool_use\", \"stop_sequence\": null, \"usage\": {\"input_tokens\": 671, \"cache_creation_input_tokens\": 0, \"cache_read_input_tokens\": 0, \"cache_creation\": {\"ephemeral_5m_input_tokens\": 0, \"ephemeral_1h_input_tokens\": 0}, \"output_tokens\": 80, \"service_tier\": \"standard\"}}" +] \ No newline at end of file diff --git a/tests/lib/tools/test_runners.py b/tests/lib/tools/test_runners.py index 0d34362..28040a2 100644 --- a/tests/lib/tools/test_runners.py +++ b/tests/lib/tools/test_runners.py @@ -31,12 +31,11 @@ "basic": { "responses": snapshot( [ - '{"id": "msg_011VcyTSZL4mKtyjRLHBuqA5", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco. I\'ll use the get_weather function, and I\'ll show you the temperature in both Celsius and Fahrenheit for completeness."}, {"type": "tool_use", "id": "toolu_013nheddwxiFJt4C4Q8eGUXJ", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "c"}}, {"type": "tool_use", "id": "toolu_01Vg4JstpLEp3JiQadw9aTU1", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "f"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 473, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 169, "service_tier": "standard"}}', - '{"id": "msg_0151Rxp5cbUKiA6TJbEoG1U1", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco is currently sunny with a temperature of 20\\u00b0C (68\\u00b0F)."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 760, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 25, "service_tier": "standard"}}', + '{"id": "msg_01Lf1uRSXq1sB9df6EigSkXA", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco. I\'ll use the get_weather function, and I\'ll show you the temperature in both Celsius and Fahrenheit for completeness."}, {"type": "tool_use", "id": "toolu_013bzsyqF4LyvJj6CF5gYCEn", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "c"}}, {"type": "tool_use", "id": "toolu_01Ugb5BSmDUth8vbdkUsNYrs", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "f"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 473, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 169, "service_tier": "standard"}}', + '{"id": "msg_01SUujjdE6BMF3CYWCTR4vHF", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco is currently sunny with a temperature of 20\\u00b0C (68\\u00b0F)."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 760, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 25, "service_tier": "standard"}}', ] ), - "result": snapshot( - """\ + "result": snapshot("""\ BetaMessage( container=None, content=[ @@ -46,7 +45,8 @@ type='text' ) ], - id='msg_0151Rxp5cbUKiA6TJbEoG1U1', + context_management=None, + id='msg_01SUujjdE6BMF3CYWCTR4vHF', model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', @@ -62,14 +62,13 @@ service_tier='standard' ) ) -""" - ), +"""), }, "custom": { "responses": snapshot( [ - '{"id": "msg_01Xabmr29SxRofCJKx6dShd1", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco. Since you want it in Celsius, I\'ll use \'c\' for the units."}, {"type": "tool_use", "id": "toolu_01TndJ8oicsz1CBQvnKa6XYM", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "c"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 476, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 104, "service_tier": "standard"}}', - '{"id": "msg_01JyRBYxoqpZHJh5tanTRPqU", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco is currently 20\\u00b0C and it\'s Sunny."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 619, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 21, "service_tier": "standard"}}', + '{"id": "msg_01QebvpjSMHnjRVYDQpthDCM", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco using the get_weather function. Since you want it in Celsius, I\'ll use \'c\' for the units."}, {"type": "tool_use", "id": "toolu_01W8QFaZz5X8w6UezBfvJaHG", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "c"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 476, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 110, "service_tier": "standard"}}', + '{"id": "msg_01GQD2QBjkCMtD8rEfbF7J7y", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco is currently 20\\u00b0C and it\'s sunny."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 625, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 20, "service_tier": "standard"}}', ] ), "result": snapshot("""\ @@ -78,11 +77,12 @@ content=[ BetaTextBlock( citations=None, - text="The weather in San Francisco is currently 20°C and it's Sunny.", + text="The weather in San Francisco is currently 20°C and it's sunny.", type='text' ) ], - id='msg_01JyRBYxoqpZHJh5tanTRPqU', + context_management=None, + id='msg_01GQD2QBjkCMtD8rEfbF7J7y', model='claude-3-5-sonnet-20241022', role='assistant', stop_reason='end_turn', @@ -92,8 +92,8 @@ cache_creation=BetaCacheCreation(ephemeral_1h_input_tokens=0, ephemeral_5m_input_tokens=0), cache_creation_input_tokens=0, cache_read_input_tokens=0, - input_tokens=619, - output_tokens=21, + input_tokens=625, + output_tokens=20, server_tool_use=None, service_tier='standard' ) @@ -107,6 +107,7 @@ content=[ BetaTextBlock(citations=None, text='The weather in San Francisco is currently 68°F and sunny.', type='text') ], + context_management=None, id='msg_01FtWrpBLsm99NpQCoFrhuf9', model='claude-3-5-sonnet-20241022', role='assistant', @@ -128,16 +129,16 @@ "tool_call": { "responses": snapshot( [ - '{"id": "msg_01N73bKQGcVyRtRFmYKS3nF7", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco using Celsius units."}, {"type": "tool_use", "id": "toolu_01KBWEMjDHXQMrtG3Mb4ifsr", "name": "get_weather", "input": {"location": "SF", "units": "c"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 414, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 86, "service_tier": "standard"}}', - '{"id": "msg_01LYmChWYohv9p2EbNojAQUD", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco is currently 20\\u00b0C and it\'s sunny."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 536, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 20, "service_tier": "standard"}}', + '{"id": "msg_01CcxTJKA7URvATmjs9yemNw", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco using Celsius units."}, {"type": "tool_use", "id": "toolu_01X4rAg6afq9WTkdXDwNdo9g", "name": "get_weather", "input": {"location": "SF", "units": "c"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 414, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 86, "service_tier": "standard"}}', + '{"id": "msg_01Hswpqi8rjN9k6Erfof4NML", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco is currently 20\\u00b0C and it\'s sunny."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 536, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 20, "service_tier": "standard"}}', ] ), }, "tool_call_error": { "responses": snapshot( [ - '{"id": "msg_014RTukBtZkatJqx6AQNJmz5", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco. Since the temperature unit wasn\'t specified, I\'ll show it in both Celsius and Fahrenheit."}, {"type": "tool_use", "id": "toolu_01Eqm7dFsQRKLFSSecctffe1", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "c"}}, {"type": "tool_use", "id": "toolu_01E7AD7aA4uR7cRk3kWs4oxa", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "f"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 473, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 162, "service_tier": "standard"}}', - '{"id": "msg_011W15YiUj9QAvCEQ1gDYCKB", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco, CA is currently sunny with a temperature of 68\\u00b0F (the Celsius reading encountered an error, but you can see the Fahrenheit temperature)."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 735, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 41, "service_tier": "standard"}}', + '{"id": "msg_01UCU1h4ayreA2Ridzbpk5ut", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "I\'ll help you check the weather in San Francisco. Since the location format should include the state, I\'ll use \\"San Francisco, CA\\". I\'ll provide the temperature in both Celsius and Fahrenheit for completeness."}, {"type": "tool_use", "id": "toolu_01ECouLXJaT6yocMNDstufPc", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "c"}}, {"type": "tool_use", "id": "toolu_01FHQTcVXvPoLL3bzxsAUtJJ", "name": "get_weather", "input": {"location": "San Francisco, CA", "units": "f"}}], "stop_reason": "tool_use", "stop_sequence": null, "usage": {"input_tokens": 473, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 176, "service_tier": "standard"}}', + '{"id": "msg_01PYwhqAdduuZYymTokQ4JQU", "type": "message", "role": "assistant", "model": "claude-3-5-sonnet-20241022", "content": [{"type": "text", "text": "The weather in San Francisco, CA is currently sunny with a temperature of 68\\u00b0F."}], "stop_reason": "end_turn", "stop_sequence": null, "usage": {"input_tokens": 749, "cache_creation_input_tokens": 0, "cache_read_input_tokens": 0, "cache_creation": {"ephemeral_5m_input_tokens": 0, "ephemeral_1h_input_tokens": 0}, "output_tokens": 23, "service_tier": "standard"}}', ] ) }, @@ -241,13 +242,13 @@ def tool_runner(client: Anthropic) -> List[Union[BetaMessageParam, None]]: 'content': [ { 'type': 'tool_result', - 'tool_use_id': 'toolu_01Eqm7dFsQRKLFSSecctffe1', + 'tool_use_id': 'toolu_01ECouLXJaT6yocMNDstufPc', 'content': "RuntimeError('Unexpected error, try again')", 'is_error': True }, { 'type': 'tool_result', - 'tool_use_id': 'toolu_01E7AD7aA4uR7cRk3kWs4oxa', + 'tool_use_id': 'toolu_01FHQTcVXvPoLL3bzxsAUtJJ', 'content': '{"location": "San Francisco, CA", "temperature": "68\\\\u00b0F", "condition": "Sunny"}' } ] @@ -423,7 +424,7 @@ def get_weather_answers(client: Anthropic) -> List[Union[BetaMessageParam, None] answers = make_snapshot_request( get_weather_answers, - content_snapshot=snapshot(external("uuid:d105e140-a30c-4d6b-91df-257247da3623.json")), + content_snapshot=snapshot(external("uuid:ef758469-6fa6-454c-b2e6-19d0b450a8c5.json")), path="/v1/messages", mock_client=client, respx_mock=respx_mock, @@ -436,8 +437,8 @@ def get_weather_answers(client: Anthropic) -> List[Union[BetaMessageParam, None] 'content': [ { 'type': 'tool_result', - 'tool_use_id': 'toolu_01PeNQ4nbMcyDpCGiuKSfkMM', - 'content': '{"location": "San Francisco, CA", "temperature": "20\\\\u00b0C", "condition": "Sunny"}' + 'tool_use_id': 'toolu_01GiQJzt5d2ThB4fSUsRCSML', + 'content': '{"location": "San Francisco, CA", "temperature": "68\\\\u00b0F", "condition": "Sunny"}' } ] }, @@ -446,8 +447,8 @@ def get_weather_answers(client: Anthropic) -> List[Union[BetaMessageParam, None] 'content': [ { 'type': 'tool_result', - 'tool_use_id': 'toolu_01WcZvizPr9EybXFMyGXRxYA', - 'content': '{"location": "New York, NY", "temperature": "20\\\\u00b0C", "condition": "Sunny"}' + 'tool_use_id': 'toolu_015yzRQ92SwYGz5Veoq7A3P7', + 'content': '{"location": "New York, NY", "temperature": "68\\\\u00b0F", "condition": "Sunny"}' } ] } diff --git a/tools.md b/tools.md index c134603..9e5470a 100644 --- a/tools.md +++ b/tools.md @@ -54,7 +54,7 @@ message = client.beta.messages.create( tools=[get_weather.to_dict()], # ... max_tokens=1024, - model="claude-sonnet-4-20250514", + model="claude-sonnet-4-5-20250929", messages=[{"role": "user", "content": "What is 2 + 2?"}], ) ``` @@ -68,7 +68,7 @@ We provide a `client.beta.messages.tool_runner()` method that can automatically ```py runner = client.beta.messages.tool_runner( max_tokens=1024, - model="claude-3-5-sonnet-latest", + model="claude-sonnet-4-5-20250929", tools=[sum], messages=[{"role": "user", "content": "What is 9 + 10?"}], ) diff --git a/uv.lock b/uv.lock index 7f8bcc6..2e04b39 100644 --- a/uv.lock +++ b/uv.lock @@ -299,7 +299,7 @@ wheels = [ [[package]] name = "anthropic" -version = "0.67.0" +version = "0.68.0" source = { editable = "." } dependencies = [ { name = "anyio", version = "4.5.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.9' or (extra == 'group-9-anthropic-pydantic-v1' and extra == 'group-9-anthropic-pydantic-v2')" }, From 1f233dfdeee145af6f86039b3cd6f9c89b49efd9 Mon Sep 17 00:00:00 2001 From: "stainless-app[bot]" <142633134+stainless-app[bot]@users.noreply.github.com> Date: Mon, 29 Sep 2025 16:50:35 +0000 Subject: [PATCH 2/2] release: 0.69.0 --- .release-please-manifest.json | 2 +- CHANGELOG.md | 8 ++++++++ pyproject.toml | 2 +- src/anthropic/_version.py | 2 +- 4 files changed, 11 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b0f0a18..a9d0cc1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.68.2" + ".": "0.69.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index b1d5149..41e2fa5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 0.69.0 (2025-09-29) + +Full Changelog: [v0.68.2...v0.69.0](https://github.com/anthropics/anthropic-sdk-python/compare/v0.68.2...v0.69.0) + +### Features + +* **api:** adds support for Claude Sonnet 4.5 and context management features ([f93eb12](https://github.com/anthropics/anthropic-sdk-python/commit/f93eb12dbfeaa68fb24590391ec72243836eb47a)) + ## 0.68.2 (2025-09-29) Full Changelog: [v0.68.1...v0.68.2](https://github.com/anthropics/anthropic-sdk-python/compare/v0.68.1...v0.68.2) diff --git a/pyproject.toml b/pyproject.toml index 99ef901..a17c50b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "anthropic" -version = "0.68.2" +version = "0.69.0" description = "The official Python library for the anthropic API" dynamic = ["readme"] license = "MIT" diff --git a/src/anthropic/_version.py b/src/anthropic/_version.py index 1f24199..018ae55 100644 --- a/src/anthropic/_version.py +++ b/src/anthropic/_version.py @@ -1,4 +1,4 @@ # File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. __title__ = "anthropic" -__version__ = "0.68.2" # x-release-please-version +__version__ = "0.69.0" # x-release-please-version