-
Notifications
You must be signed in to change notification settings - Fork 1k
feat: OpenAI compatible route /api/v1/responses #792
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
7d7df18
2018850
500fa9f
3a87c0b
510b277
683af24
76d1aee
bacea87
688fd4e
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,3 @@ | ||
| from cognee.api.v1.responses.routers import get_responses_router | ||
|
|
||
| __all__ = ["get_responses_router"] |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,66 @@ | ||
| DEFAULT_TOOLS = [ | ||
| { | ||
| "type": "function", | ||
| "name": "search", | ||
| "description": "Search for information within the knowledge graph", | ||
| "parameters": { | ||
| "type": "object", | ||
| "properties": { | ||
| "search_query": { | ||
| "type": "string", | ||
| "description": "The query to search for in the knowledge graph", | ||
| }, | ||
| "search_type": { | ||
| "type": "string", | ||
| "description": "Type of search to perform", | ||
| "enum": [ | ||
| "INSIGHTS", | ||
| "CODE", | ||
| "GRAPH_COMPLETION", | ||
| "SEMANTIC", | ||
| "NATURAL_LANGUAGE", | ||
| ], | ||
| }, | ||
| "top_k": { | ||
| "type": "integer", | ||
| "description": "Maximum number of results to return", | ||
| "default": 10, | ||
| }, | ||
| "datasets": { | ||
| "type": "array", | ||
| "items": {"type": "string"}, | ||
| "description": "Optional list of dataset names to search within", | ||
| }, | ||
| }, | ||
| "required": ["search_query"], | ||
| }, | ||
| }, | ||
| { | ||
| "type": "function", | ||
| "name": "cognify_text", | ||
| "description": "Convert text into a knowledge graph or process all added content", | ||
| "parameters": { | ||
| "type": "object", | ||
| "properties": { | ||
| "text": { | ||
| "type": "string", | ||
| "description": "Text content to be converted into a knowledge graph", | ||
| }, | ||
| "graph_model_name": { | ||
| "type": "string", | ||
| "description": "Name of the graph model to use", | ||
| }, | ||
| "graph_model_file": { | ||
| "type": "string", | ||
| "description": "Path to a custom graph model file", | ||
| }, | ||
| }, | ||
| }, | ||
| }, | ||
| # Commented as dangerous | ||
| # { | ||
| # "type": "function", | ||
| # "name": "prune", | ||
| # "description": "Prune memory", | ||
| # }, | ||
| ] | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,107 @@ | ||
| import json | ||
| import logging | ||
| from typing import Any, Dict, Union | ||
|
|
||
| from cognee.api.v1.responses.models import ToolCall | ||
| from cognee.modules.search.types import SearchType | ||
| from cognee.api.v1.add import add | ||
dm1tryG marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| from cognee.api.v1.search import search | ||
| from cognee.api.v1.cognify import cognify | ||
| from cognee.api.v1.prune import prune | ||
|
|
||
|
|
||
| from cognee.modules.users.methods import get_default_user | ||
| from cognee.api.v1.responses.default_tools import DEFAULT_TOOLS | ||
|
|
||
| logger = logging.getLogger(__name__) | ||
|
|
||
|
|
||
| async def dispatch_function(tool_call: Union[ToolCall, Dict[str, Any]]) -> str: | ||
| """ | ||
| Dispatches a function call to the appropriate Cognee function. | ||
| """ | ||
| if isinstance(tool_call, dict): | ||
| function_data = tool_call.get("function", {}) | ||
| function_name = function_data.get("name", "") | ||
| arguments_str = function_data.get("arguments", "{}") | ||
| else: | ||
| function_name = tool_call.function.name | ||
| arguments_str = tool_call.function.arguments | ||
|
|
||
| arguments = json.loads(arguments_str) | ||
|
|
||
| logger.info(f"Dispatching function: {function_name} with args: {arguments}") | ||
|
|
||
| user = await get_default_user() | ||
|
|
||
| if function_name == "search": | ||
| return await handle_search(arguments, user) | ||
| elif function_name == "cognify_text": | ||
dm1tryG marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| return await handle_cognify(arguments, user) | ||
| elif function_name == "prune": | ||
| return await handle_prune(arguments, user) | ||
| else: | ||
| return f"Error: Unknown function {function_name}" | ||
|
|
||
dm1tryG marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| async def handle_search(arguments: Dict[str, Any], user) -> list: | ||
| """Handle search function call""" | ||
| search_tool = next((tool for tool in DEFAULT_TOOLS if tool["name"] == "search"), None) | ||
| required_params = ( | ||
| search_tool["parameters"].get("required", []) if search_tool else ["search_query"] | ||
| ) | ||
|
|
||
| query = arguments.get("search_query") | ||
| if not query and "search_query" in required_params: | ||
| return "Error: Missing required 'search_query' parameter" | ||
|
|
||
| search_type_str = arguments.get("search_type", "GRAPH_COMPLETION") | ||
| valid_search_types = ( | ||
| search_tool["parameters"]["properties"]["search_type"]["enum"] | ||
| if search_tool | ||
| else ["INSIGHTS", "CODE", "GRAPH_COMPLETION", "SEMANTIC", "NATURAL_LANGUAGE"] | ||
| ) | ||
|
|
||
| if search_type_str not in valid_search_types: | ||
| logger.warning(f"Invalid search_type: {search_type_str}, defaulting to GRAPH_COMPLETION") | ||
| search_type_str = "GRAPH_COMPLETION" | ||
|
|
||
| query_type = search_type_str | ||
|
|
||
| top_k = arguments.get("top_k") | ||
| datasets = arguments.get("datasets") | ||
| system_prompt_path = arguments.get("system_prompt_path", "answer_simple_question.txt") | ||
|
|
||
| results = await search( | ||
| query_text=query, | ||
| query_type=query_type, | ||
| datasets=datasets, | ||
| user=user, | ||
| system_prompt_path=system_prompt_path, | ||
| top_k=top_k if isinstance(top_k, int) else 10, | ||
| ) | ||
|
|
||
| return results | ||
|
|
||
|
|
||
| async def handle_cognify(arguments: Dict[str, Any], user) -> str: | ||
| """Handle cognify function call""" | ||
| text = arguments.get("text") | ||
| graph_model_file = arguments.get("graph_model_file") | ||
|
|
||
| if text: | ||
| await add(data=text, user=user) | ||
|
|
||
| await cognify(user=user, ontology_file_path=graph_model_file if graph_model_file else None) | ||
|
||
|
|
||
| return ( | ||
| "Text successfully converted into knowledge graph." | ||
| if text | ||
| else "Knowledge graph successfully updated with new information." | ||
| ) | ||
dm1tryG marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
|
|
||
| async def handle_prune(arguments: Dict[str, Any], user) -> str: | ||
| """Handle prune function call""" | ||
| await prune() | ||
| return "Memory has been pruned successfully." | ||
dm1tryG marked this conversation as resolved.
Show resolved
Hide resolved
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,102 @@ | ||
| import time | ||
| import uuid | ||
| from typing import Any, Dict, List, Optional, Union | ||
|
|
||
| from pydantic import BaseModel, Field | ||
|
|
||
| from enum import Enum | ||
|
|
||
| from cognee.api.DTO import InDTO, OutDTO | ||
|
|
||
|
|
||
| class CogneeModel(str, Enum): | ||
| """Enum for supported model types""" | ||
|
|
||
| COGNEEV1 = "cognee-v1" | ||
|
|
||
|
|
||
| class FunctionParameters(BaseModel): | ||
| """JSON Schema for function parameters""" | ||
|
|
||
| type: str = "object" | ||
| properties: Dict[str, Dict[str, Any]] | ||
| required: Optional[List[str]] = None | ||
|
|
||
|
|
||
| class Function(BaseModel): | ||
| """Function definition compatible with OpenAI's format""" | ||
|
|
||
| name: str | ||
| description: str | ||
| parameters: FunctionParameters | ||
|
|
||
|
|
||
| class ToolFunction(BaseModel): | ||
| """Tool function wrapper (for OpenAI compatibility)""" | ||
|
|
||
| type: str = "function" | ||
| function: Function | ||
|
|
||
|
|
||
| class FunctionCall(BaseModel): | ||
| """Function call made by the assistant""" | ||
|
|
||
| name: str | ||
| arguments: str | ||
|
|
||
|
|
||
| class ToolCall(BaseModel): | ||
| """Tool call made by the assistant""" | ||
|
|
||
| id: str = Field(default_factory=lambda: f"call_{uuid.uuid4().hex}") | ||
| type: str = "function" | ||
| function: FunctionCall | ||
|
|
||
|
|
||
| class ChatUsage(BaseModel): | ||
| """Token usage information""" | ||
|
|
||
| prompt_tokens: int = 0 | ||
| completion_tokens: int = 0 | ||
| total_tokens: int = 0 | ||
|
|
||
|
|
||
| class ResponseRequest(InDTO): | ||
| """Request body for the new responses endpoint (OpenAI Responses API format)""" | ||
|
|
||
| model: CogneeModel = CogneeModel.COGNEEV1 | ||
| input: str | ||
| tools: Optional[List[ToolFunction]] = None | ||
| tool_choice: Optional[Union[str, Dict[str, Any]]] = "auto" | ||
| user: Optional[str] = None | ||
| temperature: Optional[float] = 1.0 | ||
| max_tokens: Optional[int] = None | ||
|
|
||
|
|
||
| class ToolCallOutput(BaseModel): | ||
| """Output of a tool call in the responses API""" | ||
|
|
||
| status: str = "success" # success/error | ||
| data: Optional[Dict[str, Any]] = None | ||
|
|
||
|
|
||
| class ResponseToolCall(BaseModel): | ||
| """Tool call in a response""" | ||
|
|
||
| id: str = Field(default_factory=lambda: f"call_{uuid.uuid4().hex}") | ||
| type: str = "function" | ||
| function: FunctionCall | ||
| output: Optional[ToolCallOutput] = None | ||
|
|
||
dm1tryG marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
|
||
| class ResponseResponse(OutDTO): | ||
dm1tryG marked this conversation as resolved.
Outdated
Show resolved
Hide resolved
|
||
| """Response body for the new responses endpoint""" | ||
|
|
||
| id: str = Field(default_factory=lambda: f"resp_{uuid.uuid4().hex}") | ||
| created: int = Field(default_factory=lambda: int(time.time())) | ||
| model: str | ||
| object: str = "response" | ||
| status: str = "completed" | ||
| tool_calls: List[ResponseToolCall] | ||
| usage: Optional[ChatUsage] = None | ||
| metadata: Dict[str, Any] = None | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,3 @@ | ||
| from cognee.api.v1.responses.routers.get_responses_router import get_responses_router | ||
|
|
||
| __all__ = ["get_responses_router"] |
Uh oh!
There was an error while loading. Please reload this page.