Skip to content
12 changes: 10 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,14 @@ Build dynamic Agent memory using scalable, modular ECL (Extract, Cognify, Load)

More on [use-cases](https://docs.cognee.ai/use-cases) and [evals](https://github.com/topoteretes/cognee/tree/main/evals)

<p align="center">
🌐 Available Languages
:
<a href="community/README.pt.md">🇵🇹 Português</a>
·
<a href="community/README.zh.md">🇨🇳 [中文]</a>
</p>

<div style="text-align: center">
<img src="https://raw.githubusercontent.com/topoteretes/cognee/refs/heads/main/assets/cognee_benefits.png" alt="Why cognee?" width="50%" />
</div>
Expand Down Expand Up @@ -117,8 +125,8 @@ Example output:

```
Graph visualization:
<a href="https://rawcdn.githack.com/topoteretes/cognee/refs/heads/add-visualization-readme/assets/graph_visualization.html"><img src="assets/graph_visualization.png" width="100%" alt="Graph Visualization"></a>
Open in [browser](https://rawcdn.githack.com/topoteretes/cognee/refs/heads/add-visualization-readme/assets/graph_visualization.html).
<a href="https://rawcdn.githack.com/topoteretes/cognee/refs/heads/main/assets/graph_visualization.html"><img src="assets/graph_visualization.png" width="100%" alt="Graph Visualization"></a>
Open in [browser](https://rawcdn.githack.com/topoteretes/cognee/refs/heads/main/assets/graph_visualization.html).

For more advanced usage, have a look at our <a href="https://docs.cognee.ai"> documentation</a>.

Expand Down
3 changes: 3 additions & 0 deletions cognee/api/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
from cognee.api.v1.search.routers import get_search_router
from cognee.api.v1.add.routers import get_add_router
from cognee.api.v1.delete.routers import get_delete_router
from cognee.api.v1.responses.routers import get_responses_router
from fastapi import Request
from fastapi.encoders import jsonable_encoder
from fastapi.exceptions import RequestValidationError
Expand Down Expand Up @@ -167,6 +168,8 @@ def health_check():

app.include_router(get_delete_router(), prefix="/api/v1/delete", tags=["delete"])

app.include_router(get_responses_router(), prefix="/api/v1/responses", tags=["responses"])

codegraph_routes = get_code_pipeline_router()
if codegraph_routes:
app.include_router(codegraph_routes, prefix="/api/v1/code-pipeline", tags=["code-pipeline"])
Expand Down
3 changes: 3 additions & 0 deletions cognee/api/v1/responses/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from cognee.api.v1.responses.routers import get_responses_router

__all__ = ["get_responses_router"]
66 changes: 66 additions & 0 deletions cognee/api/v1/responses/default_tools.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
DEFAULT_TOOLS = [
{
"type": "function",
"name": "search",
"description": "Search for information within the knowledge graph",
"parameters": {
"type": "object",
"properties": {
"search_query": {
"type": "string",
"description": "The query to search for in the knowledge graph",
},
"search_type": {
"type": "string",
"description": "Type of search to perform",
"enum": [
"INSIGHTS",
"CODE",
"GRAPH_COMPLETION",
"SEMANTIC",
"NATURAL_LANGUAGE",
],
},
"top_k": {
"type": "integer",
"description": "Maximum number of results to return",
"default": 10,
},
"datasets": {
"type": "array",
"items": {"type": "string"},
"description": "Optional list of dataset names to search within",
},
},
"required": ["search_query"],
},
},
{
"type": "function",
"name": "cognify_text",
"description": "Convert text into a knowledge graph or process all added content",
"parameters": {
"type": "object",
"properties": {
"text": {
"type": "string",
"description": "Text content to be converted into a knowledge graph",
},
"graph_model_name": {
"type": "string",
"description": "Name of the graph model to use",
},
"graph_model_file": {
"type": "string",
"description": "Path to a custom graph model file",
},
},
},
},
# Commented as dangerous
# {
# "type": "function",
# "name": "prune",
# "description": "Prune memory",
# },
]
107 changes: 107 additions & 0 deletions cognee/api/v1/responses/dispatch_function.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
import json
import logging
from typing import Any, Dict, Union

from cognee.api.v1.responses.models import ToolCall
from cognee.modules.search.types import SearchType
from cognee.api.v1.add import add
from cognee.api.v1.search import search
from cognee.api.v1.cognify import cognify
from cognee.api.v1.prune import prune


from cognee.modules.users.methods import get_default_user
from cognee.api.v1.responses.default_tools import DEFAULT_TOOLS

logger = logging.getLogger(__name__)


async def dispatch_function(tool_call: Union[ToolCall, Dict[str, Any]]) -> str:
"""
Dispatches a function call to the appropriate Cognee function.
"""
if isinstance(tool_call, dict):
function_data = tool_call.get("function", {})
function_name = function_data.get("name", "")
arguments_str = function_data.get("arguments", "{}")
else:
function_name = tool_call.function.name
arguments_str = tool_call.function.arguments

arguments = json.loads(arguments_str)

logger.info(f"Dispatching function: {function_name} with args: {arguments}")

user = await get_default_user()

if function_name == "search":
return await handle_search(arguments, user)
elif function_name == "cognify_text":
return await handle_cognify(arguments, user)
elif function_name == "prune":
return await handle_prune(arguments, user)
else:
return f"Error: Unknown function {function_name}"


async def handle_search(arguments: Dict[str, Any], user) -> list:
"""Handle search function call"""
search_tool = next((tool for tool in DEFAULT_TOOLS if tool["name"] == "search"), None)
required_params = (
search_tool["parameters"].get("required", []) if search_tool else ["search_query"]
)

query = arguments.get("search_query")
if not query and "search_query" in required_params:
return "Error: Missing required 'search_query' parameter"

search_type_str = arguments.get("search_type", "GRAPH_COMPLETION")
valid_search_types = (
search_tool["parameters"]["properties"]["search_type"]["enum"]
if search_tool
else ["INSIGHTS", "CODE", "GRAPH_COMPLETION", "SEMANTIC", "NATURAL_LANGUAGE"]
)

if search_type_str not in valid_search_types:
logger.warning(f"Invalid search_type: {search_type_str}, defaulting to GRAPH_COMPLETION")
search_type_str = "GRAPH_COMPLETION"

query_type = search_type_str

top_k = arguments.get("top_k")
datasets = arguments.get("datasets")
system_prompt_path = arguments.get("system_prompt_path", "answer_simple_question.txt")

results = await search(
query_text=query,
query_type=query_type,
datasets=datasets,
user=user,
system_prompt_path=system_prompt_path,
top_k=top_k if isinstance(top_k, int) else 10,
)

return results


async def handle_cognify(arguments: Dict[str, Any], user) -> str:
"""Handle cognify function call"""
text = arguments.get("text")
graph_model_file = arguments.get("graph_model_file")

if text:
await add(data=text, user=user)

await cognify(user=user, ontology_file_path=graph_model_file if graph_model_file else None)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I see mixed ontology_file_path and graph_model usage.
If you want to set a custom graph model, you can do it with graph_model argument. But that one expects a pydantic model, so not suitable for this case. If you want to set a custom ontology, then ontology_file_path is the right property, but then the argument name is wrong.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ohh, yes, I missed that


return (
"Text successfully converted into knowledge graph."
if text
else "Knowledge graph successfully updated with new information."
)


async def handle_prune(arguments: Dict[str, Any], user) -> str:
"""Handle prune function call"""
await prune()
return "Memory has been pruned successfully."
102 changes: 102 additions & 0 deletions cognee/api/v1/responses/models.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
import time
import uuid
from typing import Any, Dict, List, Optional, Union

from pydantic import BaseModel, Field

from enum import Enum

from cognee.api.DTO import InDTO, OutDTO


class CogneeModel(str, Enum):
"""Enum for supported model types"""

COGNEEV1 = "cognee-v1"


class FunctionParameters(BaseModel):
"""JSON Schema for function parameters"""

type: str = "object"
properties: Dict[str, Dict[str, Any]]
required: Optional[List[str]] = None


class Function(BaseModel):
"""Function definition compatible with OpenAI's format"""

name: str
description: str
parameters: FunctionParameters


class ToolFunction(BaseModel):
"""Tool function wrapper (for OpenAI compatibility)"""

type: str = "function"
function: Function


class FunctionCall(BaseModel):
"""Function call made by the assistant"""

name: str
arguments: str


class ToolCall(BaseModel):
"""Tool call made by the assistant"""

id: str = Field(default_factory=lambda: f"call_{uuid.uuid4().hex}")
type: str = "function"
function: FunctionCall


class ChatUsage(BaseModel):
"""Token usage information"""

prompt_tokens: int = 0
completion_tokens: int = 0
total_tokens: int = 0


class ResponseRequest(InDTO):
"""Request body for the new responses endpoint (OpenAI Responses API format)"""

model: CogneeModel = CogneeModel.COGNEEV1
input: str
tools: Optional[List[ToolFunction]] = None
tool_choice: Optional[Union[str, Dict[str, Any]]] = "auto"
user: Optional[str] = None
temperature: Optional[float] = 1.0
max_tokens: Optional[int] = None


class ToolCallOutput(BaseModel):
"""Output of a tool call in the responses API"""

status: str = "success" # success/error
data: Optional[Dict[str, Any]] = None


class ResponseToolCall(BaseModel):
"""Tool call in a response"""

id: str = Field(default_factory=lambda: f"call_{uuid.uuid4().hex}")
type: str = "function"
function: FunctionCall
output: Optional[ToolCallOutput] = None


class ResponseResponse(OutDTO):
"""Response body for the new responses endpoint"""

id: str = Field(default_factory=lambda: f"resp_{uuid.uuid4().hex}")
created: int = Field(default_factory=lambda: int(time.time()))
model: str
object: str = "response"
status: str = "completed"
tool_calls: List[ResponseToolCall]
usage: Optional[ChatUsage] = None
metadata: Dict[str, Any] = None
3 changes: 3 additions & 0 deletions cognee/api/v1/responses/routers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from cognee.api.v1.responses.routers.get_responses_router import get_responses_router

__all__ = ["get_responses_router"]
Loading
Loading