Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 4 additions & 9 deletions cognee/api/v1/cognify/code_graph_pipeline.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
import os
import pathlib
import asyncio
from cognee.shared.logging_utils import get_logger
from uuid import NAMESPACE_OID, uuid5
from cognee.shared.logging_utils import get_logger
from cognee.modules.observability.get_observe import get_observe

from cognee.api.v1.search import SearchType, search
from cognee.api.v1.visualize.visualize import visualize_graph
from cognee.base_config import get_base_config
from cognee.modules.cognify.config import get_cognify_config
from cognee.modules.pipelines import run_tasks
from cognee.modules.pipelines.tasks.task import Task
from cognee.modules.users.methods import get_default_user
from cognee.shared.data_models import KnowledgeGraph, MonitoringTool
from cognee.shared.utils import render_graph
from cognee.shared.data_models import KnowledgeGraph
from cognee.tasks.documents import classify_documents, extract_chunks_from_documents
from cognee.tasks.graph import extract_graph_from_data
from cognee.tasks.ingestion import ingest_data
Expand All @@ -22,11 +21,7 @@
from cognee.tasks.summarization import summarize_text
from cognee.infrastructure.llm import get_max_chunk_tokens

monitoring = get_base_config().monitoring_tool

if monitoring == MonitoringTool.LANGFUSE:
from langfuse.decorators import observe

observe = get_observe()

logger = get_logger("code_graph_pipeline")

Expand Down
6 changes: 3 additions & 3 deletions cognee/base_config.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
import os
from typing import Optional
from functools import lru_cache
from pydantic_settings import BaseSettings, SettingsConfigDict
from cognee.root_dir import get_absolute_path
from cognee.shared.data_models import MonitoringTool
from cognee.modules.observability.observers import Observer
from pydantic_settings import BaseSettings, SettingsConfigDict


class BaseConfig(BaseSettings):
data_root_directory: str = get_absolute_path(".data_storage")
monitoring_tool: object = MonitoringTool.LANGFUSE
monitoring_tool: object = Observer.LANGFUSE
graphistry_username: Optional[str] = os.getenv("GRAPHISTRY_USERNAME")
graphistry_password: Optional[str] = os.getenv("GRAPHISTRY_PASSWORD")
langfuse_public_key: Optional[str] = os.getenv("LANGFUSE_PUBLIC_KEY")
Expand Down
16 changes: 6 additions & 10 deletions cognee/infrastructure/llm/gemini/adapter.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,20 @@
from typing import Type, Optional
from pydantic import BaseModel
from cognee.shared.logging_utils import get_logger
import litellm
from pydantic import BaseModel
from typing import Type, Optional
from litellm import acompletion, JSONSchemaValidationError
from cognee.shared.data_models import MonitoringTool

from cognee.shared.logging_utils import get_logger
from cognee.modules.observability.get_observe import get_observe
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.prompts import read_query_prompt
from cognee.infrastructure.llm.rate_limiter import (
rate_limit_async,
sleep_and_retry_async,
)
from cognee.base_config import get_base_config

logger = get_logger()

monitoring = get_base_config().monitoring_tool

if monitoring == MonitoringTool.LANGFUSE:
from langfuse.decorators import observe
observe = get_observe()


class GeminiAdapter(LLMInterface):
Expand Down
12 changes: 3 additions & 9 deletions cognee/infrastructure/llm/openai/adapter.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
import os
import base64
from pathlib import Path
from typing import Type

import litellm
import instructor
from typing import Type
from pydantic import BaseModel

from cognee.modules.data.processing.document_types.open_data_file import open_data_file
from cognee.shared.data_models import MonitoringTool
from cognee.exceptions import InvalidValueError
from cognee.infrastructure.llm.llm_interface import LLMInterface
from cognee.infrastructure.llm.prompts import read_query_prompt
Expand All @@ -18,12 +15,9 @@
sleep_and_retry_async,
sleep_and_retry_sync,
)
from cognee.base_config import get_base_config

monitoring = get_base_config().monitoring_tool
from cognee.modules.observability.get_observe import get_observe

if monitoring == MonitoringTool.LANGFUSE:
from langfuse.decorators import observe
observe = get_observe()


class OpenAIAdapter(LLMInterface):
Expand Down
11 changes: 11 additions & 0 deletions cognee/modules/observability/get_observe.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from cognee.base_config import get_base_config
from .observers import Observer


def get_observe():
monitoring = get_base_config().monitoring_tool

if monitoring == Observer.LANGFUSE:
from langfuse.decorators import observe

return observe
9 changes: 9 additions & 0 deletions cognee/modules/observability/observers.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from enum import Enum


class Observer(str, Enum):
"""Monitoring tools"""

LANGFUSE = "langfuse"
LLMLITE = "llmlite"
LANGSMITH = "langsmith"
8 changes: 0 additions & 8 deletions cognee/shared/data_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -350,11 +350,3 @@ class ChunkSummaries(BaseModel):
"""Relevant summary and chunk id"""

summaries: List[ChunkSummary]


class MonitoringTool(str, Enum):
"""Monitoring tools"""

LANGFUSE = "langfuse"
LLMLITE = "llmlite"
LANGSMITH = "langsmith"
Loading