diff --git a/.github/workflows/e2e_tests.yml b/.github/workflows/e2e_tests.yml index cb69e9ef60..8cd62910c6 100644 --- a/.github/workflows/e2e_tests.yml +++ b/.github/workflows/e2e_tests.yml @@ -237,6 +237,31 @@ jobs: EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} run: uv run python ./cognee/tests/test_dataset_database_handler.py + test-dataset-database-deletion: + name: Test dataset database deletion in Cognee + runs-on: ubuntu-22.04 + steps: + - name: Check out repository + uses: actions/checkout@v4 + + - name: Cognee Setup + uses: ./.github/actions/cognee_setup + with: + python-version: '3.11.x' + + - name: Run dataset databases deletion test + env: + ENV: 'dev' + LLM_MODEL: ${{ secrets.LLM_MODEL }} + LLM_ENDPOINT: ${{ secrets.LLM_ENDPOINT }} + LLM_API_KEY: ${{ secrets.LLM_API_KEY }} + LLM_API_VERSION: ${{ secrets.LLM_API_VERSION }} + EMBEDDING_MODEL: ${{ secrets.EMBEDDING_MODEL }} + EMBEDDING_ENDPOINT: ${{ secrets.EMBEDDING_ENDPOINT }} + EMBEDDING_API_KEY: ${{ secrets.EMBEDDING_API_KEY }} + EMBEDDING_API_VERSION: ${{ secrets.EMBEDDING_API_VERSION }} + run: uv run python ./cognee/tests/test_dataset_delete.py + test-permissions: name: Test permissions with different situations in Cognee runs-on: ubuntu-22.04 diff --git a/cognee/api/v1/cognify/cognify.py b/cognee/api/v1/cognify/cognify.py index 9371f7ffda..ffc903d689 100644 --- a/cognee/api/v1/cognify/cognify.py +++ b/cognee/api/v1/cognify/cognify.py @@ -53,7 +53,7 @@ async def cognify( custom_prompt: Optional[str] = None, temporal_cognify: bool = False, data_per_batch: int = 20, - **kwargs + **kwargs, ): """ Transform ingested data into a structured knowledge graph. diff --git a/cognee/api/v1/datasets/routers/get_datasets_router.py b/cognee/api/v1/datasets/routers/get_datasets_router.py index eff87b3afb..ca738dfbe2 100644 --- a/cognee/api/v1/datasets/routers/get_datasets_router.py +++ b/cognee/api/v1/datasets/routers/get_datasets_router.py @@ -208,14 +208,14 @@ async def delete_dataset(dataset_id: UUID, user: User = Depends(get_authenticate }, ) - from cognee.modules.data.methods import get_dataset, delete_dataset + from cognee.modules.data.methods import delete_dataset - dataset = await get_dataset(user.id, dataset_id) + dataset = await get_authorized_existing_datasets([dataset_id], "delete", user) if dataset is None: raise DatasetNotFoundError(message=f"Dataset ({str(dataset_id)}) not found.") - await delete_dataset(dataset) + await delete_dataset(dataset[0]) @router.delete( "/{dataset_id}/data/{data_id}", diff --git a/cognee/infrastructure/databases/utils/__init__.py b/cognee/infrastructure/databases/utils/__init__.py index f31d1e0dc3..3907b43255 100644 --- a/cognee/infrastructure/databases/utils/__init__.py +++ b/cognee/infrastructure/databases/utils/__init__.py @@ -1,2 +1,4 @@ from .get_or_create_dataset_database import get_or_create_dataset_database from .resolve_dataset_database_connection_info import resolve_dataset_database_connection_info +from .get_graph_dataset_database_handler import get_graph_dataset_database_handler +from .get_vector_dataset_database_handler import get_vector_dataset_database_handler diff --git a/cognee/infrastructure/databases/utils/get_graph_dataset_database_handler.py b/cognee/infrastructure/databases/utils/get_graph_dataset_database_handler.py new file mode 100644 index 0000000000..d88685b48b --- /dev/null +++ b/cognee/infrastructure/databases/utils/get_graph_dataset_database_handler.py @@ -0,0 +1,10 @@ +from cognee.modules.users.models.DatasetDatabase import DatasetDatabase + + +def get_graph_dataset_database_handler(dataset_database: DatasetDatabase) -> dict: + from cognee.infrastructure.databases.dataset_database_handler.supported_dataset_database_handlers import ( + supported_dataset_database_handlers, + ) + + handler = supported_dataset_database_handlers[dataset_database.graph_dataset_database_handler] + return handler diff --git a/cognee/infrastructure/databases/utils/get_vector_dataset_database_handler.py b/cognee/infrastructure/databases/utils/get_vector_dataset_database_handler.py new file mode 100644 index 0000000000..5d1152c040 --- /dev/null +++ b/cognee/infrastructure/databases/utils/get_vector_dataset_database_handler.py @@ -0,0 +1,10 @@ +from cognee.modules.users.models.DatasetDatabase import DatasetDatabase + + +def get_vector_dataset_database_handler(dataset_database: DatasetDatabase) -> dict: + from cognee.infrastructure.databases.dataset_database_handler.supported_dataset_database_handlers import ( + supported_dataset_database_handlers, + ) + + handler = supported_dataset_database_handlers[dataset_database.vector_dataset_database_handler] + return handler diff --git a/cognee/infrastructure/databases/utils/resolve_dataset_database_connection_info.py b/cognee/infrastructure/databases/utils/resolve_dataset_database_connection_info.py index d331696427..561268eaf3 100644 --- a/cognee/infrastructure/databases/utils/resolve_dataset_database_connection_info.py +++ b/cognee/infrastructure/databases/utils/resolve_dataset_database_connection_info.py @@ -1,24 +1,12 @@ +from cognee.infrastructure.databases.utils.get_graph_dataset_database_handler import ( + get_graph_dataset_database_handler, +) +from cognee.infrastructure.databases.utils.get_vector_dataset_database_handler import ( + get_vector_dataset_database_handler, +) from cognee.modules.users.models.DatasetDatabase import DatasetDatabase -async def _get_vector_db_connection_info(dataset_database: DatasetDatabase) -> DatasetDatabase: - from cognee.infrastructure.databases.dataset_database_handler.supported_dataset_database_handlers import ( - supported_dataset_database_handlers, - ) - - handler = supported_dataset_database_handlers[dataset_database.vector_dataset_database_handler] - return await handler["handler_instance"].resolve_dataset_connection_info(dataset_database) - - -async def _get_graph_db_connection_info(dataset_database: DatasetDatabase) -> DatasetDatabase: - from cognee.infrastructure.databases.dataset_database_handler.supported_dataset_database_handlers import ( - supported_dataset_database_handlers, - ) - - handler = supported_dataset_database_handlers[dataset_database.graph_dataset_database_handler] - return await handler["handler_instance"].resolve_dataset_connection_info(dataset_database) - - async def resolve_dataset_database_connection_info( dataset_database: DatasetDatabase, ) -> DatasetDatabase: @@ -31,6 +19,12 @@ async def resolve_dataset_database_connection_info( Returns: DatasetDatabase instance with resolved connection info """ - dataset_database = await _get_vector_db_connection_info(dataset_database) - dataset_database = await _get_graph_db_connection_info(dataset_database) + vector_dataset_database_handler = get_vector_dataset_database_handler(dataset_database) + graph_dataset_database_handler = get_graph_dataset_database_handler(dataset_database) + dataset_database = await vector_dataset_database_handler[ + "handler_instance" + ].resolve_dataset_connection_info(dataset_database) + dataset_database = await graph_dataset_database_handler[ + "handler_instance" + ].resolve_dataset_connection_info(dataset_database) return dataset_database diff --git a/cognee/infrastructure/llm/LLMGateway.py b/cognee/infrastructure/llm/LLMGateway.py index fd42eb55e4..7bec9ca01b 100644 --- a/cognee/infrastructure/llm/LLMGateway.py +++ b/cognee/infrastructure/llm/LLMGateway.py @@ -31,7 +31,10 @@ def acreate_structured_output( llm_client = get_llm_client() return llm_client.acreate_structured_output( - text_input=text_input, system_prompt=system_prompt, response_model=response_model, **kwargs + text_input=text_input, + system_prompt=system_prompt, + response_model=response_model, + **kwargs, ) @staticmethod diff --git a/cognee/modules/data/deletion/prune_system.py b/cognee/modules/data/deletion/prune_system.py index 645e1a2232..22a0fde5fe 100644 --- a/cognee/modules/data/deletion/prune_system.py +++ b/cognee/modules/data/deletion/prune_system.py @@ -5,6 +5,10 @@ from cognee.infrastructure.databases.vector import get_vector_engine from cognee.infrastructure.databases.graph.get_graph_engine import get_graph_engine from cognee.infrastructure.databases.relational import get_relational_engine +from cognee.infrastructure.databases.utils import ( + get_graph_dataset_database_handler, + get_vector_dataset_database_handler, +) from cognee.shared.cache import delete_cache from cognee.modules.users.models import DatasetDatabase from cognee.shared.logging_utils import get_logger @@ -13,22 +17,13 @@ async def prune_graph_databases(): - async def _prune_graph_db(dataset_database: DatasetDatabase) -> dict: - from cognee.infrastructure.databases.dataset_database_handler.supported_dataset_database_handlers import ( - supported_dataset_database_handlers, - ) - - handler = supported_dataset_database_handlers[ - dataset_database.graph_dataset_database_handler - ] - return await handler["handler_instance"].delete_dataset(dataset_database) - db_engine = get_relational_engine() try: - data = await db_engine.get_all_data_from_table("dataset_database") + dataset_databases = await db_engine.get_all_data_from_table("dataset_database") # Go through each dataset database and delete the graph database - for data_item in data: - await _prune_graph_db(data_item) + for dataset_database in dataset_databases: + handler = get_graph_dataset_database_handler(dataset_database) + await handler["handler_instance"].delete_dataset(dataset_database) except (OperationalError, EntityNotFoundError) as e: logger.debug( "Skipping pruning of graph DB. Error when accessing dataset_database table: %s", @@ -38,22 +33,13 @@ async def _prune_graph_db(dataset_database: DatasetDatabase) -> dict: async def prune_vector_databases(): - async def _prune_vector_db(dataset_database: DatasetDatabase) -> dict: - from cognee.infrastructure.databases.dataset_database_handler.supported_dataset_database_handlers import ( - supported_dataset_database_handlers, - ) - - handler = supported_dataset_database_handlers[ - dataset_database.vector_dataset_database_handler - ] - return await handler["handler_instance"].delete_dataset(dataset_database) - db_engine = get_relational_engine() try: - data = await db_engine.get_all_data_from_table("dataset_database") + dataset_databases = await db_engine.get_all_data_from_table("dataset_database") # Go through each dataset database and delete the vector database - for data_item in data: - await _prune_vector_db(data_item) + for dataset_database in dataset_databases: + handler = get_vector_dataset_database_handler(dataset_database) + await handler["handler_instance"].delete_dataset(dataset_database) except (OperationalError, EntityNotFoundError) as e: logger.debug( "Skipping pruning of vector DB. Error when accessing dataset_database table: %s", diff --git a/cognee/modules/data/methods/delete_dataset.py b/cognee/modules/data/methods/delete_dataset.py index ff20ff9e75..dea10e741d 100644 --- a/cognee/modules/data/methods/delete_dataset.py +++ b/cognee/modules/data/methods/delete_dataset.py @@ -1,8 +1,34 @@ +from cognee.modules.users.models import DatasetDatabase +from sqlalchemy import select + from cognee.modules.data.models import Dataset +from cognee.infrastructure.databases.utils.get_vector_dataset_database_handler import ( + get_vector_dataset_database_handler, +) +from cognee.infrastructure.databases.utils.get_graph_dataset_database_handler import ( + get_graph_dataset_database_handler, +) from cognee.infrastructure.databases.relational import get_relational_engine async def delete_dataset(dataset: Dataset): db_engine = get_relational_engine() + async with db_engine.get_async_session() as session: + stmt = select(DatasetDatabase).where( + DatasetDatabase.dataset_id == dataset.id, + ) + dataset_database: DatasetDatabase = await session.scalar(stmt) + if dataset_database: + graph_dataset_database_handler = get_graph_dataset_database_handler(dataset_database) + vector_dataset_database_handler = get_vector_dataset_database_handler(dataset_database) + await graph_dataset_database_handler["handler_instance"].delete_dataset( + dataset_database + ) + await vector_dataset_database_handler["handler_instance"].delete_dataset( + dataset_database + ) + # TODO: Remove dataset from pipeline_run_status in Data objects related to dataset as well + # This blocks recreation of the dataset with the same name and data after deletion as + # it's marked as completed and will be just skipped even though it's empty. return await db_engine.delete_entity_by_id(dataset.__tablename__, dataset.id) diff --git a/cognee/tests/test_dataset_delete.py b/cognee/tests/test_dataset_delete.py new file mode 100644 index 0000000000..372945bdb4 --- /dev/null +++ b/cognee/tests/test_dataset_delete.py @@ -0,0 +1,76 @@ +import os +import asyncio +import pathlib +from uuid import UUID + +import cognee +from cognee.shared.logging_utils import setup_logging, ERROR +from cognee.modules.data.methods.delete_dataset import delete_dataset +from cognee.modules.data.methods.get_dataset import get_dataset +from cognee.modules.users.methods import get_default_user + + +async def main(): + # Set data and system directory paths + data_directory_path = str( + pathlib.Path( + os.path.join(pathlib.Path(__file__).parent, ".data_storage/test_dataset_delete") + ).resolve() + ) + cognee.config.data_root_directory(data_directory_path) + cognee_directory_path = str( + pathlib.Path( + os.path.join(pathlib.Path(__file__).parent, ".cognee_system/test_dataset_delete") + ).resolve() + ) + cognee.config.system_root_directory(cognee_directory_path) + + # Create a clean slate for cognee -- reset data and system state + print("Resetting cognee data...") + await cognee.prune.prune_data() + await cognee.prune.prune_system(metadata=True) + print("Data reset complete.\n") + + # cognee knowledge graph will be created based on this text + text = """ + Natural language processing (NLP) is an interdisciplinary + subfield of computer science and information retrieval. + """ + + # Add the text, and make it available for cognify + await cognee.add(text, "nlp_dataset") + await cognee.add("Quantum computing is the study of quantum computers.", "quantum_dataset") + + # Use LLMs and cognee to create knowledge graph + ret_val = await cognee.cognify() + user = await get_default_user() + + for val in ret_val: + dataset_id = str(val) + vector_db_path = os.path.join( + cognee_directory_path, "databases", str(user.id), dataset_id + ".lance.db" + ) + graph_db_path = os.path.join( + cognee_directory_path, "databases", str(user.id), dataset_id + ".pkl" + ) + + # Check if databases are properly created and exist before deletion + assert os.path.exists(graph_db_path), "Graph database file not found." + assert os.path.exists(vector_db_path), "Vector database file not found." + + dataset = await get_dataset(user_id=user.id, dataset_id=UUID(dataset_id)) + await delete_dataset(dataset) + + # Confirm databases have been deleted + assert not os.path.exists(graph_db_path), "Graph database file found." + assert not os.path.exists(vector_db_path), "Vector database file found." + + +if __name__ == "__main__": + logger = setup_logging(log_level=ERROR) + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + try: + loop.run_until_complete(main()) + finally: + loop.run_until_complete(loop.shutdown_asyncgens())