Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
fix: Address review comments
Signed-off-by: Marcel Klehr <mklehr@gmx.net>
  • Loading branch information
marcelklehr committed Jul 29, 2024
commit 7247abbc127fdbca8234db358574985285a5ef07
2 changes: 1 addition & 1 deletion context_chat_backend/controller.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from .config_parser import get_config
from .download import background_init, ensure_models
from .dyn_loader import EmbeddingModelLoader, LLMModelLoader, LoaderException, VectorDBLoader
from .models.nc_texttotext import LlmException
from .models import LlmException
from .ocs_utils import AppAPIAuthMiddleware
from .setup_functions import ensure_config_file, repair_run, setup_env_vars
from .utils import JSONResponse, enabled_guard, update_progress, value_of
Expand Down
5 changes: 4 additions & 1 deletion context_chat_backend/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
'llm': _llm_models,
}

__all__ = ['init_model', 'load_model', 'models']
__all__ = ['init_model', 'load_model', 'models', 'LlmException']


def load_model(model_type: str, model_info: tuple[str, dict]) -> Embeddings | LLM | None:
Expand Down Expand Up @@ -54,3 +54,6 @@ def init_model(model_type: str, model_info: tuple[str, dict]):
raise AssertionError(f'Error: {model} does not implement "llm" type or has returned an invalid object')

return model

class LlmException(Exception):
...
21 changes: 12 additions & 9 deletions context_chat_backend/models/nc_texttotext.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import json
import time
from typing import Any, Dict, List, Optional
from pydantic import BaseModel
from pydantic import BaseModel, ValidationError

from nc_py_api import Nextcloud
from langchain_core.callbacks.manager import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM

from context_chat_backend.models import LlmException


def get_model_for(model_type: str, model_config: dict):
if model_config is None:
return None
Expand Down Expand Up @@ -62,13 +65,16 @@ def _call(
}
})

task = Task.model_validate(response["task"])

while task.status != 'STATUS_SUCCESSFUL' and task.status != 'STATUS_FAILED':
time.sleep(5)
response = nc.ocs("GET", f"/ocs/v1.php/taskprocessing/task/{task.id}")
try:
task = Task.model_validate(response["task"])

while task.status != 'STATUS_SUCCESSFUL' and task.status != 'STATUS_FAILED':
time.sleep(5)
response = nc.ocs("GET", f"/ocs/v1.php/taskprocessing/task/{task.id}")
task = Task.model_validate(response["task"])
except ValidationError as e:
raise LlmException('Failed to parse Nextcloud TaskProcessing task result')

if task.status == 'STATUS_FAILED':
raise LlmException('Nextcloud TaskProcessing Task failed')

Expand All @@ -89,6 +95,3 @@ def _identifying_params(self) -> Dict[str, Any]:
def _llm_type(self) -> str:
"""Get the type of language model used by this chat model. Used for logging purposes only."""
return "nc_texttotetx"

class LlmException(Exception):
...