-
Notifications
You must be signed in to change notification settings - Fork 495
Expand file tree
/
Copy pathchatllm.py
More file actions
36 lines (28 loc) · 887 Bytes
/
chatllm.py
File metadata and controls
36 lines (28 loc) · 887 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import os
from typing import Dict, List, Optional, Tuple, Union
from langchain.llms.base import LLM
from langchain.llms.utils import enforce_stop_tokens
from paddlenlp import Taskflow
chatbot = Taskflow("text2text_generation", batch_size=2)
class ChatLLM(LLM):
max_token: int = 10000
temperature: float = 0.1
top_p = 0.9
history = []
tokenizer: object = None
model: object = None
def __init__(self):
super().__init__()
@property
def _llm_type(self) -> str:
return "ChatLLM"
def _call(self,
prompt: str,
stop: Optional[List[str]] = None) -> str:
prompt_list = []
prompt_list.append(prompt)
results = chatbot(prompt_list)
response = results['result'][0]
if stop is not None:
response = enforce_stop_tokens(response, stop)
return response