Skip to content
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
fix stop detections
  • Loading branch information
mingfang committed May 20, 2023
commit c08392069d062592f5ef467713a78bf1651d6fe8
34 changes: 25 additions & 9 deletions fastchat/serve/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,31 +160,47 @@ def generate_stream(
skip_special_tokens=True,
spaces_between_special_tokens=False,
)

def maybeStop(output, stop_str):
Comment thread
merrymercy marked this conversation as resolved.
Outdated
for i in range(0, len(output)):
Comment thread
merrymercy marked this conversation as resolved.
Outdated
if i > len(stop_str):
return False
if stop_str.startswith(output[-i:]):
return True
return False

if stop_str:
if isinstance(stop_str, str):
pos = output.rfind(stop_str, rfind_start)
if pos != -1:
output = output[:pos]
stopped = True
else:
maybeStop = maybeStop(output, stop_str)
Comment thread
merrymercy marked this conversation as resolved.
Outdated
elif isinstance(stop_str, Iterable):
for each_stop in stop_str:
pos = output.rfind(each_stop, rfind_start)
if pos != -1:
output = output[:pos]
stopped = True
break
else:
maybeStop = maybeStop(output, each_stop)
if maybeStop:
break
else:
raise ValueError("Invalid stop field type.")

yield {
"text": output,
"usage": {
"prompt_tokens": input_echo_len,
"completion_tokens": i,
"total_tokens": input_echo_len + i,
},
"finish_reason": None,
}
if not maybeStop:
yield {
"text": output,
"usage": {
"prompt_tokens": input_echo_len,
"completion_tokens": i,
"total_tokens": input_echo_len + i,
},
"finish_reason": None,
}

if stopped:
break
Expand Down