Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
0eab437
Fix agents; only stream the current chunk back
jordanrfrazier Sep 24, 2025
bb65ba9
Fix length access
jordanrfrazier Sep 24, 2025
3992e04
[autofix.ci] apply automated fixes
autofix-ci[bot] Sep 24, 2025
755bd78
[autofix.ci] apply automated fixes (attempt 2/3)
autofix-ci[bot] Sep 24, 2025
53cd69d
move the empty text skips to the agent
jordanrfrazier Sep 24, 2025
e380871
[autofix.ci] apply automated fixes
autofix-ci[bot] Sep 24, 2025
9e621de
Fix message type - passes token now
jordanrfrazier Sep 24, 2025
aafbdf0
rebase fixes
jordanrfrazier Oct 10, 2025
3e84c65
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 10, 2025
56dcb12
Fix tests for new beahvior with eventmgr
jordanrfrazier Oct 10, 2025
2acf2fc
Update vector store rag starter project
erichare Oct 10, 2025
f2c2171
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 10, 2025
5de3091
Ruff fixes
jordanrfrazier Oct 10, 2025
2e1c798
Add back params
jordanrfrazier Oct 10, 2025
1d17d69
Add event manager
jordanrfrazier Oct 11, 2025
65b3ed4
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 11, 2025
decf529
[autofix.ci] apply automated fixes (attempt 2/3)
autofix-ci[bot] Oct 11, 2025
e8bfb76
combine if sttements
jordanrfrazier Oct 11, 2025
7b3ea57
Cleanup passing of callbacks
jordanrfrazier Oct 13, 2025
99268a7
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 13, 2025
abed50d
remove added content param on component
jordanrfrazier Oct 13, 2025
f6182c7
Update how empty data is passed to fastapi endpoint -- removes embed=…
jordanrfrazier Oct 13, 2025
00adb50
Revert "Update how empty data is passed to fastapi endpoint -- remove…
jordanrfrazier Oct 13, 2025
f8100de
Add uv lock from 1.6.4 to not upgrade deps
jordanrfrazier Oct 13, 2025
361732a
Ruff, tests, mypy
jordanrfrazier Oct 13, 2025
991e936
Ruff, tests, mypy
jordanrfrazier Oct 13, 2025
9c0df28
Fix tests to use token callback instead of event manager
jordanrfrazier Oct 13, 2025
ea9b33d
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 13, 2025
70de4ca
rebase redundant fixes
jordanrfrazier Oct 13, 2025
0ce2576
uv lock
jordanrfrazier Oct 13, 2025
9d53282
simplify the run agent method
jordanrfrazier Oct 13, 2025
007c220
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 13, 2025
b3ebf72
Ensure the input is the text content
jordanrfrazier Oct 13, 2025
d47be51
Use actual text instead of lc_message content
jordanrfrazier Oct 13, 2025
1042480
[autofix.ci] apply automated fixes
autofix-ci[bot] Oct 13, 2025
e1d65f3
keep message id consistent throughout streaming
jordanrfrazier Oct 14, 2025
afddccd
remove comment that lint didn't like
jordanrfrazier Oct 14, 2025
2ea0290
ruff
jordanrfrazier Oct 14, 2025
15ade5f
Merge branch 'release-1.6.5' into fix-agent-streaming-tokens
jordanrfrazier Oct 14, 2025
f289de7
Be a little more lenient in sorting tests
jordanrfrazier Oct 14, 2025
86d0931
Fix test to not call persistence layer
jordanrfrazier Oct 14, 2025
ca54754
Add multimodal test
jordanrfrazier Oct 14, 2025
f54c461
mypy
jordanrfrazier Oct 14, 2025
8f43960
lint
jordanrfrazier Oct 14, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
104 changes: 65 additions & 39 deletions src/backend/base/langflow/base/agents/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,11 @@

from langchain.agents import AgentExecutor, BaseMultiActionAgent, BaseSingleActionAgent
from langchain.agents.agent import RunnableAgent
from langchain_core.messages import HumanMessage
from langchain_core.messages import BaseMessage, HumanMessage
from langchain_core.runnables import Runnable

from langflow.base.agents.callback import AgentAsyncHandler
from langflow.base.agents.events import ExceptionWithMessageError, process_agent_events
from langflow.base.agents.utils import data_to_messages
from langflow.custom.custom_component.component import Component, _get_component_toolkit
from langflow.field_typing import Tool
from langflow.inputs.inputs import InputTypes, MultilineInput
Expand All @@ -23,9 +22,7 @@
from langflow.utils.constants import MESSAGE_SENDER_AI

if TYPE_CHECKING:
from langchain_core.messages import BaseMessage

from langflow.schema.log import SendMessageFunctionType
from langflow.schema.log import OnTokenFunctionType, SendMessageFunctionType


DEFAULT_TOOLS_DESCRIPTION = "A helpful assistant with access to the following tools:"
Expand Down Expand Up @@ -118,6 +115,24 @@ def get_chat_history_data(self) -> list[Data] | None:
# might be overridden in subclasses
return None

def _data_to_messages_skip_empty(self, data: list[Data]) -> list[BaseMessage]:
"""Convert data to messages, filtering only empty text while preserving non-text content.

Note: added to fix issue with certain providers failing when given empty text.
"""
messages = []
for value in data:
# Only skip if the message has a text attribute that is empty/whitespace
text = getattr(value, "text", None)
if isinstance(text, str) and not text.strip():
# Skip only messages with empty/whitespace-only text strings
continue

lc_message = value.to_lc_message()
messages.append(lc_message)

return messages

async def run_agent(
self,
agent: Runnable | BaseSingleActionAgent | BaseMultiActionAgent | AgentExecutor,
Expand All @@ -137,49 +152,52 @@ async def run_agent(
max_iterations=max_iterations,
)
# Convert input_value to proper format for agent
if hasattr(self.input_value, "to_lc_message") and callable(self.input_value.to_lc_message):
lc_message = None
if isinstance(self.input_value, Message):
lc_message = self.input_value.to_lc_message()
input_text = lc_message.content if hasattr(lc_message, "content") else str(lc_message)
input_dict: dict[str, str | list[BaseMessage] | BaseMessage] = {"input": lc_message}
else:
lc_message = None
input_text = self.input_value

input_dict: dict[str, str | list[BaseMessage]] = {
"input": self.input_value.to_lc_message() if isinstance(self.input_value, Message) else self.input_value
}
input_dict = {"input": self.input_value}

if hasattr(self, "system_prompt"):
input_dict["system_prompt"] = self.system_prompt

# Handle chat_history conversion
if hasattr(self, "chat_history") and self.chat_history:
if isinstance(self.chat_history, Data):
input_dict["chat_history"] = data_to_messages(self.chat_history)
# Handle both lfx.schema.message.Message and langflow.schema.message.Message types
if all(hasattr(m, "to_data") and callable(m.to_data) and "text" in m.data for m in self.chat_history):
input_dict["chat_history"] = data_to_messages(self.chat_history)
if all(isinstance(m, Message) for m in self.chat_history):
input_dict["chat_history"] = data_to_messages([m.to_data() for m in self.chat_history])

if (
hasattr(lc_message, "content")
and isinstance(lc_message.content, list)
and all(isinstance(m, Message) for m in self.chat_history)
):
input_dict["chat_history"] = data_to_messages([m.to_data() for m in self.chat_history])

if isinstance(self.chat_history, Data) or all(
hasattr(m, "to_data") and callable(m.to_data) and "text" in m.data for m in self.chat_history
):
input_dict["chat_history"] = self._data_to_messages_skip_empty(self.chat_history)
elif all(isinstance(m, Message) for m in self.chat_history):
input_dict["chat_history"] = self._data_to_messages_skip_empty([m.to_data() for m in self.chat_history])

# Handle multimodal input (images + text)
# Note: Agent input must be a string, so we extract text and move images to chat_history
if lc_message and hasattr(lc_message, "content") and isinstance(lc_message.content, list):
# ! Because the input has to be a string, we must pass the images in the chat_history

# Extract images and text content
image_dicts = [item for item in lc_message.content if item.get("type") == "image"]
lc_message.content = [item for item in lc_message.content if item.get("type") != "image"]

if "chat_history" not in input_dict:
input_dict["chat_history"] = []
if isinstance(input_dict["chat_history"], list):
input_dict["chat_history"].extend(HumanMessage(content=[image_dict]) for image_dict in image_dicts)
else:
input_dict["chat_history"] = [HumanMessage(content=[image_dict]) for image_dict in image_dicts]
input_dict["input"] = input_text
text_content = [item for item in lc_message.content if item.get("type") != "image"]

# Extract text strings from text content items
text_strings = [
item.get("text", "")
for item in text_content
if item.get("type") == "text" and item.get("text", "").strip()
]

# Set input to concatenated text or empty string
input_dict["input"] = " ".join(text_strings) if text_strings else ""

# Add images to chat_history
if image_dicts:
if "chat_history" not in input_dict:
input_dict["chat_history"] = []
if isinstance(input_dict["chat_history"], list):
input_dict["chat_history"].extend(HumanMessage(content=[img]) for img in image_dicts)
elif lc_message:
# Simple text message - extract string content
input_dict["input"] = lc_message.content if hasattr(lc_message, "content") else str(lc_message)

if hasattr(self, "graph"):
session_id = self.graph.session_id
elif hasattr(self, "_session_id"):
Expand All @@ -194,6 +212,13 @@ async def run_agent(
content_blocks=[ContentBlock(title="Agent Steps", contents=[])],
session_id=session_id,
)

# Create token callback if event_manager is available
# This wraps the event_manager's on_token method to match OnTokenFunctionType Protocol
on_token_callback: OnTokenFunctionType | None = None
if self._event_manager:
on_token_callback = cast("OnTokenFunctionType", self._event_manager.on_token)

try:
result = await process_agent_events(
runnable.astream_events(
Expand All @@ -203,6 +228,7 @@ async def run_agent(
),
agent_message,
cast("SendMessageFunctionType", self.send_message),
on_token_callback,
)
except ExceptionWithMessageError as e:
if hasattr(e, "agent_message") and hasattr(e.agent_message, "id"):
Expand Down
Loading
Loading