Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/backend/base/langflow/components/logic/run_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@


class RunFlowComponent(RunFlowBaseComponent):
display_name = "Run Flow"
display_name ="Workflow"
description = (
"Creates a tool component from a Flow that takes all its inputs and runs it. "
" \n **Select a Flow to use the tool mode**"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
from typing import Any

from langchain_openai import OpenAIEmbeddings
Expand Down Expand Up @@ -49,6 +50,7 @@ class EmbeddingModelComponent(LCEmbeddingsModel):
required=True,
show=True,
real_time_refresh=True,
value=os.getenv("OPENAI_API_KEY", ""),
),
MessageTextInput(
name="api_base",
Expand Down Expand Up @@ -78,7 +80,7 @@ class EmbeddingModelComponent(LCEmbeddingsModel):
def build_embeddings(self) -> Embeddings:
provider = self.provider
model = self.model
api_key = self.api_key
api_key = self.api_key or os.getenv("OPENAI_API_KEY")
api_base = self.api_base
dimensions = self.dimensions
chunk_size = self.chunk_size
Expand Down Expand Up @@ -110,5 +112,6 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam
build_config["model"]["options"] = OPENAI_EMBEDDING_MODEL_NAMES
build_config["model"]["value"] = OPENAI_EMBEDDING_MODEL_NAMES[0]
build_config["api_key"]["display_name"] = "OpenAI API Key"
build_config["api_key"]["value"] = os.getenv("OPENAI_API_KEY", "")
build_config["api_base"]["display_name"] = "OpenAI API Base URL"
return build_config
return build_config
65 changes: 41 additions & 24 deletions src/backend/base/langflow/components/models/language_model.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import os
from typing import Any

from langchain_anthropic import ChatAnthropic
Expand Down Expand Up @@ -48,26 +49,29 @@ class LanguageModelComponent(LCModelComponent):
required=False,
show=True,
real_time_refresh=True,
value=os.getenv("OPENAI_API_KEY", ""),
),
MessageTextInput(
name="azure_endpoint",
display_name="Azure Endpoint",
info="Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`",
show=False,
real_time_refresh=True,
value=os.getenv("AZURE_OPENAI_ENDPOINT", ""),
),
MessageTextInput(
name="azure_deployment",
display_name="Deployment Name",
info="The name of your Azure OpenAI deployment",
show=False,
real_time_refresh=True,
value=os.getenv("AZURE_DEPLOYMENT_NAME", ""),
),
DropdownInput(
name="api_version",
display_name="API Version",
options=["2024-06-01", "2024-07-01-preview", "2024-08-01-preview", "2024-09-01-preview"],
value="2024-06-01",
value=os.getenv("AZURE_API_VERSION", "2024-06-01"),
show=False,
real_time_refresh=True,
),
Expand Down Expand Up @@ -106,7 +110,8 @@ def build_model(self) -> LanguageModel:
stream = self.stream

if provider == "OpenAI":
if not self.api_key:
api_key = self.api_key or os.getenv("OPENAI_API_KEY")
if not api_key:
msg = "OpenAI API key is required when using OpenAI provider"
raise ValueError(msg)

Expand All @@ -118,47 +123,43 @@ def build_model(self) -> LanguageModel:
model_name=model_name,
temperature=temperature,
streaming=stream,
openai_api_key=self.api_key,
openai_api_key=api_key,
)
if provider == "Azure OpenAI":
if not self.api_key:
api_key = self.api_key or os.getenv("AZURE_OPENAI_API_KEY")
azure_endpoint = self.azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT")
azure_deployment = self.azure_deployment or os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME", "gpt-4o")
api_version = self.api_version or os.getenv("AZURE_API_VERSION", "2024-06-01")

if not api_key:
msg = "Azure OpenAI API key is required when using Azure OpenAI provider"
raise ValueError(msg)
if not self.azure_endpoint:
if not azure_endpoint:
msg = "Azure endpoint is required when using Azure OpenAI provider"
raise ValueError(msg)
if not self.azure_deployment:
if not azure_deployment:
msg = "Azure deployment name is required when using Azure OpenAI provider"
raise ValueError(msg)

return AzureChatOpenAI(
azure_endpoint=self.azure_endpoint,
azure_deployment=self.azure_deployment,
api_version=self.api_version,
api_key=self.api_key,
azure_endpoint=azure_endpoint,
azure_deployment=azure_deployment,
api_version=api_version,
api_key=api_key,
temperature=temperature,
streaming=stream,
)
if provider == "Anthropic":
if not self.api_key:
api_key = self.api_key or os.getenv("ANTHROPIC_API_KEY")
if not api_key:
msg = "Anthropic API key is required when using Anthropic provider"
raise ValueError(msg)
return ChatAnthropic(
model=model_name,
temperature=temperature,
streaming=stream,
anthropic_api_key=self.api_key,
anthropic_api_key=api_key,
)
# if provider == "Google":
# if not self.api_key:
# msg = "Google API key is required when using Google provider"
# raise ValueError(msg)
# return ChatGoogleGenerativeAI(
# model=model_name,
# temperature=temperature,
# streaming=stream,
# google_api_key=self.api_key,
# )
if provider == "Google":
# Allow both API key and service account (managed identity) authentication
if self.api_key:
Expand All @@ -170,6 +171,15 @@ def build_model(self) -> LanguageModel:
google_api_key=self.api_key,
)
else:
# Check for API key in environment
google_api_key = os.getenv("GOOGLE_API_KEY")
if google_api_key:
return ChatGoogleGenerativeAI(
model=model_name,
temperature=temperature,
streaming=stream,
google_api_key=google_api_key,
)
# Use default Google credentials (service account or managed identity)
# This will automatically pick up credentials from environment or GCP runtime
return ChatGoogleGenerativeAI(
Expand All @@ -178,7 +188,6 @@ def build_model(self) -> LanguageModel:
streaming=stream,
)


msg = f"Unknown provider: {provider}"
raise ValueError(msg)

Expand All @@ -194,25 +203,33 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam
build_config["model_name"]["options"] = OPENAI_CHAT_MODEL_NAMES + OPENAI_REASONING_MODEL_NAMES
build_config["model_name"]["value"] = OPENAI_CHAT_MODEL_NAMES[0]
build_config["api_key"]["display_name"] = "OpenAI API Key"
build_config["api_key"]["value"] = os.getenv("OPENAI_API_KEY", "")
elif field_value == "Azure OpenAI":
# Show Azure-specific fields
build_config["azure_endpoint"]["show"] = True
build_config["azure_deployment"]["show"] = True
build_config["api_version"]["show"] = True
build_config["model_name"]["show"] = False # Azure uses deployment name instead
build_config["api_key"]["display_name"] = "Azure OpenAI API Key"
# Prefill Azure fields from environment variables
build_config["api_key"]["value"] = os.getenv("AZURE_OPENAI_API_KEY", "")
build_config["azure_endpoint"]["value"] = os.getenv("AZURE_OPENAI_ENDPOINT", "")
build_config["azure_deployment"]["value"] = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME", "gpt-4o")
build_config["api_version"]["value"] = os.getenv("AZURE_API_VERSION", "2024-06-01")
elif field_value == "Anthropic":
build_config["model_name"]["options"] = ANTHROPIC_MODELS
build_config["model_name"]["value"] = ANTHROPIC_MODELS[0]
build_config["api_key"]["display_name"] = "Anthropic API Key"
build_config["api_key"]["value"] = os.getenv("ANTHROPIC_API_KEY", "")
elif field_value == "Google":
build_config["model_name"]["options"] = GOOGLE_GENERATIVE_AI_MODELS
build_config["model_name"]["value"] = GOOGLE_GENERATIVE_AI_MODELS[0]
build_config["api_key"]["display_name"] = "Google API Key"
build_config["api_key"]["value"] = os.getenv("GOOGLE_API_KEY", "")
elif field_name == "model_name" and field_value.startswith("o1") and self.provider == "OpenAI":
# Hide system_message for o1 models - currently unsupported
if "system_message" in build_config:
build_config["system_message"]["show"] = False
elif field_name == "model_name" and not field_value.startswith("o1") and "system_message" in build_config:
build_config["system_message"]["show"] = True
return build_config
return build_config
Loading