diff --git a/src/backend/base/langflow/components/agents/agent.py b/src/backend/base/langflow/components/agents/agent.py index ca6c5927cf04..a147c981b39b 100644 --- a/src/backend/base/langflow/components/agents/agent.py +++ b/src/backend/base/langflow/components/agents/agent.py @@ -1,4 +1,5 @@ import json +import os import re from langchain_core.tools import StructuredTool @@ -525,9 +526,18 @@ async def update_build_config( # Add provider-specific fields build_config.update(fields_to_add) - # Apply provider-specific defaults (only for Azure OpenAI currently) + # Apply provider-specific defaults if field_value == "Azure OpenAI": build_config = apply_provider_defaults(field_value, build_config) + elif field_value == "Google Generative AI": + # Prefill Google API Key from environment variable + if "api_key" in build_config: + build_config["api_key"]["value"] = os.getenv("GOOGLE_API_KEY", "") + # Prefill Google Model from environment variable + if "model_name" in build_config: + google_model = os.getenv("GOOGLE_GENAI_MODEL", "") + if google_model: + build_config["model_name"]["value"] = google_model # Reset input types for agent_llm build_config["agent_llm"]["input_types"] = [] @@ -617,4 +627,4 @@ async def _get_tools(self) -> list[Tool]: ) if hasattr(self, "tools_metadata"): tools = component_toolkit(component=self, metadata=self.tools_metadata).update_tools_metadata(tools=tools) - return tools + return tools \ No newline at end of file diff --git a/src/backend/base/langflow/components/azure/azure_openai_embeddings.py b/src/backend/base/langflow/components/azure/azure_openai_embeddings.py index 84eef7bbb6b9..dcc3107fa8d1 100644 --- a/src/backend/base/langflow/components/azure/azure_openai_embeddings.py +++ b/src/backend/base/langflow/components/azure/azure_openai_embeddings.py @@ -1,3 +1,5 @@ +import os + from langchain_openai import AzureOpenAIEmbeddings from langflow.base.models.model import LCModelComponent @@ -35,11 +37,13 @@ class AzureOpenAIEmbeddingsComponent(LCModelComponent): display_name="Azure Endpoint", required=True, info="Your Azure endpoint, including the resource. Example: `https://example-resource.azure.openai.com/`", + value=os.getenv("AZURE_OPENAI_EMBEDDING_ENDPOINT", ""), ), MessageTextInput( name="azure_deployment", display_name="Deployment Name", required=True, + value=os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME", ""), ), DropdownInput( name="api_version", @@ -51,7 +55,10 @@ class AzureOpenAIEmbeddingsComponent(LCModelComponent): SecretStrInput( name="api_key", display_name="Azure OpenAI API Key", - required=True, + required=False, + show=True, + real_time_refresh=True, + value=os.getenv("AZURE_OPENAI_EMBEDDING_API_KEY", ""), ), IntInput( name="dimensions", @@ -67,17 +74,22 @@ class AzureOpenAIEmbeddingsComponent(LCModelComponent): ] def build_embeddings(self) -> Embeddings: + # Add fallback to environment variables if fields are empty + azure_endpoint = self.azure_endpoint or os.getenv("AZURE_OPENAI_EMBEDDING_ENDPOINT") + azure_deployment = self.azure_deployment or os.getenv("AZURE_OPENAI_EMBEDDING_DEPLOYMENT_NAME") + api_key = self.api_key or os.getenv("AZURE_OPENAI_EMBEDDING_API_KEY") + try: embeddings = AzureOpenAIEmbeddings( model=self.model, - azure_endpoint=self.azure_endpoint, - azure_deployment=self.azure_deployment, + azure_endpoint=azure_endpoint, + azure_deployment=azure_deployment, api_version=self.api_version, - api_key=self.api_key, + api_key=api_key, dimensions=self.dimensions or None, ) except Exception as e: msg = f"Could not connect to AzureOpenAIEmbeddings API: {e}" raise ValueError(msg) from e - return embeddings + return embeddings \ No newline at end of file diff --git a/src/backend/base/langflow/components/models/embedding_model.py b/src/backend/base/langflow/components/models/embedding_model.py index 9ed1ea26b96e..52e32c0655b1 100644 --- a/src/backend/base/langflow/components/models/embedding_model.py +++ b/src/backend/base/langflow/components/models/embedding_model.py @@ -112,6 +112,6 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam build_config["model"]["options"] = OPENAI_EMBEDDING_MODEL_NAMES build_config["model"]["value"] = OPENAI_EMBEDDING_MODEL_NAMES[0] build_config["api_key"]["display_name"] = "OpenAI API Key" - build_config["api_key"]["value"] = os.getenv("OPENAI_API_KEY", "") + build_config["api_key"]["value"] = os.getenv("AZURE_OPENAI_API_KEY", "") build_config["api_base"]["display_name"] = "OpenAI API Base URL" return build_config \ No newline at end of file diff --git a/src/backend/base/langflow/components/models/language_model.py b/src/backend/base/langflow/components/models/language_model.py index 3d3744e39124..ae7d3eea1ff4 100644 --- a/src/backend/base/langflow/components/models/language_model.py +++ b/src/backend/base/langflow/components/models/language_model.py @@ -128,7 +128,7 @@ def build_model(self) -> LanguageModel: if provider == "Azure OpenAI": api_key = self.api_key or os.getenv("AZURE_OPENAI_API_KEY") azure_endpoint = self.azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT") - azure_deployment = self.azure_deployment or os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME", "gpt-4o") + azure_deployment = self.azure_deployment or os.getenv("AZURE_DEPLOYMENT_NAME") api_version = self.api_version or os.getenv("AZURE_API_VERSION", "2024-06-01") if not api_key: @@ -214,7 +214,7 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam # Prefill Azure fields from environment variables build_config["api_key"]["value"] = os.getenv("AZURE_OPENAI_API_KEY", "") build_config["azure_endpoint"]["value"] = os.getenv("AZURE_OPENAI_ENDPOINT", "") - build_config["azure_deployment"]["value"] = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME", "gpt-4o") + build_config["azure_deployment"]["value"] = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME", "gpt-4.1") build_config["api_version"]["value"] = os.getenv("AZURE_API_VERSION", "2024-06-01") elif field_value == "Anthropic": build_config["model_name"]["options"] = ANTHROPIC_MODELS @@ -223,7 +223,9 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam build_config["api_key"]["value"] = os.getenv("ANTHROPIC_API_KEY", "") elif field_value == "Google": build_config["model_name"]["options"] = GOOGLE_GENERATIVE_AI_MODELS - build_config["model_name"]["value"] = GOOGLE_GENERATIVE_AI_MODELS[0] + # Prefill Google Model from environment variable if available, otherwise use default + google_model = os.getenv("GOOGLE_GENAI_MODEL", "") + build_config["model_name"]["value"] = google_model if google_model else GOOGLE_GENERATIVE_AI_MODELS[0] build_config["api_key"]["display_name"] = "Google API Key" build_config["api_key"]["value"] = os.getenv("GOOGLE_API_KEY", "") elif field_name == "model_name" and field_value.startswith("o1") and self.provider == "OpenAI": @@ -232,4 +234,4 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam build_config["system_message"]["show"] = False elif field_name == "model_name" and not field_value.startswith("o1") and "system_message" in build_config: build_config["system_message"]["show"] = True - return build_config \ No newline at end of file + return build_config diff --git a/src/backend/base/langflow/components/qdrant/qdrant.py b/src/backend/base/langflow/components/qdrant/qdrant.py index 72a5003d6693..f7b8bf54f771 100644 --- a/src/backend/base/langflow/components/qdrant/qdrant.py +++ b/src/backend/base/langflow/components/qdrant/qdrant.py @@ -1,7 +1,9 @@ from langchain.embeddings.base import Embeddings from langchain_community.vectorstores import Qdrant +import os from langflow.base.vectorstores.model import LCVectorStoreComponent, check_cached_vector_store +from typing import Dict from langflow.helpers.data import docs_to_data from langflow.inputs.inputs import MessageTextInput from langflow.io import ( @@ -10,6 +12,7 @@ IntInput, SecretStrInput, StrInput, + BoolInput ) from langflow.schema.data import Data import uuid @@ -26,7 +29,7 @@ class QdrantVectorStoreComponent(LCVectorStoreComponent): StrInput(name="host", display_name="Host", value="localhost", advanced=True), IntInput(name="port", display_name="Port", value=6333, advanced=True), IntInput(name="grpc_port", display_name="gRPC Port", value=6334, advanced=True), - SecretStrInput(name="api_key", display_name="Qdrant API Key", advanced=True), + SecretStrInput(name="api_key", display_name="Qdrant API Key", required=False), StrInput(name="prefix", display_name="Prefix", advanced=True), IntInput(name="timeout", display_name="Timeout", advanced=True), StrInput(name="path", display_name="Path", advanced=True), @@ -66,11 +69,13 @@ def build_vector_store(self) -> Qdrant: "metadata_payload_key": self.metadata_payload_key, } + api_key = self.api_key or os.getenv("QUADRANT_API_KEY", "") + server_kwargs = { "host": self.host or None, "port": int(self.port), # Ensure port is an integer "grpc_port": int(self.grpc_port), # Ensure grpc_port is an integer - "api_key": self.api_key, + "api_key": api_key, "prefix": self.prefix, # Ensure timeout is an integer "timeout": int(self.timeout) if self.timeout else None, diff --git a/src/backend/base/langflow/custom/default_providers.py b/src/backend/base/langflow/custom/default_providers.py index addec2cc42bc..fd87398e7b06 100644 --- a/src/backend/base/langflow/custom/default_providers.py +++ b/src/backend/base/langflow/custom/default_providers.py @@ -34,7 +34,7 @@ def _get_azure_openai_defaults(build_config: dict) -> Dict[str, Any]: }, "azure_deployment": { **build_config.get("azure_deployment", {}), - "value": os.environ.get("AZURE_DEPLOYMENT_NAME", "gpt-4o") + "value": os.environ.get("AZURE_OPENAI_DEPLOYMENT_NAME", "gpt-4.1") }, "azure_endpoint": { **build_config.get("azure_endpoint", {}),