Agent Secret Store DocsSign up
📘 Guides

LangChain Integration

Build LangChain agents that fetch credentials from the vault at runtime — no hardcoded API keys, full audit trail for every access.

Installation

Shell
pip install agentsecretstore langchain langchain-openai

VaultSecretTool

Create a custom LangChain tool that wraps the vault SDK. The tool receives a scoped token (not the master key) so the agent can only access secrets within its allowed scope:

vault_tool.py
import asyncio
from langchain.tools import BaseTool
from langchain_core.callbacks import CallbackManagerForToolRun
from agentsecretstore import AgentVault
from pydantic import BaseModel, Field

class GetSecretInput(BaseModel):
    path: str = Field(description="Secret path, e.g. 'production/openai/api-key'")

class VaultSecretTool(BaseTool):
    """LangChain tool that retrieves a secret from Agent Secret Store."""

    name: str = "get_secret"
    description: str = (
        "Retrieve a credential or API key from the secure vault. "
        "Use this before making any external API calls that require authentication. "
        "Input: the secret path (e.g. 'production/openai/api-key')."
    )
    args_schema: type[BaseModel] = GetSecretInput

    # The vault token is injected — NOT the master key
    _vault_token: str

    def __init__(self, vault_token: str):
        super().__init__()
        self._vault_token = vault_token

    def _run(
        self,
        path: str,
        run_manager: CallbackManagerForToolRun | None = None,
    ) -> str:
        """Sync wrapper for the async vault call."""
        loop = asyncio.new_event_loop()
        try:
            return loop.run_until_complete(self._async_run(path))
        finally:
            loop.close()

    async def _arun(
        self,
        path: str,
        run_manager: CallbackManagerForToolRun | None = None,
    ) -> str:
        return await self._async_run(path)

    async def _async_run(self, path: str) -> str:
        from agentsecretstore import AgentVault

        async with AgentVault() as vault:
            secret = await vault.get_secret(path, token=self._vault_token)
        # Return only the value — the agent sees just the credential
        return secret.value

Full agent example

Complete example showing how the orchestrator issues a scoped token and passes it to the LangChain agent. Even the LLM's own API key is fetched from the vault:

langchain_agent.py
import asyncio
from langchain_openai import ChatOpenAI
from langchain.agents import AgentExecutor, create_openai_tools_agent
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from agentsecretstore import AgentVault

async def build_agent(scoped_token: str) -> AgentExecutor:
    """
    Build a LangChain agent with vault-backed credential access.

    scoped_token: A token issued by your orchestrator with scope like
        "secrets:read:production/*"
    """
    # Initialize vault client with the scoped token
    vault = AgentVault()  # master key for token issuance in orchestrator

    # Create the vault tool — agent uses scoped token, not master key
    vault_tool = VaultSecretTool(vault_token=scoped_token)

    # Agent only has access to secrets within the token's scope
    tools = [vault_tool]

    prompt = ChatPromptTemplate.from_messages([
        ("system",
         "You are an AI assistant with access to a secure credential vault. "
         "When you need to call an external API, first retrieve the required "
         "credentials using the get_secret tool. Never ask the user for API keys."),
        MessagesPlaceholder(variable_name="chat_history"),
        ("human", "{input}"),
        MessagesPlaceholder(variable_name="agent_scratchpad"),
    ])

    # The LLM itself doesn't need a key — it's fetched from the vault
    async with AgentVault() as v:
        openai_secret = await v.get_secret("production/openai-api-key")

    llm = ChatOpenAI(
        model="gpt-4o",
        api_key=openai_secret.value,  # fetched from vault!
        temperature=0,
    )

    agent = create_openai_tools_agent(llm, tools, prompt)

    return AgentExecutor(
        agent=agent,
        tools=tools,
        verbose=True,
        max_iterations=10,
    )

async def main():
    # Orchestrator: issue a scoped token for this agent run
    async with AgentVault() as vault:
        token = await vault.request_token(
            scope="secrets:read:production/*",
            ttl_seconds=1800,          # 30 minutes
            description="LangChain research agent",
        )

    # Build and run agent with scoped credentials
    agent_executor = await build_agent(scoped_token=token.value)

    result = await agent_executor.ainvoke({
        "input": "Search for recent AI research papers using Tavily and summarize them.",
        "chat_history": [],
    })

    print(result["output"])

asyncio.run(main())

Key insight

Notice that even the ChatOpenAI client's API key is fetched from the vault — not from an environment variable. Your LangChain agent has zero hardcoded credentials.

Multi-service agent

Each tool fetches its own credential from the vault just-in-time. The scoped token controls which paths the agent can access:

multi_service_agent.py
from langchain.tools import StructuredTool
from agentsecretstore import AgentVault

async def create_research_agent(scoped_token: str):
    """Agent that uses multiple services, each credential fetched from vault."""

    async def search_web(query: str) -> str:
        """Search using Tavily. Fetches API key from vault."""
        async with AgentVault() as vault:
            key = await vault.get_secret("production/tavily-api-key", token=scoped_token)
        from langchain_community.tools.tavily_search import TavilySearchResults
        search = TavilySearchResults(api_key=key.value, max_results=5)
        return str(await search.ainvoke(query))

    async def send_slack_notification(message: str) -> str:
        """Send Slack notification. Fetches token from vault."""
        import httpx
        async with AgentVault() as vault:
            token = await vault.get_secret("production/slack-bot-token", token=scoped_token)
        async with httpx.AsyncClient() as client:
            resp = await client.post(
                "https://slack.com/api/chat.postMessage",
                headers={"Authorization": f"Bearer {token.value}"},
                json={"channel": "#ai-agents", "text": message},
            )
        return f"Slack notification sent: {resp.status_code}"

    tools = [
        StructuredTool.from_function(
            coroutine=search_web,
            name="web_search",
            description="Search the web for information",
        ),
        StructuredTool.from_function(
            coroutine=send_slack_notification,
            name="slack_notify",
            description="Send a Slack notification to the team",
        ),
    ]

    return tools

CrewAI Integration

Use the vault with CrewAI multi-agent systems.

Python SDK

Complete Python SDK reference.