>_ Bashlet
Python SDK

bashlet

Python SDK for bashlet - provide sandboxed bash execution as tools for AI agents. Supports both sync and async clients.

01 Installation

Terminal
# Core package
pip install bashlet

# With framework support
pip install bashlet[langchain]    # LangChain
pip install bashlet[openai]       # OpenAI
pip install bashlet[anthropic]    # Anthropic
pip install bashlet[mcp]          # MCP
pip install bashlet[all]          # All frameworks

Make sure you have bashlet installed on your system.

02 Quick Start

Synchronous Client

Python
from bashlet import Bashlet, Mount

bashlet = Bashlet(
    mounts=[Mount("./src", "/workspace")],
)

# Execute a command
result = bashlet.exec("ls -la /workspace")
print(result.stdout)

Asynchronous Client

Python
import asyncio
from bashlet import AsyncBashlet

async def main():
    bashlet = AsyncBashlet()
    result = await bashlet.exec("echo hello")
    print(result.stdout)

asyncio.run(main())

03 AI Framework Support

LangChain

BaseTool integration for LangChain agents

OpenAI

Function calling for OpenAI API

Anthropic

Tool use for Claude API

MCP

Model Context Protocol support

LangChain

Python
from langchain_openai import ChatOpenAI
from bashlet import Bashlet

bashlet = Bashlet(
    mounts=[{"host_path": "./project", "guest_path": "/workspace"}],
)

# Get LangChain tools
tools = bashlet.to_langchain_tools()

# Bind tools to LLM
llm = ChatOpenAI(model="gpt-4-turbo")
llm_with_tools = llm.bind_tools(tools.all())

# Use in agent
response = llm_with_tools.invoke("List files in /workspace")

OpenAI Function Calling

Python
from openai import OpenAI
from bashlet import Bashlet

client = OpenAI()
bashlet = Bashlet()

# Get OpenAI tools
handler = bashlet.to_openai_tools()

# Create completion with tools
response = client.chat.completions.create(
    model="gpt-4-turbo",
    tools=handler.definitions,
    messages=[{"role": "user", "content": "List files"}],
)

# Handle tool calls
for tool_call in response.choices[0].message.tool_calls or []:
    result = handler.handle(
        tool_call.function.name,
        tool_call.function.arguments
    )
    print(result)

Anthropic Tool Use

Python
from anthropic import Anthropic
from bashlet import Bashlet

client = Anthropic()
bashlet = Bashlet()

# Get Anthropic tools
handler = bashlet.to_anthropic_tools()

# Create message with tools
response = client.messages.create(
    model="claude-3-opus-20240229",
    max_tokens=1024,
    tools=handler.definitions,
    messages=[{"role": "user", "content": "List files"}],
)

# Handle tool use
for block in response.content:
    if block.type == "tool_use":
        result = handler.handle(block.name, block.input)
        print(result)

MCP Server

Python
from mcp.server import Server
from mcp.server.stdio import stdio_server
from bashlet import Bashlet

bashlet = Bashlet()
handler = bashlet.to_mcp_tools()

server = Server("bashlet-server")

@server.list_tools()
async def list_tools():
    return handler.definitions

@server.call_tool()
async def call_tool(name: str, arguments: dict):
    result = handler.handle(name, arguments)
    return result.content

async def main():
    async with stdio_server() as (read_stream, write_stream):
        await server.run(read_stream, write_stream)

04 Available Tools

Tool Description
bashlet_exec Execute shell commands in the sandbox
bashlet_read_file Read file contents from the sandbox
bashlet_write_file Write content to a file in the sandbox
bashlet_list_dir List directory contents

05 API Reference

Bashlet / AsyncBashlet Class

Constructor
Bashlet(
    binary_path: str = "bashlet",   # Path to bashlet binary
    preset: str | None = None,       # Default preset name
    mounts: list | None = None,      # Default mounts
    env_vars: list | None = None,    # Environment variables
    workdir: str | None = None,      # Working directory
    timeout: int = 300,               # Timeout in seconds
    config_path: str | None = None,  # Config file path
)

Methods

Method Description
exec(command, **options) Execute a one-shot command
create_session(**options) Create a persistent session
run_in_session(id, command) Run command in existing session
terminate(session_id) Terminate a session
list_sessions() List all active sessions
read_file(path) Read file from sandbox
write_file(path, content) Write file to sandbox
list_dir(path) List directory contents
to_langchain_tools() Generate LangChain tools
to_openai_tools() Generate OpenAI tools
to_anthropic_tools() Generate Anthropic tools
to_mcp_tools() Generate MCP tools
to_generic_tools() Generate framework-agnostic tools