Documentation Index
Fetch the complete documentation index at: https://launchdarkly-preview.mintlify.app/llms.txt
Use this file to discover all available pages before exploring further.
Example 1: Your first chatbot
Start by building a minimal chatbot application using a LaunchDarkly AI Config with Anthropic’s Claude.Step 1.1: Project setup
First, create a new directory for your project: mkdir simple-ai-chatbot
cd simple-ai-chatbot
python3 -m venv venv
source venv/bin/activate
# On Windows: venv\Scripts\activate
Step 1.2: Install dependencies
Install the required packages: pip install launchdarkly-server-sdk \
launchdarkly-server-sdk-ai \
anthropic \
openai \
google-genai \
python-dotenv
requirements.txt file:
launchdarkly-server-sdk>=9.0.0
launchdarkly-server-sdk-ai>=0.1.0
anthropic>=0.25.0
openai>=1.0.0
google-genai>=0.1.0
python-dotenv>=1.0.0
Step 1.3: Environment configuration
First, add.env to your .gitignore file to keep credentials secure:
echo ".env" >> .gitignore
.env file in your project root:
# LaunchDarkly Configuration
LD_PROJECT_KEY=simple-chatbot
LD_SDK_KEY=your-launchdarkly-sdk-key
LAUNCHDARKLY_AGENT_CONFIG_KEY=simple-config
# AI Provider API Keys (add the ones you plan to use)
ANTHROPIC_API_KEY=your-anthropic-api-key
OPENAI_API_KEY=your-openai-api-key
GEMINI_API_KEY=your-google-api-key
Step 1.4: Create the basic chatbot
Create a file calledsimple_chatbot.py and add the following:
Click to expand the complete `simple_chatbot.py`
Click to expand the complete `simple_chatbot.py`
"""
Simple AI Chatbot
Multi-provider support: Anthropic, OpenAI, and Google
Direct API integration with automatic provider selection
"""
import os
import logging
from typing import Dict, List, Optional
from abc import ABC, abstractmethod
import dotenv
# AI Provider imports
import anthropic
import openai
import google.genai as genai
# Set up logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
# Suppress HTTP request logs from libraries
logging.getLogger("httpx").setLevel(logging.WARNING)
logging.getLogger("httpcore").setLevel(logging.WARNING)
logging.getLogger("openai").setLevel(logging.WARNING)
logging.getLogger("anthropic").setLevel(logging.WARNING)
# Load environment variables
dotenv.load_dotenv()
class BaseAIProvider(ABC):
"""Base class for AI providers"""
def __init__(self, api_key: Optional[str] = None):
self.api_key = api_key
self.client = self._initialize_client() if api_key else None
@abstractmethod
def _initialize_client(self):
"""Initialize the provider's client"""
pass
@abstractmethod
def send_message(self, model: str, messages: List[Dict], system_prompt: str, params: Dict) -> str:
"""Send message to the AI provider"""
pass
def format_messages(self, messages: List[Dict], system_prompt: str) -> List[Dict]:
"""Default message formatting (can be overridden by providers)"""
formatted = [{"role": "system", "content": system_prompt}] if system_prompt else []
formatted.extend([{"role": msg["role"], "content": msg["content"]} for msg in messages])
return formatted
def extract_params(self, params: Dict) -> Dict:
"""Extract common parameters"""
return {
"temperature": params.get("temperature", 0.7),
"max_tokens": params.get("max_tokens", 500)
}
class AnthropicProvider(BaseAIProvider):
"""Anthropic Claude provider"""
def _initialize_client(self):
return anthropic.Anthropic(api_key=self.api_key)
def send_message(self, model: str, messages: List[Dict], system_prompt: str, params: Dict) -> str:
if not self.client:
raise ValueError("Anthropic API key not configured")
extracted_params = self.extract_params(params)
response = self.client.messages.create(
model=model,
max_tokens=extracted_params["max_tokens"],
temperature=extracted_params["temperature"],
system=system_prompt,
messages=messages
)
return response.content[0].text
class OpenAIProvider(BaseAIProvider):
"""OpenAI GPT provider"""
def _initialize_client(self):
return openai.OpenAI(api_key=self.api_key)
def send_message(self, model: str, messages: List[Dict], system_prompt: str, params: Dict) -> str:
if not self.client:
raise ValueError("OpenAI API key not configured")
formatted_messages = self.format_messages(messages, system_prompt)
extracted_params = self.extract_params(params)
response = self.client.chat.completions.create(
model=model,
messages=formatted_messages,
**extracted_params
)
return response.choices[0].message.content
class GoogleProvider(BaseAIProvider):
"""Google Gemini provider"""
def _initialize_client(self):
# New SDK uses client instantiation with API key
# The environment variable GEMINI_API_KEY is automatically picked up
if self.api_key:
import os
os.environ['GEMINI_API_KEY'] = self.api_key
return genai.Client()
def send_message(self, model: str, messages: List[Dict], system_prompt: str, params: Dict) -> str:
if not self.client:
raise ValueError("Google API key not configured")
extracted_params = self.extract_params(params)
# Format conversation with system prompt
contents = []
# Add system prompt as context
if system_prompt:
contents.append(f"{system_prompt}\n")
# Add conversation history
for msg in messages:
role = "User" if msg["role"] == "user" else "Assistant"
contents.append(f"{role}: {msg['content']}")
full_prompt = "\n".join(contents)
# Use the new client API
response = self.client.models.generate_content(
model=model,
contents=full_prompt,
config={
"temperature": extracted_params["temperature"],
"max_output_tokens": extracted_params["max_tokens"],
}
)
return response.text
class AIProviderRegistry:
"""Registry for AI providers with automatic initialization"""
def __init__(self):
self.providers = {
"anthropic": AnthropicProvider(os.getenv("ANTHROPIC_API_KEY")),
"openai": OpenAIProvider(os.getenv("OPENAI_API_KEY")),
"google": GoogleProvider(os.getenv("GEMINI_API_KEY"))
}
def send_message(self, provider: str, model_id: str, messages: List[Dict],
system_prompt: str, parameters: Dict) -> str:
"""Route message to appropriate provider"""
provider_name = provider.lower()
if provider_name not in self.providers:
raise ValueError(f"Unsupported provider: {provider}")
provider_instance = self.providers[provider_name]
return provider_instance.send_message(model_id, messages, system_prompt, parameters)
def get_available_providers(self) -> List[str]:
"""Get list of configured providers"""
return [name for name, provider in self.providers.items() if provider.api_key]
def get_default_provider(self) -> tuple:
"""Get the default provider based on available API keys"""
if os.getenv("ANTHROPIC_API_KEY"):
return "anthropic", "claude-3-haiku-20240307"
elif os.getenv("OPENAI_API_KEY"):
return "openai", "chatgpt-4o-latest"
elif os.getenv("GEMINI_API_KEY"):
return "google", "gemini-2.5-flash-lite"
else:
raise ValueError("No AI provider API keys found")
def run_chatbot():
"""Main chatbot loop"""
print("=" * 70)
print(" Simple AI Chatbot")
print("=" * 70)
print("\nSupporting: Anthropic Claude, OpenAI GPT, Google Gemini")
print("Type 'exit' or 'quit' to end the conversation\n")
# Initialize AI provider registry
try:
ai_registry = AIProviderRegistry()
available = ai_registry.get_available_providers()
if not available:
logger.error("No AI provider API keys found. Please configure at least one provider.")
return
# Get default provider
provider, model_id = ai_registry.get_default_provider()
logger.info(f"✓ Using {provider} with model {model_id}")
logger.info(f"Available providers: {', '.join(available)}")
except Exception as e:
logger.error(f"Failed to initialize AI providers: {e}")
return
# Default system prompt
system_prompt = "You are a helpful AI assistant. Provide clear, concise, and friendly responses."
# Default parameters
parameters = {
"temperature": 0.7,
"max_tokens": 500
}
conversation_history = []
# Main chat loop
while True:
try:
user_input = input("You: ").strip()
if user_input.lower() in ['exit', 'quit', 'q']:
print("\nGoodbye! Thanks for chatting.")
break
if not user_input:
continue
# Add user message to history
conversation_history.append({"role": "user", "content": user_input})
# Send to AI provider
print("\nAssistant: ", end="", flush=True)
response = ai_registry.send_message(
provider=provider,
model_id=model_id,
messages=conversation_history,
system_prompt=system_prompt,
parameters=parameters
)
print(response)
# Add assistant response to history
conversation_history.append({"role": "assistant", "content": response})
except KeyboardInterrupt:
print("\n\nInterrupted. Goodbye!")
break
except Exception as e:
logger.error(f"Error in chat loop: {e}")
print(f"\nError: {e}")
# Provide helpful guidance for common errors
if "API key not valid" in str(e) and "googleapis.com" in str(e):
print("\n💡 Tip: For Google Gemini, you need an API key from Google AI Studio:")
print(" 1. Go to https://aistudio.google.com/app/apikey")
print(" 2. Click 'Get API Key' and create a new key")
print(" 3. Add it to your .env file as GEMINI_API_KEY=your-key-here")
elif "API key" in str(e).lower():
print("\n💡 Tip: Check that your API key is correct and has the necessary permissions.")
if __name__ == "__main__":
# Check for at least one AI provider key
provider_keys = ["ANTHROPIC_API_KEY", "OPENAI_API_KEY", "GEMINI_API_KEY"]
if not any(os.getenv(key) for key in provider_keys):
logger.error("No AI provider API keys found. Please add at least one:")
for key in provider_keys:
logger.error(f" - {key}")
exit(1)
# Run the chatbot
run_chatbot()
Step 1.5: Run your basic chatbot
Run the chatbot: python simple_chatbot.py
======================================================================
Simple AI Chatbot
======================================================================
Supporting: Anthropic Claude, OpenAI GPT, Google Gemini
Type 'exit' or 'quit' to end the conversation
2026-01-14 11:48:03,603 - INFO - ✓ Using anthropic with model claude-3-haiku-20240307
2026-01-14 11:48:03,603 - INFO - Available providers: anthropic, openai
You: Hello! What can you do?