Skip to content

Commit 2cac15a

Browse files
docs: add AI provider integration examples (#466)
* feat: Add AI provider integration examples Add self-contained examples for each AI provider supported by posthog.ai: - Anthropic (chat, streaming, extended thinking) - OpenAI (Chat Completions, Responses, streaming, embeddings, transcription, image generation) - Google Gemini (chat, streaming, image generation) - LangChain (callback handler, OTEL) - LiteLLM (chat, streaming) - Pydantic AI (agent with OTEL) - OpenAI Agents SDK (multi-agent, single agent, guardrails, custom spans) Each example directory is self-contained with its own requirements.txt, .env.example, and README. Files are designed to be copy-pasted by users as starting points for their own integrations. * style: apply ruff formatting to AI examples * fix: add __init__.py to example directories for mypy Mypy reports duplicate module names when multiple example directories contain files with the same name (e.g. chat.py). Adding __init__.py makes each directory a proper package so mypy can disambiguate them. * fix: exclude examples from mypy and sync baseline Example directory names contain hyphens (e.g. example-ai-anthropic) which aren't valid Python package names, so __init__.py doesn't help. Exclude them from mypy instead and sync the baseline. * fix: fix image gen, transcription, and gemini logging in examples - Use images.generate API instead of responses API for image generation - Gracefully skip transcription when audio file is missing - Use privacy mode for Gemini image gen to avoid huge event payloads - Suppress Gemini SDK base64 logging * fix: add missing run commands to example READMEs * Add comment about incomplete tool-calling loop in examples
1 parent 7c58d11 commit 2cac15a

45 files changed

Lines changed: 1220 additions & 2 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
POSTHOG_API_KEY=phc_your_project_api_key
2+
POSTHOG_HOST=https://us.i.posthog.com
3+
ANTHROPIC_API_KEY=sk-ant-your_api_key
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# Anthropic + PostHog AI Examples
2+
3+
Track Anthropic Claude API calls with PostHog.
4+
5+
## Setup
6+
7+
```bash
8+
pip install -r requirements.txt
9+
cp .env.example .env
10+
# Fill in your API keys in .env
11+
```
12+
13+
## Examples
14+
15+
- **chat.py** - Basic chat with tool calling
16+
- **streaming.py** - Streaming responses
17+
- **extended_thinking.py** - Claude's extended thinking feature
18+
19+
## Run
20+
21+
```bash
22+
source .env
23+
python chat.py
24+
python streaming.py
25+
python extended_thinking.py
26+
```
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
"""Anthropic chat with tool calling, tracked by PostHog."""
2+
3+
import os
4+
import json
5+
import urllib.request
6+
from posthog import Posthog
7+
from posthog.ai.anthropic import Anthropic
8+
9+
posthog = Posthog(
10+
os.environ["POSTHOG_API_KEY"],
11+
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
12+
)
13+
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog)
14+
15+
tools = [
16+
{
17+
"name": "get_weather",
18+
"description": "Get current weather for a location",
19+
"input_schema": {
20+
"type": "object",
21+
"properties": {
22+
"latitude": {"type": "number"},
23+
"longitude": {"type": "number"},
24+
"location_name": {"type": "string"},
25+
},
26+
"required": ["latitude", "longitude", "location_name"],
27+
},
28+
}
29+
]
30+
31+
32+
def get_weather(latitude: float, longitude: float, location_name: str) -> str:
33+
url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}&current=temperature_2m,relative_humidity_2m,wind_speed_10m"
34+
with urllib.request.urlopen(url) as resp:
35+
data = json.loads(resp.read())
36+
current = data["current"]
37+
return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h"
38+
39+
40+
message = client.messages.create(
41+
model="claude-sonnet-4-5-20250929",
42+
max_tokens=1024,
43+
posthog_distinct_id="example-user",
44+
tools=tools,
45+
messages=[{"role": "user", "content": "What's the weather like in San Francisco?"}],
46+
)
47+
48+
# Handle tool use if the model requests it.
49+
# In production, send tool results back to the model for a final response.
50+
for block in message.content:
51+
if block.type == "text":
52+
print(block.text)
53+
elif block.type == "tool_use":
54+
result = get_weather(**block.input)
55+
print(result)
56+
57+
posthog.shutdown()
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
"""Anthropic extended thinking, tracked by PostHog.
2+
3+
Extended thinking lets Claude show its reasoning process before responding.
4+
"""
5+
6+
import os
7+
from posthog import Posthog
8+
from posthog.ai.anthropic import Anthropic
9+
10+
posthog = Posthog(
11+
os.environ["POSTHOG_API_KEY"],
12+
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
13+
)
14+
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog)
15+
16+
message = client.messages.create(
17+
model="claude-sonnet-4-5-20250929",
18+
max_tokens=16000,
19+
posthog_distinct_id="example-user",
20+
thinking={"type": "enabled", "budget_tokens": 10000},
21+
messages=[
22+
{
23+
"role": "user",
24+
"content": "What is the probability of rolling at least one six in four rolls of a fair die?",
25+
}
26+
],
27+
)
28+
29+
for block in message.content:
30+
if block.type == "thinking":
31+
print(f"Thinking: {block.thinking}\n")
32+
elif block.type == "text":
33+
print(f"Answer: {block.text}")
34+
35+
posthog.shutdown()
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
posthog>=6.6.1
2+
anthropic
Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
"""Anthropic streaming chat, tracked by PostHog."""
2+
3+
import os
4+
from posthog import Posthog
5+
from posthog.ai.anthropic import Anthropic
6+
7+
posthog = Posthog(
8+
os.environ["POSTHOG_API_KEY"],
9+
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
10+
)
11+
client = Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"], posthog_client=posthog)
12+
13+
stream = client.messages.create(
14+
model="claude-sonnet-4-5-20250929",
15+
max_tokens=1024,
16+
posthog_distinct_id="example-user",
17+
messages=[{"role": "user", "content": "Write a haiku about observability."}],
18+
stream=True,
19+
)
20+
21+
for event in stream:
22+
if hasattr(event, "type"):
23+
if event.type == "content_block_delta" and hasattr(event.delta, "text"):
24+
print(event.delta.text, end="", flush=True)
25+
26+
print()
27+
posthog.shutdown()
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
POSTHOG_API_KEY=phc_your_project_api_key
2+
POSTHOG_HOST=https://us.i.posthog.com
3+
GEMINI_API_KEY=your_gemini_api_key
Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
# Google Gemini + PostHog AI Examples
2+
3+
Track Google Gemini API calls with PostHog.
4+
5+
## Setup
6+
7+
```bash
8+
pip install -r requirements.txt
9+
cp .env.example .env
10+
# Fill in your API keys in .env
11+
```
12+
13+
## Examples
14+
15+
- **chat.py** - Chat with tool calling
16+
- **streaming.py** - Streaming responses
17+
- **image_generation.py** - Image generation
18+
19+
## Run
20+
21+
```bash
22+
source .env
23+
python chat.py
24+
python streaming.py
25+
python image_generation.py
26+
```

examples/example-ai-gemini/chat.py

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
"""Google Gemini chat with tool calling, tracked by PostHog."""
2+
3+
import os
4+
import json
5+
import urllib.request
6+
from google.genai import types
7+
from posthog import Posthog
8+
from posthog.ai.gemini import Client
9+
10+
posthog = Posthog(
11+
os.environ["POSTHOG_API_KEY"],
12+
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
13+
)
14+
client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog)
15+
16+
tool_declarations = [
17+
{
18+
"name": "get_weather",
19+
"description": "Get current weather for a location",
20+
"parameters": {
21+
"type": "object",
22+
"properties": {
23+
"latitude": {"type": "number"},
24+
"longitude": {"type": "number"},
25+
"location_name": {"type": "string"},
26+
},
27+
"required": ["latitude", "longitude", "location_name"],
28+
},
29+
}
30+
]
31+
32+
33+
def get_weather(latitude: float, longitude: float, location_name: str) -> str:
34+
url = f"https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}&current=temperature_2m,relative_humidity_2m,wind_speed_10m"
35+
with urllib.request.urlopen(url) as resp:
36+
data = json.loads(resp.read())
37+
current = data["current"]
38+
return f"Weather in {location_name}: {current['temperature_2m']}°C, humidity {current['relative_humidity_2m']}%, wind {current['wind_speed_10m']} km/h"
39+
40+
41+
config = types.GenerateContentConfig(
42+
tools=[types.Tool(function_declarations=tool_declarations)]
43+
)
44+
45+
response = client.models.generate_content(
46+
model="gemini-2.5-flash",
47+
posthog_distinct_id="example-user",
48+
contents=[{"role": "user", "parts": [{"text": "What's the weather in London?"}]}],
49+
config=config,
50+
)
51+
52+
# In production, send tool results back to the model for a final response.
53+
for candidate in response.candidates:
54+
for part in candidate.content.parts:
55+
if hasattr(part, "function_call") and part.function_call:
56+
result = get_weather(
57+
latitude=part.function_call.args["latitude"],
58+
longitude=part.function_call.args["longitude"],
59+
location_name=part.function_call.args["location_name"],
60+
)
61+
print(result)
62+
elif hasattr(part, "text"):
63+
print(part.text)
64+
65+
posthog.shutdown()
Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
"""Google Gemini image generation, tracked by PostHog."""
2+
3+
import logging
4+
import os
5+
6+
from posthog import Posthog
7+
from posthog.ai.gemini import Client
8+
9+
# Suppress verbose Gemini SDK logging of base64 image data
10+
logging.getLogger("google.genai").setLevel(logging.WARNING)
11+
12+
posthog = Posthog(
13+
os.environ["POSTHOG_API_KEY"],
14+
host=os.environ.get("POSTHOG_HOST", "https://us.i.posthog.com"),
15+
)
16+
client = Client(api_key=os.environ["GEMINI_API_KEY"], posthog_client=posthog)
17+
18+
response = client.models.generate_content(
19+
model="gemini-2.5-flash-image",
20+
posthog_distinct_id="example-user",
21+
posthog_privacy_mode=True, # Redact base64 image data from the PostHog event
22+
contents=[{"role": "user", "parts": [{"text": "Generate a pixel art hedgehog"}]}],
23+
)
24+
25+
for candidate in response.candidates:
26+
for part in candidate.content.parts:
27+
if hasattr(part, "inline_data") and part.inline_data:
28+
print(
29+
f"Generated image: {part.inline_data.mime_type}, {len(part.inline_data.data)} bytes"
30+
)
31+
elif hasattr(part, "text"):
32+
print(part.text)
33+
34+
posthog.shutdown()

0 commit comments

Comments
 (0)