Skip to content

Commit 11466c6

Browse files
authored
feat(llma): support prompt versions in prompts sdk (#454)
* feat(llma): support prompt versions in prompts sdk * fix(llma): enforce clear_cache version requires name
1 parent ef5e135 commit 11466c6

3 files changed

Lines changed: 227 additions & 22 deletions

File tree

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
pypi/posthog: patch
3+
---
4+
5+
feat(llma): support fetching versioned prompts from the prompts sdk

posthog/ai/prompts.py

Lines changed: 65 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
DEFAULT_CACHE_TTL_SECONDS = 300 # 5 minutes
2020

2121
PromptVariables = Dict[str, Union[str, int, float, bool]]
22+
PromptCacheKey = tuple[str, Optional[int]]
2223

2324

2425
class CachedPrompt:
@@ -29,6 +30,19 @@ def __init__(self, prompt: str, fetched_at: float):
2930
self.fetched_at = fetched_at
3031

3132

33+
def _cache_key(name: str, version: Optional[int]) -> PromptCacheKey:
34+
"""Build a cache key for latest or versioned prompt fetches."""
35+
return (name, version)
36+
37+
38+
def _prompt_reference(name: str, version: Optional[int]) -> str:
39+
"""Format a prompt reference for logs and errors."""
40+
label = f'prompt "{name}"'
41+
if version is not None:
42+
return f"{label} version {version}"
43+
return label
44+
45+
3246
def _is_prompt_api_response(data: Any) -> bool:
3347
"""Check if the response is a valid prompt API response."""
3448
return (
@@ -63,6 +77,9 @@ class Prompts:
6377
# Fetch with caching and fallback
6478
template = prompts.get('support-system-prompt', fallback='You are a helpful assistant.')
6579
80+
# Fetch a specific published version
81+
prompt_v1 = prompts.get('support-system-prompt', version=1)
82+
6683
# Compile with variables
6784
system_prompt = prompts.compile(template, {
6885
'company': 'Acme Corp',
@@ -93,7 +110,7 @@ def __init__(
93110
self._default_cache_ttl_seconds = (
94111
default_cache_ttl_seconds or DEFAULT_CACHE_TTL_SECONDS
95112
)
96-
self._cache: Dict[str, CachedPrompt] = {}
113+
self._cache: Dict[PromptCacheKey, CachedPrompt] = {}
97114

98115
if posthog is not None:
99116
self._personal_api_key = getattr(posthog, "personal_api_key", None) or ""
@@ -112,6 +129,7 @@ def get(
112129
*,
113130
cache_ttl_seconds: Optional[int] = None,
114131
fallback: Optional[str] = None,
132+
version: Optional[int] = None,
115133
) -> str:
116134
"""
117135
Fetch a prompt by name from the PostHog API.
@@ -126,6 +144,8 @@ def get(
126144
name: The name of the prompt to fetch
127145
cache_ttl_seconds: Cache TTL in seconds (defaults to instance default)
128146
fallback: Fallback prompt to use if fetch fails and no cache available
147+
version: Specific prompt version to fetch. If None, fetches the latest
148+
version
129149
130150
Returns:
131151
The prompt string
@@ -138,9 +158,10 @@ def get(
138158
if cache_ttl_seconds is not None
139159
else self._default_cache_ttl_seconds
140160
)
161+
cache_key = _cache_key(name, version)
141162

142163
# Check cache first
143-
cached = self._cache.get(name)
164+
cached = self._cache.get(cache_key)
144165
now = time.time()
145166

146167
if cached is not None:
@@ -151,30 +172,31 @@ def get(
151172

152173
# Try to fetch from API
153174
try:
154-
prompt = self._fetch_prompt_from_api(name)
175+
prompt = self._fetch_prompt_from_api(name, version)
155176
fetched_at = time.time()
156177

157178
# Update cache
158-
self._cache[name] = CachedPrompt(prompt=prompt, fetched_at=fetched_at)
179+
self._cache[cache_key] = CachedPrompt(prompt=prompt, fetched_at=fetched_at)
159180

160181
return prompt
161182

162183
except Exception as error:
184+
prompt_reference = _prompt_reference(name, version)
163185
# Fallback order:
164186
# 1. Return stale cache (with warning)
165187
if cached is not None:
166188
log.warning(
167-
'[PostHog Prompts] Failed to fetch prompt "%s", using stale cache: %s',
168-
name,
189+
"[PostHog Prompts] Failed to fetch %s, using stale cache: %s",
190+
prompt_reference,
169191
error,
170192
)
171193
return cached.prompt
172194

173195
# 2. Return fallback (with warning)
174196
if fallback is not None:
175197
log.warning(
176-
'[PostHog Prompts] Failed to fetch prompt "%s", using fallback: %s',
177-
name,
198+
"[PostHog Prompts] Failed to fetch %s, using fallback: %s",
199+
prompt_reference,
178200
error,
179201
)
180202
return fallback
@@ -207,27 +229,43 @@ def replace_variable(match: re.Match) -> str:
207229

208230
return re.sub(r"\{\{([\w.-]+)\}\}", replace_variable, prompt)
209231

210-
def clear_cache(self, name: Optional[str] = None) -> None:
232+
def clear_cache(
233+
self, name: Optional[str] = None, *, version: Optional[int] = None
234+
) -> None:
211235
"""
212236
Clear cached prompts.
213237
214238
Args:
215-
name: Specific prompt to clear. If None, clears all cached prompts.
239+
name: Specific prompt name to clear. If None, clears all cached prompts.
240+
version: Specific prompt version to clear. Requires name.
216241
"""
217-
if name is not None:
218-
self._cache.pop(name, None)
219-
else:
242+
if version is not None and name is None:
243+
raise ValueError("'version' requires 'name' to be provided")
244+
245+
if name is None:
220246
self._cache.clear()
247+
return
248+
249+
if version is not None:
250+
self._cache.pop(_cache_key(name, version), None)
251+
return
252+
253+
keys_to_clear = [key for key in self._cache if key[0] == name]
254+
for key in keys_to_clear:
255+
self._cache.pop(key, None)
221256

222-
def _fetch_prompt_from_api(self, name: str) -> str:
257+
def _fetch_prompt_from_api(self, name: str, version: Optional[int] = None) -> str:
223258
"""
224259
Fetch prompt from PostHog API.
225260
226-
Endpoint: {host}/api/environments/@current/llm_prompts/name/{encoded_name}/?token={encoded_project_api_key}
261+
Endpoint:
262+
{host}/api/environments/@current/llm_prompts/name/{encoded_name}/
263+
?token={encoded_project_api_key}[&version={version}]
227264
Auth: Bearer {personal_api_key}
228265
229266
Args:
230267
name: The name of the prompt to fetch
268+
version: Specific prompt version to fetch. If None, fetches the latest
231269
232270
Returns:
233271
The prompt string
@@ -247,8 +285,13 @@ def _fetch_prompt_from_api(self, name: str) -> str:
247285
)
248286

249287
encoded_name = urllib.parse.quote(name, safe="")
250-
encoded_project_api_key = urllib.parse.quote(self._project_api_key, safe="")
251-
url = f"{self._host}/api/environments/@current/llm_prompts/name/{encoded_name}/?token={encoded_project_api_key}"
288+
query_params: Dict[str, Union[str, int]] = {"token": self._project_api_key}
289+
if version is not None:
290+
query_params["version"] = version
291+
encoded_query = urllib.parse.urlencode(query_params)
292+
url = f"{self._host}/api/environments/@current/llm_prompts/name/{encoded_name}/?{encoded_query}"
293+
prompt_reference = _prompt_reference(name, version)
294+
prompt_label = prompt_reference[:1].upper() + prompt_reference[1:]
252295

253296
headers = {
254297
"Authorization": f"Bearer {self._personal_api_key}",
@@ -259,28 +302,28 @@ def _fetch_prompt_from_api(self, name: str) -> str:
259302

260303
if not response.ok:
261304
if response.status_code == 404:
262-
raise Exception(f'[PostHog Prompts] Prompt "{name}" not found')
305+
raise Exception(f"[PostHog Prompts] {prompt_label} not found")
263306

264307
if response.status_code == 403:
265308
raise Exception(
266-
f'[PostHog Prompts] Access denied for prompt "{name}". '
309+
f"[PostHog Prompts] Access denied for {prompt_reference}. "
267310
"Check that your personal_api_key has the correct permissions and the LLM prompts feature is enabled."
268311
)
269312

270313
raise Exception(
271-
f'[PostHog Prompts] Failed to fetch prompt "{name}": HTTP {response.status_code}'
314+
f"[PostHog Prompts] Failed to fetch {prompt_label}: HTTP {response.status_code}"
272315
)
273316

274317
try:
275318
data = response.json()
276319
except Exception:
277320
raise Exception(
278-
f'[PostHog Prompts] Invalid response format for prompt "{name}"'
321+
f"[PostHog Prompts] Invalid response format for {prompt_label}"
279322
)
280323

281324
if not _is_prompt_api_response(data):
282325
raise Exception(
283-
f'[PostHog Prompts] Invalid response format for prompt "{name}"'
326+
f"[PostHog Prompts] Invalid response format for {prompt_label}"
284327
)
285328

286329
return data["prompt"]

0 commit comments

Comments
 (0)