Skip to content

Commit c230911

Browse files
authored
Merge pull request #98 from Miyamura80/migrate-langfuse-v3
🔨 Migrate Langfuse to v3.x and update Gemini model names
2 parents f09aa82 + f3955e2 commit c230911

4 files changed

Lines changed: 32 additions & 21 deletions

File tree

.cursor/rules/langfuse.mdc

Lines changed: 17 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,28 @@ description:
33
globs: *.py
44
alwaysApply: false
55
---
6-
LangFuse is an LLM observability tool, to see how LLMs are behaving in our application. It uses callbacks to reord LLM inference. You should always use it when using LLM applications, to track if the LLM is behaving as we want it to.
6+
Langfuse is an LLM observability tool used to monitor LLM behavior in our application. It uses decorators and callbacks to record LLM inference. Always use it in LLM-related modules to ensure traceability.
77

8-
Below is the most typical usage.
8+
### Langfuse v3 Migration Guide
9+
As of Langfuse SDK v3.x, the `langfuse.decorators` module structure has changed.
10+
11+
**Typical Usage:**
912

1013
```python
11-
from langfuse.decorators import observe, langfuse_context
14+
from langfuse import observe, get_client
1215

13-
@observe
16+
@observe()
1417
def function_name(...):
18+
# To rename the span/observation or update metadata:
19+
get_client().update_current_span(name=f"descriptive_name_{id}")
1520

16-
# If we don't want the name of the span to be "function_name", and want to give a more descriptive name like with email, we should use the following
17-
langfuse_context.update_current_observation(name=f"{email}")
21+
# To get trace or observation IDs:
22+
trace_id = get_client().get_current_trace_id()
23+
observation_id = get_client().get_current_observation_id()
1824
```
1925

26+
**Key Changes from v2:**
27+
1. Import `observe` and `get_client` directly from `langfuse`.
28+
2. Replace `langfuse_context` with `get_client()`.
29+
3. Use `update_current_span()` instead of `update_current_observation()`.
30+

common/global_config.yaml

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
1-
model_name: gemini/gemini-3-flash
1+
model_name: gemini/gemini-3-flash-preview
22
dot_global_config_health_check: true
3+
DEV_ENV: dev
34

45
example_parent:
56
example_child: "example_value"
@@ -8,8 +9,8 @@ example_parent:
89
# LLMs
910
########################################################
1011
default_llm:
11-
default_model: gemini/gemini-3-flash
12-
fallback_model: gemini/gemini-2.5-flash
12+
default_model: gemini/gemini-3-flash-preview
13+
fallback_model: gemini/gemini-2.5-flash-preview
1314
default_temperature: 0.5
1415
default_max_tokens: 100000
1516

utils/llm/dspy_inference.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
from typing import Any
33

44
import dspy
5-
from langfuse.decorators import observe # type: ignore
5+
from langfuse import observe
66
from litellm.exceptions import RateLimitError, ServiceUnavailableError
77
from loguru import logger as log
88
from tenacity import (

utils/llm/dspy_langfuse.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,7 @@
44
from dspy.adapters import Image as dspy_Image
55
from dspy.signatures import Signature as dspy_Signature
66
from dspy.utils.callback import BaseCallback
7-
from langfuse.client import Langfuse, StatefulGenerationClient # type: ignore
8-
from langfuse.decorators import langfuse_context # type: ignore
7+
from langfuse import Langfuse, LangfuseGeneration, get_client
98
from litellm.cost_calculator import completion_cost
109
from loguru import logger as log
1110
from pydantic import BaseModel, Field, ValidationError
@@ -52,7 +51,7 @@ def __init__(self, signature: type[dspy_Signature]) -> None:
5251
)
5352
self.current_prompt = contextvars.ContextVar[str]("current_prompt")
5453
self.current_completion = contextvars.ContextVar[str]("current_completion")
55-
self.current_span = contextvars.ContextVar[StatefulGenerationClient | None](
54+
self.current_span = contextvars.ContextVar[LangfuseGeneration | None](
5655
"current_span"
5756
)
5857
self.model_name_at_span_creation = contextvars.ContextVar[str | None](
@@ -91,8 +90,8 @@ def on_module_end( # noqa
9190
exception: Exception | None = None, # noqa
9291
) -> None:
9392
metadata = {
94-
"existing_trace_id": langfuse_context.get_current_trace_id(),
95-
"parent_observation_id": langfuse_context.get_current_observation_id(),
93+
"existing_trace_id": get_client().get_current_trace_id(),
94+
"parent_observation_id": get_client().get_current_observation_id(),
9695
}
9796
outputs_extracted = {} # Default to empty dict
9897
if outputs is not None:
@@ -102,7 +101,7 @@ def on_module_end( # noqa
102101
outputs_extracted = {"value": outputs}
103102
except Exception as e:
104103
outputs_extracted = {"error_extracting_module_output": str(e)}
105-
langfuse_context.update_current_observation(
104+
get_client().update_current_span(
106105
input=self.input_field_values.get(None) or {},
107106
output=outputs_extracted,
108107
metadata=metadata,
@@ -134,9 +133,9 @@ def on_lm_start( # noqa
134133
self.current_system_prompt.set(system_prompt)
135134
self.current_prompt.set(user_input)
136135
self.model_name_at_span_creation.set(model_name)
137-
trace_id = langfuse_context.get_current_trace_id()
138-
parent_observation_id = langfuse_context.get_current_observation_id()
139-
span_obj: StatefulGenerationClient | None = None
136+
trace_id = get_client().get_current_trace_id()
137+
parent_observation_id = get_client().get_current_observation_id()
138+
span_obj: LangfuseGeneration | None = None
140139
if trace_id:
141140
span_obj = self.langfuse.generation(
142141
input=user_input,
@@ -392,8 +391,8 @@ def on_tool_start( # noqa
392391

393392
log.debug(f"Tool call started: {tool_name} with args: {tool_args}")
394393

395-
trace_id = langfuse_context.get_current_trace_id()
396-
parent_observation_id = langfuse_context.get_current_observation_id()
394+
trace_id = get_client().get_current_trace_id()
395+
parent_observation_id = get_client().get_current_observation_id()
397396

398397
if trace_id:
399398
# Create a span for the tool call

0 commit comments

Comments
 (0)