Upgrade the Langfuse SDK to v2 following this guide.
Apply with the Grit CLI
grit apply langfuse_v2
Rewrites Pydantic interface argument
BEFORE
from langfuse.model import InitialSpan langfuse.span( InitialSpan( name="span", startTime=timestamp, endTime=timestamp, input={"key": "value"}, output={"key": "value"}, ) )
AFTER
langfuse.span( name="span", start_time=timestamp, end_time=timestamp, input={"key": "value"}, output={"key": "value"}, )
Renames prompt
and completion
to input
and output
BEFORE
from langfuse import * client.generation(InitialGeneration( name="some_generation", startTime=start_time, endTime=end_time, prompt=prompt, completion=completion, metadata=metadata ) )
AFTER
from langfuse import * client.generation( name="some_generation", start_time=start_time, end_time=end_time, input=prompt, output=completion, metadata=metadata )
Snake cases parameters without Pydantic
BEFORE
import langfuse generation = observation.generation( name='name', prompt=kwargs['messages'], startTime=dt.datetime.utcnow(), )
AFTER
import langfuse generation = observation.generation( name='name', input=kwargs['messages'], start_time=dt.datetime.utcnow(), )
Converts Pydantic enum
BEFORE
from langfuse.model import InitialGeneration from langfuse.api.resources.commons.types.observation_level import ObservationLevel import langfuse langfuse.generation(InitialGeneration(level=ObservationLevel.ERROR))
AFTER
import langfuse langfuse.generation(level="ERROR")
Rewrites nested Pydantic interface
BEFORE
from langfuse.model import InitialGeneration, Usage import langfuse as lf generation = lf.generation( InitialGeneration( name="chatgpt-completion", startTime=generationStartTime, endTime=datetime.now(), model=self.model, modelParameters={"temperature": str(temperature)}, prompt=history, completion=response["choices"][0]["message"]["content"], usage=Usage( promptTokens=50, completionTokens=50, ), ) )
AFTER
import langfuse as lf generation = lf.generation(name="chatgpt-completion", start_time=generationStartTime, end_time=datetime.now(), model=self.model, model_parameters={"temperature": str(temperature)}, input=history, output=response["choices"][0]["message"]["content"], usage={"promptTokens": 50, "completionTokens": 50}, )
Does nothing without langfuse import
PYTHON
model.event( CreateEvent( name="span", startTime=timestamp, endTime=timestamp, ) )