Skip to content

Commit

Permalink
OPIK-651 Log usage information for LlamaIndex
Browse files Browse the repository at this point in the history
  • Loading branch information
japdubengsub committed Dec 26, 2024
1 parent f3b278a commit c4540de
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 1 deletion.
4 changes: 4 additions & 0 deletions sdks/python/src/opik/integrations/llama_index/callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,10 @@ def on_event_end(
# Log the output to the span with the matching id
if event_id in self._map_event_id_to_span_data:
span_data = self._map_event_id_to_span_data[event_id]

model, usage_info = event_parsing_utils.get_usage_data(payload)
span_data.update(usage=usage_info, model=model)

span_data.update(output=span_output).init_end_time()
self._opik_client.span(**span_data.__dict__)

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from typing import Optional, Dict, Any
from typing import Optional, Dict, Any, Tuple
from llama_index.core.callbacks import schema as llama_index_schema

from opik.types import UsageDict


def get_span_input_from_events(
event_type: llama_index_schema.CBEventType, payload: Optional[Dict[str, Any]]
Expand Down Expand Up @@ -109,3 +111,25 @@ def get_span_output_from_event(
return {"output": payload_copy}
else:
return None


def get_usage_data(
payload: Optional[Dict[str, Any]],
) -> Tuple[Optional[str], Optional[UsageDict]]:
model = None
usage_info = None

if payload is None:
return model, usage_info

response: Any = payload.get(llama_index_schema.EventPayload.RESPONSE)

if hasattr(response, "raw"):
if hasattr(response.raw, "model"):
model = response.raw.model
if hasattr(response.raw, "usage"):
usage_info = response.raw.usage.model_dump()
usage_info.pop("completion_tokens_details", None)
usage_info.pop("prompt_tokens_details", None)

return model, usage_info

0 comments on commit c4540de

Please sign in to comment.