Skip to content

Commit

Permalink
feat: add all other open ai functions
Browse files Browse the repository at this point in the history
  • Loading branch information
k11kirky committed Jan 10, 2025
1 parent d8e4557 commit 315d6e1
Show file tree
Hide file tree
Showing 5 changed files with 43 additions and 18 deletions.
7 changes: 7 additions & 0 deletions llm_observability_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def main_sync():
try:
basic_openai_call(distinct_id, trace_id, properties)
# streaming_openai_call(distinct_id, trace_id, properties)
non_instrumented_openai_call()
except Exception as e:
print("Error during OpenAI call:", str(e))

Expand Down Expand Up @@ -130,6 +131,12 @@ async def streaming_async_openai_call(distinct_id, trace_id, properties):
return response


def non_instrumented_openai_call():
response = openai_client.images.generate(model="dall-e-3", prompt="A cute baby sea otter", n=1, size="1024x1024")
print(response)
return response


# HOW TO RUN:
# comment out one of these to run the other

Expand Down
1 change: 0 additions & 1 deletion posthog/ai/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,3 @@
from .providers.openai.openai_async import AsyncOpenAI

__all__ = ["OpenAI", "AsyncOpenAI"]
# TODO: add Azure OpenAI wrapper
24 changes: 17 additions & 7 deletions posthog/ai/providers/openai/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from posthog.client import Client as PostHogClient
from posthog.ai.utils import (
track_usage,
call_llm_and_track_usage,
get_model_params,
)

Expand All @@ -33,6 +33,15 @@ def __init__(
self._openai_client = openai.OpenAI(**openai_config)
self._posthog_client = posthog_client

def __getattr__(self, name: str) -> Any:
"""
Expose all attributes of the underlying openai.OpenAI instance except for the 'chat' property,
which is replaced with a custom ChatNamespace for usage tracking.
"""
if name == "chat":
return self.chat
return getattr(self._openai_client, name)

@property
def chat(self) -> "ChatNamespace":
return ChatNamespace(self._posthog_client, self._openai_client)
Expand Down Expand Up @@ -61,7 +70,7 @@ def create(
**kwargs: Any,
):
distinct_id = posthog_distinct_id or "anonymous_ai_user"

if kwargs.get("stream", False):
return self._create_streaming(
distinct_id,
Expand All @@ -70,11 +79,10 @@ def create(
**kwargs,
)


def call_method(**call_kwargs):
return self._openai_client.chat.completions.create(**call_kwargs)

return track_usage(
return call_llm_and_track_usage(
distinct_id,
self._ph_client,
posthog_trace_id,
Expand Down Expand Up @@ -113,7 +121,9 @@ def generator():
end_time = time.time()
latency = end_time - start_time
output = "".join(accumulated_content)
self._capture_streaming_event(distinct_id, posthog_trace_id, posthog_properties, kwargs, usage_stats, latency, output)
self._capture_streaming_event(
distinct_id, posthog_trace_id, posthog_properties, kwargs, usage_stats, latency, output
)

return generator()

Expand All @@ -127,7 +137,7 @@ def _capture_streaming_event(
latency: float,
output: str,
):

event_properties = {
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
Expand All @@ -140,7 +150,7 @@ def _capture_streaming_event(
"role": "assistant",
}
]
},
},
"$ai_http_status": 200,
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
Expand Down
23 changes: 16 additions & 7 deletions posthog/ai/providers/openai/openai_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
raise ModuleNotFoundError("Please install OpenAI to use this feature: 'pip install openai'")

from posthog.client import Client as PostHogClient
from posthog.ai.utils import track_usage_async, get_model_params
from posthog.ai.utils import call_llm_and_track_usage_async, get_model_params


class AsyncOpenAI:
Expand All @@ -29,6 +29,15 @@ def __init__(
self._openai_client = openai.AsyncOpenAI(**openai_config)
self._posthog_client = posthog_client

def __getattr__(self, name: str) -> Any:
"""
Expose all attributes of the underlying openai.AsyncOpenAI instance except for the 'chat' property,
which is replaced with a custom AsyncChatNamespace for usage tracking.
"""
if name == "chat":
return self.chat
return getattr(self._openai_client, name)

@property
def chat(self) -> "AsyncChatNamespace":
return AsyncChatNamespace(self._posthog_client, self._openai_client)
Expand Down Expand Up @@ -71,11 +80,10 @@ async def create(
async def call_async_method(**call_kwargs):
return await self._openai_client.chat.completions.create(**call_kwargs)

response = await track_usage_async(
response = await call_llm_and_track_usage_async(
distinct_id, self._ph_client, posthog_trace_id, posthog_properties, call_async_method, **kwargs
)
return response


async def _create_streaming(
self,
Expand Down Expand Up @@ -106,7 +114,9 @@ async def async_generator():
end_time = time.time()
latency = end_time - start_time
output = "".join(accumulated_content)
self._capture_streaming_event(distinct_id, posthog_trace_id, posthog_properties, kwargs, usage_stats, latency, output)
self._capture_streaming_event(
distinct_id, posthog_trace_id, posthog_properties, kwargs, usage_stats, latency, output
)

return async_generator()

Expand All @@ -120,7 +130,7 @@ def _capture_streaming_event(
latency: float,
output: str,
):

event_properties = {
"$ai_provider": "openai",
"$ai_model": kwargs.get("model"),
Expand All @@ -133,7 +143,7 @@ def _capture_streaming_event(
"role": "assistant",
}
]
},
},
"$ai_http_status": 200,
"$ai_input_tokens": usage_stats.get("prompt_tokens", 0),
"$ai_output_tokens": usage_stats.get("completion_tokens", 0),
Expand All @@ -148,4 +158,3 @@ def _capture_streaming_event(
event="$ai_generation",
properties=event_properties,
)

6 changes: 3 additions & 3 deletions posthog/ai/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Dict, AsyncGenerator, Callable, Optional
from typing import Any, Dict, Callable, Optional
import time
from posthog.client import Client as PostHogClient

Expand Down Expand Up @@ -39,7 +39,7 @@ def format_response(response):
return output


def track_usage(
def call_llm_and_track_usage(
distinct_id: str,
ph_client: PostHogClient,
posthog_trace_id: Optional[str],
Expand Down Expand Up @@ -99,7 +99,7 @@ def track_usage(
return response


async def track_usage_async(
async def call_llm_and_track_usage_async(
distinct_id: str,
ph_client: PostHogClient,
posthog_trace_id: Optional[str],
Expand Down

0 comments on commit 315d6e1

Please sign in to comment.