diff --git a/posthog/test/ai/langchain/test_callbacks.py b/posthog/test/ai/langchain/test_callbacks.py index 007f983..8a6fb0f 100644 --- a/posthog/test/ai/langchain/test_callbacks.py +++ b/posthog/test/ai/langchain/test_callbacks.py @@ -335,7 +335,7 @@ def test_metadata(mock_client): first_call_props = first_call_args["properties"] assert first_call_args["event"] == "$ai_generation" assert first_call_props["$ai_trace_id"] == "test-trace-id" - assert first_call_props["$ai_posthog_properties"] == {"foo": "bar"} + assert first_call_props["$foo"] == "bar" assert first_call_props["$ai_input"] == [{"role": "user", "content": "Foo"}] assert first_call_props["$ai_output"] == {"choices": [{"role": "assistant", "content": "Bar"}]} assert first_call_props["$ai_http_status"] == 200 @@ -422,7 +422,7 @@ def test_openai_chain(mock_client): assert first_call_props["$ai_trace_id"] == "test-trace-id" assert first_call_props["$ai_provider"] == "openai" assert first_call_props["$ai_model"] == "gpt-4o-mini" - assert first_call_props["$ai_posthog_properties"] == {"foo": "bar"} + assert first_call_props["$foo"] == "bar" # langchain-openai for langchain v3 if "max_completion_tokens" in first_call_props["$ai_model_parameters"]: diff --git a/posthog/test/ai/openai/test_openai.py b/posthog/test/ai/openai/test_openai.py index a23563b..e75e549 100644 --- a/posthog/test/ai/openai/test_openai.py +++ b/posthog/test/ai/openai/test_openai.py @@ -73,24 +73,23 @@ def test_basic_completion(mock_client, mock_openai_response): assert response == mock_openai_response assert mock_client.capture.call_count == 1 - + call_args = mock_client.capture.call_args[1] props = call_args["properties"] - + assert call_args["distinct_id"] == "test-id" assert call_args["event"] == "$ai_generation" assert props["$ai_provider"] == "openai" assert props["$ai_model"] == "gpt-4" assert props["$ai_input"] == [{"role": "user", "content": "Hello"}] - assert props["$ai_output"] == { - "choices": [{"role": "assistant", "content": "Test response"}] - } + assert props["$ai_output"] == {"choices": [{"role": "assistant", "content": "Test response"}]} assert props["$ai_input_tokens"] == 20 assert props["$ai_output_tokens"] == 10 assert props["$ai_http_status"] == 200 assert props["foo"] == "bar" assert isinstance(props["$ai_latency"], float) + def test_embeddings(mock_client, mock_embedding_response): with patch("openai.resources.embeddings.Embeddings.create", return_value=mock_embedding_response): client = OpenAI(api_key="test-key", posthog_client=mock_client) @@ -103,10 +102,10 @@ def test_embeddings(mock_client, mock_embedding_response): assert response == mock_embedding_response assert mock_client.capture.call_count == 1 - + call_args = mock_client.capture.call_args[1] props = call_args["properties"] - + assert call_args["distinct_id"] == "test-id" assert call_args["event"] == "$ai_embedding" assert props["$ai_provider"] == "openai" @@ -115,4 +114,4 @@ def test_embeddings(mock_client, mock_embedding_response): assert props["$ai_input_tokens"] == 10 assert props["$ai_http_status"] == 200 assert props["foo"] == "bar" - assert isinstance(props["$ai_latency"], float) \ No newline at end of file + assert isinstance(props["$ai_latency"], float)