Skip to content

Commit

Permalink
[editor][client] Enable Telemetry based on User Config settings (#899)
Browse files Browse the repository at this point in the history
[editor][client] Enable Telemetry based on User Config settings




This diff builds on top of @rholinshead's starting point for telemetry
diff. Most of that looked good to me so I didn't touch it beside
@rossdanlm 's .


- Building off of #869, if `allow_usage_data_sharing` is set to True,
initialize Datadog Browser logging with a session ID.
- Disabled telemetry in dev mode as per @rholinshead 's comment that the
hot reload spams the logs with something like "datadog logging already
initialized"
- Moved the initialization logic away from`index.tsx` into `editor.tsx`
so that it can be configurable.


## Testplan:

1. yarn build
2. run in "prod" mode
3. Edit AIconfig
4. Hit Save button -> validate datadog log sent

repeat with aiconfig.rc set with false logging

|<img width="963" alt="Screenshot 2024-01-12 at 7 24 33 PM"
src="https://github.com/lastmile-ai/aiconfig/assets/141073967/c2f979df-327e-40c4-9f06-034531437a65">
| <img width="1455" alt="Screenshot 2024-01-12 at 7 23 51 PM"
src="https://github.com/lastmile-ai/aiconfig/assets/141073967/edb022b5-4abd-4ddc-b9bb-97713d32e5dc">
|
| ------------- | ------------- |
|
<img width="604" alt="Screenshot 2024-01-12 at 7 26 35 PM"
src="https://github.com/lastmile-ai/aiconfig/assets/141073967/4a2e49a3-061e-404c-9681-13a15aca8e9c">
| -> No logs |




I tried taking a video but datadog doesn't immediately update with logs
which made the video too long to upload

---
Stack created with [Sapling](https://sapling-scm.com). Best reviewed
with
[ReviewStack](https://reviewstack.dev/lastmile-ai/aiconfig/pull/899).
* __->__ #899
* #864
  • Loading branch information
Ankush-lastmile authored Jan 14, 2024
2 parents 24086a9 + d881775 commit f110958
Show file tree
Hide file tree
Showing 7 changed files with 76 additions and 13 deletions.
3 changes: 2 additions & 1 deletion python/src/aiconfig/editor/client/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
]
},
"dependencies": {
"@datadog/browser-logs": "^5.7.0",
"@emotion/react": "^11.11.1",
"@mantine/carousel": "^6.0.7",
"@mantine/core": "^6.0.7",
Expand Down Expand Up @@ -63,4 +64,4 @@
"eslint-plugin-react-hooks": "^4.6.0",
"typescript": "^5"
}
}
}
48 changes: 41 additions & 7 deletions python/src/aiconfig/editor/client/src/LocalEditor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,13 @@ import AIConfigEditor, {
RunPromptStreamErrorEvent,
} from "./components/AIConfigEditor";
import { Flex, Loader, MantineProvider, Image } from "@mantine/core";
import {
AIConfig,
InferenceSettings,
JSONObject,
Output,
Prompt,
} from "aiconfig";
import { AIConfig, InferenceSettings, JSONObject, Output, Prompt } from "aiconfig";
import { useCallback, useEffect, useMemo, useState } from "react";
import { ufetch } from "ufetch";
import { ROUTE_TABLE } from "./utils/api";
import { streamingApiChain } from "./utils/oboeHelpers";
import { datadogLogs } from "@datadog/browser-logs";
import { LogEvent, LogEventData } from "./shared/types";

export default function Editor() {
const [aiconfig, setAiConfig] = useState<AIConfig | undefined>();
Expand All @@ -29,6 +25,34 @@ export default function Editor() {
loadConfig();
}, [loadConfig]);

const setupTelemetryIfAllowed = useCallback(async () => {
const isDev = (process.env.NODE_ENV ?? "development") === "development";

// Don't enable telemetry in dev mode because hot reload will spam the logs.
if (isDev) {
return;
}

const res = await ufetch.get(ROUTE_TABLE.GET_AICONFIGRC, {});

const enableTelemetry = res.allow_usage_data_sharing;

if (enableTelemetry) {
datadogLogs.init({
clientToken: "pub356987caf022337989e492681d1944a8",
env: process.env.NODE_ENV ?? "development",
service: "aiconfig-editor",
site: "us5.datadoghq.com",
forwardErrorsToLogs: true,
sessionSampleRate: 100,
});
}
}, []);

useEffect(() => {
setupTelemetryIfAllowed();
}, [setupTelemetryIfAllowed]);

const save = useCallback(async (aiconfig: AIConfig) => {
const res = await ufetch.post(ROUTE_TABLE.SAVE, {
// path: file path,
Expand Down Expand Up @@ -171,6 +195,14 @@ export default function Editor() {
return await ufetch.get(ROUTE_TABLE.SERVER_STATUS);
}, []);

const logEvent = useCallback((event: LogEvent, data?: LogEventData) => {
try {
datadogLogs.logger.info(event, data);
} catch (e) {
// Ignore logger errors for now
}
}, []);

const callbacks: AIConfigCallbacks = useMemo(
() => ({
addPrompt,
Expand All @@ -179,6 +211,7 @@ export default function Editor() {
deletePrompt,
getModels,
getServerStatus,
logEvent,
runPrompt,
save,
setConfigDescription,
Expand All @@ -194,6 +227,7 @@ export default function Editor() {
deletePrompt,
getModels,
getServerStatus,
logEvent,
runPrompt,
save,
setConfigDescription,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
import { createContext } from "react";
import { ClientAIConfig } from "../shared/types";
import { ClientAIConfig, LogEvent, LogEventData } from "../shared/types";

/**
* Context for overall editor config state. This context should
* be memoized to prevent unnecessary re-renders
*/
const AIConfigContext = createContext<{
getState: () => ClientAIConfig;
logEvent?: (event: LogEvent, data?: LogEventData) => void;
}>({
getState: () => ({ prompts: [], _ui: { isDirty: false } }),
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,8 @@ import { v4 as uuidv4 } from "uuid";
import aiconfigReducer, { AIConfigReducerAction } from "./aiconfigReducer";
import {
ClientPrompt,
LogEvent,
LogEventData,
aiConfigToClientConfig,
clientConfigToAIConfig,
clientPromptToAIConfigPrompt,
Expand Down Expand Up @@ -98,6 +100,7 @@ export type AIConfigCallbacks = {
deletePrompt: (promptName: string) => Promise<void>;
getModels: (search: string) => Promise<string[]>;
getServerStatus?: () => Promise<{ status: "OK" | "ERROR" }>;
logEvent?: (event: LogEvent, data?: LogEventData) => void;
runPrompt: (
promptName: string,
onStream: RunPromptStreamCallback,
Expand Down Expand Up @@ -167,6 +170,8 @@ export default function EditorContainer({
const stateRef = useRef(aiconfigState);
stateRef.current = aiconfigState;

const logEventCallback = callbacks.logEvent;

const saveCallback = callbacks.save;
const onSave = useCallback(async () => {
setIsSaving(true);
Expand Down Expand Up @@ -515,6 +520,7 @@ export default function EditorContainer({
};

dispatch(action);
logEventCallback?.("ADD_PROMPT", { model, promptIndex });

try {
const serverConfigRes = await addPromptCallback(
Expand All @@ -536,7 +542,7 @@ export default function EditorContainer({
});
}
},
[addPromptCallback, dispatch]
[addPromptCallback, logEventCallback]
);

const deletePromptCallback = callbacks.deletePrompt;
Expand Down Expand Up @@ -761,8 +767,9 @@ export default function EditorContainer({
const contextValue = useMemo(
() => ({
getState,
logEvent: logEventCallback,
}),
[getState]
[getState, logEventCallback]
);

const isDirty = aiconfigState._ui.isDirty !== false;
Expand Down Expand Up @@ -870,7 +877,10 @@ export default function EditorContainer({
<Button
leftIcon={<IconDeviceFloppy />}
loading={isSaving}
onClick={onSave}
onClick={() => {
onSave();
logEventCallback?.("SAVE_BUTTON_CLICKED");
}}
disabled={!isDirty}
size="xs"
variant="gradient"
Expand Down
6 changes: 5 additions & 1 deletion python/src/aiconfig/editor/client/src/shared/types.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { AIConfig, Prompt } from "aiconfig";
import { AIConfig, JSONObject, Prompt } from "aiconfig";
import { uniqueId } from "lodash";

export type EditorFile = {
Expand Down Expand Up @@ -62,3 +62,7 @@ export function aiConfigToClientConfig(aiconfig: AIConfig): ClientAIConfig {
},
};
}

export type LogEvent = "ADD_PROMPT" | "SAVE_BUTTON_CLICKED";
// TODO: schematize this
export type LogEventData = JSONObject;
1 change: 1 addition & 0 deletions python/src/aiconfig/editor/client/src/utils/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ export const ROUTE_TABLE = {
CANCEL: urlJoin(API_ENDPOINT, "/cancel"),
CLEAR_OUTPUTS: urlJoin(API_ENDPOINT, "/clear_outputs"),
DELETE_PROMPT: urlJoin(API_ENDPOINT, "/delete_prompt"),
GET_AICONFIGRC: urlJoin(API_ENDPOINT, "/get_aiconfigrc"),
SAVE: urlJoin(API_ENDPOINT, "/save"),
SET_DESCRIPTION: urlJoin(API_ENDPOINT, "/set_description"),
SERVER_STATUS: urlJoin(API_ENDPOINT, "/server_status"),
Expand Down
12 changes: 12 additions & 0 deletions python/src/aiconfig/editor/client/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1289,6 +1289,18 @@
enabled "2.0.x"
kuler "^2.0.0"

"@datadog/[email protected]":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@datadog/browser-core/-/browser-core-5.6.0.tgz#f7a0d809afede4520bb4786886b4648c0e1b890d"
integrity sha512-z6CvlJyEFbYNw2ZawY9fDHQjdl71yp0OcchvB/S4SGKdQVLPUd48Y528gv132VDnY2g0UipE9JK59wnAamyS9w==

"@datadog/browser-logs@^5.6.0":
version "5.6.0"
resolved "https://registry.yarnpkg.com/@datadog/browser-logs/-/browser-logs-5.6.0.tgz#2b4b62d87a315560e87d46f84504012f7b7bdecd"
integrity sha512-NyqkG+UfAgz86CbEbSrAlDR5GvjJHAUwaI38xo9JR+9O5KJkwMgRnvQBtmdmpjjm723yyWeDcTd+ZIdvgIP61g==
dependencies:
"@datadog/browser-core" "5.6.0"

"@emotion/babel-plugin@^11.11.0":
version "11.11.0"
resolved "https://registry.yarnpkg.com/@emotion/babel-plugin/-/babel-plugin-11.11.0.tgz#c2d872b6a7767a9d176d007f5b31f7d504bb5d6c"
Expand Down

0 comments on commit f110958

Please sign in to comment.