Skip to content

Commit

Permalink
Connected api for everything but updating backend with settings
Browse files Browse the repository at this point in the history
  • Loading branch information
LilyCaroline17 committed Aug 5, 2024
1 parent 843710f commit ebc81c4
Show file tree
Hide file tree
Showing 5 changed files with 200 additions and 57 deletions.
18 changes: 10 additions & 8 deletions public/pages/Configuration/Configuration.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,10 @@ const Configuration = ({
];

const minutesOptions = [
{ value: 'ONE', text: '1' },
{ value: 'FIVE', text: '5' },
{ value: 'TEN', text: '10' },
{ value: 'THIRTY', text: '30' },
{ value: '1', text: '1' },
{ value: '5', text: '5' },
{ value: '10', text: '10' },
{ value: '30', text: '30' },
];

const history = useHistory();
Expand Down Expand Up @@ -135,14 +135,16 @@ const Configuration = ({

const WindowChoice = time === timeUnits[0].value ? MinutesBox : HoursBox;

let changed;
let changed = false;
if (isEnabled != metricSettingsMap[metric].isEnabled){
changed = 'isEnabled';
changed = true;
}
else if (topNSize !== metricSettingsMap[metric].currTopN) {
changed = 'topN';
changed = true;
} else if (windowSize !== metricSettingsMap[metric].currWindowSize) {
changed = 'windowSize';
changed = true;
} else if (time !== metricSettingsMap[metric].currTimeUnit){
changed = true;
}

let valid = false;
Expand Down
12 changes: 1 addition & 11 deletions public/pages/QueryDetails/QueryDetails.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -84,17 +84,7 @@ const QueryDetails = ({ queries, core }: { queries: any; core: CoreStart }) => {
Plotly.newPlot('latency', data, layout, config);
}, [query]);

console.log(query);
console.log(query.source);
const queryString = JSON.stringify(JSON.parse(query.source, function(k, v) {
if (v && typeof v === 'object' && !Array.isArray(v)) {
return Object.assign(Object.create(null), v);
}
return v;
}).query, null, 2);

// const queryString = JSON.stringify(JSON.parse(query.source).query, null, 2);

const queryString = JSON.stringify(JSON.parse(JSON.stringify(query.source)), null, 2);
const queryDisplay = `{\n "query": ${queryString ? queryString.replace(/\n/g, '\n ') : ''}\n}`;

return (
Expand Down
93 changes: 64 additions & 29 deletions public/pages/TopNQueries/TopNQueries.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,6 @@ const TopNQueries = ({ core }: { core: CoreStart }) => {
currTimeUnit: 'HOURS'
});

// const metricSettingsMap: { [key: string]: MetricSettings } = {
// latency: latencySettings,
// cpu: cpuSettings,
// memory: memorySettings,
// };

const setMetricSettings = (metricType: string, updates: Partial<MetricSettings>) => {
switch(metricType){
case 'latency':
Expand All @@ -60,6 +54,8 @@ const TopNQueries = ({ core }: { core: CoreStart }) => {
case 'memory':
setMemorySettings(prevSettings => ({ ...prevSettings, ...updates }));
break;
default:
console.error(`Unknown metric type: ${metricType}`);
}
};

Expand Down Expand Up @@ -103,8 +99,18 @@ const TopNQueries = ({ core }: { core: CoreStart }) => {
const retrieveQueries = useCallback(async (start: string, end: string) => {
setLoading(true);
try {
const resp = await core.http.get('/api/top_queries');
const newQueries = resp.response.top_queries;
const nullResponse = { response: { top_queries: [] } };
console.log(latencySettings);
console.log(cpuSettings);
console.log(memorySettings);
const respLatency = latencySettings.isEnabled ? await core.http.get('/api/top_queries/latency'): nullResponse;
const respCpu = cpuSettings.isEnabled ? await core.http.get('/api/top_queries/cpu') : nullResponse;
const respMemory = memorySettings.isEnabled ? await core.http.get('/api/top_queries/memory'): nullResponse;
const newQueries = [
...respLatency.response.top_queries,
...respCpu.response.top_queries,
...respMemory.response.top_queries
];
const startTimestamp = parseDateString(start);
const endTimestamp = parseDateString(end);
const noDuplicates = newQueries.filter((array, index, self) =>
Expand All @@ -116,7 +122,7 @@ const TopNQueries = ({ core }: { core: CoreStart }) => {
} finally {
setLoading(false);
}
}, []);
}, [latencySettings, cpuSettings, memorySettings, core]);

const retrieveConfigInfo = async (
get : boolean,
Expand All @@ -130,38 +136,67 @@ const TopNQueries = ({ core }: { core: CoreStart }) => {
try {
const resp = await core.http.get('/api/settings');
console.log(resp);
// const settings = resp.persistent.search.insights.top_queries
// newTopN = settings.
// newWindowSize =
// newTimeUnit =
const settings = resp.response.persistent.search.insights.top_queries
const latency = settings.latency;
const cpu = settings.cpu;
const memory = settings.memory;
console.log(latency);
if (latency !== undefined && latency.enabled === "true") {
const [time, timeUnits] = latency.window_size.match(/\D+|\d+/g);
setMetricSettings('latency', {
isEnabled: true,
currTopN: latency.top_n_size,
currWindowSize: time,
currTimeUnit: timeUnits === 'm' ? 'MINUTES': 'HOURS',
});
}
if (cpu !== undefined && cpu.enabled === "true") {
const [time, timeUnits] = cpu.window_size.match(/\D+|\d+/g);
setMetricSettings('cpu', {
isEnabled: true,
currTopN: cpu.top_n_size,
currWindowSize: time,
currTimeUnit: timeUnits === 'm' ? 'MINUTES': 'HOURS',
});
}
if (memory !== undefined && memory.enabled === "true") {
const [time, timeUnits] = memory.window_size.match(/\D+|\d+/g);
setMetricSettings('memory', {
isEnabled: true,
currTopN: memory.top_n_size,
currWindowSize: time,
currTimeUnit: timeUnits === 'm' ? 'MINUTES': 'HOURS',
});
}
} catch (error) {
console.error('Failed to retrieve settings:', error);
}
} else {
try {
if (enabled){
setMetricSettings(metric, {
isEnabled: enabled,
});
} else {
setMetricSettings(metric, {
isEnabled: enabled,
currTopN: newTopN,
currWindowSize: newWindowSize,
currTimeUnit: newTimeUnit,
});
}
// const requestBody = {newTopN};
// core.http.put('/api/top_n_size', {body: JSON.stringify(requestBody)});
setMetricSettings(metric, {
isEnabled: enabled,
currTopN: newTopN,
currWindowSize: newWindowSize,
currTimeUnit: newTimeUnit,
});
const requestQuery = {metric: metric, enabled: enabled, top_n_size: newTopN, window_size: `${newWindowSize}${newTimeUnit == 'MINUTES' ? 'm': 'h'}`};
const resp = await core.http.put('/api/update_settings', {query: requestQuery});
console.log("Setting settings");
console.log(resp);
} catch (error) {
console.error('Failed to set units:', error);
console.error('Failed to set settings:', error);
}
}
// setTopN(newTopN);
// setWindowSize(newWindowSize);
// setTimeUnit(newTimeUnit);
};

const onQueriesChange = (start : string, end : string) => {
retrieveQueries(start, end);
retrieveConfigInfo(true);
}

useEffect(() => {
retrieveQueries(defaultStart, 'now');
}, [retrieveQueries, defaultStart]);
Expand All @@ -186,7 +221,7 @@ const TopNQueries = ({ core }: { core: CoreStart }) => {
<QueryInsights
queries={queries}
loading={loading}
onQueriesChange={retrieveQueries}
onQueriesChange={onQueriesChange}
defaultStart={defaultStart}
core={core}
/>
Expand Down
29 changes: 25 additions & 4 deletions server/clusters/queryInsightsPlugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,25 @@
method: 'GET',
});

queryInsights.setSettings = ca({
queryInsights.getTopNQueriesLatency = ca({
url: {
fmt: `_cluster/settings`,
fmt: `/_insights/top_queries?type=latency`,
},
method: 'PUT',
needBody: true,
method: 'GET',
});

queryInsights.getTopNQueriesCpu = ca({
url: {
fmt: `/_insights/top_queries?type=cpu`,
},
method: 'GET',
});

queryInsights.getTopNQueriesMemory = ca({
url: {
fmt: `/_insights/top_queries?type=memory`,
},
method: 'GET',
});

queryInsights.getSettings = ca({
Expand All @@ -29,4 +42,12 @@
},
method: 'GET',
});

queryInsights.setSettings = ca({
url: {
fmt: `_cluster/settings`,
},
method: 'PUT',
needBody: true,
});
};
105 changes: 100 additions & 5 deletions server/routes/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,93 @@ export function defineRoutes(router: IRouter) {
}
);

router.get(
{
path: '/api/top_queries/latency',
validate: false,
},
async (context, request, response) => {
try {
const client = context.queryInsights_plugin.queryInsightsClient.asScoped(request).callAsCurrentUser;
const res = await client('queryInsights.getTopNQueriesLatency');
console.log(res);
return response.custom({
statusCode: 200,
body: {
ok: true,
response: res,
},
});
} catch (error) {
console.error("Unable to get top queries (latency): ", error);
return response.ok({
body: {
ok: false,
response: error.message,
}
});
}
}
);

router.get(
{
path: '/api/top_queries/cpu',
validate: false,
},
async (context, request, response) => {
try {
const client = context.queryInsights_plugin.queryInsightsClient.asScoped(request).callAsCurrentUser;
const res = await client('queryInsights.getTopNQueriesCpu');
console.log(res);
return response.custom({
statusCode: 200,
body: {
ok: true,
response: res,
},
});
} catch (error) {
console.error("Unable to get top queries (cpu): ", error);
return response.ok({
body: {
ok: false,
response: error.message,
}
});
}
}
);

router.get(
{
path: '/api/top_queries/memory',
validate: false,
},
async (context, request, response) => {
try {
const client = context.queryInsights_plugin.queryInsightsClient.asScoped(request).callAsCurrentUser;
const res = await client('queryInsights.getTopNQueriesMemory');
console.log(res);
return response.custom({
statusCode: 200,
body: {
ok: true,
response: res,
},
});
} catch (error) {
console.error("Unable to get top queries (memory): ", error);
return response.ok({
body: {
ok: false,
response: error.message,
}
});
}
}
);

router.get(
{
path: '/api/settings',
Expand Down Expand Up @@ -72,19 +159,27 @@ export function defineRoutes(router: IRouter) {

router.put(
{
path: '/api/top_n_size',
path: '/api/update_settings',
validate: false,
},
async (context, request, response) => {
try {
// console.log("context is: ", context);
// console.log("response is: ", response);
const query = request.query;
console.log("----------------request is: ", query);
const client = context.queryInsights_plugin.queryInsightsClient.asScoped(request).callAsCurrentUser;
const res = await client('queryInsights.getTopNQueries', {
const params = {
"body": {
"persistent": {
"search.insights.top_queries.latency.top_n_size": "request.top_n_size"
[`search.insights.top_queries.${query.metric}.enabled`]: query.enabled,
[`search.insights.top_queries.${query.metric}.top_n_size`]: query.top_n_size,
[`search.insights.top_queries.${query.metric}.window_size`] : query.window_size,
}
}
});
};
console.log(params);
const res = await client('queryInsights.setSettings', params);
return response.custom({
statusCode: 200,
body: {
Expand All @@ -93,7 +188,7 @@ export function defineRoutes(router: IRouter) {
},
});
} catch (error) {
console.error("Unable to set top n size: ", error);
console.error("Unable to set settings: ", error);
return response.ok({
body: {
ok: false,
Expand Down

0 comments on commit ebc81c4

Please sign in to comment.