diff --git a/.github/workflows/cypress-tests.yml b/.github/workflows/cypress-tests.yml index 00d66ef68..7846d2390 100644 --- a/.github/workflows/cypress-tests.yml +++ b/.github/workflows/cypress-tests.yml @@ -22,18 +22,10 @@ jobs: - name: Checkout uses: actions/checkout@v4 - - name: set up google creds - env: - DATA: ${{ secrets.JIGSAW_SERVICE_ACCT }} - run: | - echo $DATA > server/llm-service-account.json - - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Build and start Docker containers - env: - JIGSAW_SERVICE_ACCT: ${{ secrets.JIGSAW_SERVICE_ACCT }} run: | docker compose -f docker-compose.yml -f docker-compose.test.yml --env-file test.env --profile postgres up -d --build diff --git a/.gitignore b/.gitignore index 3ec5d9ac4..1658df67f 100644 --- a/.gitignore +++ b/.gitignore @@ -9,4 +9,3 @@ prod.env xids.csv preprod.env .venv -server/llm-service-account.json diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml index 508fbc2be..0aa471343 100644 --- a/docker-compose.dev.yml +++ b/docker-compose.dev.yml @@ -13,7 +13,6 @@ services: target: dev args: NODE_ENV: development - GOOGLE_APPLICATION_CREDENTIALS: /app/llm-service-account.json volumes: # This will mount your local polis/server directory so changes can be watched and reloaded. # But it will ignore your local node_modules and instead use a new container volume. diff --git a/docker-compose.yml b/docker-compose.yml index 812df2c24..ab5673ae6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -35,7 +35,6 @@ services: target: prod args: NODE_ENV: production - JIGSAW_SERVICE_ACCT: ${JIGSAW_SERVICE_ACCT} labels: polis_tag: ${TAG:-dev} depends_on: diff --git a/example.env b/example.env index 537cbd884..7ccce2f95 100644 --- a/example.env +++ b/example.env @@ -122,7 +122,6 @@ AWS_SECRET_ACCESS_KEY= # This value is written by the server app if SHOULD_USE_TRANSLATION_API is true. GOOGLE_APPLICATION_CREDENTIALS= GOOGLE_JIGSAW_PERSPECTIVE_API_KEY= -JIGSAW_SERVICE_ACCT= # A value in miliseconds for caching AI responses for narrativeReport MAX_REPORT_CACHE_DURATION= diff --git a/heroku.yml b/heroku.yml index 4bbb0ebfe..f896c1cd3 100644 --- a/heroku.yml +++ b/heroku.yml @@ -6,12 +6,6 @@ build: worker: math/Dockerfile # build and release static assets (js, index.html, etc) #release: Dockerfile -# release: -# image: web -# command: -# - echo "provisioning gcloud" # You can add other release commands if needed -# - printf "%s" "$JIGSAW_SERVICE_ACCT" > /app/llm-service-account.json -# - gcloud auth activate-service-account --key-file=/app/llm-service-account.json #release: #image: release #command: diff --git a/server/Dockerfile b/server/Dockerfile index 3df1895c6..61e929fed 100644 --- a/server/Dockerfile +++ b/server/Dockerfile @@ -16,13 +16,6 @@ WORKDIR /app # Install Python and necessary tools (including pip) RUN apk add --no-cache python3 py3-pip -# Install the gcloud CLI -RUN apk add --no-cache wget gnupg && \ - wget -q -O - https://dl.google.com/dl/cloudsdk/release/google-cloud-sdk.tar.gz | tar xvz -C /root && \ - /root/google-cloud-sdk/install.sh --quiet - -ENV PATH="/root/google-cloud-sdk/bin:${PATH}" - # This package is needed at runtime RUN apk add libpq-dev @@ -54,15 +47,6 @@ FROM base as prod # Set default NODE_ENV to production unless overridden at build time with --build-arg NODE_ENV=development ENV NODE_ENV ${NODE_ENV:-production} -# ARG JIGSAW_SERVICE_ACCT -# ENV JIGSAW_SERVICE_ACCT=${JIGSAW_SERVICE_ACCT} -# RUN echo ${JIGSAW_SERVICE_ACCT} > /app/llm-service-account.json - -# ENV GOOGLE_APPLICATION_CREDENTIALS=/app/llm-service-account.json - -# Authenticate with the service account -# RUN gcloud auth activate-service-account --key-file=/app/llm-service-account.json - RUN npm ci --production=false # For prod builds, to minimize the image size, remove the dependencies tagged with `--virtual .build` above. diff --git a/server/package-lock.json b/server/package-lock.json index 0d1b9ef36..76ce8f906 100644 --- a/server/package-lock.json +++ b/server/package-lock.json @@ -18,7 +18,7 @@ "@google-cloud/vertexai": "^1.9.0", "@google/generative-ai": "^0.21.0", "@sinclair/typebox": "^0.34.3", - "@tevko/sensemaking-tools": "^1.0.5", + "@tevko/sensemaking-tools": "^1.0.15", "@typescript-eslint/eslint-plugin": "^8.16.0", "akismet": "~0.0.11", "async": "~0.1.22", @@ -3624,13 +3624,14 @@ } }, "node_modules/@tevko/sensemaking-tools": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@tevko/sensemaking-tools/-/sensemaking-tools-1.0.5.tgz", - "integrity": "sha512-ry8csckNRM8pfeldqaE8mPll5ggMOLXXwUy62/oUg2JOeDzXHxQTeGieB0BPTkXD0wO4lOXeu+ls6Q9HyUe5Lw==", + "version": "1.0.15", + "resolved": "https://registry.npmjs.org/@tevko/sensemaking-tools/-/sensemaking-tools-1.0.15.tgz", + "integrity": "sha512-l8e5s8eIG8T4KT+TSpjkJ11PkYpd2PMNzCgVV4OxMUxkorXrt3fyGmmlm8nLX56swgAlo6cesun1YnrtsoDLBA==", "license": "ISC", "dependencies": { "@babel/preset-env": "^7.25.4", "@google-cloud/vertexai": "^1.9.0", + "@google/generative-ai": "^0.21.0", "@sinclair/typebox": "^0.34.3", "@typescript-eslint/eslint-plugin": "^8.16.0", "babel-jest": "^29.7.0", diff --git a/server/package.json b/server/package.json index 87e61998e..2c71c6930 100644 --- a/server/package.json +++ b/server/package.json @@ -34,7 +34,7 @@ "@google-cloud/vertexai": "^1.9.0", "@google/generative-ai": "^0.21.0", "@sinclair/typebox": "^0.34.3", - "@tevko/sensemaking-tools": "^1.0.5", + "@tevko/sensemaking-tools": "^1.0.15", "@typescript-eslint/eslint-plugin": "^8.16.0", "akismet": "~0.0.11", "async": "~0.1.22", diff --git a/server/src/report_experimental/topics-example/index.ts b/server/src/report_experimental/topics-example/index.ts index 4529270a7..fd8a0d279 100644 --- a/server/src/report_experimental/topics-example/index.ts +++ b/server/src/report_experimental/topics-example/index.ts @@ -5,9 +5,9 @@ // - subtopics: Array of subtopics, each with their specific citations import { sendCommentGroupsSummary } from "../../routes/export"; -import { VertexModel } from "@tevko/sensemaking-tools/src/models/vertex_model"; import { Sensemaker } from "@tevko/sensemaking-tools/src/sensemaker"; -import { Comment, VoteTally } from "@tevko/sensemaking-tools/src/types"; +import { GoogleAIModel } from "@tevko/sensemaking-tools/src/models/aiStudio_model"; +import { Comment, VoteTally, Topic } from "@tevko/sensemaking-tools/src/types"; import { parse } from "csv-parse"; async function parseCsvString(csvString: string) { @@ -16,7 +16,7 @@ async function parseCsvString(csvString: string) { const parser = parse({ columns: true, // Use first row as headers skip_empty_lines: true, // Ignore empty lines - relax_column_count: true + relax_column_count: true, }); parser.on("error", (error) => reject(error)); @@ -52,27 +52,44 @@ async function parseCsvString(csvString: string) { } export async function getTopicsFromRID(zId: number) { - const resp = await sendCommentGroupsSummary(zId, undefined, false); - const modified = (resp as string).split("\n"); - modified[0] = `comment-id,comment_text,total-votes,total-agrees,total-disagrees,total-passes,group-a-votes,group-0-agree-count,group-0-disagree-count,group-0-pass-count,group-b-votes,group-1-agree-count,group-1-disagree-count,group-1-pass-count`; - - const comments = await parseCsvString(modified.join("\n")); - const topics = await new Sensemaker({ - defaultModel: new VertexModel( - "jigsaw-vertex-integration", - "us-central1", - // "llm-service-account" - ), - }).learnTopics(comments as Comment[], true); - return topics; -} - - - - - - - - - + try { + const resp = await sendCommentGroupsSummary(zId, undefined, false); + const modified = (resp as string).split("\n"); + modified[0] = `comment-id,comment_text,total-votes,total-agrees,total-disagrees,total-passes,group-a-votes,group-0-agree-count,group-0-disagree-count,group-0-pass-count,group-b-votes,group-1-agree-count,group-1-disagree-count,group-1-pass-count`; + + const comments = await parseCsvString(modified.join("\n")); + const topics = await new Sensemaker({ + defaultModel: new GoogleAIModel( + process.env.GEMINI_API_KEY as string, + "gemini-exp-1206" + ), + }).learnTopics(comments as Comment[], false); + const categorizedComments = await new Sensemaker({ + defaultModel: new GoogleAIModel( + process.env.GEMINI_API_KEY as string, + "gemini-1.5-flash-8b" + ), + }).categorizeComments(comments as Comment[], false, topics); + + const topics_master_list = new Map(); + + categorizedComments.forEach((c: Comment) => { + c.topics?.forEach((t: Topic) => { + const existingTopic = topics_master_list.get(t.name); + if (existingTopic) { + existingTopic.citations.push(Number(c.id)); + } else { + topics_master_list.set(t.name, { citations: [Number(c.id)] }); + } + }); + }); + return Array.from(topics_master_list, ([name, value]) => ({ + name, + citations: value.citations, + })); + } catch (error) { + console.error(error); + return []; + } +} diff --git a/server/src/routes/reportNarrative.ts b/server/src/routes/reportNarrative.ts index c1cde4e80..ad93069a8 100644 --- a/server/src/routes/reportNarrative.ts +++ b/server/src/routes/reportNarrative.ts @@ -7,18 +7,13 @@ import { GenerateContentRequest, GoogleGenerativeAI, } from "@google/generative-ai"; -import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; -import { - PutCommand, - QueryCommand, - DeleteCommand -} from "@aws-sdk/lib-dynamodb"; import { convertXML } from "simple-xml-to-json"; import fs from "fs/promises"; import { parse } from "csv-parse/sync"; import { create } from "xmlbuilder2"; import { sendCommentGroupsSummary } from "./export"; import { getTopicsFromRID } from "../report_experimental/topics-example"; +import DynamoStorageService from "../utils/storage"; const js2xmlparser = require("js2xmlparser"); @@ -26,66 +21,6 @@ interface PolisRecord { [key: string]: string; // Allow any string keys } -const dynamoClient = new DynamoDBClient({ region: process.env.AWS_REGION ?? "us-west-1" }); -const tableName = "report_narrative_store"; - -const putReportItem = async (item: Record | undefined) => { - const params = { - TableName: tableName, - Item: item, - }; - - const command = new PutCommand(params); - - try { - const response = await dynamoClient.send(command); - console.log("Item added successfully:", response); - return response; - } catch (error) { - console.error("Error adding item:", error); - } -}; - -const queryItemsByRidSectionModel = async (rid_section_model: string) => { - const params = { - TableName: tableName, - KeyConditionExpression: "rid_section_model = :rid_section_model", - ExpressionAttributeValues: { - ":rid_section_model": rid_section_model, - }, - }; - - const command = new QueryCommand(params); - - try { - const data = await dynamoClient.send(command); - console.log("Items retrieved successfully:", data.Items); - return data.Items; - } catch (error) { - console.error("Error querying items:", error); - } -}; - -const deleteReportItem = async (rid_section_model: string, timestamp: string) => { - const params = { - TableName: tableName, - Key: { - rid_section_model: rid_section_model, - timestamp: timestamp, - }, - }; - - const command = new DeleteCommand(params); - - try { - const response = await dynamoClient.send(command); - console.log("Item deleted successfully:", response); - return response; - } catch (error) { - console.error("Error deleting item:", error); - } -}; - export class PolisConverter { static convertToXml(csvContent: string): string { // Parse CSV content @@ -181,7 +116,7 @@ const gemeniModel = genAI.getGenerativeModel({ generationConfig: { // https://cloud.google.com/vertex-ai/docs/reference/rest/v1/GenerationConfig responseMimeType: "application/json", - maxOutputTokens: 5000, // high for reliability for now. + maxOutputTokens: 50000, // high for reliability for now. }, }); @@ -201,7 +136,8 @@ const getCommentsAsXML = async ( const resp = await sendCommentGroupsSummary(id, undefined, false, filter); const xml = PolisConverter.convertToXml(resp as string); // eslint-disable-next-line no-console - if (xml.trim().length === 0) console.error("No data has been returned by sendCommentGroupsSummary"); + if (xml.trim().length === 0) + console.error("No data has been returned by sendCommentGroupsSummary"); return xml; } catch (e) { console.error("Error in getCommentsAsXML:", e); @@ -225,28 +161,30 @@ interface ReportSection { } // Define the report sections with filters -const getReportSections = (topics: {name: string, citations: number[]}[]) => { +const getReportSections = (topics: { name: string; citations: number[] }[]) => { return [ { name: "uncertainty", templatePath: "src/report_experimental/subtaskPrompts/uncertainty.xml", // Revert to original simple pass ratio check - filter: (v: {passes: number, votes: number}) => v.passes / v.votes >= 0.2, + filter: (v: { passes: number; votes: number }) => + v.passes / v.votes >= 0.2, }, { name: "group_informed_consensus", templatePath: "src/report_experimental/subtaskPrompts/group_informed_consensus.xml", - filter: (v: {group_aware_consensus: number}) => (v.group_aware_consensus ?? 0) > 0.7, + filter: (v: { group_aware_consensus: number }) => + (v.group_aware_consensus ?? 0) > 0.7, }, { name: "groups", templatePath: "src/report_experimental/subtaskPrompts/groups.xml", - filter: (v: {comment_extremity: number}) => { + filter: (v: { comment_extremity: number }) => { return (v.comment_extremity ?? 0) > 1; }, }, - ...topics.map((topic: {name: string, citations: number[]}) => ({ + ...topics.map((topic: { name: string; citations: number[] }) => ({ name: `topic_${topic.name.toLowerCase().replace(/\s+/g, "_")}`, templatePath: "src/report_experimental/subtaskPrompts/topics.xml", filter: (v: { comment_id: number }) => { @@ -254,7 +192,7 @@ const getReportSections = (topics: {name: string, citations: number[]}[]) => { return topic.citations.includes(v.comment_id); }, })), - ] + ]; }; type QueryParams = { @@ -263,15 +201,25 @@ type QueryParams = { const isFreshData = (timestamp: string) => { const now = new Date().getTime(); - const then = new Date(timestamp).getTime(); + const then = new Date(timestamp).getTime(); const elapsed = Math.abs(now - then); - return elapsed < ((process.env.MAX_REPORT_CACHE_DURATION as unknown as number) || 3600000); -} + return ( + elapsed < + (((process.env.MAX_REPORT_CACHE_DURATION as unknown) as number) || 3600000) + ); +}; export async function handle_GET_reportNarrative( req: { p: { rid: string }; query: QueryParams }, res: Response ) { + let storage; + if (process.env.AWS_REGION && process.env.AWS_REGION?.trim().length > 0) { + storage = new DynamoStorageService( + process.env.AWS_REGION, + "report_narrative_store" + ); + } const sectionParam = req.query.section; const modelParam = req.query.model; let tpcs; @@ -281,12 +229,16 @@ export async function handle_GET_reportNarrative( }); const { rid } = req.p; - res.write(`POLIS-PING: AI bootstrap`); // @ts-expect-error flush - calling due to use of compression res.flush(); + const system_lore = await fs.readFile( + "src/report_experimental/system.xml", + "utf8" + ); + try { const zid = await getZidForRid(rid); if (!zid) { @@ -298,13 +250,18 @@ export async function handle_GET_reportNarrative( // @ts-expect-error flush - calling due to use of compression res.flush(); - const cachedTopics = await queryItemsByRidSectionModel(`${rid}#topics`); + const cachedTopics = await storage?.queryItemsByRidSectionModel( + `${rid}#topics` + ); if (cachedTopics?.length && isFreshData(cachedTopics[0].timestamp)) { - tpcs = cachedTopics[0].report_data + tpcs = cachedTopics[0].report_data; } else { if (cachedTopics?.length) { - deleteReportItem(cachedTopics[0].rid_section_model, cachedTopics[0].timestamp); + storage?.deleteReportItem( + cachedTopics[0].rid_section_model, + cachedTopics[0].timestamp + ); } tpcs = await getTopicsFromRID(zid); const reportItemTopics = { @@ -312,21 +269,14 @@ export async function handle_GET_reportNarrative( timestamp: new Date().toISOString(), report_data: tpcs, }; - - putReportItem(reportItemTopics) - .then(data => console.log(data)) - .catch(err => console.error(err)); + + storage?.putItem(reportItemTopics); } - const reportSections = getReportSections(tpcs) + const reportSections = getReportSections(tpcs); res.write(`POLIS-PING: retrieving system lore`); - const system_lore = await fs.readFile( - "src/report_experimental/system.xml", - "utf8" - ); - // @ts-expect-error flush - calling due to use of compression res.flush(); @@ -334,40 +284,66 @@ export async function handle_GET_reportNarrative( const s = sectionParam ? reportSections.find((s) => s.name === sectionParam) || section : section; - const cachedResponseClaude = await queryItemsByRidSectionModel(`${rid}#${s.name}#claude`); - const cachedResponseGemini = await queryItemsByRidSectionModel(`${rid}#${s.name}#gemini`); + const cachedResponseClaude = storage?.queryItemsByRidSectionModel( + `${rid}#${s.name}#claude` + ); + const cachedResponseGemini = storage?.queryItemsByRidSectionModel( + `${rid}#${s.name}#gemini` + ); const fileContents = await fs.readFile(s.templatePath, "utf8"); const json = await convertXML(fileContents); // @ts-expect-error function args ignore temp const structured_comments = await getCommentsAsXML(zid, s.filter); // send cached response first if avalable - if (cachedResponseClaude?.length && cachedResponseGemini?.length && isFreshData(cachedResponseClaude[0].timestamp) && isFreshData(cachedResponseGemini[0].timestamp)) { + if ( + Array.isArray(cachedResponseClaude) && + cachedResponseClaude?.length && + Array.isArray(cachedResponseGemini) && + cachedResponseGemini?.length && + isFreshData(cachedResponseClaude[0].timestamp) && + isFreshData(cachedResponseGemini[0].timestamp) + ) { res.write( JSON.stringify({ [s.name]: { responseGemini: cachedResponseGemini[0].report_data, responseClaude: cachedResponseClaude[0].report_data, - errors: structured_comments?.trim().length === 0 ? "NO_CONTENT_AFTER_FILTER" : undefined, + errors: + structured_comments?.trim().length === 0 + ? "NO_CONTENT_AFTER_FILTER" + : undefined, }, }) ); } else { - if (cachedResponseClaude?.length) { - deleteReportItem(cachedResponseClaude[0].rid_section_model, cachedResponseClaude[0].timestamp); + if ( + Array.isArray(cachedResponseClaude) && + cachedResponseClaude?.length + ) { + storage?.deleteReportItem( + cachedResponseClaude[0].rid_section_model, + cachedResponseClaude[0].timestamp + ); } - if (cachedResponseGemini?.length) { - deleteReportItem(cachedResponseGemini[0].rid_section_model, cachedResponseGemini[0].timestamp); + if ( + Array.isArray(cachedResponseGemini) && + cachedResponseGemini?.length + ) { + storage?.deleteReportItem( + cachedResponseGemini[0].rid_section_model, + cachedResponseGemini[0].timestamp + ); } json.polisAnalysisPrompt.children[ json.polisAnalysisPrompt.children.length - 1 ].data.content = { structured_comments }; - + const prompt_xml = js2xmlparser.parse( "polis-comments-and-group-demographics", json ); - + if ((modelParam as string)?.trim()) { const responseClaude = await anthropic.messages.create({ model: "claude-3-5-haiku-20241022", @@ -389,7 +365,10 @@ export async function handle_GET_reportNarrative( JSON.stringify({ [s.name]: { responseClaude, - errors: structured_comments?.trim().length === 0 ? "NO_CONTENT_AFTER_FILTER" : undefined, + errors: + structured_comments?.trim().length === 0 + ? "NO_CONTENT_AFTER_FILTER" + : undefined, }, }) ); @@ -410,7 +389,7 @@ export async function handle_GET_reportNarrative( }, ], }); - + const gemeniModelprompt: GenerateContentRequest = { contents: [ { @@ -424,45 +403,49 @@ export async function handle_GET_reportNarrative( ], systemInstruction: system_lore, }; - + const respGem = await gemeniModel.generateContent(gemeniModelprompt); const responseGemini = await respGem.response.text(); - + const reportItemClaude = { rid_section_model: `${rid}#${s.name}#claude`, timestamp: new Date().toISOString(), report_data: responseClaude, - errors: structured_comments?.trim().length === 0 ? "NO_CONTENT_AFTER_FILTER" : undefined, + errors: + structured_comments?.trim().length === 0 + ? "NO_CONTENT_AFTER_FILTER" + : undefined, }; - - putReportItem(reportItemClaude) - .then(data => console.log(data)) - .catch(err => console.error(err)); - + + storage?.putItem(reportItemClaude); + const reportItemGemini = { rid_section_model: `${rid}#${s.name}#gemini`, timestamp: new Date().toISOString(), report_data: responseGemini, - errors: structured_comments?.trim().length === 0 ? "NO_CONTENT_AFTER_FILTER" : undefined, + errors: + structured_comments?.trim().length === 0 + ? "NO_CONTENT_AFTER_FILTER" + : undefined, }; - - putReportItem(reportItemGemini) - .then(data => console.log(data)) - .catch(err => console.error(err)); - + + storage?.putItem(reportItemGemini); + res.write( JSON.stringify({ [s.name]: { responseGemini, responseClaude, - errors: structured_comments?.trim().length === 0 ? "NO_CONTENT_AFTER_FILTER" : undefined, + errors: + structured_comments?.trim().length === 0 + ? "NO_CONTENT_AFTER_FILTER" + : undefined, }, }) ); } } - // @ts-expect-error flush - calling due to use of compression res.flush(); @@ -473,6 +456,9 @@ export async function handle_GET_reportNarrative( res.end(); } catch (err) { + // @ts-expect-error flush - calling due to use of compression + res.flush(); + console.log(err); const msg = err instanceof Error && err.message && err.message.startsWith("polis_") ? err.message diff --git a/server/src/utils/storage.ts b/server/src/utils/storage.ts new file mode 100644 index 000000000..f1e921370 --- /dev/null +++ b/server/src/utils/storage.ts @@ -0,0 +1,73 @@ +/* eslint-disable no-console */ +import { DynamoDBClient } from "@aws-sdk/client-dynamodb"; +import { + PutCommand, + QueryCommand, + DeleteCommand +} from "@aws-sdk/lib-dynamodb"; + +export default class DynamoStorageService { + private client: DynamoDBClient; + private tableName: string; + + constructor(region: string, tableName: string) { + const dynamoClient = new DynamoDBClient({ region }); + this.client = dynamoClient; + this.tableName = tableName; + } + + async putItem(item: Record | undefined) { + const params = { + TableName: this.tableName, + Item: item, + }; + + const command = new PutCommand(params); + + try { + const response = await this.client.send(command); + return response; + } catch (error) { + console.error(error) + } + } + + async queryItemsByRidSectionModel(rid_section_model: string) { + const params = { + TableName: this.tableName, + KeyConditionExpression: "rid_section_model = :rid_section_model", + ExpressionAttributeValues: { + ":rid_section_model": rid_section_model, + }, + }; + + const command = new QueryCommand(params); + + try { + const data = await this.client.send(command); + return data.Items; + } catch (error) { + console.error("Error querying items:", error); + } + } + + async deleteReportItem(rid_section_model: string, timestamp: string) { + const params = { + TableName: this.tableName, + Key: { + rid_section_model: rid_section_model, + timestamp: timestamp, + }, + }; + + const command = new DeleteCommand(params); + + try { + const response = await this.client.send(command); + console.log("Item deleted successfully:", response); + return response; + } catch (error) { + console.error("Error deleting item:", error); + } + } +} \ No newline at end of file