Skip to content

Commit

Permalink
bug fix for Ask AI button
Browse files Browse the repository at this point in the history
previously the button does not include the Highlighted text as context.
  • Loading branch information
jarodise committed Dec 13, 2024
1 parent 9361286 commit ccbe36f
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 23 deletions.
Binary file modified .DS_Store
Binary file not shown.
6 changes: 3 additions & 3 deletions configuration.lua
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,15 @@ local CONFIGURATION = {

-- Default system prompt used when asking ChatGPT a question
-- It can be overridden by setting a custom prompt in the plugin settings
prompt1 = "translate to Spanish.",
prompt1 = "你是一名有二十年经验的中文人文社科杂志主编,擅长用通俗易懂的语言和形象的类比来解释艰深晦涩的哲学/社会科学文本。请直接对文本进行解读,无需在首尾进行任何形式的总结,将你的输出限制在600字以内",

-- Default system prompt used when asking ChatGPT a question
-- It can be overridden by setting a custom prompt in the plugin settings
prompt2 = "translate to Chinese.",
prompt2 = "你是一名有二十年经验的专业英译中翻译,擅长将人文社科类作品翻译成高质量的中文译文,你的翻译文笔流畅优美且没有翻译腔。",

-- Default system prompt used when asking ChatGPT a question
-- It can be overridden by setting a custom prompt in the plugin settings
prompt3 = "explain this text in detail.",
prompt3 = "You Stephen West, the host of popular podcast Philosophize This!",
}

return CONFIGURATION
29 changes: 16 additions & 13 deletions dialogs.lua
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ local function createResultText(highlightedText, message_history)
if message_history[i].role == "user" then
result_text = result_text .. _("User: ") .. message_history[i].content .. "\n\n"
else
result_text = result_text .. _("ChatGPT: ") .. message_history[i].content .. "\n\n"
result_text = result_text .. _("Assistant: ") .. message_history[i].content .. "\n\n"
end
end

Expand Down Expand Up @@ -68,9 +68,12 @@ local function showChatGPTDialog(ui, highlightedText, message_history)
end

local function handleNewQuestion(chatgpt_viewer, question)
-- For follow-up questions, include the context of the highlighted text
local contextual_question = "Regarding the text: \"" .. highlightedText .. "\"\n\nQuestion: " .. question

table.insert(message_history, {
role = "user",
content = question
content = contextual_question
})

local answer = queryChatGPT(message_history)
Expand Down Expand Up @@ -111,18 +114,18 @@ local function showChatGPTDialog(ui, highlightedText, message_history)
showLoadingDialog()

UIManager:scheduleIn(0.1, function()
local context_message = {
-- Combine the context and question into a single message
local combined_message = {
role = "user",
content = "I'm reading something titled '" .. title .. "' by " .. author ..
". I have a question about the following highlighted text: " .. highlightedText
content = string.format(
"Context: I'm reading \"%s\" by %s.\n\nText: \"%s\"\n\nQuestion: %s",
title,
author,
highlightedText,
question
)
}
table.insert(message_history, context_message)

local question_message = {
role = "user",
content = question
}
table.insert(message_history, question_message)
table.insert(message_history, combined_message)

local answer
pcall(function()
Expand All @@ -148,7 +151,7 @@ local function showChatGPTDialog(ui, highlightedText, message_history)
else
UIManager:close(loading)
UIManager:show(InfoMessage:new{
text = _("Error querying ChatGPT. Please check your configuration and try again."),
text = _("Error querying AI. Please check your configuration and try again."),
timeout = 5
})
end
Expand Down
15 changes: 8 additions & 7 deletions gpt_query.lua
Original file line number Diff line number Diff line change
Expand Up @@ -28,25 +28,26 @@ local function isGeminiEndpoint(url)
end

local function formatGeminiRequest(message_history)
-- Find system prompt and last user message
-- Find system prompt and combine user messages
local systemPrompt = ""
local userContent = ""
local userContent = {}

for _, msg in ipairs(message_history) do
if msg.role == "system" then
systemPrompt = msg.content
elseif msg.role == "user" then
userContent = msg.content
table.insert(userContent, msg.content)
end
end

-- Combine system prompt with user content
-- Combine all user content with proper context
local combinedPrompt = ""
if systemPrompt ~= "" then
combinedPrompt = "Instructions: " .. systemPrompt .. "\n\nText: " .. userContent
else
combinedPrompt = userContent
combinedPrompt = "Instructions: " .. systemPrompt .. "\n\n"
end

-- Join all user messages with proper context
combinedPrompt = combinedPrompt .. table.concat(userContent, "\n\nFollow-up: ")

return {
contents = {
Expand Down

0 comments on commit ccbe36f

Please sign in to comment.