diff --git a/DESCRIPTION b/DESCRIPTION index c3da98d..d7d3491 100644 --- a/DESCRIPTION +++ b/DESCRIPTION @@ -14,5 +14,7 @@ RoxygenNote: 7.3.2 URL: https://github.com/tadascience/valentine, https://valentine.tada.science/ BugReports: https://github.com/tadascience/valentine/issues Imports: + cli, ellmer, - glue + glue, + rlang diff --git a/NAMESPACE b/NAMESPACE index cd35af1..97e423a 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -2,5 +2,7 @@ export(prompt) export(roses) +importFrom(cli,cli_abort) importFrom(ellmer,chat_openai) importFrom(glue,glue) +importFrom(rlang,current_env) diff --git a/R/roses.R b/R/roses.R index 02db719..9557a07 100644 --- a/R/roses.R +++ b/R/roses.R @@ -6,12 +6,12 @@ #' @param pkg A package #' @param hint extra information to add to the prompt #' @param emoji Should the poem include emojis ? -#' @param chat A Chat object, e.g. [ellmer::chat_openai()]. The default uses +#' @param chat A [ellmer::Chat] object, e.g. [ellmer::chat_openai()]. The default uses #' the 'gpt-3.5-turbo' model from OpenAI. +#' @inheritParams rlang::args_error_context #' #' @return A lovely poem for your package crush #' -#' @importFrom glue glue #' @examples #' prompt("dplyr") #' @@ -21,11 +21,31 @@ #' roses("dplyr") #' } #' +#' @importFrom glue glue #' @importFrom ellmer chat_openai +#' @importFrom cli cli_abort +#' @importFrom rlang current_env #' @export -roses <- function(pkg, hint = "", emoji = TRUE, chat = chat_openai(model = "gpt-3.5-turbo")) { +roses <- function(pkg, hint = "", emoji = TRUE, chat = chat_openai(model = "gpt-3.5-turbo"), error_call = current_env()) { + error_handler <- function(e) { + cli_abort(c( + "Problem creating or communicating with the LLM.", + i = "See {.fn ellmer::chat_openai} and friends for details on how to create a {.cls Chat} object.}" + ), parent = e, call = error_call) + } + prompt <- prompt(pkg, hint = hint, emoji = emoji) - chat$chat(prompt, echo = "text") + + chat <- withCallingHandlers(force(chat), error = error_handler) + + if (!inherits(chat, "Chat")) { + cli_abort( + c("The {.arg chat} argument must be a {.cls Chat} object, not {.obj_type_friendly {chat}}."), + call = error_call + ) + } + + withCallingHandlers(chat$chat(prompt, echo = "text"), error = error_handler) } #' @rdname roses diff --git a/man/roses.Rd b/man/roses.Rd index 27fc1fd..54def81 100644 --- a/man/roses.Rd +++ b/man/roses.Rd @@ -9,7 +9,8 @@ roses( pkg, hint = "", emoji = TRUE, - chat = chat_openai(model = "gpt-3.5-turbo") + chat = chat_openai(model = "gpt-3.5-turbo"), + error_call = current_env() ) prompt(pkg, hint = "", emoji = TRUE) @@ -21,8 +22,13 @@ prompt(pkg, hint = "", emoji = TRUE) \item{emoji}{Should the poem include emojis ?} -\item{chat}{A Chat object, e.g. \code{\link[ellmer:chat_openai]{ellmer::chat_openai()}}. The default uses +\item{chat}{A \link[ellmer:Chat]{ellmer::Chat} object, e.g. \code{\link[ellmer:chat_openai]{ellmer::chat_openai()}}. The default uses the 'gpt-3.5-turbo' model from OpenAI.} + +\item{error_call}{The execution environment of a currently +running function, e.g. \code{caller_env()}. The function will be +mentioned in error messages as the source of the error. See the +\code{call} argument of \code{\link[rlang:abort]{abort()}} for more information.} } \value{ A lovely poem for your package crush