From b506423a18e136e07c94993722253744492d1465 Mon Sep 17 00:00:00 2001 From: Hause Lin Date: Sun, 28 Jul 2024 00:06:26 -0400 Subject: [PATCH] Add ohelp --- NAMESPACE | 1 + R/ollama.R | 41 ++++++++++++++++++++++++++++++++++++++ _pkgdown.yml | 1 + man/image_encode_base64.Rd | 2 +- man/ohelp.Rd | 25 +++++++++++++++++++++++ 5 files changed, 69 insertions(+), 1 deletion(-) create mode 100644 man/ohelp.Rd diff --git a/NAMESPACE b/NAMESPACE index 1db4b1b..ab4ef2e 100644 --- a/NAMESPACE +++ b/NAMESPACE @@ -14,6 +14,7 @@ export(generate) export(image_encode_base64) export(insert_message) export(list_models) +export(ohelp) export(prepend_message) export(pull) export(resp_process) diff --git a/R/ollama.R b/R/ollama.R index f8f0d81..9198e2d 100644 --- a/R/ollama.R +++ b/R/ollama.R @@ -568,3 +568,44 @@ embeddings <- function(model, prompt, normalize = TRUE, keep_alive = "5m", endpo +#' Chat with a model in real-time in R console. +#' +#' @param model A character string of the model name such as "llama3". Defaults to "codegemma:7b" which is a decent coding model as of 2024-07-27. +#' @param ... Additional options. No options are currently available at this time. +#' +#' @return Does not return anything. It prints the conversation in the console. +#' @export +#' +#' @examplesIf test_connection()$status_code == 200 +#' ohelp(first_prompt = "quit") +#' # regular usage: ohelp() +ohelp <- function(model = "codegemma:7b", ...) { + + cat("Say something or type /q to quit or end the conversation.\n\n") + + n_messages <- 0 + opts <- list(...) + if (length(opts) > 0) { + if (opts$first_prompt == "quit") { + prompt <- "/q" + } + } else { + prompt <- readline() + } + + while (prompt != "/q") { + if (n_messages == 0) { + messages <- create_message(prompt, role = 'user') + } else { + messages <- append(messages, create_message(prompt, role = 'user')) + } + n_messages <- n_messages + 1 + response <- chat(model, messages = messages, output = 'text', stream = TRUE) + messages <- append_message(response, "assistant", messages) + n_messages <- n_messages + 1 + prompt <- readline() + } + + cat("Goodbye!\n") + +} diff --git a/_pkgdown.yml b/_pkgdown.yml index ff2af29..099d494 100644 --- a/_pkgdown.yml +++ b/_pkgdown.yml @@ -14,6 +14,7 @@ reference: - pull - embed - embeddings + - ohelp - test_connection - create_request diff --git a/man/image_encode_base64.Rd b/man/image_encode_base64.Rd index 2415b0d..5e036e3 100644 --- a/man/image_encode_base64.Rd +++ b/man/image_encode_base64.Rd @@ -16,6 +16,6 @@ A base64 encoded string. Read image file and encode it to base64. } \examples{ -image_path <- file.path(system.file('extdata', package = "ollamar"), "image1.png") +image_path <- file.path(system.file("extdata", package = "ollamar"), "image1.png") image_encode_base64(image_path) } diff --git a/man/ohelp.Rd b/man/ohelp.Rd new file mode 100644 index 0000000..ab83b40 --- /dev/null +++ b/man/ohelp.Rd @@ -0,0 +1,25 @@ +% Generated by roxygen2: do not edit by hand +% Please edit documentation in R/ollama.R +\name{ohelp} +\alias{ohelp} +\title{Chat with a model in real-time in R console.} +\usage{ +ohelp(model = "codegemma:7b", ...) +} +\arguments{ +\item{model}{A character string of the model name such as "llama3". Defaults to "codegemma:7b" which is a decent coding model as of 2024-07-27.} + +\item{...}{Additional options. No options are currently available at this time.} +} +\value{ +Does not return anything. It prints the conversation in the console. +} +\description{ +Chat with a model in real-time in R console. +} +\examples{ +\dontshow{if (test_connection()$status_code == 200) (if (getRversion() >= "3.4") withAutoprint else force)(\{ # examplesIf} +ohelp(first_prompt = "quit") +# regular usage: ohelp() +\dontshow{\}) # examplesIf} +}