diff --git a/README.md b/README.md index 4a2eff9..55d3a5c 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ Dette repoet er en eksperimentell sandkasse for å teste ut bruk av språkmodeller til å gjøre smarte søke i dokumenter. Innholdet her er en POC for å teste funksjonalitet og muligheter og er ikke ment for produksjon. -Hovedprogrammet finnes i fila ```lmDocSearchUtils.js```. For å test så kan man kalle funksjonen(e) fra filen```index.js``` +Hovedprogrammet finnes i fila ```llmDocSearchUtils.js```. For å test så kan man kalle funksjonen(e) fra filen```index.js``` Funksjonen(e) er bygget ved hjelp av rammeverket [langchain](https://js.langchain.com/docs/get_started/introduction) som er laget for å gjøre det enkelt å bruke språkmodeller. Koden i dette repoet bruker samme API som chatVTFK med egen datavatale. diff --git a/index.js b/index.js index 555039b..4ed375e 100644 --- a/index.js +++ b/index.js @@ -1,5 +1,20 @@ -import { askDok } from "./llmDocSearchUtils.js"; +// import { askDok, askDokOpenAI } from "./utils/docChatUtilsLangchain.js"; +// import { askRag, showThread, createNewThread, runAssistant } from "./utils/docChatUtilsTemp.js"; +import { createSpeech, createNewThread, askDok } from "./utils/docChatUtilsOpenAI.js"; -// console.log(askDok("html", "https://snl.no/Vestfold_og_Telemark", "Finnes det noen stavkirker i fylket?")); -console.log(askDok("md", "docs/delingsinfo.md", "Hvem er primærkontakter i Vestfold?")) +const artikkel = "https://www.utdanningsnytt.no/eksamen-karakterer-kvalitetsvurderingssystem/utvalg-foreslar-a-avvikle-dagens-nasjonale-prover/380387" +const spørsmål = "Hva er den kommunitative lov? Forklar på en enkel måte"; + + +// console.log(askDok("html", artikkel, spørsmål)); +// console.log(askDok("pdf", "./docs/MB_bm.pdf", spørsmål)); + +// console.log(createNewThread()); +// console.log(askRag()); +// console.log(showThread()); +// console.log(runAssistant()); +// console.log(createSpeech(Hei hei.`)); + +// console.log(createNewThread()); +console.log(await askDok("asst_PP9eODyAvv3Qtd7VJYTfmKEL", "thread_xGR6QNsAbB6LZIOBoHYoN6QE", "Punkt 2 var spennende. Kan du si mer om det?")) diff --git a/speech.mp3 b/speech.mp3 new file mode 100644 index 0000000..e24366a Binary files /dev/null and b/speech.mp3 differ diff --git a/llmDocSearchUtils.js b/utils/docChatUtilsLangchain.js similarity index 77% rename from llmDocSearchUtils.js rename to utils/docChatUtilsLangchain.js index 0bd3729..4fbc78c 100644 --- a/llmDocSearchUtils.js +++ b/utils/docChatUtilsLangchain.js @@ -11,6 +11,8 @@ import { loadQAStuffChain } from "langchain/chains"; import { HtmlToTextTransformer } from "langchain/document_transformers/html_to_text"; import fs from "fs"; +import OpenAI from "openai"; + import dotenv from "dotenv"; dotenv.config(); @@ -72,3 +74,31 @@ export const askDok = async (dokType, dokPath, question) => { console.log(res.text); return true; }; + + +export const askDokOpenAI = async () => { + + const openai = new OpenAI(); + const completion = await openai.chat.completions.create({ + messages: [{ role: "system", content: "Du er en hyggelig assistent. Du skal kun svare på spørsmål med utgangspunkt i vedlagte fil.." },{ role: "user", content: "Hva står om underveisvurdering?" }], + model: "gpt-3.5-turbo", + }); + + const myAssistant = await openai.beta.assistants.retrieve( + "asst_PP9eODyAvv3Qtd7VJYTfmKEL" + ); + + //const emptyThread = await openai.beta.threads.create(); + //console.log(emptyThread); + + const run = await openai.beta.threads.runs.create( + "thread_9IScPMsgbKKmGuBM0W78NWzy", + { assistant_id: "asst_PP9eODyAvv3Qtd7VJYTfmKEL", instructions: "Hva står om kompetansemål i vedlagte fil?" } + ); + + console.log(run); + console.log(completion.choices[0].message); + return true; +} + + diff --git a/utils/docChatUtilsOpenAI.js b/utils/docChatUtilsOpenAI.js new file mode 100644 index 0000000..63bae6c --- /dev/null +++ b/utils/docChatUtilsOpenAI.js @@ -0,0 +1,54 @@ +import OpenAI from "openai"; +import fs from "fs"; +import path from "path"; +import dotenv from "dotenv"; +dotenv.config(); + +const openai = new OpenAI(process.env.OPENAI_API_KEY); +const speechFile = path.resolve("./speech.mp3"); + +export const createNewThread = async () => { + const thread = await openai.beta.threads.create(); + console.log(thread); + return thread; +}; + +export const askDok = async (assistantID, threadID, prompt) => { + console.log(assistantID || process.env.ASSISTANT, threadID, prompt); + const message = await openai.beta.threads.messages.create(threadID, { + role: "user", + content: prompt, + }); + const run = await openai.beta.threads.runs.create(threadID, { + assistant_id: assistantID, + instructions: "Svar på norsk.", + }); + + let response = new Promise((resolve, reject) => { + let intervalId = setInterval(async () => { + const updatedRun = await openai.beta.threads.runs.retrieve( + threadID, + run.id + ); // Replace with actual function to fetch updated run status + console.log("Waiting for completion: ", updatedRun.status); + if (updatedRun.status === "completed") { + clearInterval(intervalId); + resolve(await openai.beta.threads.messages.list(threadID).data[0].content[0].text.value); + } + }, 1000); + }); + return response; +}; +export const createSpeech = async (speech) => { + const mp3 = await openai.audio.speech.create({ + model: "tts-1", + voice: "alloy", + input: speech, + voice: "shimmer", + }); + console.log(speechFile); + const buffer = Buffer.from(await mp3.arrayBuffer()); + await fs.promises.writeFile(speechFile, buffer); + // Her kan vi returnere noe + return true; +}; diff --git a/utils/docChatUtilsTemp.js b/utils/docChatUtilsTemp.js new file mode 100644 index 0000000..d79061a --- /dev/null +++ b/utils/docChatUtilsTemp.js @@ -0,0 +1,44 @@ +import OpenAI from "openai"; +import dotenv from "dotenv"; +dotenv.config(); + +const openai = new OpenAI(); + +export const askRag = async () => { + const message = await openai.beta.threads.messages.create( + 'thread_UQI9QX4rs0CHMJNwMVey6F0Z', + { + role: "user", + content: "Hvilke kjerneelementer er det?" + } + ) + console.log(message.content); + return message +} + +export const createNewThread = async () => { + const thread = await openai.beta.threads.create(); + console.log(thread); + return thread; +} + +export const showThread = async () => { + const threadMessages = await openai.beta.threads.messages.list('thread_UQI9QX4rs0CHMJNwMVey6F0Z') + + for ( const m of threadMessages.data ) { + console.log(m.content); + } + console.log(threadMessages); + return threadMessages; +} + +export const runAssistant = async () => { + const run = await openai.beta.threads.runs.create( + 'thread_UQI9QX4rs0CHMJNwMVey6F0Z', + { + assistant_id: 'asst_PP9eODyAvv3Qtd7VJYTfmKEL', + instructions: "Svar på norsk." + } + ) + console.log(run); + }; \ No newline at end of file