Chat Queue Persistence (#33)

* fix: workflow env

* update: center title on readme

* update: readme goals and format

* add: icons in readme

* fix: plus margin

* update: environment variables in contr.

* add: queue for chat history

* add: set -e for workflow failure

* update: version increment

* fix: client null info

* fix: shutoff issues
This commit is contained in:
Kevin Dang
2024-04-02 22:04:09 -07:00
committed by GitHub
parent 5f8b513269
commit 727731695e
16 changed files with 139 additions and 31 deletions

View File

@@ -1,6 +1,7 @@
import { EmbedBuilder, Message } from 'discord.js'
import { ChatResponse, Ollama } from 'ollama'
import { UserMessage } from './events.js'
import { Queue } from '../queues/queue.js'
/**
* Method to send replies as normal text on discord like any other user
@@ -15,7 +16,7 @@ export async function embedMessage(
channel: string,
model: string
},
msgHist: UserMessage[]
msgHist: Queue<UserMessage>
) {
// bot response
let response: ChatResponse
@@ -33,7 +34,7 @@ export async function embedMessage(
// Attempt to query model for message
response = await ollama.chat({
model: tokens.model,
messages: msgHist,
messages: msgHist.getItems(),
options: {
num_thread: 8, // remove if optimization needed further
mirostat: 1,