Chat Queue Persistence (#33)

* fix: workflow env

* update: center title on readme

* update: readme goals and format

* add: icons in readme

* fix: plus margin

* update: environment variables in contr.

* add: queue for chat history

* add: set -e for workflow failure

* update: version increment

* fix: client null info

* fix: shutoff issues
This commit is contained in:
Kevin Dang
2024-04-02 22:04:09 -07:00
committed by GitHub
parent 5f8b513269
commit 727731695e
16 changed files with 139 additions and 31 deletions

View File

@@ -2,6 +2,7 @@ import { Client, GatewayIntentBits } from 'discord.js'
import { UserMessage, registerEvents } from './utils/events.js'
import Events from './events/index.js'
import { Ollama } from 'ollama'
import { Queue } from './queues/queue.js'
// Import keys/tokens
import Keys from './keys.js'
@@ -23,12 +24,7 @@ const ollama = new Ollama({
})
// Create Queue managed by Events
const messageHistory: [UserMessage] = [
{
role: 'system',
content: 'Your name is Ollama GU'
}
]
const messageHistory: Queue<UserMessage> = new Queue<UserMessage>
/**
* register events for bot to listen to in discord
@@ -44,4 +40,10 @@ await client.login(Keys.clientToken)
.catch((error) => {
console.error('[Login Error]', error)
process.exit(1)
})
// queue up bots name
messageHistory.enqueue({
role: 'assistant',
content: `My name is ${client.user?.username}`
})

View File

@@ -29,12 +29,18 @@ export const Shutoff: SlashCommand = {
const superUsers: string[] = JSON.parse(Keys.superUser.replace(/'/g, '"'))
// check if admin or false on shutdown
if (interaction.user.tag in superUsers || !(!interaction.options.get('are-you-sure')?.value && interaction.user.tag in superUsers)) {
if (interaction.user.tag !in superUsers) {
interaction.reply({
content: `Shutdown failed:\n\n${interaction.user.tag}, You do not have permission to shutoff **${client.user?.tag}**, otherwise, you just didn't want to.`,
content: `Shutdown failed:\n\n${interaction.user.tag}, You do not have permission to shutoff **${client.user?.tag}**.`,
ephemeral: true
})
return // stop from shutting down
} else if (!interaction.options.get('are-you-sure')?.value) {
interaction.reply({
content: `Shutdown failed:\n\n${interaction.user.tag}, You didn't want to shutoff **${client.user?.tag}**.`,
ephemeral: true
})
return
}
interaction.reply({

View File

@@ -18,8 +18,11 @@ export default event(Events.MessageCreate, async ({ log, msgHist, tokens, ollama
// Only respond if message mentions the bot
if (!message.mentions.has(tokens.clientUid)) return
// check if we can push, if not, remove oldest
if (msgHist.size() === msgHist.getCapacity()) msgHist.dequeue()
// push user response
msgHist.push({
msgHist.enqueue({
role: 'user',
content: message.content
})
@@ -43,8 +46,8 @@ export default event(Events.MessageCreate, async ({ log, msgHist, tokens, ollama
})
})
let response: ChatResponse
let response: ChatResponse
// undefined or false, use normal, otherwise use embed
if (config.options['message-style'])
response = await embedMessage(message, ollama, tokens, msgHist)
@@ -54,14 +57,17 @@ export default event(Events.MessageCreate, async ({ log, msgHist, tokens, ollama
// If something bad happened, remove user query and stop
if (response == undefined) { msgHist.pop(); return }
// if queue is full, remove the oldest message
if (msgHist.size() === msgHist.getCapacity()) msgHist.dequeue()
// successful query, save it as history
msgHist.push({
msgHist.enqueue({
role: 'assistant',
content: response.message.content
})
} catch (error: any) {
msgHist.pop() // remove message because of failure
openFile('config.json', 'message-style', true)
message.reply(`**Response generation failed.**\n\n**Reason:** *${error.message}*`)
message.reply(`**Error Occurred:**\n\n**Reason:** *${error.message}*`)
}
})

70
src/queues/queue.ts Normal file
View File

@@ -0,0 +1,70 @@
// Queue interfaces for any queue class to follow
interface IQueue<T> {
enqueue(item: T): void
dequeue(): T | undefined
size(): number
}
/**
* Queue for UserMessages
* When the limit for messages is met, we want to clear
* out the oldest message in the queue
*/
export class Queue<T> implements IQueue<T> {
private storage: T[] = []
/**
* Set up Queue
* @param capacity max length of queue
*/
constructor(private capacity: number = 5) {}
/**
* Put item in front of queue
* @param item object of type T to add into queue
*/
enqueue(item: T): void {
if (this.size() === this.capacity)
throw Error('Queue has reached max capacity, you cannot add more items.')
this.storage.push(item)
}
/**
* Remove item at end of queue
* @returns object of type T in queue
*/
dequeue(): T | undefined {
return this.storage.shift()
}
/**
* Size of the queue
* @returns length of queue as a int/number
*/
size(): number {
return this.storage.length
}
/**
* Remove the front of the queue, typically for errors
*/
pop(): void {
this.storage.pop()
}
/**
* Geet the queue as an array
* @returns a array of T items
*/
getItems(): T[] {
return this.storage
}
/**
* Get capacity of the queue
* @returns capacity of queue
*/
getCapacity(): number {
return this.capacity
}
}

View File

@@ -1,5 +1,6 @@
import type { ClientEvents, Awaitable, Client } from 'discord.js'
import type { ClientEvents, Awaitable, Client, User } from 'discord.js'
import { Ollama } from 'ollama'
import { Queue } from '../queues/queue.js'
// Export events through here to reduce amount of imports
export { Events } from 'discord.js'
@@ -33,7 +34,7 @@ export type UserMessage = {
export interface EventProps {
client: Client
log: LogMethod
msgHist: { role: string, content: string }[]
msgHist: Queue<UserMessage>
tokens: Tokens,
ollama: Ollama
}
@@ -63,7 +64,7 @@ export function event<T extends EventKeys>(key: T, callback: EventCallback<T>):
export function registerEvents(
client: Client,
events: Event[],
msgHist: UserMessage[],
msgHist: Queue<UserMessage>,
tokens: Tokens,
ollama: Ollama
): void {

View File

@@ -1,6 +1,7 @@
import { EmbedBuilder, Message } from 'discord.js'
import { ChatResponse, Ollama } from 'ollama'
import { UserMessage } from './events.js'
import { Queue } from '../queues/queue.js'
/**
* Method to send replies as normal text on discord like any other user
@@ -15,7 +16,7 @@ export async function embedMessage(
channel: string,
model: string
},
msgHist: UserMessage[]
msgHist: Queue<UserMessage>
) {
// bot response
let response: ChatResponse
@@ -33,7 +34,7 @@ export async function embedMessage(
// Attempt to query model for message
response = await ollama.chat({
model: tokens.model,
messages: msgHist,
messages: msgHist.getItems(),
options: {
num_thread: 8, // remove if optimization needed further
mirostat: 1,

View File

@@ -1,6 +1,7 @@
import { Message } from 'discord.js'
import { ChatResponse, Ollama } from 'ollama'
import { UserMessage } from './events.js'
import { Queue } from '../queues/queue.js'
/**
* Method to send replies as normal text on discord like any other user
@@ -15,7 +16,7 @@ export async function normalMessage(
channel: string,
model: string
},
msgHist: UserMessage[]
msgHist: Queue<UserMessage>
) {
// bot's respnse
let response: ChatResponse
@@ -25,7 +26,7 @@ export async function normalMessage(
// Attempt to query model for message
response = await ollama.chat({
model: tokens.model,
messages: msgHist,
messages: msgHist.getItems(),
options: {
num_thread: 8, // remove if optimization needed further
mirostat: 1,