Switch Model Command (#126)

This commit is contained in:
Kevin Dang
2024-10-12 22:03:31 -07:00
committed by GitHub
parent 5d02800c3f
commit 9f61f6bc6c
22 changed files with 334 additions and 376 deletions

View File

@@ -3,7 +3,8 @@ import { UserMessage } from './index.js'
export interface UserConfiguration {
'message-stream'?: boolean,
'message-style'?: boolean,
'modify-capacity': number
'modify-capacity': number,
'switch-model': string
}
export interface ServerConfiguration {

View File

@@ -8,14 +8,6 @@ export { Events } from 'discord.js'
export type LogMethod = (...args: unknown[]) => void
export type EventKeys = keyof ClientEvents // only wants keys of ClientEvents object
/**
* Tokens to run the bot as intended
* @param model chosen model for the ollama to utilize
*/
export type Tokens = {
model: string,
}
/**
* Parameters to run the chat query
* @param model the model to run
@@ -44,7 +36,6 @@ export interface EventProps {
client: Client
log: LogMethod
msgHist: Queue<UserMessage>
tokens: Tokens,
ollama: Ollama
}
export type EventCallback<T extends EventKeys> = (
@@ -67,14 +58,12 @@ export function event<T extends EventKeys>(key: T, callback: EventCallback<T>):
* @param client initialized bot client
* @param events all the exported events from the index.ts in the events dir
* @param msgHist The message history of the bot
* @param tokens the passed in environment tokens for the service
* @param ollama the initialized ollama instance
*/
export function registerEvents(
client: Client,
events: Event[],
msgHist: Queue<UserMessage>,
tokens: Tokens,
ollama: Ollama
): void {
for (const { key, callback } of events) {
@@ -84,7 +73,7 @@ export function registerEvents(
// Handle Errors, call callback, log errors as needed
try {
callback({ client, log, msgHist, tokens, ollama }, ...args)
callback({ client, log, msgHist, ollama }, ...args)
} catch (error) {
log('[Uncaught Error]', error)
}

View File

@@ -7,15 +7,13 @@ import { AbortableAsyncIterator } from 'ollama/src/utils.js'
/**
* Method to send replies as normal text on discord like any other user
* @param message message sent by the user
* @param tokens tokens to run query
* @param model name of model to run query
* @param msgHist message history between user and model
*/
export async function embedMessage(
message: Message,
ollama: Ollama,
tokens: {
model: string
},
model: string,
msgHist: Queue<UserMessage>,
stream: boolean
): Promise<string> {
@@ -34,7 +32,7 @@ export async function embedMessage(
// create params
const params: ChatParams = {
model: tokens.model,
model: model,
ollama: ollama,
msgHist: msgHist.getItems()
}

View File

@@ -7,15 +7,13 @@ import { AbortableAsyncIterator } from 'ollama/src/utils.js'
/**
* Method to send replies as normal text on discord like any other user
* @param message message sent by the user
* @param tokens tokens to run query
* @param model name of model to run query
* @param msgHist message history between user and model
*/
export async function normalMessage(
message: Message,
ollama: Ollama,
tokens: {
model: string
},
model: string,
msgHist: Queue<UserMessage>,
stream: boolean
): Promise<string> {
@@ -26,7 +24,7 @@ export async function normalMessage(
await message.channel.send('Generating Response . . .').then(async sentMessage => {
try {
const params: ChatParams = {
model: tokens.model,
model: model,
ollama: ollama,
msgHist: msgHist.getItems()
}