4 Commits

Author SHA1 Message Date
JT2M0L3Y
be6c64be82 Update: fix imports based on last pkg fix 2025-07-11 13:26:47 -07:00
JT2M0L3Y
427c1ecd3d Added: defined objects directory 2025-07-11 13:22:44 -07:00
JT2M0L3Y
5eda32b185 Update: utility method logs use method name 2025-07-11 13:22:38 -07:00
Kevin Dang
b27cdfc162 Update Documentation with New Features (#185) 2025-06-22 20:43:50 -07:00
12 changed files with 83 additions and 27 deletions

View File

@@ -14,18 +14,28 @@
Ollama is an AI model management tool that allows users to install and use custom large language models locally.
The project aims to:
* [x] Create a Discord bot that will utilize Ollama and chat to chat with users!
* [x] User Preferences on Chat
* [x] Message Persistance on Channels and Threads
* [x] Threads
* [x] Channels
* [x] User and Server Preferences
* [x] Message Persistance
* [x] Containerization with Docker
* [x] Slash Commands Compatible
* [ ] Summary Command
* [ ] Model Info Command
* [ ] List Models Command
* [x] Pull Model Command
* [x] Switch Model Command
* [x] Delete Model Command
* [x] Create Thread Command
* [x] Create Private Thread Command
* [x] Message Stream Command
* [x] Change Message History Size Command
* [x] Clear Channel History Command (User Only)
* [x] Administrator Role Compatible
* [x] Generated Token Length Handling for >2000
* [x] Token Length Handling of any message size
* [x] User vs. Server Preferences
* [x] Administrator Role Compatible
* [x] Multi-User Chat Generation (Multiple users chatting at the same time) - This was built in from Ollama `v0.2.1+`
* [x] Automatic and Manual model pulling through the Discord client
* [x] Multi-User Chat Generation - This was built in from Ollama `v0.2.1+`
* [ ] Ollama Tool Support Implementation
* [ ] Enhanced Channel Context Awareness
* [ ] Improved User Replied Triggers
Further, Ollama provides the functionality to utilize custom models or provide context for the top-layer of any model available through the Ollama model library.
* [Customize a model](https://github.com/ollama/ollama#customize-a-model)

View File

@@ -1,6 +1,6 @@
import { Client, GatewayIntentBits } from 'discord.js'
import { Ollama } from 'ollama'
import { Queue } from './queues/queue.js'
import { Queue } from './components/index.js'
import { UserMessage, registerEvents } from './utils/index.js'
import Events from './events/index.js'
import Keys from './keys.js'

46
src/components/binder.ts Normal file
View File

@@ -0,0 +1,46 @@
/**
* @class Logger
* @description A class to handle logging messages
* @method log
*/
export class Logger {
private logPrefix: string = ''
private type: string = 'log'
private constructPrefix(component?: string, method?: string): string {
let prefix = this.type.toUpperCase()
if (component) {
prefix += ` [${component}`
if (method) prefix += `: ${method}`
prefix += ']'
}
return prefix
}
public bind(component?: string, method?: string): CallableFunction {
let tempPrefix = this.constructPrefix(component, method)
if (tempPrefix !== this.logPrefix) this.logPrefix = tempPrefix
switch (this.type) {
case 'warn':
return console.warn.bind(console, this.logPrefix)
case 'error':
return console.error.bind(console, this.logPrefix)
case 'log':
default:
return console.log.bind(console, this.logPrefix)
}
}
public log(type: string, message: unknown, component?: string, method?: string): void {
if (type && type !== this.type) this.type = type
let log = this.bind(component, method)
log(message)
}
}

2
src/components/index.ts Normal file
View File

@@ -0,0 +1,2 @@
export * from './queue.js'
export * from './binder.js'

View File

@@ -1,6 +1,6 @@
import { TextChannel } from 'discord.js'
import { event, Events, normalMessage, UserMessage, clean } from '../utils/index.js'
import {
event, Events, normalMessage, UserMessage, clean,
getChannelInfo, getServerConfig, getUserConfig, openChannelInfo,
openConfig, UserConfig, getAttachmentData, getTextFileAttachmentData
} from '../utils/index.js'

View File

@@ -1,6 +1,6 @@
import type { ClientEvents, Awaitable, Client } from 'discord.js'
import { Ollama } from 'ollama'
import { Queue } from '../queues/queue.js'
import { Queue } from '../components/index.js'
// Export events through here to reduce amount of imports
export { Events } from 'discord.js'

View File

@@ -64,7 +64,7 @@ export async function clearChannelInfo(filename: string, channel: TextChannel, u
* @param user the user's name
* @param messages their messages
*/
export async function openChannelInfo(filename: string, channel: TextChannel | ThreadChannel, user: string, messages: UserMessage[] = []): Promise<void> {
export async function openChannelInfo(this: any, filename: string, channel: TextChannel | ThreadChannel, user: string, messages: UserMessage[] = []): Promise<void> {
const fullFileName = `data/${filename}-${user}.json`
if (fs.existsSync(fullFileName)) {
fs.readFile(fullFileName, 'utf8', (error, data) => {
@@ -95,7 +95,7 @@ export async function openChannelInfo(filename: string, channel: TextChannel | T
// only creating it, no need to add anything
fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2))
console.log(`[Util: openChannelInfo] Created '${fullFileName}' in working directory`)
console.log(`[Util: ${this.name}] Created '${fullFileName}' in working directory`)
}
}

View File

@@ -10,7 +10,7 @@ import path from 'path'
* @param value new value to assign
*/
// add type of change (server, user)
export function openConfig(filename: string, key: string, value: any) {
export function openConfig(this: any, filename: string, key: string, value: any) {
const fullFileName = `data/${filename}`
// check if the file exists, if not then make the config file
@@ -41,7 +41,7 @@ export function openConfig(filename: string, key: string, value: any) {
fs.mkdirSync(directory, { recursive: true })
fs.writeFileSync(`data/${filename}`, JSON.stringify(object, null, 2))
console.log(`[Util: openConfig] Created '${filename}' in working directory`)
console.log(`[Util: ${this.name}] Created '${filename}' in working directory`)
}
}

View File

@@ -1,6 +1,5 @@
import { ChatResponse } from "ollama"
import { ChatResponse, AbortableAsyncIterator } from "ollama"
import { ChatParams } from "../index.js"
import { AbortableAsyncIterator } from "ollama/src/utils.js"
/**
* Method to query the Ollama client for async generation

View File

@@ -1,8 +1,7 @@
import { Message, SendableChannels } from 'discord.js'
import { ChatResponse, Ollama } from 'ollama'
import { ChatResponse, Ollama, AbortableAsyncIterator } from 'ollama'
import { ChatParams, UserMessage, streamResponse, blockResponse } from './index.js'
import { Queue } from '../queues/queue.js'
import { AbortableAsyncIterator } from 'ollama/src/utils.js'
import { Queue } from '../components/index.js'
/**
* Method to send replies as normal text on discord like any other user
@@ -11,6 +10,7 @@ import { AbortableAsyncIterator } from 'ollama/src/utils.js'
* @param msgHist message history between user and model
*/
export async function normalMessage(
this: any,
message: Message,
ollama: Ollama,
model: string,
@@ -73,12 +73,11 @@ export async function normalMessage(
sentMessage.edit(result)
}
} catch (error: any) {
console.log(`[Util: messageNormal] Error creating message: ${error.message}`)
if (error.message.includes('fetch failed'))
error.message = 'Missing ollama service on machine'
else if (error.message.includes('try pulling it first'))
error.message = `You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.`
sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`)
console.log(`[Util: ${this.name}] Error creating message: ${error.message}`)
if (error.message.includes('try pulling it first'))
sentMessage.edit(`**Response generation failed.**\n\nReason: You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.`)
else
sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`)
}
})

View File

@@ -1,5 +1,5 @@
import { describe, expect, it } from 'vitest'
import { Queue } from '../src/queues/queue.js'
import { Queue } from '../src/components/index.js'
/**
* Queue test suite, tests the Queue class