3 Commits

Author SHA1 Message Date
Kevin Dang
32137dacb0 Add: Filter out think tags in bot message (#190) 2025-10-04 20:17:26 -07:00
Kevin Dang
c00ea5de98 Additional Channel Awareness (#186) 2025-07-31 19:04:51 -07:00
Kevin Dang
b27cdfc162 Update Documentation with New Features (#185) 2025-06-22 20:43:50 -07:00
9 changed files with 127 additions and 25 deletions

View File

@@ -14,18 +14,28 @@
Ollama is an AI model management tool that allows users to install and use custom large language models locally. Ollama is an AI model management tool that allows users to install and use custom large language models locally.
The project aims to: The project aims to:
* [x] Create a Discord bot that will utilize Ollama and chat to chat with users! * [x] Create a Discord bot that will utilize Ollama and chat to chat with users!
* [x] User Preferences on Chat * [x] User and Server Preferences
* [x] Message Persistance on Channels and Threads * [x] Message Persistance
* [x] Threads
* [x] Channels
* [x] Containerization with Docker * [x] Containerization with Docker
* [x] Slash Commands Compatible * [x] Slash Commands Compatible
* [ ] Summary Command
* [ ] Model Info Command
* [ ] List Models Command
* [x] Pull Model Command
* [x] Switch Model Command
* [x] Delete Model Command
* [x] Create Thread Command
* [x] Create Private Thread Command
* [x] Message Stream Command
* [x] Change Message History Size Command
* [x] Clear Channel History Command (User Only)
* [x] Administrator Role Compatible
* [x] Generated Token Length Handling for >2000 * [x] Generated Token Length Handling for >2000
* [x] Token Length Handling of any message size * [x] Token Length Handling of any message size
* [x] User vs. Server Preferences * [x] Multi-User Chat Generation - This was built in from Ollama `v0.2.1+`
* [x] Administrator Role Compatible * [ ] Ollama Tool Support Implementation
* [x] Multi-User Chat Generation (Multiple users chatting at the same time) - This was built in from Ollama `v0.2.1+` * [ ] Enhanced Channel Context Awareness
* [x] Automatic and Manual model pulling through the Discord client * [ ] Improved User Replied Triggers
Further, Ollama provides the functionality to utilize custom models or provide context for the top-layer of any model available through the Ollama model library. Further, Ollama provides the functionality to utilize custom models or provide context for the top-layer of any model available through the Ollama model library.
* [Customize a model](https://github.com/ollama/ollama#customize-a-model) * [Customize a model](https://github.com/ollama/ollama#customize-a-model)

View File

@@ -7,7 +7,7 @@ services:
build: ./ # find docker file in designated path build: ./ # find docker file in designated path
container_name: discord container_name: discord
restart: always # rebuild container always restart: always # rebuild container always
image: kevinthedang/discord-ollama:0.8.6 image: kevinthedang/discord-ollama:0.8.7
environment: environment:
CLIENT_TOKEN: ${CLIENT_TOKEN} CLIENT_TOKEN: ${CLIENT_TOKEN}
OLLAMA_IP: ${OLLAMA_IP} OLLAMA_IP: ${OLLAMA_IP}

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.7",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.7",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"discord.js": "^14.20.0", "discord.js": "^14.20.0",

View File

@@ -1,6 +1,6 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.6", "version": "0.8.7",
"description": "Ollama Integration into discord", "description": "Ollama Integration into discord",
"main": "build/index.js", "main": "build/index.js",
"exports": "./build/index.js", "exports": "./build/index.js",

View File

@@ -23,8 +23,11 @@ export const ollama = new Ollama({
// Create Queue managed by Events // Create Queue managed by Events
const messageHistory: Queue<UserMessage> = new Queue<UserMessage> const messageHistory: Queue<UserMessage> = new Queue<UserMessage>
// Create Channel History Queue managed by Events
const channelMessageHistory: Queue<UserMessage> = new Queue<UserMessage>
// register all events // register all events
registerEvents(client, Events, messageHistory, ollama, Keys.defaultModel) registerEvents(client, Events, messageHistory, channelMessageHistory, ollama, Keys.defaultModel)
// Try to log in the client // Try to log in the client
await client.login(Keys.clientToken) await client.login(Keys.clientToken)

View File

@@ -1,5 +1,5 @@
import { TextChannel } from 'discord.js' import { TextChannel } from 'discord.js'
import { event, Events, normalMessage, UserMessage, clean } from '../utils/index.js' import { event, Events, normalMessage, UserMessage, clean, addToChannelContext } from '../utils/index.js'
import { import {
getChannelInfo, getServerConfig, getUserConfig, openChannelInfo, getChannelInfo, getServerConfig, getUserConfig, openChannelInfo,
openConfig, UserConfig, getAttachmentData, getTextFileAttachmentData openConfig, UserConfig, getAttachmentData, getTextFileAttachmentData
@@ -11,7 +11,7 @@ import {
* *
* @param message the message received from the channel * @param message the message received from the channel
*/ */
export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client, defaultModel }, message) => { export default event(Events.MessageCreate, async ({ log, msgHist, channelHistory, ollama, client, defaultModel }, message) => {
const clientId = client.user!!.id const clientId = client.user!!.id
let cleanedMessage = clean(message.content, clientId) let cleanedMessage = clean(message.content, clientId)
log(`Message \"${cleanedMessage}\" from ${message.author.tag} in channel/thread ${message.channelId}.`) log(`Message \"${cleanedMessage}\" from ${message.author.tag} in channel/thread ${message.channelId}.`)
@@ -19,6 +19,61 @@ export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client
// Do not respond if bot talks in the chat // Do not respond if bot talks in the chat
if (message.author.username === message.client.user.username) return if (message.author.username === message.client.user.username) return
// Save User Chat even if not for the bot
let channelContextHistory: UserMessage[] = await new Promise((resolve) => {
getChannelInfo(`${message.channelId}-context.json`, (channelInfo) => {
if (channelInfo?.messages)
resolve(channelInfo.messages)
else {
log(`Channel/Thread ${message.channel}-context does not exist. File will be created shortly...`)
resolve([])
}
})
})
if (channelContextHistory.length === 0) {
channelContextHistory = await new Promise((resolve) => {
addToChannelContext(message.channelId,
message.channel as TextChannel
)
getChannelInfo(`${message.channelId}-context.json`, (channelInfo) => {
if (channelInfo?.messages)
resolve(channelInfo.messages)
else {
log(`Channel/Thread ${message.channel}-context does not exist. File will be created shortly...`)
}
})
})
}
// Set Channel History Queue
channelHistory.setQueue(channelContextHistory)
// get message attachment if exists
const attachment = message.attachments.first()
let messageAttachment: string[] = []
if (attachment && attachment.name?.endsWith(".txt"))
cleanedMessage += ' ' + await getTextFileAttachmentData(attachment)
else if (attachment)
messageAttachment = await getAttachmentData(attachment)
while (channelHistory.size() >= channelHistory.capacity) channelHistory.dequeue()
// push user response to channel history
console.log
channelHistory.enqueue({
role: 'user',
content: cleanedMessage,
images: messageAttachment || []
})
// Store in Channel Context
addToChannelContext(message.channelId,
message.channel as TextChannel,
channelHistory.getItems()
)
// Only respond if message mentions the bot // Only respond if message mentions the bot
if (!message.mentions.has(clientId)) return if (!message.mentions.has(clientId)) return
@@ -139,15 +194,6 @@ export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client
if (!userConfig) if (!userConfig)
throw new Error(`Failed to initialize User Preference for **${message.author.username}**.\n\nIt's likely you do not have a model set. Please use the \`switch-model\` command to do that.`) throw new Error(`Failed to initialize User Preference for **${message.author.username}**.\n\nIt's likely you do not have a model set. Please use the \`switch-model\` command to do that.`)
// get message attachment if exists
const attachment = message.attachments.first()
let messageAttachment: string[] = []
if (attachment && attachment.name?.endsWith(".txt"))
cleanedMessage += await getTextFileAttachmentData(attachment)
else if (attachment)
messageAttachment = await getAttachmentData(attachment)
const model: string = userConfig.options['switch-model'] const model: string = userConfig.options['switch-model']
// set up new queue // set up new queue

View File

@@ -37,6 +37,7 @@ export interface EventProps {
client: Client, client: Client,
log: LogMethod, log: LogMethod,
msgHist: Queue<UserMessage>, msgHist: Queue<UserMessage>,
channelHistory: Queue<UserMessage>,
ollama: Ollama, ollama: Ollama,
defaultModel: String defaultModel: String
} }
@@ -78,6 +79,7 @@ export function registerEvents(
client: Client, client: Client,
events: Event[], events: Event[],
msgHist: Queue<UserMessage>, msgHist: Queue<UserMessage>,
channelHistory: Queue<UserMessage>,
ollama: Ollama, ollama: Ollama,
defaultModel: String defaultModel: String
): void { ): void {
@@ -88,7 +90,7 @@ export function registerEvents(
// Handle Errors, call callback, log errors as needed // Handle Errors, call callback, log errors as needed
try { try {
callback({ client, log, msgHist, ollama, defaultModel }, ...args) callback({ client, log, msgHist, channelHistory, ollama, defaultModel }, ...args)
} catch (error) { } catch (error) {
log('[Uncaught Error]', error) log('[Uncaught Error]', error)
} }

View File

@@ -56,6 +56,39 @@ export async function clearChannelInfo(filename: string, channel: TextChannel, u
return cleanedHistory return cleanedHistory
} }
export async function addToChannelContext(filename: string, channel : TextChannel | ThreadChannel, messages: UserMessage[] = []): Promise<void> {
const fullFileName = `data/${filename}-context.json`
if (fs.existsSync(fullFileName)) {
fs.readFile(fullFileName, 'utf8', (error, data) => {
if (error)
console.log(`[Error: addToChannelContext] Incorrect file format`)
else {
const object = JSON.parse(data)
if (object['messages'].length === 0)
object['messages'] = messages as []
else if (object['messages'].length !== 0 && messages.length !== 0)
object['messages'] = messages as []
fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2))
}
})
} else { // channel context does not exist, create it
const object: Configuration = JSON.parse(
`{
\"id\": \"${channel?.id}\",
\"name\": \"${channel?.name}\",
\"messages\": []
}`
)
const directory = path.dirname(fullFileName)
if (!fs.existsSync(directory))
fs.mkdirSync(directory, { recursive: true })
fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2))
console.log(`[Util: addToChannelContext] Created '${fullFileName}' in working directory`)
}
}
/** /**
* Method to open the channel history * Method to open the channel history
* *

View File

@@ -56,6 +56,10 @@ export async function normalMessage(
response = await blockResponse(params) response = await blockResponse(params)
result = response.message.content result = response.message.content
// check if there is a <think>...</think> sequence from the bot.
if (hasThinking(result))
result = result.replace(/<think>[\s\S]*?<\/think>/g, '').trim()
// check if message length > discord max for normal messages // check if message length > discord max for normal messages
if (result.length > 2000) { if (result.length > 2000) {
sentMessage.edit(result.slice(0, 2000)) sentMessage.edit(result.slice(0, 2000))
@@ -85,3 +89,7 @@ export async function normalMessage(
// return the string representation of ollama query response // return the string representation of ollama query response
return result return result
} }
function hasThinking(message: string): boolean {
return /<think>[\s\S]*?<\/think>/i.test(message)
}