3 Commits

Author SHA1 Message Date
JT2M0L3Y
cc7a3661b7 Update: fix imports based on last pkg fix 2025-02-23 21:24:35 -07:00
JT2M0L3Y
c026de5d62 Added: defined objects directory 2025-02-23 21:03:44 -07:00
JT2M0L3Y
f99bd2528d Update: utility method logs use method name 2025-02-23 21:03:44 -07:00
21 changed files with 837 additions and 386 deletions

View File

@@ -33,7 +33,6 @@ jobs:
echo CLIENT_TOKEN = ${{ secrets.BOT_TOKEN }} >> .env
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
@@ -62,7 +61,6 @@ jobs:
echo CLIENT_TOKEN = ${{ secrets.BOT_TOKEN }} >> .env
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env

View File

@@ -30,13 +30,12 @@ jobs:
echo CLIENT_TOKEN = ${{ secrets.BOT_TOKEN }} >> .env
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
- name: Collect Code Coverage
run: |
LINE_PCT=$(npm run coverage | tail -2 | head -1 | awk '{print $3}')
LINE_PCT=$(npm run test:coverage | tail -2 | head -1 | awk '{print $3}')
echo "COVERAGE=$LINE_PCT" >> $GITHUB_ENV
- name: Upload Code Coverage

View File

@@ -21,7 +21,6 @@ jobs:
echo CLIENT_TOKEN = ${{ secrets.CLIENT }} >> .env
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env
echo DISCORD_IP = ${{ secrets.DISCORD_IP }} >> .env
echo SUBNET_ADDRESS = ${{ secrets.SUBNET_ADDRESS }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env

View File

@@ -41,10 +41,9 @@ jobs:
echo CLIENT_TOKEN = ${{ secrets.BOT_TOKEN }} >> .env
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
- name: Test Application
run: |
npm run tests
npm run test:run

View File

@@ -7,12 +7,11 @@ services:
build: ./ # find docker file in designated path
container_name: discord
restart: always # rebuild container always
image: kevinthedang/discord-ollama:0.8.5
image: kevinthedang/discord-ollama:0.8.3
environment:
CLIENT_TOKEN: ${CLIENT_TOKEN}
OLLAMA_IP: ${OLLAMA_IP}
OLLAMA_PORT: ${OLLAMA_PORT}
MODEL: ${MODEL}
REDIS_IP: ${REDIS_IP}
REDIS_PORT: ${REDIS_PORT}
networks:

1054
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,12 +1,12 @@
{
"name": "discord-ollama",
"version": "0.8.5",
"version": "0.8.3",
"description": "Ollama Integration into discord",
"main": "build/index.js",
"exports": "./build/index.js",
"scripts": {
"tests": "vitest run",
"coverage": "vitest run --coverage",
"test:run": "vitest run",
"test:coverage": "vitest run --coverage",
"watch": "tsx watch src",
"build": "tsc",
"prod": "node .",
@@ -28,16 +28,16 @@
"license": "ISC",
"dependencies": {
"discord.js": "^14.18.0",
"dotenv": "^16.5.0",
"ollama": "^0.5.15",
"dotenv": "^16.4.7",
"ollama": "^0.5.13",
"redis": "^4.7.0"
},
"devDependencies": {
"@types/node": "^22.13.14",
"@vitest/coverage-v8": "^3.0.9",
"@types/node": "^22.13.5",
"@vitest/coverage-v8": "^3.0.6",
"ts-node": "^10.9.2",
"tsx": "^4.19.3",
"typescript": "^5.8.2",
"typescript": "^5.7.3",
"vitest": "^3.0.4"
},
"type": "module",

View File

@@ -1,7 +1,7 @@
import { Client, GatewayIntentBits } from 'discord.js'
import { Ollama } from 'ollama'
import { createClient } from 'redis'
import { Queue } from './queues/queue.js'
import { Queue } from './components/index.js'
import { UserMessage, registerEvents } from './utils/index.js'
import Events from './events/index.js'
import Keys from './keys.js'
@@ -34,14 +34,10 @@ registerEvents(client, Events, messageHistory, ollama, Keys.defaultModel)
// Try to connect to redis
await redis.connect()
.then(response => {
console.log('[Redis] Successfully Connected')
})
.catch(error => {
console.error('[Redis] Connection Error. See error below:\n', error)
console.warn('[Redis] Failed to connect to Redis Database, using local system')
// TODO: create boolean flag that will probably be used in messageCreate.ts if redis database is down
// When implementing this boolean flag, move connection to database BEFORE the registerEvents method
.then(() => console.log('[Redis] Connected'))
.catch((error) => {
console.error('[Redis] Connection Error', error)
process.exit(1)
})
// Try to log in the client

View File

@@ -37,20 +37,8 @@ export const DeleteModel: SlashCommand = {
}
// check if model exists
const modelExists = await ollama.list()
const modelExists: boolean = await ollama.list()
.then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput)))
.catch(error => {
console.error(`[Command: delete-model] Failed to connect with Ollama service. Error: ${error.message}`)
})
// Validate for any issue or if service is running
if (!modelExists) {
interaction.editReply({
content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).`
})
return
}
try {
// call ollama to delete model

View File

@@ -36,21 +36,9 @@ export const PullModel: SlashCommand = {
return
}
// check if model was already pulled, if the ollama service isn't running throw error
const modelExists = await ollama.list()
// check if model was already pulled
const modelExists: boolean = await ollama.list()
.then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput)))
.catch(error => {
console.error(`[Command: pull-model] Failed to connect with Ollama service. Error: ${error.message}`)
})
// Validate for any issue or if service is running
if (!modelExists) {
interaction.editReply({
content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).`
})
return
}
try {
// call ollama to pull desired model

View File

@@ -45,9 +45,6 @@ export const SwitchModel: SlashCommand = {
}
}
})
.catch(error => {
console.error(`[Command: switch-model] Failed to connect with Ollama service. Error: ${error.message}`)
})
// todo: problem can be here if async messes up
if (switchSuccess) {
// set model now that it exists
@@ -59,13 +56,10 @@ export const SwitchModel: SlashCommand = {
interaction.editReply({
content: `Could not find **${modelInput}** in local model library.\n\nPlease contact an server admin for access to this model.`
})
} catch (error: any) {
} catch (error) {
// could not resolve user model switch
if (error.message.includes("fetch failed") as string)
error.message = "The Ollama service is not running. Please turn on/download the [service](https://ollama.com/)."
interaction.editReply({
content: `Unable to switch user preferred model to **${modelInput}**.\n\n${error.message}`
content: `Unable to switch user preferred model to **${modelInput}**.\n\n${error}\n\nPossible solution is to request an server admin run \`/pull-model ${modelInput}\` and try again.`
})
return
}

46
src/components/binder.ts Normal file
View File

@@ -0,0 +1,46 @@
/**
* @class Logger
* @description A class to handle logging messages
* @method log
*/
export class Logger {
private logPrefix: string = ''
private type: string = 'log'
private constructPrefix(component?: string, method?: string): string {
let prefix = this.type.toUpperCase()
if (component) {
prefix += ` [${component}`
if (method) prefix += `: ${method}`
prefix += ']'
}
return prefix
}
public bind(component?: string, method?: string): CallableFunction {
let tempPrefix = this.constructPrefix(component, method)
if (tempPrefix !== this.logPrefix) this.logPrefix = tempPrefix
switch (this.type) {
case 'warn':
return console.warn.bind(console, this.logPrefix)
case 'error':
return console.error.bind(console, this.logPrefix)
case 'log':
default:
return console.log.bind(console, this.logPrefix)
}
}
public log(type: string, message: unknown, component?: string, method?: string): void {
if (type && type !== this.type) this.type = type
let log = this.bind(component, method)
log(message)
}
}

2
src/components/index.ts Normal file
View File

@@ -0,0 +1,2 @@
export * from './queue.js'
export * from './binder.js'

View File

@@ -1,6 +1,6 @@
import { TextChannel } from 'discord.js'
import { event, Events, normalMessage, UserMessage, clean } from '../utils/index.js'
import {
event, Events, normalMessage, UserMessage, clean,
getChannelInfo, getServerConfig, getUserConfig, openChannelInfo,
openConfig, UserConfig, getAttachmentData, getTextFileAttachmentData
} from '../utils/index.js'
@@ -71,8 +71,9 @@ export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client
userConfig = await new Promise((resolve, reject) => {
getUserConfig(`${message.author.username}-config.json`, (config) => {
if (config === undefined) {
openConfig(`${message.author.username}-config.json`, 'message-style', false)
openConfig(`${message.author.username}-config.json`, 'switch-model', defaultModel)
reject(new Error(`No User Preferences is set up.\n\nCreating new preferences file for ${message.author.username}\nPlease try chatting again.`))
reject(new Error('No User Preferences is set up.\n\nCreating preferences file with \`message-style\` set as \`false\` for regular message style.\nPlease try chatting again.'))
return
}

View File

@@ -1,6 +1,6 @@
import type { ClientEvents, Awaitable, Client } from 'discord.js'
import { Ollama } from 'ollama'
import { Queue } from '../queues/queue.js'
import { Queue } from '../components/index.js'
// Export events through here to reduce amount of imports
export { Events } from 'discord.js'

View File

@@ -64,7 +64,7 @@ export async function clearChannelInfo(filename: string, channel: TextChannel, u
* @param user the user's name
* @param messages their messages
*/
export async function openChannelInfo(filename: string, channel: TextChannel | ThreadChannel, user: string, messages: UserMessage[] = []): Promise<void> {
export async function openChannelInfo(this: any, filename: string, channel: TextChannel | ThreadChannel, user: string, messages: UserMessage[] = []): Promise<void> {
const fullFileName = `data/${filename}-${user}.json`
if (fs.existsSync(fullFileName)) {
fs.readFile(fullFileName, 'utf8', (error, data) => {
@@ -95,7 +95,7 @@ export async function openChannelInfo(filename: string, channel: TextChannel | T
// only creating it, no need to add anything
fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2))
console.log(`[Util: openChannelInfo] Created '${fullFileName}' in working directory`)
console.log(`[Util: ${this.name}] Created '${fullFileName}' in working directory`)
}
}

View File

@@ -10,7 +10,7 @@ import path from 'path'
* @param value new value to assign
*/
// add type of change (server, user)
export function openConfig(filename: string, key: string, value: any) {
export function openConfig(this: any, filename: string, key: string, value: any) {
const fullFileName = `data/${filename}`
// check if the file exists, if not then make the config file
@@ -41,7 +41,7 @@ export function openConfig(filename: string, key: string, value: any) {
fs.mkdirSync(directory, { recursive: true })
fs.writeFileSync(`data/${filename}`, JSON.stringify(object, null, 2))
console.log(`[Util: openConfig] Created '${filename}' in working directory`)
console.log(`[Util: ${this.name}] Created '${filename}' in working directory`)
}
}

View File

@@ -1,6 +1,5 @@
import { ChatResponse } from "ollama"
import { ChatResponse, AbortableAsyncIterator } from "ollama"
import { ChatParams } from "../index.js"
import { AbortableAsyncIterator } from "ollama/src/utils.js"
/**
* Method to query the Ollama client for async generation

View File

@@ -1,8 +1,7 @@
import { Message, SendableChannels } from 'discord.js'
import { ChatResponse, Ollama } from 'ollama'
import { ChatResponse, Ollama, AbortableAsyncIterator } from 'ollama'
import { ChatParams, UserMessage, streamResponse, blockResponse } from './index.js'
import { Queue } from '../queues/queue.js'
import { AbortableAsyncIterator } from 'ollama/src/utils.js'
import { Queue } from '../components/index.js'
/**
* Method to send replies as normal text on discord like any other user
@@ -11,6 +10,7 @@ import { AbortableAsyncIterator } from 'ollama/src/utils.js'
* @param msgHist message history between user and model
*/
export async function normalMessage(
this: any,
message: Message,
ollama: Ollama,
model: string,
@@ -73,12 +73,11 @@ export async function normalMessage(
sentMessage.edit(result)
}
} catch (error: any) {
console.log(`[Util: messageNormal] Error creating message: ${error.message}`)
if (error.message.includes('fetch failed'))
error.message = 'Missing ollama service on machine'
else if (error.message.includes('try pulling it first'))
error.message = `You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.`
sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`)
console.log(`[Util: ${this.name}] Error creating message: ${error.message}`)
if (error.message.includes('try pulling it first'))
sentMessage.edit(`**Response generation failed.**\n\nReason: You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.`)
else
sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`)
}
})

View File

@@ -1,5 +1,5 @@
import { describe, expect, it } from 'vitest'
import { Queue } from '../src/queues/queue.js'
import { Queue } from '../src/components/index.js'
/**
* Queue test suite, tests the Queue class