1 Commits

Author SHA1 Message Date
snyk-bot
69dd68bb5e fix: upgrade discord.js from 14.18.0 to 14.19.3
Snyk has created this PR to upgrade discord.js from 14.18.0 to 14.19.3.

See this package in npm:
discord.js

See this project in Snyk:
https://app.snyk.io/org/jt2m0l3y/project/d8b070a3-e4a3-457a-977b-7eb6a4a48346?utm_source=github&utm_medium=referral&page=upgrade-pr
2025-05-29 09:09:32 +00:00
11 changed files with 23 additions and 57 deletions

View File

@@ -36,7 +36,7 @@ jobs:
- name: Collect Code Coverage - name: Collect Code Coverage
run: | run: |
LINE_PCT=$(npm run coverage | tail -2 | head -1 | awk '{print $3}') LINE_PCT=$(npm run test:coverage | tail -2 | head -1 | awk '{print $3}')
echo "COVERAGE=$LINE_PCT" >> $GITHUB_ENV echo "COVERAGE=$LINE_PCT" >> $GITHUB_ENV
- name: Upload Code Coverage - name: Upload Code Coverage

View File

@@ -47,4 +47,4 @@ jobs:
- name: Test Application - name: Test Application
run: | run: |
npm run tests npm run test:run

View File

@@ -7,7 +7,7 @@ services:
build: ./ # find docker file in designated path build: ./ # find docker file in designated path
container_name: discord container_name: discord
restart: always # rebuild container always restart: always # rebuild container always
image: kevinthedang/discord-ollama:0.8.5 image: kevinthedang/discord-ollama:0.8.4
environment: environment:
CLIENT_TOKEN: ${CLIENT_TOKEN} CLIENT_TOKEN: ${CLIENT_TOKEN}
OLLAMA_IP: ${OLLAMA_IP} OLLAMA_IP: ${OLLAMA_IP}

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.4",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.4",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"discord.js": "^14.19.3", "discord.js": "^14.19.3",

View File

@@ -1,12 +1,12 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.4",
"description": "Ollama Integration into discord", "description": "Ollama Integration into discord",
"main": "build/index.js", "main": "build/index.js",
"exports": "./build/index.js", "exports": "./build/index.js",
"scripts": { "scripts": {
"tests": "vitest run", "test:run": "vitest run",
"coverage": "vitest run --coverage", "test:coverage": "vitest run --coverage",
"watch": "tsx watch src", "watch": "tsx watch src",
"build": "tsc", "build": "tsc",
"prod": "node .", "prod": "node .",

View File

@@ -34,14 +34,10 @@ registerEvents(client, Events, messageHistory, ollama, Keys.defaultModel)
// Try to connect to redis // Try to connect to redis
await redis.connect() await redis.connect()
.then(response => { .then(() => console.log('[Redis] Connected'))
console.log('[Redis] Successfully Connected') .catch((error) => {
}) console.error('[Redis] Connection Error', error)
.catch(error => { process.exit(1)
console.error('[Redis] Connection Error. See error below:\n', error)
console.warn('[Redis] Failed to connect to Redis Database, using local system')
// TODO: create boolean flag that will probably be used in messageCreate.ts if redis database is down
// When implementing this boolean flag, move connection to database BEFORE the registerEvents method
}) })
// Try to log in the client // Try to log in the client

View File

@@ -37,21 +37,9 @@ export const DeleteModel: SlashCommand = {
} }
// check if model exists // check if model exists
const modelExists = await ollama.list() const modelExists: boolean = await ollama.list()
.then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput)))
.catch(error => {
console.error(`[Command: delete-model] Failed to connect with Ollama service. Error: ${error.message}`)
})
// Validate for any issue or if service is running
if (!modelExists) {
interaction.editReply({
content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).`
})
return
}
try { try {
// call ollama to delete model // call ollama to delete model
if (modelExists) { if (modelExists) {

View File

@@ -36,21 +36,9 @@ export const PullModel: SlashCommand = {
return return
} }
// check if model was already pulled, if the ollama service isn't running throw error // check if model was already pulled
const modelExists = await ollama.list() const modelExists: boolean = await ollama.list()
.then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput)))
.catch(error => {
console.error(`[Command: pull-model] Failed to connect with Ollama service. Error: ${error.message}`)
})
// Validate for any issue or if service is running
if (!modelExists) {
interaction.editReply({
content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).`
})
return
}
try { try {
// call ollama to pull desired model // call ollama to pull desired model

View File

@@ -45,9 +45,6 @@ export const SwitchModel: SlashCommand = {
} }
} }
}) })
.catch(error => {
console.error(`[Command: switch-model] Failed to connect with Ollama service. Error: ${error.message}`)
})
// todo: problem can be here if async messes up // todo: problem can be here if async messes up
if (switchSuccess) { if (switchSuccess) {
// set model now that it exists // set model now that it exists
@@ -59,13 +56,10 @@ export const SwitchModel: SlashCommand = {
interaction.editReply({ interaction.editReply({
content: `Could not find **${modelInput}** in local model library.\n\nPlease contact an server admin for access to this model.` content: `Could not find **${modelInput}** in local model library.\n\nPlease contact an server admin for access to this model.`
}) })
} catch (error: any) { } catch (error) {
// could not resolve user model switch // could not resolve user model switch
if (error.message.includes("fetch failed") as string)
error.message = "The Ollama service is not running. Please turn on/download the [service](https://ollama.com/)."
interaction.editReply({ interaction.editReply({
content: `Unable to switch user preferred model to **${modelInput}**.\n\n${error.message}` content: `Unable to switch user preferred model to **${modelInput}**.\n\n${error}\n\nPossible solution is to request an server admin run \`/pull-model ${modelInput}\` and try again.`
}) })
return return
} }

View File

@@ -71,8 +71,9 @@ export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client
userConfig = await new Promise((resolve, reject) => { userConfig = await new Promise((resolve, reject) => {
getUserConfig(`${message.author.username}-config.json`, (config) => { getUserConfig(`${message.author.username}-config.json`, (config) => {
if (config === undefined) { if (config === undefined) {
openConfig(`${message.author.username}-config.json`, 'message-style', false)
openConfig(`${message.author.username}-config.json`, 'switch-model', defaultModel) openConfig(`${message.author.username}-config.json`, 'switch-model', defaultModel)
reject(new Error(`No User Preferences is set up.\n\nCreating new preferences file for ${message.author.username}\nPlease try chatting again.`)) reject(new Error('No User Preferences is set up.\n\nCreating preferences file with \`message-style\` set as \`false\` for regular message style.\nPlease try chatting again.'))
return return
} }

View File

@@ -74,11 +74,10 @@ export async function normalMessage(
} }
} catch (error: any) { } catch (error: any) {
console.log(`[Util: messageNormal] Error creating message: ${error.message}`) console.log(`[Util: messageNormal] Error creating message: ${error.message}`)
if (error.message.includes('fetch failed')) if (error.message.includes('try pulling it first'))
error.message = 'Missing ollama service on machine' sentMessage.edit(`**Response generation failed.**\n\nReason: You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.`)
else if (error.message.includes('try pulling it first')) else
error.message = `You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.` sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`)
sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`)
} }
}) })