5 Commits

Author SHA1 Message Date
Kevin Dang
0056d9173c Update: version increment 2025-06-12 21:25:48 -07:00
Kevin Dang
0b3f9998b7 Fix: Better Messages for Ollama service being offline 2025-06-12 21:21:18 -07:00
Kevin Dang
8d3a8f9098 Update: error message and config creation 2025-06-07 12:56:00 -07:00
Kevin Dang
a2c954136d Fix: redis workaround for local non docker 2025-05-08 22:02:26 -07:00
Kevin Dang
5820583609 Update: simplify npm command to run tests 2025-05-08 21:16:32 -07:00
7 changed files with 31 additions and 41 deletions

View File

@@ -7,7 +7,7 @@ services:
build: ./ # find docker file in designated path build: ./ # find docker file in designated path
container_name: discord container_name: discord
restart: always # rebuild container always restart: always # rebuild container always
image: kevinthedang/discord-ollama:0.8.5 image: kevinthedang/discord-ollama:0.8.5s
environment: environment:
CLIENT_TOKEN: ${CLIENT_TOKEN} CLIENT_TOKEN: ${CLIENT_TOKEN}
OLLAMA_IP: ${OLLAMA_IP} OLLAMA_IP: ${OLLAMA_IP}

16
package-lock.json generated
View File

@@ -10,8 +10,8 @@
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"discord.js": "^14.18.0", "discord.js": "^14.18.0",
"dotenv": "^16.5.0", "dotenv": "^16.4.7",
"ollama": "^0.5.15", "ollama": "^0.5.14",
"redis": "^4.7.0" "redis": "^4.7.0"
}, },
"devDependencies": { "devDependencies": {
@@ -1595,9 +1595,9 @@
} }
}, },
"node_modules/dotenv": { "node_modules/dotenv": {
"version": "16.5.0", "version": "16.4.7",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.5.0.tgz", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
"integrity": "sha512-m/C+AwOAr9/W1UOIZUo232ejMNnJAJtYQjUbHoNTBNTJSvqzzDh7vnrei3o3r3m9blf6ZoDkvcw0VmozNRFJxg==", "integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
"license": "BSD-2-Clause", "license": "BSD-2-Clause",
"engines": { "engines": {
"node": ">=12" "node": ">=12"
@@ -2003,9 +2003,9 @@
} }
}, },
"node_modules/ollama": { "node_modules/ollama": {
"version": "0.5.15", "version": "0.5.14",
"resolved": "https://registry.npmjs.org/ollama/-/ollama-0.5.15.tgz", "resolved": "https://registry.npmjs.org/ollama/-/ollama-0.5.14.tgz",
"integrity": "sha512-TSaZSJyP7MQJFjSmmNsoJiriwa3U+/UJRw6+M8aucs5dTsaWNZsBIGpDb5rXnW6nXxJBB/z79gZY8IaiIQgelQ==", "integrity": "sha512-pvOuEYa2WkkAumxzJP0RdEYHkbZ64AYyyUszXVX7ruLvk5L+EiO2G71da2GqEQ4IAk4j6eLoUbGk5arzFT1wJA==",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"whatwg-fetch": "^3.6.20" "whatwg-fetch": "^3.6.20"

View File

@@ -28,8 +28,8 @@
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"discord.js": "^14.18.0", "discord.js": "^14.18.0",
"dotenv": "^16.5.0", "dotenv": "^16.4.7",
"ollama": "^0.5.15", "ollama": "^0.5.14",
"redis": "^4.7.0" "redis": "^4.7.0"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -33,16 +33,15 @@ const messageHistory: Queue<UserMessage> = new Queue<UserMessage>
registerEvents(client, Events, messageHistory, ollama, Keys.defaultModel) registerEvents(client, Events, messageHistory, ollama, Keys.defaultModel)
// Try to connect to redis // Try to connect to redis
await redis.connect() try {
.then(response => { await redis.connect()
console.log('[Redis] Successfully Connected') console.log('[Redis] Successfully Connected')
}) } catch(error) {
.catch(error => { console.error('[Redis] Connection Error. See error below:\n', error)
console.error('[Redis] Connection Error. See error below:\n', error) console.warn('[Redis] Failed to connect to Redis Database, using local system')
console.warn('[Redis] Failed to connect to Redis Database, using local system') // TODO: create boolean flag that will probably be used in messageCreate.ts if redis database is down
// TODO: create boolean flag that will probably be used in messageCreate.ts if redis database is down // When implementing this boolean flag, move connection to database BEFORE the registerEvents method
// When implementing this boolean flag, move connection to database BEFORE the registerEvents method }
})
// Try to log in the client // Try to log in the client
await client.login(Keys.clientToken) await client.login(Keys.clientToken)

View File

@@ -22,6 +22,7 @@ export const DeleteModel: SlashCommand = {
// defer reply to avoid timeout // defer reply to avoid timeout
await interaction.deferReply() await interaction.deferReply()
const modelInput: string = interaction.options.get('model-name')!!.value as string const modelInput: string = interaction.options.get('model-name')!!.value as string
let modelExists: boolean
// fetch channel and message // fetch channel and message
const channel = await client.channels.fetch(interaction.channelId) const channel = await client.channels.fetch(interaction.channelId)
@@ -37,21 +38,17 @@ export const DeleteModel: SlashCommand = {
} }
// check if model exists // check if model exists
const modelExists = await ollama.list() try {
.then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) modelExists = await ollama.list()
.catch(error => { .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput)))
console.error(`[Command: delete-model] Failed to connect with Ollama service. Error: ${error.message}`) } catch (error) {
})
// Validate for any issue or if service is running
if (!modelExists) {
interaction.editReply({ interaction.editReply({
content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).` content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).`
}) })
return return
} }
try { try {
// call ollama to delete model // call ollama to delete model
if (modelExists) { if (modelExists) {

View File

@@ -22,6 +22,7 @@ export const PullModel: SlashCommand = {
// defer reply to avoid timeout // defer reply to avoid timeout
await interaction.deferReply() await interaction.deferReply()
const modelInput: string = interaction.options.get('model-to-pull')!!.value as string const modelInput: string = interaction.options.get('model-to-pull')!!.value as string
let modelExists: boolean
// fetch channel and message // fetch channel and message
const channel = await client.channels.fetch(interaction.channelId) const channel = await client.channels.fetch(interaction.channelId)
@@ -37,14 +38,10 @@ export const PullModel: SlashCommand = {
} }
// check if model was already pulled, if the ollama service isn't running throw error // check if model was already pulled, if the ollama service isn't running throw error
const modelExists = await ollama.list() try {
.then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) modelExists = await ollama.list()
.catch(error => { .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput)))
console.error(`[Command: pull-model] Failed to connect with Ollama service. Error: ${error.message}`) } catch (error) {
})
// Validate for any issue or if service is running
if (!modelExists) {
interaction.editReply({ interaction.editReply({
content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).` content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).`
}) })

View File

@@ -45,9 +45,6 @@ export const SwitchModel: SlashCommand = {
} }
} }
}) })
.catch(error => {
console.error(`[Command: switch-model] Failed to connect with Ollama service. Error: ${error.message}`)
})
// todo: problem can be here if async messes up // todo: problem can be here if async messes up
if (switchSuccess) { if (switchSuccess) {
// set model now that it exists // set model now that it exists