From 0b3f9998b75f24a4c469b3421866feff046985a5 Mon Sep 17 00:00:00 2001 From: Kevin Dang Date: Thu, 12 Jun 2025 21:21:18 -0700 Subject: [PATCH] Fix: Better Messages for Ollama service being offline --- src/commands/deleteModel.ts | 13 +++++++++++-- src/commands/pullModel.ts | 15 ++++++++++++--- src/commands/switchModel.ts | 7 +++++-- 3 files changed, 28 insertions(+), 7 deletions(-) diff --git a/src/commands/deleteModel.ts b/src/commands/deleteModel.ts index e07193e..4812c6b 100644 --- a/src/commands/deleteModel.ts +++ b/src/commands/deleteModel.ts @@ -22,6 +22,7 @@ export const DeleteModel: SlashCommand = { // defer reply to avoid timeout await interaction.deferReply() const modelInput: string = interaction.options.get('model-name')!!.value as string + let modelExists: boolean // fetch channel and message const channel = await client.channels.fetch(interaction.channelId) @@ -37,8 +38,16 @@ export const DeleteModel: SlashCommand = { } // check if model exists - const modelExists: boolean = await ollama.list() - .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) + try { + modelExists = await ollama.list() + .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) + } catch (error) { + interaction.editReply({ + content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).` + }) + return + } + try { // call ollama to delete model diff --git a/src/commands/pullModel.ts b/src/commands/pullModel.ts index 7fba93d..9d5136c 100644 --- a/src/commands/pullModel.ts +++ b/src/commands/pullModel.ts @@ -22,6 +22,7 @@ export const PullModel: SlashCommand = { // defer reply to avoid timeout await interaction.deferReply() const modelInput: string = interaction.options.get('model-to-pull')!!.value as string + let modelExists: boolean // fetch channel and message const channel = await client.channels.fetch(interaction.channelId) @@ -36,9 +37,17 @@ export const PullModel: SlashCommand = { return } - // check if model was already pulled - const modelExists: boolean = await ollama.list() - .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) + // check if model was already pulled, if the ollama service isn't running throw error + try { + modelExists = await ollama.list() + .then(response => response.models.some((model: ModelResponse) => model.name.startsWith(modelInput))) + } catch (error) { + interaction.editReply({ + content: `The Ollama service is not running. Please turn on/download the [service](https://ollama.com/).` + }) + return + } + try { // call ollama to pull desired model diff --git a/src/commands/switchModel.ts b/src/commands/switchModel.ts index c81c1e9..aabb847 100644 --- a/src/commands/switchModel.ts +++ b/src/commands/switchModel.ts @@ -56,10 +56,13 @@ export const SwitchModel: SlashCommand = { interaction.editReply({ content: `Could not find **${modelInput}** in local model library.\n\nPlease contact an server admin for access to this model.` }) - } catch (error) { + } catch (error: any) { // could not resolve user model switch + if (error.message.includes("fetch failed") as string) + error.message = "The Ollama service is not running. Please turn on/download the [service](https://ollama.com/)." + interaction.editReply({ - content: `Unable to switch user preferred model to **${modelInput}**.\n\n${error}\n\nPossible solution is to request an server admin run \`/pull-model ${modelInput}\` and try again.` + content: `Unable to switch user preferred model to **${modelInput}**.\n\n${error.message}` }) return }