Pull Model Command (#125)

* Add: Pull Model Command

* Fix: Missing ollama mock for PullModel
This commit is contained in:
Kevin Dang
2024-10-12 17:53:34 -07:00
committed by GitHub
parent 5061dab335
commit 5d02800c3f
5 changed files with 63 additions and 7 deletions

View File

@@ -17,7 +17,7 @@ const client = new Client({
});
// initialize connection to ollama container
const ollama = new Ollama({
export const ollama = new Ollama({
host: `http://${Keys.ipAddress}:${Keys.portAddress}`,
})

View File

@@ -7,6 +7,7 @@ import { Shutoff } from './shutoff.js'
import { Capacity } from './capacity.js'
import { PrivateThreadCreate } from './threadPrivateCreate.js'
import { ClearUserChannelHistory } from './cleanUserChannelHistory.js'
import { PullModel } from './pullModel.js'
export default [
ThreadCreate,
@@ -16,5 +17,6 @@ export default [
Disable,
Shutoff,
Capacity,
ClearUserChannelHistory
ClearUserChannelHistory,
PullModel
] as SlashCommand[]

46
src/commands/pullModel.ts Normal file
View File

@@ -0,0 +1,46 @@
import { ApplicationCommandOptionType, ChannelType, Client, CommandInteraction } from "discord.js";
import { SlashCommand } from "../utils/commands.js";
import { ollama } from "../client.js";
export const PullModel: SlashCommand = {
name: 'pull-model',
description: 'pulls a model from the ollama model library',
// set available user options to pass to the command
options: [
{
name: 'model-to-pull',
description: 'the name of the model to pull',
type: ApplicationCommandOptionType.String,
required: true
}
],
// Pull for model from Ollama library
run: async (client: Client, interaction: CommandInteraction) => {
// defer reply to avoid timeout
await interaction.deferReply()
// fetch channel and message
const channel = await client.channels.fetch(interaction.channelId)
if (!channel || channel.type !== (ChannelType.PrivateThread && ChannelType.PublicThread && ChannelType.GuildText)) return
try {
// call ollama to pull desired model
await ollama.pull({
model: interaction.options.get('model-to-pull')!!.value as string
})
} catch (error) {
// could not resolve pull or model unfound
interaction.editReply({
content: `Could not pull/locate the **${interaction.options.get('model-to-pull')!!.value}** model within the [Ollama Model Library](https://ollama.com/library).\n\nPlease check the model library and try again.`
})
return
}
// successful pull
interaction.editReply({
content: `Successfully added **${interaction.options.get('model-to-pull')!!.value}** into your local model library.`
})
}
}