Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2bdc7b8583 | ||
|
|
727731695e | ||
|
|
5f8b513269 | ||
|
|
fcb0267559 | ||
|
|
6b903cff5e |
@@ -21,4 +21,7 @@ OLLAMA_PORT = PORT
|
||||
DISCORD_IP = IP_ADDRESS
|
||||
|
||||
# subnet address, ex. 172.18.0.0 as we use /16.
|
||||
SUBNET_ADDRESS = ADDRESS
|
||||
SUBNET_ADDRESS = ADDRESS
|
||||
|
||||
# list of admins to handle admin commands for the bot, use single quotes
|
||||
ADMINS=['username1', 'username2', 'username3', ...]
|
||||
5
.github/CONTRIBUTING.md
vendored
5
.github/CONTRIBUTING.md
vendored
@@ -23,10 +23,7 @@
|
||||
## Environment
|
||||
* You will need two environment files:
|
||||
* `.env`: for running the bot
|
||||
* `CLIENT_TOKEN`: the token for the bot to log in
|
||||
* `CHANNEL_ID`: the id of the channel you wish for the bot to listen in
|
||||
* `MODEL`: the mode you wish to use
|
||||
* `BOT_UID`: the user id the bot goes by (the id of the discord user)
|
||||
* Please refer to `.env.sample` for all environment variables to include
|
||||
* `.env.dev.local`: also runs the bot, but with development variables
|
||||
* Currently there are no differences between the two, but when needed, you may add environment variables as needed.
|
||||
|
||||
|
||||
11
.github/workflows/build-test.yml
vendored
11
.github/workflows/build-test.yml
vendored
@@ -1,4 +1,4 @@
|
||||
name: Test Discord-Ollama Builds
|
||||
name: Builds
|
||||
run-name: Validate Node and Docker Builds
|
||||
on:
|
||||
push:
|
||||
@@ -37,9 +37,12 @@ jobs:
|
||||
echo CLIENT_UID = ${{ secrets.CLIENT_UID }} >> .env
|
||||
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
|
||||
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
|
||||
|
||||
echo ADMINS = ${{ secrets.ADMINS }} >> .env
|
||||
|
||||
# set -e ensures if nohup fails, this section fails
|
||||
- name: Startup Discord Bot Client
|
||||
run: |
|
||||
set -e
|
||||
nohup npm run prod &
|
||||
|
||||
Discord-Ollama-Container-Build: # test docker build and run
|
||||
@@ -65,6 +68,7 @@ jobs:
|
||||
echo CLIENT_UID = ${{ secrets.CLIENT_UID }} >> .env
|
||||
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
|
||||
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
|
||||
echo ADMINS = ${{ secrets.ADMINS }} >> .env
|
||||
|
||||
- name: Setup Docker Network and Images
|
||||
run: |
|
||||
@@ -74,8 +78,7 @@ jobs:
|
||||
run: |
|
||||
(docker images | grep -q 'discord/bot' && docker images | grep -qE 'ollama/ollama') || exit 1
|
||||
|
||||
|
||||
- name: Check Images Exist
|
||||
- name: Check Containers Exist
|
||||
run: |
|
||||
(docker ps | grep -q 'ollama' && docker ps | grep -q 'discord') || exit 1
|
||||
|
||||
|
||||
25
README.md
25
README.md
@@ -1,5 +1,26 @@
|
||||
# Discord Ollama Integration [](https://creativecommons.org/licenses/by-nc/4.0/) [](https://github.com/kevinthedang/discord-ollama/releases/latest)
|
||||
Ollama is an AI model management tool that allows users to install and use custom large language models locally. The goal is to create a discord bot that will utilize Ollama and chat with it on a Discord!
|
||||
<div align="center">
|
||||
<p><a href="#"><a href="https://ollama.ai/"><img alt="ollama" src="./imgs/ollama-icon.png" width="200px" /></a><img alt="+" src="./imgs/grey-plus.png" width="100px" /></a><a href="https://discord.com/"><img alt="discord" src="./imgs/discord-icon.png" width="190px" /></a></p>
|
||||
<h1>Discord Ollama Integration</h1>
|
||||
<h3><a href="#"></a>Ollama as your Discord AI Assistant</h3>
|
||||
<p><a href="#"></a><a href="https://creativecommons.org/licenses/by-nc/4.0/"><img alt="License" src="https://img.shields.io/badge/License-CC_BY--NC_4.0-darkgreen.svg" /></a>
|
||||
<a href="#"></a><a href="https://github.com/kevinthedang/discord-ollama/releases/latest"><img alt="Release" src="https://img.shields.io/github/v/release/kevinthedang/discord-ollama?logo=github" /></a>
|
||||
<a href="#"></a><a href="https://github.com/kevinthedang/discord-ollama/actions/workflows/build-test.yml"><img alt="Build Status" src="https://github.com/kevinthedang/discord-ollama/actions/workflows/build-test.yml/badge.svg" /></a>
|
||||
</div>
|
||||
|
||||
## About/Goals
|
||||
Ollama is an AI model management tool that allows users to install and use custom large language models locally.
|
||||
The project aims to:
|
||||
* [x] Create a Discord bot that will utilize Ollama and chat to chat with users!
|
||||
* [ ] User Preferences on Chat
|
||||
* [ ] Message Persistance on Channels and Threads
|
||||
* [x] Containerization with Docker
|
||||
* [x] Slash Commands Compatible
|
||||
* [ ] Generated Token Length Handling for >2000 or >6000 characters
|
||||
* [ ] External WebUI Integration
|
||||
* [ ] Administrator Role Compatible
|
||||
* [ ] Allow others to create their own models personalized for their own servers!
|
||||
* [ ] Documentation on creating your own LLM
|
||||
* [ ] Documentation on web scrapping and cleaning
|
||||
|
||||
## Environment Setup
|
||||
* Clone this repo using `git clone https://github.com/kevinthedang/discord-ollama.git` or just use [GitHub Desktop](https://desktop.github.com/) to clone the repo.
|
||||
|
||||
@@ -8,7 +8,7 @@ services:
|
||||
build: ./ # find docker file in designated path
|
||||
container_name: discord
|
||||
restart: always # rebuild container always
|
||||
image: discord/bot:0.2.0
|
||||
image: discord/bot:0.3.6
|
||||
environment:
|
||||
CLIENT_TOKEN: ${CLIENT_TOKEN}
|
||||
GUILD_ID: ${GUILD_ID}
|
||||
@@ -17,6 +17,7 @@ services:
|
||||
CLIENT_UID: ${CLIENT_UID}
|
||||
OLLAMA_IP: ${OLLAMA_IP}
|
||||
OLLAMA_PORT: ${OLLAMA_PORT}
|
||||
ADMINS: ${ADMINS}
|
||||
networks:
|
||||
ollama-net:
|
||||
ipv4_address: ${DISCORD_IP}
|
||||
|
||||
BIN
imgs/discord-icon.png
Normal file
BIN
imgs/discord-icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 94 KiB |
BIN
imgs/grey-plus.png
Normal file
BIN
imgs/grey-plus.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.5 KiB |
BIN
imgs/ollama-icon.png
Normal file
BIN
imgs/ollama-icon.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 49 KiB |
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "discord-ollama",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.6",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "discord-ollama",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.6",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.6.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "discord-ollama",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.6",
|
||||
"description": "Ollama Integration into discord",
|
||||
"main": "build/index.js",
|
||||
"exports": "./build/index.js",
|
||||
@@ -10,14 +10,14 @@
|
||||
"build": "tsc",
|
||||
"prod": "node .",
|
||||
"client": "npm run build && npm run prod",
|
||||
"clean": "docker compose down && docker rmi $(docker images | grep 0.2.0 | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
|
||||
"clean": "docker compose down && docker rmi $(docker images | grep $(node -p \"require('./package.json').version\") | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
|
||||
"start": "docker compose build --no-cache && docker compose up -d",
|
||||
"docker:start": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama",
|
||||
"docker:start-cpu": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama-cpu",
|
||||
"docker:clean": "docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
|
||||
"docker:network": "docker network create --subnet=172.18.0.0/16 ollama-net",
|
||||
"docker:build": "docker build --no-cache -t discord/bot:0.3.2 .",
|
||||
"docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 discord/bot:0.3.2",
|
||||
"docker:build": "docker build --no-cache -t discord/bot:$(node -p \"require('./package.json').version\") .",
|
||||
"docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 discord/bot:$(node -p \"require('./package.json').version\")",
|
||||
"docker:ollama": "docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest",
|
||||
"docker:ollama-cpu": "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest"
|
||||
},
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Client, GatewayIntentBits } from 'discord.js'
|
||||
import { UserMessage, registerEvents } from './utils/events.js'
|
||||
import Events from './events/index.js'
|
||||
import { Ollama } from 'ollama'
|
||||
import { Queue } from './queues/queue.js'
|
||||
|
||||
// Import keys/tokens
|
||||
import Keys from './keys.js'
|
||||
@@ -23,12 +24,7 @@ const ollama = new Ollama({
|
||||
})
|
||||
|
||||
// Create Queue managed by Events
|
||||
const messageHistory: [UserMessage] = [
|
||||
{
|
||||
role: 'system',
|
||||
content: 'Your name is Ollama GU'
|
||||
}
|
||||
]
|
||||
const messageHistory: Queue<UserMessage> = new Queue<UserMessage>
|
||||
|
||||
/**
|
||||
* register events for bot to listen to in discord
|
||||
@@ -44,4 +40,10 @@ await client.login(Keys.clientToken)
|
||||
.catch((error) => {
|
||||
console.error('[Login Error]', error)
|
||||
process.exit(1)
|
||||
})
|
||||
|
||||
// queue up bots name
|
||||
messageHistory.enqueue({
|
||||
role: 'assistant',
|
||||
content: `My name is ${client.user?.username}`
|
||||
})
|
||||
33
src/commands/capacity.ts
Normal file
33
src/commands/capacity.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { ChannelType, Client, CommandInteraction, ApplicationCommandOptionType } from 'discord.js'
|
||||
import { SlashCommand } from '../utils/commands.js'
|
||||
import { openFile } from '../utils/jsonHandler.js'
|
||||
|
||||
export const Capacity: SlashCommand = {
|
||||
name: 'modify-capacity',
|
||||
description: 'number of messages bot will hold for context.',
|
||||
|
||||
// set available user options to pass to the command
|
||||
options: [
|
||||
{
|
||||
name: 'context-capacity',
|
||||
description: 'a number to set capacity',
|
||||
type: ApplicationCommandOptionType.Number,
|
||||
required: true
|
||||
}
|
||||
],
|
||||
|
||||
// Query for message information and set the style
|
||||
run: async (client: Client, interaction: CommandInteraction) => {
|
||||
// fetch channel and message
|
||||
const channel = await client.channels.fetch(interaction.channelId)
|
||||
if (!channel || channel.type !== ChannelType.GuildText) return
|
||||
|
||||
// set state of bot chat features
|
||||
openFile('config.json', interaction.commandName, interaction.options.get('context-capacity')?.value)
|
||||
|
||||
interaction.reply({
|
||||
content: `Message History Capacity has been set to \`${interaction.options.get('context-capacity')?.value}\``,
|
||||
ephemeral: true
|
||||
})
|
||||
}
|
||||
}
|
||||
33
src/commands/disable.ts
Normal file
33
src/commands/disable.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { ChannelType, Client, CommandInteraction, ApplicationCommandOptionType } from 'discord.js'
|
||||
import { SlashCommand } from '../utils/commands.js'
|
||||
import { openFile } from '../utils/jsonHandler.js'
|
||||
|
||||
export const Disable: SlashCommand = {
|
||||
name: 'toggle-chat',
|
||||
description: 'toggle all chat features, slash commands will still work.',
|
||||
|
||||
// set available user options to pass to the command
|
||||
options: [
|
||||
{
|
||||
name: 'enabled',
|
||||
description: 'true = enabled, false = disabled',
|
||||
type: ApplicationCommandOptionType.Boolean,
|
||||
required: true
|
||||
}
|
||||
],
|
||||
|
||||
// Query for message information and set the style
|
||||
run: async (client: Client, interaction: CommandInteraction) => {
|
||||
// fetch channel and message
|
||||
const channel = await client.channels.fetch(interaction.channelId)
|
||||
if (!channel || channel.type !== ChannelType.GuildText) return
|
||||
|
||||
// set state of bot chat features
|
||||
openFile('config.json', interaction.commandName, interaction.options.get('enabled')?.value)
|
||||
|
||||
interaction.reply({
|
||||
content: `Chat features has been \`${interaction.options.get('enabled')?.value ? "enabled" : "disabled" }\``,
|
||||
ephemeral: true
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -2,9 +2,15 @@ import { SlashCommand } from '../utils/commands.js'
|
||||
import { ThreadCreate } from './threadCreate.js'
|
||||
import { MessageStyle } from './messageStyle.js'
|
||||
import { MessageStream } from './messageStream.js'
|
||||
import { Disable } from './disable.js'
|
||||
import { Shutoff } from './shutoff.js'
|
||||
import { Capacity } from './capacity.js'
|
||||
|
||||
export default [
|
||||
ThreadCreate,
|
||||
MessageStyle,
|
||||
MessageStream
|
||||
MessageStream,
|
||||
Disable,
|
||||
Shutoff,
|
||||
Capacity
|
||||
] as SlashCommand[]
|
||||
54
src/commands/shutoff.ts
Normal file
54
src/commands/shutoff.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import { ChannelType, Client, CommandInteraction, ApplicationCommandOptionType } from 'discord.js'
|
||||
import { SlashCommand } from '../utils/commands.js'
|
||||
import Keys from '../keys.js'
|
||||
|
||||
export const Shutoff: SlashCommand = {
|
||||
name: 'shutoff',
|
||||
description: 'shutdown the bot. You will need to manually bring it online again.',
|
||||
|
||||
// set available user options to pass to the command
|
||||
options: [
|
||||
{
|
||||
name: 'are-you-sure',
|
||||
description: 'true = yes, false = I\'m scared',
|
||||
type: ApplicationCommandOptionType.Boolean,
|
||||
required: true
|
||||
}
|
||||
],
|
||||
|
||||
// Query for message information and set the style
|
||||
run: async (client: Client, interaction: CommandInteraction) => {
|
||||
// fetch channel and message
|
||||
const channel = await client.channels.fetch(interaction.channelId)
|
||||
if (!channel || channel.type !== ChannelType.GuildText) return
|
||||
|
||||
// log this, this will probably be improtant for logging who did this
|
||||
console.log(`User -> ${interaction.user.tag} attempting to shutdown ${client.user!!.tag}`)
|
||||
|
||||
// create list of superUsers based on string parse
|
||||
const superUsers: string[] = JSON.parse(Keys.superUser.replace(/'/g, '"'))
|
||||
|
||||
// check if admin or false on shutdown
|
||||
if (interaction.user.tag !in superUsers) {
|
||||
interaction.reply({
|
||||
content: `Shutdown failed:\n\n${interaction.user.tag}, You do not have permission to shutoff **${client.user?.tag}**.`,
|
||||
ephemeral: true
|
||||
})
|
||||
return // stop from shutting down
|
||||
} else if (!interaction.options.get('are-you-sure')?.value) {
|
||||
interaction.reply({
|
||||
content: `Shutdown failed:\n\n${interaction.user.tag}, You didn't want to shutoff **${client.user?.tag}**.`,
|
||||
ephemeral: true
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
interaction.reply({
|
||||
content: `${client.user?.tag} is ${interaction.options.get('are-you-sure')?.value ? "shutting down now." : "not shutting down." }`,
|
||||
ephemeral: true
|
||||
})
|
||||
|
||||
// clean up client instance and stop
|
||||
client.destroy()
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ChatResponse } from 'ollama'
|
||||
import { embedMessage, event, Events, normalMessage } from '../utils/index.js'
|
||||
import { Configuration, getConfig } from '../utils/jsonHandler.js'
|
||||
import { Configuration, getConfig, openFile } from '../utils/jsonHandler.js'
|
||||
|
||||
/**
|
||||
* Max Message length for free users is 2000 characters (bot or not).
|
||||
@@ -18,26 +18,47 @@ export default event(Events.MessageCreate, async ({ log, msgHist, tokens, ollama
|
||||
// Only respond if message mentions the bot
|
||||
if (!message.mentions.has(tokens.clientUid)) return
|
||||
|
||||
// push user response
|
||||
msgHist.push({
|
||||
role: 'user',
|
||||
content: message.content
|
||||
})
|
||||
|
||||
// Try to query and send embed
|
||||
try {
|
||||
const config: Configuration = await new Promise((resolve, reject) => {
|
||||
getConfig('config.json', (config) => {
|
||||
// check if config.json exists
|
||||
if (config === undefined) {
|
||||
reject(new Error('No Configuration is set up.'))
|
||||
reject(new Error('No Configuration is set up.\n\nCreating \`config.json\` with \`message-style\` set as \`true\` for embedded messages.\nPlease try chatting again.'))
|
||||
return
|
||||
}
|
||||
|
||||
// check if chat is disabled
|
||||
if (!config.options['toggle-chat']) {
|
||||
reject(new Error('Admin(s) have disabled chat features.\n\n Please contact your server\'s admin(s).'))
|
||||
return
|
||||
}
|
||||
|
||||
// check if there is a set capacity in config
|
||||
if (typeof config.options['history-capacity'] !== 'number')
|
||||
log(`Capacity is undefined, using default capacity of ${msgHist.capacity}.`)
|
||||
else if (config.options['history-capacity'] === msgHist.capacity)
|
||||
log(`Capacity matches config as ${msgHist.capacity}, no changes made.`)
|
||||
else {
|
||||
log(`New Capacity found. Setting Context Capacity to ${config.options['history-capacity']}.`)
|
||||
msgHist.capacity = config.options['history-capacity']
|
||||
}
|
||||
|
||||
resolve(config)
|
||||
})
|
||||
})
|
||||
|
||||
let response: ChatResponse
|
||||
let response: ChatResponse
|
||||
|
||||
// check if we can push, if not, remove oldest
|
||||
if (msgHist.size() === msgHist.capacity) msgHist.dequeue()
|
||||
|
||||
// push user response before ollama query
|
||||
msgHist.enqueue({
|
||||
role: 'user',
|
||||
content: message.content
|
||||
})
|
||||
|
||||
// undefined or false, use normal, otherwise use embed
|
||||
if (config.options['message-style'])
|
||||
response = await embedMessage(message, ollama, tokens, msgHist)
|
||||
@@ -47,13 +68,17 @@ export default event(Events.MessageCreate, async ({ log, msgHist, tokens, ollama
|
||||
// If something bad happened, remove user query and stop
|
||||
if (response == undefined) { msgHist.pop(); return }
|
||||
|
||||
// successful query, save it as history
|
||||
msgHist.push({
|
||||
// if queue is full, remove the oldest message
|
||||
if (msgHist.size() === msgHist.capacity) msgHist.dequeue()
|
||||
|
||||
// successful query, save it in context history
|
||||
msgHist.enqueue({
|
||||
role: 'assistant',
|
||||
content: response.message.content
|
||||
})
|
||||
} catch (error: any) {
|
||||
msgHist.pop() // remove message because of failure
|
||||
message.reply(`**Response generation failed.**\n\nReason: ${error.message}\n\nPlease use any config slash command.`)
|
||||
openFile('config.json', 'message-style', true)
|
||||
message.reply(`**Error Occurred:**\n\n**Reason:** *${error.message}*`)
|
||||
}
|
||||
})
|
||||
@@ -7,7 +7,8 @@ export const Keys = {
|
||||
clientUid: getEnvVar('CLIENT_UID'),
|
||||
guildId: getEnvVar('GUILD_ID'),
|
||||
ipAddress: getEnvVar('OLLAMA_IP'),
|
||||
portAddress: getEnvVar('OLLAMA_PORT')
|
||||
portAddress: getEnvVar('OLLAMA_PORT'),
|
||||
superUser: getEnvVar('ADMINS')
|
||||
} as const // readonly keys
|
||||
|
||||
export default Keys
|
||||
62
src/queues/queue.ts
Normal file
62
src/queues/queue.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
// Queue interfaces for any queue class to follow
|
||||
interface IQueue<T> {
|
||||
enqueue(item: T): void
|
||||
dequeue(): T | undefined
|
||||
size(): number
|
||||
}
|
||||
|
||||
/**
|
||||
* Queue for UserMessages
|
||||
* When the limit for messages is met, we want to clear
|
||||
* out the oldest message in the queue
|
||||
*/
|
||||
export class Queue<T> implements IQueue<T> {
|
||||
private storage: T[] = []
|
||||
|
||||
/**
|
||||
* Set up Queue
|
||||
* @param capacity max length of queue
|
||||
*/
|
||||
constructor(public capacity: number = 5) {}
|
||||
|
||||
/**
|
||||
* Put item in front of queue
|
||||
* @param item object of type T to add into queue
|
||||
*/
|
||||
enqueue(item: T): void {
|
||||
if (this.size() === this.capacity)
|
||||
throw Error('Queue has reached max capacity, you cannot add more items.')
|
||||
this.storage.push(item)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove item at end of queue
|
||||
* @returns object of type T in queue
|
||||
*/
|
||||
dequeue(): T | undefined {
|
||||
return this.storage.shift()
|
||||
}
|
||||
|
||||
/**
|
||||
* Size of the queue
|
||||
* @returns length of queue as a int/number
|
||||
*/
|
||||
size(): number {
|
||||
return this.storage.length
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove the front of the queue, typically for errors
|
||||
*/
|
||||
pop(): void {
|
||||
this.storage.pop()
|
||||
}
|
||||
|
||||
/**
|
||||
* Geet the queue as an array
|
||||
* @returns a array of T items
|
||||
*/
|
||||
getItems(): T[] {
|
||||
return this.storage
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { ClientEvents, Awaitable, Client } from 'discord.js'
|
||||
import type { ClientEvents, Awaitable, Client, User } from 'discord.js'
|
||||
import { Ollama } from 'ollama'
|
||||
import { Queue } from '../queues/queue.js'
|
||||
|
||||
// Export events through here to reduce amount of imports
|
||||
export { Events } from 'discord.js'
|
||||
@@ -33,7 +34,7 @@ export type UserMessage = {
|
||||
export interface EventProps {
|
||||
client: Client
|
||||
log: LogMethod
|
||||
msgHist: { role: string, content: string }[]
|
||||
msgHist: Queue<UserMessage>
|
||||
tokens: Tokens,
|
||||
ollama: Ollama
|
||||
}
|
||||
@@ -63,7 +64,7 @@ export function event<T extends EventKeys>(key: T, callback: EventCallback<T>):
|
||||
export function registerEvents(
|
||||
client: Client,
|
||||
events: Event[],
|
||||
msgHist: UserMessage[],
|
||||
msgHist: Queue<UserMessage>,
|
||||
tokens: Tokens,
|
||||
ollama: Ollama
|
||||
): void {
|
||||
|
||||
@@ -4,7 +4,9 @@ export interface Configuration {
|
||||
readonly name: string
|
||||
options: {
|
||||
'message-stream'?: boolean,
|
||||
'message-style'?: boolean
|
||||
'message-style'?: boolean,
|
||||
'toggle-chat'?: boolean,
|
||||
'history-capacity'?: number
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { EmbedBuilder, Message } from 'discord.js'
|
||||
import { ChatResponse, Ollama } from 'ollama'
|
||||
import { UserMessage } from './events.js'
|
||||
import { Queue } from '../queues/queue.js'
|
||||
|
||||
/**
|
||||
* Method to send replies as normal text on discord like any other user
|
||||
@@ -15,7 +16,7 @@ export async function embedMessage(
|
||||
channel: string,
|
||||
model: string
|
||||
},
|
||||
msgHist: UserMessage[]
|
||||
msgHist: Queue<UserMessage>
|
||||
) {
|
||||
// bot response
|
||||
let response: ChatResponse
|
||||
@@ -33,7 +34,7 @@ export async function embedMessage(
|
||||
// Attempt to query model for message
|
||||
response = await ollama.chat({
|
||||
model: tokens.model,
|
||||
messages: msgHist,
|
||||
messages: msgHist.getItems(),
|
||||
options: {
|
||||
num_thread: 8, // remove if optimization needed further
|
||||
mirostat: 1,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { Message } from 'discord.js'
|
||||
import { ChatResponse, Ollama } from 'ollama'
|
||||
import { UserMessage } from './events.js'
|
||||
import { Queue } from '../queues/queue.js'
|
||||
|
||||
/**
|
||||
* Method to send replies as normal text on discord like any other user
|
||||
@@ -15,7 +16,7 @@ export async function normalMessage(
|
||||
channel: string,
|
||||
model: string
|
||||
},
|
||||
msgHist: UserMessage[]
|
||||
msgHist: Queue<UserMessage>
|
||||
) {
|
||||
// bot's respnse
|
||||
let response: ChatResponse
|
||||
@@ -25,7 +26,7 @@ export async function normalMessage(
|
||||
// Attempt to query model for message
|
||||
response = await ollama.chat({
|
||||
model: tokens.model,
|
||||
messages: msgHist,
|
||||
messages: msgHist.getItems(),
|
||||
options: {
|
||||
num_thread: 8, // remove if optimization needed further
|
||||
mirostat: 1,
|
||||
|
||||
Reference in New Issue
Block a user