mirror of
https://github.com/kevinthedang/discord-ollama.git
synced 2025-12-12 11:56:06 -05:00
Auto-Generate Config (#29)
* fix: name in job * add: auto create config.json on missing * update: readme goals * add: clarify instructions on fail chat * update: reduced redundancy in package file
This commit is contained in:
2
.github/workflows/build-test.yml
vendored
2
.github/workflows/build-test.yml
vendored
@@ -75,7 +75,7 @@ jobs:
|
||||
(docker images | grep -q 'discord/bot' && docker images | grep -qE 'ollama/ollama') || exit 1
|
||||
|
||||
|
||||
- name: Check Images Exist
|
||||
- name: Check Containers Exist
|
||||
run: |
|
||||
(docker ps | grep -q 'ollama' && docker ps | grep -q 'discord') || exit 1
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Discord Ollama Integration [](https://creativecommons.org/licenses/by-nc/4.0/) [](https://github.com/kevinthedang/discord-ollama/releases/latest)
|
||||
Ollama is an AI model management tool that allows users to install and use custom large language models locally. The goal is to create a discord bot that will utilize Ollama and chat with it on a Discord!
|
||||
Ollama is an AI model management tool that allows users to install and use custom large language models locally. The goal is to create a discord bot that will utilize Ollama and chat with it on a Discord server! Also, allow others to create their own models personalized for their own servers!
|
||||
|
||||
## Environment Setup
|
||||
* Clone this repo using `git clone https://github.com/kevinthedang/discord-ollama.git` or just use [GitHub Desktop](https://desktop.github.com/) to clone the repo.
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "discord-ollama",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "discord-ollama",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"axios": "^1.6.2",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "discord-ollama",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"description": "Ollama Integration into discord",
|
||||
"main": "build/index.js",
|
||||
"exports": "./build/index.js",
|
||||
@@ -10,14 +10,14 @@
|
||||
"build": "tsc",
|
||||
"prod": "node .",
|
||||
"client": "npm run build && npm run prod",
|
||||
"clean": "docker compose down && docker rmi $(docker images | grep 0.2.0 | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
|
||||
"clean": "docker compose down && docker rmi $(docker images | grep $(node -p \"require('./package.json').version\") | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
|
||||
"start": "docker compose build --no-cache && docker compose up -d",
|
||||
"docker:start": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama",
|
||||
"docker:start-cpu": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama-cpu",
|
||||
"docker:clean": "docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
|
||||
"docker:network": "docker network create --subnet=172.18.0.0/16 ollama-net",
|
||||
"docker:build": "docker build --no-cache -t discord/bot:0.3.2 .",
|
||||
"docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 discord/bot:0.3.2",
|
||||
"docker:build": "docker build --no-cache -t discord/bot:$(node -p \"require('./package.json').version\") .",
|
||||
"docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 discord/bot:$(node -p \"require('./package.json').version\")",
|
||||
"docker:ollama": "docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest",
|
||||
"docker:ollama-cpu": "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest"
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ChatResponse } from 'ollama'
|
||||
import { embedMessage, event, Events, normalMessage } from '../utils/index.js'
|
||||
import { Configuration, getConfig } from '../utils/jsonHandler.js'
|
||||
import { Configuration, getConfig, openFile } from '../utils/jsonHandler.js'
|
||||
|
||||
/**
|
||||
* Max Message length for free users is 2000 characters (bot or not).
|
||||
@@ -54,6 +54,7 @@ export default event(Events.MessageCreate, async ({ log, msgHist, tokens, ollama
|
||||
})
|
||||
} catch (error: any) {
|
||||
msgHist.pop() // remove message because of failure
|
||||
message.reply(`**Response generation failed.**\n\nReason: ${error.message}\n\nPlease use any config slash command.`)
|
||||
openFile('config.json', 'message-style', true)
|
||||
message.reply(`**Response generation failed.**\n\n**Reason:** *${error.message}*\n\nCreating \`config.json\` with \`message-style\` set as \`true\` for embedded messages.\nPlease try chatting again.`)
|
||||
}
|
||||
})
|
||||
Reference in New Issue
Block a user