Compare commits

..

1 Commits

Author SHA1 Message Date
Kevin Dang
6b903cff5e Auto-Generate Config (#29)
* fix: name in job

* add: auto create config.json on missing

* update: readme goals

* add: clarify instructions on fail chat

* update: reduced redundancy in package file
2024-03-30 22:02:49 -07:00
5 changed files with 11 additions and 10 deletions

View File

@@ -75,7 +75,7 @@ jobs:
(docker images | grep -q 'discord/bot' && docker images | grep -qE 'ollama/ollama') || exit 1 (docker images | grep -q 'discord/bot' && docker images | grep -qE 'ollama/ollama') || exit 1
- name: Check Images Exist - name: Check Containers Exist
run: | run: |
(docker ps | grep -q 'ollama' && docker ps | grep -q 'discord') || exit 1 (docker ps | grep -q 'ollama' && docker ps | grep -q 'discord') || exit 1

View File

@@ -1,5 +1,5 @@
# Discord Ollama Integration [![License: CC BY-NC 4.0](https://img.shields.io/badge/License-CC_BY--NC_4.0-darkgreen.svg)](https://creativecommons.org/licenses/by-nc/4.0/) [![Release Badge](https://img.shields.io/github/v/release/kevinthedang/discord-ollama?logo=github)](https://github.com/kevinthedang/discord-ollama/releases/latest) # Discord Ollama Integration [![License: CC BY-NC 4.0](https://img.shields.io/badge/License-CC_BY--NC_4.0-darkgreen.svg)](https://creativecommons.org/licenses/by-nc/4.0/) [![Release Badge](https://img.shields.io/github/v/release/kevinthedang/discord-ollama?logo=github)](https://github.com/kevinthedang/discord-ollama/releases/latest)
Ollama is an AI model management tool that allows users to install and use custom large language models locally. The goal is to create a discord bot that will utilize Ollama and chat with it on a Discord! Ollama is an AI model management tool that allows users to install and use custom large language models locally. The goal is to create a discord bot that will utilize Ollama and chat with it on a Discord server! Also, allow others to create their own models personalized for their own servers!
## Environment Setup ## Environment Setup
* Clone this repo using `git clone https://github.com/kevinthedang/discord-ollama.git` or just use [GitHub Desktop](https://desktop.github.com/) to clone the repo. * Clone this repo using `git clone https://github.com/kevinthedang/discord-ollama.git` or just use [GitHub Desktop](https://desktop.github.com/) to clone the repo.

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.3.2", "version": "0.3.3",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.3.2", "version": "0.3.3",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"axios": "^1.6.2", "axios": "^1.6.2",

View File

@@ -1,6 +1,6 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.3.2", "version": "0.3.3",
"description": "Ollama Integration into discord", "description": "Ollama Integration into discord",
"main": "build/index.js", "main": "build/index.js",
"exports": "./build/index.js", "exports": "./build/index.js",
@@ -10,14 +10,14 @@
"build": "tsc", "build": "tsc",
"prod": "node .", "prod": "node .",
"client": "npm run build && npm run prod", "client": "npm run build && npm run prod",
"clean": "docker compose down && docker rmi $(docker images | grep 0.2.0 | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)", "clean": "docker compose down && docker rmi $(docker images | grep $(node -p \"require('./package.json').version\") | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
"start": "docker compose build --no-cache && docker compose up -d", "start": "docker compose build --no-cache && docker compose up -d",
"docker:start": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama", "docker:start": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama",
"docker:start-cpu": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama-cpu", "docker:start-cpu": "npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama-cpu",
"docker:clean": "docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)", "docker:clean": "docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
"docker:network": "docker network create --subnet=172.18.0.0/16 ollama-net", "docker:network": "docker network create --subnet=172.18.0.0/16 ollama-net",
"docker:build": "docker build --no-cache -t discord/bot:0.3.2 .", "docker:build": "docker build --no-cache -t discord/bot:$(node -p \"require('./package.json').version\") .",
"docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 discord/bot:0.3.2", "docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 discord/bot:$(node -p \"require('./package.json').version\")",
"docker:ollama": "docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest", "docker:ollama": "docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest",
"docker:ollama-cpu": "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest" "docker:ollama-cpu": "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest"
}, },

View File

@@ -1,6 +1,6 @@
import { ChatResponse } from 'ollama' import { ChatResponse } from 'ollama'
import { embedMessage, event, Events, normalMessage } from '../utils/index.js' import { embedMessage, event, Events, normalMessage } from '../utils/index.js'
import { Configuration, getConfig } from '../utils/jsonHandler.js' import { Configuration, getConfig, openFile } from '../utils/jsonHandler.js'
/** /**
* Max Message length for free users is 2000 characters (bot or not). * Max Message length for free users is 2000 characters (bot or not).
@@ -54,6 +54,7 @@ export default event(Events.MessageCreate, async ({ log, msgHist, tokens, ollama
}) })
} catch (error: any) { } catch (error: any) {
msgHist.pop() // remove message because of failure msgHist.pop() // remove message because of failure
message.reply(`**Response generation failed.**\n\nReason: ${error.message}\n\nPlease use any config slash command.`) openFile('config.json', 'message-style', true)
message.reply(`**Response generation failed.**\n\n**Reason:** *${error.message}*\n\nCreating \`config.json\` with \`message-style\` set as \`true\` for embedded messages.\nPlease try chatting again.`)
} }
}) })