4 Commits

Author SHA1 Message Date
Kevin Dang
32137dacb0 Add: Filter out think tags in bot message (#190) 2025-10-04 20:17:26 -07:00
Kevin Dang
c00ea5de98 Additional Channel Awareness (#186) 2025-07-31 19:04:51 -07:00
Kevin Dang
b27cdfc162 Update Documentation with New Features (#185) 2025-06-22 20:43:50 -07:00
Jonathan Smoley
1074fe2270 Removed Redis Dependency (#184) 2025-06-20 17:04:56 -07:00
23 changed files with 157 additions and 288 deletions

View File

@@ -13,7 +13,3 @@ DISCORD_IP = IP_ADDRESS
# subnet address, ex. 172.18.0.0 as we use /16. # subnet address, ex. 172.18.0.0 as we use /16.
SUBNET_ADDRESS = ADDRESS SUBNET_ADDRESS = ADDRESS
# redis port and ip, default redis port is 6379
REDIS_IP = IP_ADDRESS
REDIS_PORT = PORT

View File

@@ -34,8 +34,6 @@ jobs:
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
# set -e ensures if nohup fails, this section fails # set -e ensures if nohup fails, this section fails
- name: Startup Discord Bot Client - name: Startup Discord Bot Client
@@ -63,8 +61,6 @@ jobs:
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
- name: Setup Docker Network and Images - name: Setup Docker Network and Images
run: | run: |
@@ -72,8 +68,8 @@ jobs:
- name: Check Images Exist - name: Check Images Exist
run: | run: |
(docker images | grep -q 'kevinthedang/discord-ollama' && docker images | grep -qE 'ollama/ollama' | docker images | grep -qE 'redis') || exit 1 (docker images | grep -q 'kevinthedang/discord-ollama' && docker images | grep -qE 'ollama/ollama') || exit 1
- name: Check Containers Exist - name: Check Containers Exist
run: | run: |
(docker ps | grep -q 'ollama' && docker ps | grep -q 'discord' && docker ps | grep -q 'redis') || exit 1 (docker ps | grep -q 'ollama' && docker ps | grep -q 'discord') || exit 1

View File

@@ -31,8 +31,6 @@ jobs:
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
- name: Collect Code Coverage - name: Collect Code Coverage
run: | run: |

View File

@@ -24,8 +24,6 @@ jobs:
echo MODEL = ${{ secrets.MODEL }} >> .env echo MODEL = ${{ secrets.MODEL }} >> .env
echo DISCORD_IP = ${{ secrets.DISCORD_IP }} >> .env echo DISCORD_IP = ${{ secrets.DISCORD_IP }} >> .env
echo SUBNET_ADDRESS = ${{ secrets.SUBNET_ADDRESS }} >> .env echo SUBNET_ADDRESS = ${{ secrets.SUBNET_ADDRESS }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
- name: Check if directory exists and delete it - name: Check if directory exists and delete it
run: | run: |
@@ -59,7 +57,6 @@ jobs:
npm install npm install
IMAGE="kevinthedang/discord-ollama" IMAGE="kevinthedang/discord-ollama"
REDIS="redis"
OLLAMA="ollama/ollama" OLLAMA="ollama/ollama"
if docker images | grep -q $IMAGE; then if docker images | grep -q $IMAGE; then
@@ -75,19 +72,6 @@ jobs:
echo "Old $IMAGE Image Removed" echo "Old $IMAGE Image Removed"
fi fi
if docker images | grep -q $REDIS; then
IMAGE_ID=$(docker images -q $REDIS)
CONTAINER_IDS=$(docker ps -q --filter "ancestor=$IMAGE_ID")
if [ ! -z "$CONTAINER_IDS" ]; then
# Stop and remove the running containers
docker stop $CONTAINER_IDS
echo "Stopped and removed the containers using the image $REDIS"
fi
docker rmi $IMAGE_ID
echo "Old $REDIS Image Removed"
fi
if docker images | grep -q $OLLAMA; then if docker images | grep -q $OLLAMA; then
IMAGE_ID=$(docker images -q $OLLAMA) IMAGE_ID=$(docker images -q $OLLAMA)
CONTAINER_IDS=$(docker ps -q --filter "ancestor=$IMAGE_ID") CONTAINER_IDS=$(docker ps -q --filter "ancestor=$IMAGE_ID")
@@ -117,14 +101,6 @@ jobs:
--ip ${{ secrets.OLLAMA_IP }} \ --ip ${{ secrets.OLLAMA_IP }} \
ollama/ollama:latest ollama/ollama:latest
docker run --rm -d \
-v redis:/root/.redis \
-p ${{ secrets.REDIS_PORT }}:${{ secrets.REDIS_PORT }} \
--name redis \
--network ollama-net \
--ip ${{ secrets.REDIS_IP }} \
redis:latest
docker run --rm -d \ docker run --rm -d \
-v discord:/src/app \ -v discord:/src/app \
--name discord \ --name discord \

View File

@@ -42,8 +42,6 @@ jobs:
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env echo OLLAMA_PORT = ${{ secrets.OLLAMA_PORT }} >> .env
echo MODEL = ${{ secrets.MODEL }} >> .env echo MODEL = ${{ secrets.MODEL }} >> .env
echo REDIS_IP = ${{ secrets.REDIS_IP }} >> .env
echo REDIS_PORT = ${{ secrets.REDIS_PORT }} >> .env
- name: Test Application - name: Test Application
run: | run: |

View File

@@ -14,19 +14,28 @@
Ollama is an AI model management tool that allows users to install and use custom large language models locally. Ollama is an AI model management tool that allows users to install and use custom large language models locally.
The project aims to: The project aims to:
* [x] Create a Discord bot that will utilize Ollama and chat to chat with users! * [x] Create a Discord bot that will utilize Ollama and chat to chat with users!
* [x] User Preferences on Chat * [x] User and Server Preferences
* [x] Message Persistance on Channels and Threads * [x] Message Persistance
* [x] Threads
* [x] Channels
* [x] Containerization with Docker * [x] Containerization with Docker
* [x] Slash Commands Compatible * [x] Slash Commands Compatible
* [ ] Summary Command
* [ ] Model Info Command
* [ ] List Models Command
* [x] Pull Model Command
* [x] Switch Model Command
* [x] Delete Model Command
* [x] Create Thread Command
* [x] Create Private Thread Command
* [x] Message Stream Command
* [x] Change Message History Size Command
* [x] Clear Channel History Command (User Only)
* [x] Administrator Role Compatible
* [x] Generated Token Length Handling for >2000 * [x] Generated Token Length Handling for >2000
* [x] Token Length Handling of any message size * [x] Token Length Handling of any message size
* [x] User vs. Server Preferences * [x] Multi-User Chat Generation - This was built in from Ollama `v0.2.1+`
* [ ] Redis Caching * [ ] Ollama Tool Support Implementation
* [x] Administrator Role Compatible * [ ] Enhanced Channel Context Awareness
* [x] Multi-User Chat Generation (Multiple users chatting at the same time) - This was built in from Ollama `v0.2.1+` * [ ] Improved User Replied Triggers
* [x] Automatic and Manual model pulling through the Discord client
Further, Ollama provides the functionality to utilize custom models or provide context for the top-layer of any model available through the Ollama model library. Further, Ollama provides the functionality to utilize custom models or provide context for the top-layer of any model available through the Ollama model library.
* [Customize a model](https://github.com/ollama/ollama#customize-a-model) * [Customize a model](https://github.com/ollama/ollama#customize-a-model)
@@ -54,8 +63,6 @@ These are guides to the features and capabilities of this app.
* This project requires the use of npm version `10.9.0` or above. * This project requires the use of npm version `10.9.0` or above.
* [Ollama](https://ollama.com/) * [Ollama](https://ollama.com/)
* [Ollama Docker Image](https://hub.docker.com/r/ollama/ollama) * [Ollama Docker Image](https://hub.docker.com/r/ollama/ollama)
* [Redis](https://redis.io/)
* [Redis Docker Image](https://hub.docker.com/_/redis)
* [Discord.js Docs](https://discord.js.org/docs/packages/discord.js/main) * [Discord.js Docs](https://discord.js.org/docs/packages/discord.js/main)
* [Setting up Docker (Ubuntu 20.04)](https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-20-04) * [Setting up Docker (Ubuntu 20.04)](https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-20-04)
* [Setting up Nvidia Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) * [Setting up Nvidia Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html)

View File

@@ -7,14 +7,12 @@ services:
build: ./ # find docker file in designated path build: ./ # find docker file in designated path
container_name: discord container_name: discord
restart: always # rebuild container always restart: always # rebuild container always
image: kevinthedang/discord-ollama:0.8.5 image: kevinthedang/discord-ollama:0.8.7
environment: environment:
CLIENT_TOKEN: ${CLIENT_TOKEN} CLIENT_TOKEN: ${CLIENT_TOKEN}
OLLAMA_IP: ${OLLAMA_IP} OLLAMA_IP: ${OLLAMA_IP}
OLLAMA_PORT: ${OLLAMA_PORT} OLLAMA_PORT: ${OLLAMA_PORT}
MODEL: ${MODEL} MODEL: ${MODEL}
REDIS_IP: ${REDIS_IP}
REDIS_PORT: ${REDIS_PORT}
networks: networks:
ollama-net: ollama-net:
ipv4_address: ${DISCORD_IP} ipv4_address: ${DISCORD_IP}
@@ -37,19 +35,6 @@ services:
ports: ports:
- ${OLLAMA_PORT}:${OLLAMA_PORT} - ${OLLAMA_PORT}:${OLLAMA_PORT}
# setup redis container
redis:
image: redis:latest
container_name: redis
restart: always
networks:
ollama-net:
ipv4_address: ${REDIS_IP}
volumes:
- redis:/root/.redis
ports:
- ${REDIS_PORT}:${REDIS_PORT}
# create a network that supports giving addresses withing a specific subnet # create a network that supports giving addresses withing a specific subnet
networks: networks:
ollama-net: ollama-net:
@@ -62,4 +47,3 @@ networks:
volumes: volumes:
ollama: ollama:
discord: discord:
redis:

View File

@@ -43,13 +43,11 @@ sudo systemctl restart docker
* [GitHub repository](https://github.com/NVIDIA/nvidia-container-toolkit?tab=readme-ov-file) for Nvidia Container Toolkit * [GitHub repository](https://github.com/NVIDIA/nvidia-container-toolkit?tab=readme-ov-file) for Nvidia Container Toolkit
## To Run (with Docker and Docker Compose) ## To Run (with Docker and Docker Compose)
* With the inclusion of subnets in the `docker-compose.yml`, you will need to set the `SUBNET_ADDRESS`, `OLLAMA_IP`, `OLLAMA_PORT`, `REDIS_IP`, `REDIS_PORT`, and `DISCORD_IP`. Here are some default values if you don't care: * With the inclusion of subnets in the `docker-compose.yml`, you will need to set the `SUBNET_ADDRESS`, `OLLAMA_IP`, `OLLAMA_PORT`, and `DISCORD_IP`. Here are some default values if you don't care:
* `SUBNET_ADDRESS = 172.18.0.0` * `SUBNET_ADDRESS = 172.18.0.0`
* `OLLAMA_IP = 172.18.0.2` * `OLLAMA_IP = 172.18.0.2`
* `OLLAMA_PORT = 11434` * `OLLAMA_PORT = 11434`
* `DISCORD_IP = 172.18.0.3` * `DISCORD_IP = 172.18.0.3`
* `REDIS_IP = 172.18.0.4`
* `REDIS_PORT = 6379`
* Don't understand any of this? watch a Networking video to understand subnetting. * Don't understand any of this? watch a Networking video to understand subnetting.
* You also need all environment variables shown in [`.env.sample`](../.env.sample) * You also need all environment variables shown in [`.env.sample`](../.env.sample)
* Otherwise, there is no need to install any npm packages for this, you just need to run `npm run start` to pull the containers and spin them up. * Otherwise, there is no need to install any npm packages for this, you just need to run `npm run start` to pull the containers and spin them up.

107
package-lock.json generated
View File

@@ -1,18 +1,17 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.7",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.7",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"discord.js": "^14.20.0", "discord.js": "^14.20.0",
"dotenv": "^16.5.0", "dotenv": "^16.5.0",
"ollama": "^0.5.15", "ollama": "^0.5.15"
"redis": "^4.7.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^22.13.14", "@types/node": "^22.13.14",
@@ -769,65 +768,6 @@
"node": ">=14" "node": ">=14"
} }
}, },
"node_modules/@redis/bloom": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-1.2.0.tgz",
"integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==",
"license": "MIT",
"peerDependencies": {
"@redis/client": "^1.0.0"
}
},
"node_modules/@redis/client": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/@redis/client/-/client-1.6.0.tgz",
"integrity": "sha512-aR0uffYI700OEEH4gYnitAnv3vzVGXCFvYfdpu/CJKvk4pHfLPEy/JSZyrpQ+15WhXe1yJRXLtfQ84s4mEXnPg==",
"license": "MIT",
"dependencies": {
"cluster-key-slot": "1.1.2",
"generic-pool": "3.9.0",
"yallist": "4.0.0"
},
"engines": {
"node": ">=14"
}
},
"node_modules/@redis/graph": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.1.tgz",
"integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==",
"license": "MIT",
"peerDependencies": {
"@redis/client": "^1.0.0"
}
},
"node_modules/@redis/json": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.7.tgz",
"integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==",
"license": "MIT",
"peerDependencies": {
"@redis/client": "^1.0.0"
}
},
"node_modules/@redis/search": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@redis/search/-/search-1.2.0.tgz",
"integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==",
"license": "MIT",
"peerDependencies": {
"@redis/client": "^1.0.0"
}
},
"node_modules/@redis/time-series": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-1.1.0.tgz",
"integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==",
"license": "MIT",
"peerDependencies": {
"@redis/client": "^1.0.0"
}
},
"node_modules/@rollup/rollup-android-arm-eabi": { "node_modules/@rollup/rollup-android-arm-eabi": {
"version": "4.37.0", "version": "4.37.0",
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.37.0.tgz", "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.37.0.tgz",
@@ -1473,15 +1413,6 @@
"node": ">= 16" "node": ">= 16"
} }
}, },
"node_modules/cluster-key-slot": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz",
"integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==",
"license": "Apache-2.0",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/color-convert": { "node_modules/color-convert": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
@@ -1730,15 +1661,6 @@
"node": "^8.16.0 || ^10.6.0 || >=11.0.0" "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
} }
}, },
"node_modules/generic-pool": {
"version": "3.9.0",
"resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz",
"integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==",
"license": "MIT",
"engines": {
"node": ">= 4"
}
},
"node_modules/get-tsconfig": { "node_modules/get-tsconfig": {
"version": "4.10.0", "version": "4.10.0",
"resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.0.tgz", "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.10.0.tgz",
@@ -2102,23 +2024,6 @@
"node": "^10 || ^12 || >=14" "node": "^10 || ^12 || >=14"
} }
}, },
"node_modules/redis": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/redis/-/redis-4.7.0.tgz",
"integrity": "sha512-zvmkHEAdGMn+hMRXuMBtu4Vo5P6rHQjLoHftu+lBqq8ZTA3RCVC/WzD790bkKKiNFp7d5/9PcSD19fJyyRvOdQ==",
"license": "MIT",
"workspaces": [
"./packages/*"
],
"dependencies": {
"@redis/bloom": "1.2.0",
"@redis/client": "1.6.0",
"@redis/graph": "1.1.1",
"@redis/json": "1.0.7",
"@redis/search": "1.2.0",
"@redis/time-series": "1.1.0"
}
},
"node_modules/resolve-pkg-maps": { "node_modules/resolve-pkg-maps": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz", "resolved": "https://registry.npmjs.org/resolve-pkg-maps/-/resolve-pkg-maps-1.0.0.tgz",
@@ -2867,12 +2772,6 @@
} }
} }
}, },
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
"license": "ISC"
},
"node_modules/yn": { "node_modules/yn": {
"version": "3.1.1", "version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",

View File

@@ -1,6 +1,6 @@
{ {
"name": "discord-ollama", "name": "discord-ollama",
"version": "0.8.5", "version": "0.8.7",
"description": "Ollama Integration into discord", "description": "Ollama Integration into discord",
"main": "build/index.js", "main": "build/index.js",
"exports": "./build/index.js", "exports": "./build/index.js",
@@ -13,24 +13,22 @@
"client": "npm run build && npm run prod", "client": "npm run build && npm run prod",
"clean": "docker compose down && docker rmi $(docker images | grep kevinthedang | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)", "clean": "docker compose down && docker rmi $(docker images | grep kevinthedang | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
"start": "docker compose build --no-cache && docker compose up -d", "start": "docker compose build --no-cache && docker compose up -d",
"docker:clean": "docker rm -f discord && docker rm -f ollama && docker rm -f redis && docker network prune -f && docker rmi $(docker images | grep kevinthedang | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)", "docker:clean": "docker rm -f discord && docker rm -f ollama && docker network prune -f && docker rmi $(docker images | grep kevinthedang | tr -s ' ' | cut -d ' ' -f 3) && docker rmi $(docker images --filter \"dangling=true\" -q --no-trunc)",
"docker:network": "docker network create --subnet=172.18.0.0/16 ollama-net", "docker:network": "docker network create --subnet=172.18.0.0/16 ollama-net",
"docker:build": "docker build --no-cache -t kevinthedang/discord-ollama:$(node -p \"require('./package.json').version\") .", "docker:build": "docker build --no-cache -t kevinthedang/discord-ollama:$(node -p \"require('./package.json').version\") .",
"docker:build-latest": "docker build --no-cache -t kevinthedang/discord-ollama:latest .", "docker:build-latest": "docker build --no-cache -t kevinthedang/discord-ollama:latest .",
"docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 kevinthedang/discord-ollama:$(node -p \"require('./package.json').version\")", "docker:client": "docker run -d -v discord:/src/app --name discord --network ollama-net --ip 172.18.0.3 kevinthedang/discord-ollama:$(node -p \"require('./package.json').version\")",
"docker:redis": "docker run -d -v redis:/root/.redis -p 6379:6379 --name redis --network ollama-net --ip 172.18.0.4 redis:latest",
"docker:ollama": "docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest", "docker:ollama": "docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest",
"docker:ollama-cpu": "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest", "docker:ollama-cpu": "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama --network ollama-net --ip 172.18.0.2 ollama/ollama:latest",
"docker:start": "docker network prune -f && npm run docker:network && npm run docker:build && npm run docker:redis && npm run docker:client && npm run docker:ollama", "docker:start": "docker network prune -f && npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama",
"docker:start-cpu": "docker network prune -f && npm run docker:network && npm run docker:build && npm run docker:redis && npm run docker:client && npm run docker:ollama-cpu" "docker:start-cpu": "docker network prune -f && npm run docker:network && npm run docker:build && npm run docker:client && npm run docker:ollama-cpu"
}, },
"author": "Kevin Dang", "author": "Kevin Dang",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"discord.js": "^14.20.0", "discord.js": "^14.20.0",
"dotenv": "^16.5.0", "dotenv": "^16.5.0",
"ollama": "^0.5.15", "ollama": "^0.5.15"
"redis": "^4.7.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^22.13.14", "@types/node": "^22.13.14",
@@ -45,4 +43,4 @@
"npm": ">=10.9.0", "npm": ">=10.9.0",
"node": ">=22.12.0" "node": ">=22.12.0"
} }
} }

View File

@@ -1,7 +1,6 @@
import { Client, GatewayIntentBits } from 'discord.js' import { Client, GatewayIntentBits } from 'discord.js'
import { Ollama } from 'ollama' import { Ollama } from 'ollama'
import { createClient } from 'redis' import { Queue } from './queues/queue.js'
import { Queue } from './components/index.js'
import { UserMessage, registerEvents } from './utils/index.js' import { UserMessage, registerEvents } from './utils/index.js'
import Events from './events/index.js' import Events from './events/index.js'
import Keys from './keys.js' import Keys from './keys.js'
@@ -16,11 +15,6 @@ const client = new Client({
] ]
}) })
// initialize connection to redis
const redis = createClient({
url: `redis://${Keys.redisHost}:${Keys.redisPort}`,
})
// initialize connection to ollama container // initialize connection to ollama container
export const ollama = new Ollama({ export const ollama = new Ollama({
host: `http://${Keys.ipAddress}:${Keys.portAddress}`, host: `http://${Keys.ipAddress}:${Keys.portAddress}`,
@@ -29,20 +23,11 @@ export const ollama = new Ollama({
// Create Queue managed by Events // Create Queue managed by Events
const messageHistory: Queue<UserMessage> = new Queue<UserMessage> const messageHistory: Queue<UserMessage> = new Queue<UserMessage>
// register all events // Create Channel History Queue managed by Events
registerEvents(client, Events, messageHistory, ollama, Keys.defaultModel) const channelMessageHistory: Queue<UserMessage> = new Queue<UserMessage>
// Try to connect to redis // register all events
await redis.connect() registerEvents(client, Events, messageHistory, channelMessageHistory, ollama, Keys.defaultModel)
.then(response => {
console.log('[Redis] Successfully Connected')
})
.catch(error => {
console.error('[Redis] Connection Error. See error below:\n', error)
console.warn('[Redis] Failed to connect to Redis Database, using local system')
// TODO: create boolean flag that will probably be used in messageCreate.ts if redis database is down
// When implementing this boolean flag, move connection to database BEFORE the registerEvents method
})
// Try to log in the client // Try to log in the client
await client.login(Keys.clientToken) await client.login(Keys.clientToken)

View File

@@ -1,46 +0,0 @@
/**
* @class Logger
* @description A class to handle logging messages
* @method log
*/
export class Logger {
private logPrefix: string = ''
private type: string = 'log'
private constructPrefix(component?: string, method?: string): string {
let prefix = this.type.toUpperCase()
if (component) {
prefix += ` [${component}`
if (method) prefix += `: ${method}`
prefix += ']'
}
return prefix
}
public bind(component?: string, method?: string): CallableFunction {
let tempPrefix = this.constructPrefix(component, method)
if (tempPrefix !== this.logPrefix) this.logPrefix = tempPrefix
switch (this.type) {
case 'warn':
return console.warn.bind(console, this.logPrefix)
case 'error':
return console.error.bind(console, this.logPrefix)
case 'log':
default:
return console.log.bind(console, this.logPrefix)
}
}
public log(type: string, message: unknown, component?: string, method?: string): void {
if (type && type !== this.type) this.type = type
let log = this.bind(component, method)
log(message)
}
}

View File

@@ -1,2 +0,0 @@
export * from './queue.js'
export * from './binder.js'

View File

@@ -1,6 +1,6 @@
import { TextChannel } from 'discord.js' import { TextChannel } from 'discord.js'
import { event, Events, normalMessage, UserMessage, clean, addToChannelContext } from '../utils/index.js'
import { import {
event, Events, normalMessage, UserMessage, clean,
getChannelInfo, getServerConfig, getUserConfig, openChannelInfo, getChannelInfo, getServerConfig, getUserConfig, openChannelInfo,
openConfig, UserConfig, getAttachmentData, getTextFileAttachmentData openConfig, UserConfig, getAttachmentData, getTextFileAttachmentData
} from '../utils/index.js' } from '../utils/index.js'
@@ -11,7 +11,7 @@ import {
* *
* @param message the message received from the channel * @param message the message received from the channel
*/ */
export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client, defaultModel }, message) => { export default event(Events.MessageCreate, async ({ log, msgHist, channelHistory, ollama, client, defaultModel }, message) => {
const clientId = client.user!!.id const clientId = client.user!!.id
let cleanedMessage = clean(message.content, clientId) let cleanedMessage = clean(message.content, clientId)
log(`Message \"${cleanedMessage}\" from ${message.author.tag} in channel/thread ${message.channelId}.`) log(`Message \"${cleanedMessage}\" from ${message.author.tag} in channel/thread ${message.channelId}.`)
@@ -19,6 +19,61 @@ export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client
// Do not respond if bot talks in the chat // Do not respond if bot talks in the chat
if (message.author.username === message.client.user.username) return if (message.author.username === message.client.user.username) return
// Save User Chat even if not for the bot
let channelContextHistory: UserMessage[] = await new Promise((resolve) => {
getChannelInfo(`${message.channelId}-context.json`, (channelInfo) => {
if (channelInfo?.messages)
resolve(channelInfo.messages)
else {
log(`Channel/Thread ${message.channel}-context does not exist. File will be created shortly...`)
resolve([])
}
})
})
if (channelContextHistory.length === 0) {
channelContextHistory = await new Promise((resolve) => {
addToChannelContext(message.channelId,
message.channel as TextChannel
)
getChannelInfo(`${message.channelId}-context.json`, (channelInfo) => {
if (channelInfo?.messages)
resolve(channelInfo.messages)
else {
log(`Channel/Thread ${message.channel}-context does not exist. File will be created shortly...`)
}
})
})
}
// Set Channel History Queue
channelHistory.setQueue(channelContextHistory)
// get message attachment if exists
const attachment = message.attachments.first()
let messageAttachment: string[] = []
if (attachment && attachment.name?.endsWith(".txt"))
cleanedMessage += ' ' + await getTextFileAttachmentData(attachment)
else if (attachment)
messageAttachment = await getAttachmentData(attachment)
while (channelHistory.size() >= channelHistory.capacity) channelHistory.dequeue()
// push user response to channel history
console.log
channelHistory.enqueue({
role: 'user',
content: cleanedMessage,
images: messageAttachment || []
})
// Store in Channel Context
addToChannelContext(message.channelId,
message.channel as TextChannel,
channelHistory.getItems()
)
// Only respond if message mentions the bot // Only respond if message mentions the bot
if (!message.mentions.has(clientId)) return if (!message.mentions.has(clientId)) return
@@ -139,15 +194,6 @@ export default event(Events.MessageCreate, async ({ log, msgHist, ollama, client
if (!userConfig) if (!userConfig)
throw new Error(`Failed to initialize User Preference for **${message.author.username}**.\n\nIt's likely you do not have a model set. Please use the \`switch-model\` command to do that.`) throw new Error(`Failed to initialize User Preference for **${message.author.username}**.\n\nIt's likely you do not have a model set. Please use the \`switch-model\` command to do that.`)
// get message attachment if exists
const attachment = message.attachments.first()
let messageAttachment: string[] = []
if (attachment && attachment.name?.endsWith(".txt"))
cleanedMessage += await getTextFileAttachmentData(attachment)
else if (attachment)
messageAttachment = await getAttachmentData(attachment)
const model: string = userConfig.options['switch-model'] const model: string = userConfig.options['switch-model']
// set up new queue // set up new queue

View File

@@ -4,9 +4,7 @@ export const Keys = {
clientToken: getEnvVar('CLIENT_TOKEN'), clientToken: getEnvVar('CLIENT_TOKEN'),
ipAddress: getEnvVar('OLLAMA_IP', '127.0.0.1'), // default ollama ip if none ipAddress: getEnvVar('OLLAMA_IP', '127.0.0.1'), // default ollama ip if none
portAddress: getEnvVar('OLLAMA_PORT', '11434'), // default ollama port if none portAddress: getEnvVar('OLLAMA_PORT', '11434'), // default ollama port if none
defaultModel: getEnvVar('MODEL', 'llama3.2'), defaultModel: getEnvVar('MODEL', 'llama3.2')
redisHost: getEnvVar('REDIS_IP', '172.18.0.4'), // default redis host if none
redisPort: parseInt(getEnvVar('REDIS_PORT', '6379')) // default redis port if none
} as const // readonly keys } as const // readonly keys
export default Keys export default Keys

View File

@@ -1,6 +1,6 @@
import type { ClientEvents, Awaitable, Client } from 'discord.js' import type { ClientEvents, Awaitable, Client } from 'discord.js'
import { Ollama } from 'ollama' import { Ollama } from 'ollama'
import { Queue } from '../components/index.js' import { Queue } from '../queues/queue.js'
// Export events through here to reduce amount of imports // Export events through here to reduce amount of imports
export { Events } from 'discord.js' export { Events } from 'discord.js'
@@ -37,6 +37,7 @@ export interface EventProps {
client: Client, client: Client,
log: LogMethod, log: LogMethod,
msgHist: Queue<UserMessage>, msgHist: Queue<UserMessage>,
channelHistory: Queue<UserMessage>,
ollama: Ollama, ollama: Ollama,
defaultModel: String defaultModel: String
} }
@@ -78,6 +79,7 @@ export function registerEvents(
client: Client, client: Client,
events: Event[], events: Event[],
msgHist: Queue<UserMessage>, msgHist: Queue<UserMessage>,
channelHistory: Queue<UserMessage>,
ollama: Ollama, ollama: Ollama,
defaultModel: String defaultModel: String
): void { ): void {
@@ -88,7 +90,7 @@ export function registerEvents(
// Handle Errors, call callback, log errors as needed // Handle Errors, call callback, log errors as needed
try { try {
callback({ client, log, msgHist, ollama, defaultModel }, ...args) callback({ client, log, msgHist, channelHistory, ollama, defaultModel }, ...args)
} catch (error) { } catch (error) {
log('[Uncaught Error]', error) log('[Uncaught Error]', error)
} }

View File

@@ -56,6 +56,39 @@ export async function clearChannelInfo(filename: string, channel: TextChannel, u
return cleanedHistory return cleanedHistory
} }
export async function addToChannelContext(filename: string, channel : TextChannel | ThreadChannel, messages: UserMessage[] = []): Promise<void> {
const fullFileName = `data/${filename}-context.json`
if (fs.existsSync(fullFileName)) {
fs.readFile(fullFileName, 'utf8', (error, data) => {
if (error)
console.log(`[Error: addToChannelContext] Incorrect file format`)
else {
const object = JSON.parse(data)
if (object['messages'].length === 0)
object['messages'] = messages as []
else if (object['messages'].length !== 0 && messages.length !== 0)
object['messages'] = messages as []
fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2))
}
})
} else { // channel context does not exist, create it
const object: Configuration = JSON.parse(
`{
\"id\": \"${channel?.id}\",
\"name\": \"${channel?.name}\",
\"messages\": []
}`
)
const directory = path.dirname(fullFileName)
if (!fs.existsSync(directory))
fs.mkdirSync(directory, { recursive: true })
fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2))
console.log(`[Util: addToChannelContext] Created '${fullFileName}' in working directory`)
}
}
/** /**
* Method to open the channel history * Method to open the channel history
* *
@@ -64,7 +97,7 @@ export async function clearChannelInfo(filename: string, channel: TextChannel, u
* @param user the user's name * @param user the user's name
* @param messages their messages * @param messages their messages
*/ */
export async function openChannelInfo(this: any, filename: string, channel: TextChannel | ThreadChannel, user: string, messages: UserMessage[] = []): Promise<void> { export async function openChannelInfo(filename: string, channel: TextChannel | ThreadChannel, user: string, messages: UserMessage[] = []): Promise<void> {
const fullFileName = `data/${filename}-${user}.json` const fullFileName = `data/${filename}-${user}.json`
if (fs.existsSync(fullFileName)) { if (fs.existsSync(fullFileName)) {
fs.readFile(fullFileName, 'utf8', (error, data) => { fs.readFile(fullFileName, 'utf8', (error, data) => {
@@ -95,7 +128,7 @@ export async function openChannelInfo(this: any, filename: string, channel: Text
// only creating it, no need to add anything // only creating it, no need to add anything
fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2)) fs.writeFileSync(fullFileName, JSON.stringify(object, null, 2))
console.log(`[Util: ${this.name}] Created '${fullFileName}' in working directory`) console.log(`[Util: openChannelInfo] Created '${fullFileName}' in working directory`)
} }
} }

View File

@@ -10,7 +10,7 @@ import path from 'path'
* @param value new value to assign * @param value new value to assign
*/ */
// add type of change (server, user) // add type of change (server, user)
export function openConfig(this: any, filename: string, key: string, value: any) { export function openConfig(filename: string, key: string, value: any) {
const fullFileName = `data/${filename}` const fullFileName = `data/${filename}`
// check if the file exists, if not then make the config file // check if the file exists, if not then make the config file
@@ -41,7 +41,7 @@ export function openConfig(this: any, filename: string, key: string, value: any)
fs.mkdirSync(directory, { recursive: true }) fs.mkdirSync(directory, { recursive: true })
fs.writeFileSync(`data/${filename}`, JSON.stringify(object, null, 2)) fs.writeFileSync(`data/${filename}`, JSON.stringify(object, null, 2))
console.log(`[Util: ${this.name}] Created '${filename}' in working directory`) console.log(`[Util: openConfig] Created '${filename}' in working directory`)
} }
} }

View File

@@ -1,5 +1,6 @@
import { ChatResponse, AbortableAsyncIterator } from "ollama" import { ChatResponse } from "ollama"
import { ChatParams } from "../index.js" import { ChatParams } from "../index.js"
import { AbortableAsyncIterator } from "ollama/src/utils.js"
/** /**
* Method to query the Ollama client for async generation * Method to query the Ollama client for async generation

View File

@@ -1,7 +1,8 @@
import { Message, SendableChannels } from 'discord.js' import { Message, SendableChannels } from 'discord.js'
import { ChatResponse, Ollama, AbortableAsyncIterator } from 'ollama' import { ChatResponse, Ollama } from 'ollama'
import { ChatParams, UserMessage, streamResponse, blockResponse } from './index.js' import { ChatParams, UserMessage, streamResponse, blockResponse } from './index.js'
import { Queue } from '../components/index.js' import { Queue } from '../queues/queue.js'
import { AbortableAsyncIterator } from 'ollama/src/utils.js'
/** /**
* Method to send replies as normal text on discord like any other user * Method to send replies as normal text on discord like any other user
@@ -10,7 +11,6 @@ import { Queue } from '../components/index.js'
* @param msgHist message history between user and model * @param msgHist message history between user and model
*/ */
export async function normalMessage( export async function normalMessage(
this: any,
message: Message, message: Message,
ollama: Ollama, ollama: Ollama,
model: string, model: string,
@@ -56,6 +56,10 @@ export async function normalMessage(
response = await blockResponse(params) response = await blockResponse(params)
result = response.message.content result = response.message.content
// check if there is a <think>...</think> sequence from the bot.
if (hasThinking(result))
result = result.replace(/<think>[\s\S]*?<\/think>/g, '').trim()
// check if message length > discord max for normal messages // check if message length > discord max for normal messages
if (result.length > 2000) { if (result.length > 2000) {
sentMessage.edit(result.slice(0, 2000)) sentMessage.edit(result.slice(0, 2000))
@@ -73,14 +77,19 @@ export async function normalMessage(
sentMessage.edit(result) sentMessage.edit(result)
} }
} catch (error: any) { } catch (error: any) {
console.log(`[Util: ${this.name}] Error creating message: ${error.message}`) console.log(`[Util: messageNormal] Error creating message: ${error.message}`)
if (error.message.includes('try pulling it first')) if (error.message.includes('fetch failed'))
sentMessage.edit(`**Response generation failed.**\n\nReason: You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.`) error.message = 'Missing ollama service on machine'
else else if (error.message.includes('try pulling it first'))
sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`) error.message = `You do not have the ${model} downloaded. Ask an admin to pull it using the \`pull-model\` command.`
sentMessage.edit(`**Response generation failed.**\n\nReason: ${error.message}`)
} }
}) })
// return the string representation of ollama query response // return the string representation of ollama query response
return result return result
} }
function hasThinking(message: string): boolean {
return /<think>[\s\S]*?<\/think>/i.test(message)
}

View File

@@ -5,16 +5,9 @@ import { describe, expect, it, vi } from 'vitest'
import commands from '../src/commands/index.js' import commands from '../src/commands/index.js'
/** /**
* Mocking redis found in client.ts because of the commands * Mocking client.ts because of the commands
*/ */
vi.mock('../src/client.js', () => ({ vi.mock('../src/client.js', () => ({}))
redis: {
createClient: vi.fn(),
connect: vi.fn(),
get: vi.fn(),
set: vi.fn()
}
}))
/** /**
* Commands test suite, tests the commands object * Commands test suite, tests the commands object

View File

@@ -1,5 +1,5 @@
import { describe, expect, it } from 'vitest' import { describe, expect, it } from 'vitest'
import { Queue } from '../src/components/index.js' import { Queue } from '../src/queues/queue.js'
/** /**
* Queue test suite, tests the Queue class * Queue test suite, tests the Queue class