mirror of
https://github.com/kevinthedang/discord-ollama.git
synced 2025-12-12 03:46:08 -05:00
Removed GUILD_ID references (#109)
This commit is contained in:
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
@@ -31,7 +31,6 @@ jobs:
|
||||
run: |
|
||||
touch .env
|
||||
echo CLIENT_TOKEN = ${{ secrets.BOT_TOKEN }} >> .env
|
||||
echo GUILD_ID = ${{ secrets.GUILD_ID }} >> .env
|
||||
echo MODEL = ${{ secrets.MODEL }} >> .env
|
||||
echo CLIENT_UID = ${{ secrets.CLIENT_UID }} >> .env
|
||||
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
|
||||
@@ -60,7 +59,6 @@ jobs:
|
||||
run: |
|
||||
touch .env
|
||||
echo CLIENT_TOKEN = ${{ secrets.BOT_TOKEN }} >> .env
|
||||
echo GUILD_ID = ${{ secrets.GUILD_ID }} >> .env
|
||||
echo MODEL = ${{ secrets.MODEL }} >> .env
|
||||
echo CLIENT_UID = ${{ secrets.CLIENT_UID }} >> .env
|
||||
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
|
||||
|
||||
1
.github/workflows/release.yml
vendored
1
.github/workflows/release.yml
vendored
@@ -24,7 +24,6 @@ jobs:
|
||||
run: |
|
||||
touch .env
|
||||
echo CLIENT_TOKEN = NOT_REAL_TOKEN >> .env
|
||||
echo GUILD_ID = 123456 >> .env
|
||||
echo MODEL = ${{ secrets.MODEL }} >> .env
|
||||
echo CLIENT_UID = ${{ secrets.CLIENT_UID }} >> .env
|
||||
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
|
||||
|
||||
1
.github/workflows/test.yml
vendored
1
.github/workflows/test.yml
vendored
@@ -39,7 +39,6 @@ jobs:
|
||||
run: |
|
||||
touch .env
|
||||
echo CLIENT_TOKEN = ${{ secrets.BOT_TOKEN }} >> .env
|
||||
echo GUILD_ID = ${{ secrets.GUILD_ID }} >> .env
|
||||
echo MODEL = ${{ secrets.MODEL }} >> .env
|
||||
echo CLIENT_UID = ${{ secrets.CLIENT_UID }} >> .env
|
||||
echo OLLAMA_IP = ${{ secrets.OLLAMA_IP }} >> .env
|
||||
|
||||
@@ -11,7 +11,6 @@ services:
|
||||
image: kevinthedang/discord-ollama:0.5.9
|
||||
environment:
|
||||
CLIENT_TOKEN: ${CLIENT_TOKEN}
|
||||
GUILD_ID: ${GUILD_ID}
|
||||
MODEL: ${MODEL}
|
||||
CLIENT_UID: ${CLIENT_UID}
|
||||
OLLAMA_IP: ${OLLAMA_IP}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
## To Run Locally (without Docker)
|
||||
* Run `npm install` to install the npm packages.
|
||||
* Ensure that your [.env](../.env.sample) file's `OLLAMA_IP` is `127.0.0.1` to work properly.
|
||||
* You only need your `CLIENT_TOKEN`, `GUILD_ID`, `MODEL`, `CLIENT_UID`, `OLLAMA_IP`, `OLLAMA_PORT`.
|
||||
* You only need your `CLIENT_TOKEN`, `MODEL`, `CLIENT_UID`, `OLLAMA_IP`, `OLLAMA_PORT`.
|
||||
* The ollama ip and port should just use it's defaults by nature. If not, utilize `OLLAMA_IP = 127.0.0.1` and `OLLAMA_PORT = 11434`.
|
||||
* Now, you can run the bot by running `npm run client` which will build and run the decompiled typescript and run the setup for ollama.
|
||||
* **IMPORTANT**: This must be ran in the wsl/Linux instance to work properly! Using Command Prompt/Powershell/Git Bash/etc. will not work on Windows (at least in my experience).
|
||||
|
||||
Reference in New Issue
Block a user