* rm: axios dependency * add: stream parsing for normal style * fix: empty string problem * add: stream for embedded prompts * update: version increment
40 lines
1.1 KiB
TypeScript
40 lines
1.1 KiB
TypeScript
import { ChatResponse } from "ollama"
|
|
import { ChatParams } from "./index.js"
|
|
|
|
/**
|
|
* Method to query the Ollama client for async generation
|
|
* @param params
|
|
* @returns Asyn
|
|
*/
|
|
export async function streamResponse(params: ChatParams): Promise<AsyncGenerator<ChatResponse, any, unknown>> {
|
|
return await params.ollama.chat({
|
|
model: params.model,
|
|
messages: params.msgHist,
|
|
options: {
|
|
num_thread: 8, // remove if optimization needed further
|
|
mirostat: 1,
|
|
mirostat_tau: 2.0,
|
|
top_k: 70
|
|
},
|
|
stream: true
|
|
})
|
|
}
|
|
|
|
/**
|
|
* Method to query the Ollama client for a block response
|
|
* @param params parameters to query the client
|
|
* @returns ChatResponse generated by the Ollama client
|
|
*/
|
|
export async function blockResponse(params: ChatParams): Promise<ChatResponse> {
|
|
return await params.ollama.chat({
|
|
model: params.model,
|
|
messages: params.msgHist,
|
|
options: {
|
|
num_thread: 8, // remove if optimization needed further
|
|
mirostat: 1,
|
|
mirostat_tau: 2.0,
|
|
top_k: 70
|
|
},
|
|
stream: false
|
|
})
|
|
} |