add ollama config options type
This commit is contained in:
33
types.d.ts
vendored
33
types.d.ts
vendored
@ -41,6 +41,7 @@ export interface OllamaRequest {
|
||||
* Whether to stream responses from the API, or have it sent all as one payload.
|
||||
*/
|
||||
stream?: boolean = false; // stream response vs get response in one full message
|
||||
options?: OllamaConfigOptions;
|
||||
}
|
||||
|
||||
export interface OllamaResponse {
|
||||
@ -68,8 +69,32 @@ export interface Mention {
|
||||
username: string;
|
||||
}
|
||||
|
||||
export interface WSEvent {
|
||||
event: "update" | "status.update" | "notification";
|
||||
payload: string;
|
||||
stream: "user" | "direct";
|
||||
export interface OllamaConfigOptions {
|
||||
num_keep?: number;
|
||||
seed?: number;
|
||||
num_predict?: number;
|
||||
top_k?: number;
|
||||
top_p?: number;
|
||||
min_p?: number;
|
||||
typical_p?: number;
|
||||
repeat_last_n?: number;
|
||||
temperature?: number;
|
||||
repeat_penalty?: number;
|
||||
presence_penalty?: number;
|
||||
frequency_penalty?: number;
|
||||
mirostat?: number;
|
||||
mirostat_tau?: number;
|
||||
mirostat_eta?: number;
|
||||
penalize_newline?: boolean;
|
||||
stop?: string[];
|
||||
numa?: boolean;
|
||||
num_ctx?: number;
|
||||
num_batch?: number;
|
||||
num_gpu?: number;
|
||||
main_gpu?: number;
|
||||
low_vram?: boolean;
|
||||
vocab_only?: boolean;
|
||||
use_mmap?: boolean;
|
||||
use_mlock?: boolean;
|
||||
num_thread?: number;
|
||||
}
|
||||
|
Reference in New Issue
Block a user