diff --git a/docs-preview-960/options.html b/docs-preview-960/options.html index d5fc63df..d7e24942 100644 --- a/docs-preview-960/options.html +++ b/docs-preview-960/options.html @@ -899,6 +899,64 @@ null or string

Default: null

+

Declared by:

+ + +
+ +<nvf/modules/plugins/assistant/avante/avante-nvim.nix> + +
+ +
+ + vim.assistant.avante-nvim.setupOpts.providers + + +
+
+

Define settings for builtin and custom providers.

+ +

Type: +null or (attribute set)

+ +

Default: +null

+ +

Example:

  openai = {
+    endpoint = "https://api.openai.com/v1";
+    model = "gpt-4o"; # your desired model (or use gpt-4o, etc.)
+    timeout = 30000; # Timeout in milliseconds, increase this for reasoning models
+    extra_request_body = {
+      temperature = 0;
+      max_completion_tokens = 8192; # Increase this to include reasoning tokens (for reasoning models)
+      reasoning_effort = "medium"; # low|medium|high, only used for reasoning models
+    };
+  };
+  ollama = {
+    endpoint = "http://127.0.0.1:11434";
+    timeout = 30000; # Timeout in milliseconds
+    extra_request_body = {
+      options = {
+        temperature = 0.75;
+        num_ctx = 20480;
+        keep_alive = "5m";
+      };
+    };
+  };
+  groq = {
+    __inherited_from = "openai";
+    api_key_name = "GROQ_API_KEY";
+    endpoint = "https://api.groq.com/openai/v1/";
+    model = "llama-3.3-70b-versatile";
+    disable_tools = true;
+    extra_request_body = {
+      temperature = 1;
+      max_tokens = 32768; # remember to increase this value, otherwise it will stop generating halfway
+    };
+  };
+
+

Declared by:

@@ -947,48 +1005,6 @@ signed integer

Default: 600

-

Declared by:

- - -
- -<nvf/modules/plugins/assistant/avante/avante-nvim.nix> - -
- -
- - vim.assistant.avante-nvim.setupOpts.vendors - - -
-
-

Define Your Custom providers.

- -

Type: -null or (attribute set)

- -

Default: -null

- -

Example:

ollama = {
-  __inherited_from = "openai";
-  api_key_name = "";
-  endpoint = "http://127.0.0.1:11434/v1";
-  model = "qwen2.5u-coder:7b";
-  max_tokens = 4096;
-  disable_tools = true;
-};
-ollama_ds = {
-  __inherited_from = "openai";
-  api_key_name = "";
-  endpoint = "http://127.0.0.1:11434/v1";
-  model = "deepseek-r1u:7b";
-  max_tokens = 4096;
-  disable_tools = true;
-};
-
-

Declared by: