mirror of
https://github.com/coalaura/whiskr.git
synced 2025-09-08 08:39:53 +00:00
reasoning effort control
This commit is contained in:
BIN
.github/chat.png
vendored
BIN
.github/chat.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 196 KiB After Width: | Height: | Size: 127 KiB |
@@ -17,10 +17,10 @@ whiskr is a private, self-hosted web chat interface for interacting with AI mode
|
||||
- Tags indicate if a model supports **tools**, **vision**, or **reasoning**
|
||||
- Search field with fuzzy matching to quickly find models
|
||||
- Models are listed newest -> oldest
|
||||
- Reasoning effort control
|
||||
|
||||
## TODO
|
||||
|
||||
- Reasoning effort control
|
||||
- Retry button for assistant messages
|
||||
- Import and export of chats
|
||||
- Web search tool
|
||||
|
27
chat.go
27
chat.go
@@ -16,10 +16,16 @@ type Message struct {
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
type Reasoning struct {
|
||||
Effort string `json:"effort"`
|
||||
Tokens int `json:"tokens"`
|
||||
}
|
||||
|
||||
type Request struct {
|
||||
Prompt string `json:"prompt"`
|
||||
Model string `json:"model"`
|
||||
Temperature float64 `json:"temperature"`
|
||||
Reasoning Reasoning `json:"reasoning"`
|
||||
Messages []Message `json:"messages"`
|
||||
}
|
||||
|
||||
@@ -39,6 +45,21 @@ func (r *Request) Parse() (*openrouter.ChatCompletionRequest, error) {
|
||||
|
||||
request.Temperature = float32(r.Temperature)
|
||||
|
||||
if model.Reasoning {
|
||||
request.Reasoning = &openrouter.ChatCompletionReasoning{}
|
||||
|
||||
switch r.Reasoning.Effort {
|
||||
case "high", "medium", "low":
|
||||
request.Reasoning.Effort = &r.Reasoning.Effort
|
||||
default:
|
||||
if r.Reasoning.Tokens <= 0 || r.Reasoning.Tokens > 1024*1024 {
|
||||
return nil, fmt.Errorf("invalid reasoning tokens (1-1048576): %d", r.Reasoning.Tokens)
|
||||
}
|
||||
|
||||
request.Reasoning.MaxTokens = &r.Reasoning.Tokens
|
||||
}
|
||||
}
|
||||
|
||||
prompt, err := BuildPrompt(r.Prompt, model)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -61,12 +82,6 @@ func (r *Request) Parse() (*openrouter.ChatCompletionRequest, error) {
|
||||
})
|
||||
}
|
||||
|
||||
h := "high"
|
||||
|
||||
request.Reasoning = &openrouter.ChatCompletionReasoning{
|
||||
Effort: &h,
|
||||
}
|
||||
|
||||
return &request, nil
|
||||
}
|
||||
|
||||
|
21
models.go
21
models.go
@@ -13,6 +13,8 @@ type Model struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Tags []string `json:"tags,omitempty"`
|
||||
|
||||
Reasoning bool `json:"-"`
|
||||
}
|
||||
|
||||
var ModelMap = make(map[string]*Model)
|
||||
@@ -38,11 +40,15 @@ func LoadModels() ([]*Model, error) {
|
||||
name = name[index+2:]
|
||||
}
|
||||
|
||||
tags, reasoning := GetModelTags(model)
|
||||
|
||||
m := &Model{
|
||||
ID: model.ID,
|
||||
Name: name,
|
||||
Description: model.Description,
|
||||
Tags: GetModelTags(model),
|
||||
Tags: tags,
|
||||
|
||||
Reasoning: reasoning,
|
||||
}
|
||||
|
||||
models[index] = m
|
||||
@@ -53,10 +59,17 @@ func LoadModels() ([]*Model, error) {
|
||||
return models, nil
|
||||
}
|
||||
|
||||
func GetModelTags(model openrouter.Model) []string {
|
||||
var tags []string
|
||||
func GetModelTags(model openrouter.Model) ([]string, bool) {
|
||||
var (
|
||||
reasoning bool
|
||||
tags []string
|
||||
)
|
||||
|
||||
for _, parameter := range model.SupportedParameters {
|
||||
if parameter == "reasoning" {
|
||||
reasoning = true
|
||||
}
|
||||
|
||||
if parameter == "reasoning" || parameter == "tools" {
|
||||
tags = append(tags, parameter)
|
||||
}
|
||||
@@ -70,5 +83,5 @@ func GetModelTags(model openrouter.Model) []string {
|
||||
|
||||
sort.Strings(tags)
|
||||
|
||||
return tags
|
||||
return tags, reasoning
|
||||
}
|
||||
|
@@ -87,6 +87,10 @@ body {
|
||||
pointer-events: none !important;
|
||||
}
|
||||
|
||||
.none {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
#messages {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
@@ -432,9 +436,10 @@ select {
|
||||
padding: 2px 4px;
|
||||
}
|
||||
|
||||
#reasoning-tokens,
|
||||
#temperature {
|
||||
appearance: textfield;
|
||||
width: 50px;
|
||||
width: 48px;
|
||||
padding: 2px 4px;
|
||||
text-align: right;
|
||||
}
|
||||
@@ -455,6 +460,14 @@ label[for="temperature"] {
|
||||
background-image: url(icons/temperature.svg);
|
||||
}
|
||||
|
||||
label[for="reasoning-effort"] {
|
||||
background-image: url(icons/reasoning.svg);
|
||||
}
|
||||
|
||||
label[for="reasoning-tokens"] {
|
||||
background-image: url(icons/amount.svg);
|
||||
}
|
||||
|
||||
#bottom {
|
||||
top: -38px;
|
||||
left: 50%;
|
||||
|
7
static/css/icons/amount.svg
Normal file
7
static/css/icons/amount.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<!-- Uploaded to: SVG Repo, www.svgrepo.com, Transformed by: SVG Repo Mixer Tools -->
|
||||
<svg width="800px" height="800px" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
|
||||
<g id="SVGRepo_bgCarrier" stroke-width="0"/>
|
||||
|
||||
<g id="SVGRepo_tracerCarrier" stroke-linecap="round" stroke-linejoin="round"/>
|
After Width: | Height: | Size: 806 B |
@@ -23,12 +23,12 @@
|
||||
|
||||
<textarea id="message" placeholder="Type something..." autocomplete="off"></textarea>
|
||||
|
||||
<button id="add" title="Add message"></button>
|
||||
<button id="send" title="Add message and start completion"></button>
|
||||
<button id="add" title="Add message to chat"></button>
|
||||
<button id="send" title="Add message to chat and start completion"></button>
|
||||
|
||||
<div class="options">
|
||||
<div class="option">
|
||||
<label for="role" title="Role"></label>
|
||||
<label for="role" title="Message role"></label>
|
||||
<select id="role">
|
||||
<option value="user" selected>User</option>
|
||||
<option value="assistant">Assistant</option>
|
||||
@@ -40,7 +40,7 @@
|
||||
<select id="model" data-searchable></select>
|
||||
</div>
|
||||
<div class="option">
|
||||
<label for="prompt" title="Prompt"></label>
|
||||
<label for="prompt" title="Main system prompt"></label>
|
||||
<select id="prompt">
|
||||
<option value="" selected>No Prompt</option>
|
||||
<option value="normal">Assistant</option>
|
||||
@@ -50,6 +50,19 @@
|
||||
<label for="temperature" title="Temperature (0 - 1)"></label>
|
||||
<input id="temperature" type="number" min="0" max="1" step="0.05" value="0.85" />
|
||||
</div>
|
||||
<div class="option none">
|
||||
<label for="reasoning-effort" title="Reasoning Effort"></label>
|
||||
<select id="reasoning-effort">
|
||||
<option value="low">Low</option>
|
||||
<option value="medium" selected>Medium</option>
|
||||
<option value="hight">High</option>
|
||||
<option value="">Custom</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="option none">
|
||||
<label for="reasoning-tokens" title="Maximum amount of reasoning tokens"></label>
|
||||
<input id="reasoning-tokens" type="number" min="2" max="1" step="0.05" value="0.85" />
|
||||
</div>
|
||||
<div class="option">
|
||||
<button id="scrolling" title="Turn on auto-scrolling"></button>
|
||||
</div>
|
||||
|
@@ -7,12 +7,15 @@
|
||||
$model = document.getElementById("model"),
|
||||
$prompt = document.getElementById("prompt"),
|
||||
$temperature = document.getElementById("temperature"),
|
||||
$reasoningEffort = document.getElementById("reasoning-effort"),
|
||||
$reasoningTokens = document.getElementById("reasoning-tokens"),
|
||||
$add = document.getElementById("add"),
|
||||
$send = document.getElementById("send"),
|
||||
$scrolling = document.getElementById("scrolling"),
|
||||
$clear = document.getElementById("clear");
|
||||
|
||||
const messages = [];
|
||||
const messages = [],
|
||||
models = {};
|
||||
|
||||
let autoScrolling = false,
|
||||
interacted = false;
|
||||
@@ -434,9 +437,9 @@
|
||||
}
|
||||
|
||||
async function loadModels() {
|
||||
const models = await json("/-/models");
|
||||
const modelList = await json("/-/models");
|
||||
|
||||
if (!models) {
|
||||
if (!modelList) {
|
||||
alert("Failed to load models.");
|
||||
|
||||
return [];
|
||||
@@ -444,7 +447,7 @@
|
||||
|
||||
$model.innerHTML = "";
|
||||
|
||||
for (const model of models) {
|
||||
for (const model of modelList) {
|
||||
const el = document.createElement("option");
|
||||
|
||||
el.value = model.id;
|
||||
@@ -454,18 +457,22 @@
|
||||
el.dataset.tags = (model.tags || []).join(",");
|
||||
|
||||
$model.appendChild(el);
|
||||
|
||||
models[model.id] = model;
|
||||
}
|
||||
|
||||
dropdown($model);
|
||||
|
||||
return models;
|
||||
return modelList;
|
||||
}
|
||||
|
||||
function restore(models) {
|
||||
function restore(modelList) {
|
||||
$role.value = loadValue("role", "user");
|
||||
$model.value = loadValue("model", models[0].id);
|
||||
$model.value = loadValue("model", modelList[0].id);
|
||||
$prompt.value = loadValue("prompt", "normal");
|
||||
$temperature.value = loadValue("temperature", 0.85);
|
||||
$reasoningEffort.value = loadValue("reasoning-effort", "medium");
|
||||
$reasoningTokens.value = loadValue("reasoning-tokens", 1024);
|
||||
|
||||
if (loadValue("scrolling")) {
|
||||
$scrolling.click();
|
||||
@@ -512,7 +519,21 @@
|
||||
});
|
||||
|
||||
$model.addEventListener("change", () => {
|
||||
storeValue("model", $model.value);
|
||||
const model = $model.value,
|
||||
data = model ? models[model] : null;
|
||||
|
||||
storeValue("model", model);
|
||||
|
||||
if (data?.tags.includes("reasoning")) {
|
||||
$reasoningEffort.parentNode.classList.remove("none");
|
||||
$reasoningTokens.parentNode.classList.toggle(
|
||||
"none",
|
||||
!!$reasoningEffort.value,
|
||||
);
|
||||
} else {
|
||||
$reasoningEffort.parentNode.classList.add("none");
|
||||
$reasoningTokens.parentNode.classList.add("none");
|
||||
}
|
||||
});
|
||||
|
||||
$prompt.addEventListener("change", () => {
|
||||
@@ -523,6 +544,18 @@
|
||||
storeValue("temperature", $temperature.value);
|
||||
});
|
||||
|
||||
$reasoningEffort.addEventListener("change", () => {
|
||||
const effort = $reasoningEffort.value;
|
||||
|
||||
storeValue("reasoning-effort", effort);
|
||||
|
||||
$reasoningTokens.parentNode.classList.toggle("none", !!effort);
|
||||
});
|
||||
|
||||
$reasoningTokens.addEventListener("change", () => {
|
||||
storeValue("reasoning-tokens", $reasoningTokens.value);
|
||||
});
|
||||
|
||||
$message.addEventListener("input", () => {
|
||||
storeValue("message", $message.value);
|
||||
});
|
||||
@@ -570,12 +603,26 @@
|
||||
return;
|
||||
}
|
||||
|
||||
if (!$temperature.value) {
|
||||
$temperature.value = 0.85;
|
||||
}
|
||||
|
||||
const temperature = parseFloat($temperature.value);
|
||||
|
||||
if (Number.isNaN(temperature) || temperature < 0 || temperature > 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
const effort = $reasoningEffort.value,
|
||||
tokens = parseInt($reasoningTokens.value);
|
||||
|
||||
if (
|
||||
!effort &&
|
||||
(Number.isNaN(tokens) || tokens <= 0 || tokens > 1024 * 1024)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
pushMessage();
|
||||
|
||||
controller = new AbortController();
|
||||
@@ -586,6 +633,10 @@
|
||||
prompt: $prompt.value,
|
||||
model: $model.value,
|
||||
temperature: temperature,
|
||||
reasoning: {
|
||||
effort: effort,
|
||||
tokens: tokens || 0,
|
||||
},
|
||||
messages: messages.map((message) => message.getData()),
|
||||
};
|
||||
|
||||
@@ -644,6 +695,7 @@
|
||||
|
||||
dropdown($role);
|
||||
dropdown($prompt);
|
||||
dropdown($reasoningEffort);
|
||||
|
||||
loadModels().then(restore);
|
||||
})();
|
||||
|
Reference in New Issue
Block a user