Remove unreliable Ollama models from AI prompt, fix message overflow
- Remove Ollama model options (cold start timeouts via CF tunnel) - Keep only Gemini Flash/Pro which are reliable cloud APIs - Fix messages overflowing shape: min-height:0 on flex scroll container, word-break on messages, max-width on pre blocks Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
7dc4e9b3e9
commit
a4cd977d17
|
|
@ -60,6 +60,7 @@ const styles = css`
|
|||
|
||||
.messages {
|
||||
flex: 1;
|
||||
min-height: 0;
|
||||
overflow-y: auto;
|
||||
padding: 12px;
|
||||
display: flex;
|
||||
|
|
@ -73,6 +74,8 @@ const styles = css`
|
|||
border-radius: 12px;
|
||||
font-size: 13px;
|
||||
line-height: 1.5;
|
||||
overflow-wrap: break-word;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.message.user {
|
||||
|
|
@ -300,6 +303,7 @@ const styles = css`
|
|||
overflow-x: auto;
|
||||
font-size: 12px;
|
||||
margin: 8px 0;
|
||||
max-width: 100%;
|
||||
}
|
||||
|
||||
code {
|
||||
|
|
@ -440,16 +444,8 @@ export class FolkPrompt extends FolkShape {
|
|||
<div class="input-area">
|
||||
<div class="input-controls">
|
||||
<select class="model-select">
|
||||
<optgroup label="Gemini">
|
||||
<option value="gemini-flash">Gemini 2.5 Flash</option>
|
||||
<option value="gemini-pro">Gemini 2.5 Pro</option>
|
||||
</optgroup>
|
||||
<optgroup label="Local (Ollama)">
|
||||
<option value="llama3.2">Llama 3.2 (3B)</option>
|
||||
<option value="llama3.1">Llama 3.1 (8B)</option>
|
||||
<option value="qwen2.5-coder">Qwen Coder (7B)</option>
|
||||
<option value="mistral-small">Mistral Small (24B)</option>
|
||||
</optgroup>
|
||||
<option value="gemini-flash">Gemini 2.5 Flash</option>
|
||||
<option value="gemini-pro">Gemini 2.5 Pro</option>
|
||||
</select>
|
||||
<button class="tools-btn" title="Enable canvas tools — AI can create maps, notes, embeds, and more">Tools</button>
|
||||
</div>
|
||||
|
|
|
|||
Loading…
Reference in New Issue