Fixing a few AI-related bugs
This commit is contained in:
Sylvie Crowe 2024-07-29 22:35:21 -07:00 committed by GitHub
parent b27d032704
commit 923850313f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 106 additions and 35 deletions

View File

@ -145,6 +145,7 @@ tasks:
- "cmd/generate/*.go" - "cmd/generate/*.go"
- "cmd/generatewshclient/*.go" - "cmd/generatewshclient/*.go"
- "pkg/service/**/*.go" - "pkg/service/**/*.go"
- "pkg/wconfig/**/*.go"
- "pkg/wstore/*.go" - "pkg/wstore/*.go"
- "pkg/wshrpc/**/*.go" - "pkg/wshrpc/**/*.go"
- "pkg/tsgen/**/*.go" - "pkg/tsgen/**/*.go"

View File

@ -13,6 +13,9 @@ class BlockServiceType {
SaveTerminalState(arg2: string, arg3: string, arg4: string, arg5: number): Promise<void> { SaveTerminalState(arg2: string, arg3: string, arg4: string, arg5: number): Promise<void> {
return WOS.callBackendService("block", "SaveTerminalState", Array.from(arguments)) return WOS.callBackendService("block", "SaveTerminalState", Array.from(arguments))
} }
SaveWaveAiData(arg2: string, arg3: OpenAIPromptMessageType[]): Promise<void> {
return WOS.callBackendService("block", "SaveWaveAiData", Array.from(arguments))
}
} }
export const BlockService = new BlockServiceType(); export const BlockService = new BlockServiceType();

View File

@ -3,7 +3,8 @@
import { Markdown } from "@/app/element/markdown"; import { Markdown } from "@/app/element/markdown";
import { TypingIndicator } from "@/app/element/typingindicator"; import { TypingIndicator } from "@/app/element/typingindicator";
import { WOS, atoms, globalStore } from "@/store/global"; import { WOS, atoms, fetchWaveFile, globalStore } from "@/store/global";
import * as services from "@/store/services";
import { WshServer } from "@/store/wshserver"; import { WshServer } from "@/store/wshserver";
import * as jotai from "jotai"; import * as jotai from "jotai";
import type { OverlayScrollbars } from "overlayscrollbars"; import type { OverlayScrollbars } from "overlayscrollbars";
@ -59,11 +60,7 @@ export class WaveAiModel implements ViewModel {
return "sparkles"; // should not be hardcoded return "sparkles"; // should not be hardcoded
}); });
this.viewName = jotai.atom("Wave Ai"); this.viewName = jotai.atom("Wave Ai");
this.messagesAtom = jotai.atom( this.messagesAtom = jotai.atom([]);
globalStore
.get(this.blockAtom)
.meta?.history?.map((prompt: OpenAIPromptMessageType) => promptToMsg(prompt)) ?? []
);
this.addMessageAtom = jotai.atom(null, (get, set, message: ChatMessageType) => { this.addMessageAtom = jotai.atom(null, (get, set, message: ChatMessageType) => {
const messages = get(this.messagesAtom); const messages = get(this.messagesAtom);
@ -105,14 +102,35 @@ export class WaveAiModel implements ViewModel {
}, 100); }, 100);
}, 1500); }, 1500);
}); });
this.viewText = jotai.atom((get) => {
const viewTextChildren: HeaderElem[] = [
{
elemtype: "text",
text: get(atoms.settingsConfigAtom).ai.model,
},
];
return viewTextChildren;
});
}
async populateMessages(): Promise<void> {
const history = await this.fetchAiData();
globalStore.set(this.messagesAtom, history.map(promptToMsg));
}
async fetchAiData(): Promise<Array<OpenAIPromptMessageType>> {
const { data, fileInfo } = await fetchWaveFile(this.blockId, "aidata");
if (!data) {
return [];
}
const history: Array<OpenAIPromptMessageType> = JSON.parse(new TextDecoder().decode(data));
return history;
} }
useWaveAi() { useWaveAi() {
const [messages] = jotai.useAtom(this.messagesAtom); const [messages] = jotai.useAtom(this.messagesAtom);
const [, addMessage] = jotai.useAtom(this.addMessageAtom); const [, addMessage] = jotai.useAtom(this.addMessageAtom);
const [, simulateResponse] = jotai.useAtom(this.simulateAssistantResponseAtom); const [, simulateResponse] = jotai.useAtom(this.simulateAssistantResponseAtom);
const block = jotai.useAtomValue(this.blockAtom);
const metadata = block.meta;
const clientId = jotai.useAtomValue(atoms.clientId); const clientId = jotai.useAtomValue(atoms.clientId);
const blockId = this.blockId; const blockId = this.blockId;
@ -125,27 +143,27 @@ export class WaveAiModel implements ViewModel {
}; };
addMessage(newMessage); addMessage(newMessage);
// send message to backend and get response // send message to backend and get response
const settings = globalStore.get(atoms.settingsConfigAtom);
const opts: OpenAIOptsType = { const opts: OpenAIOptsType = {
model: "gpt-4o-mini", model: settings.ai.model,
apitoken: metadata?.apitoken as string, apitoken: settings.ai.apitoken,
maxtokens: 1000, maxtokens: settings.ai.maxtokens,
timeout: 10, timeout: settings.ai.timeoutms / 1000,
baseurl: metadata?.baseurl as string, baseurl: settings.ai.baseurl,
}; };
const newPrompt: OpenAIPromptMessageType = { const newPrompt: OpenAIPromptMessageType = {
role: "user", role: "user",
content: text, content: text,
name: (metadata?.name as string) || "user", name: globalStore.get(atoms.settingsConfigAtom).ai.name,
}; };
const updatedHistory: Array<OpenAIPromptMessageType> = metadata?.history || [];
updatedHistory.push(newPrompt);
const beMsg: OpenAiStreamRequest = {
clientid: clientId,
opts: opts,
prompt: updatedHistory,
};
const aiGen = WshServer.StreamWaveAiCommand(beMsg);
let temp = async () => { let temp = async () => {
const history = await this.fetchAiData();
const beMsg: OpenAiStreamRequest = {
clientid: clientId,
opts: opts,
prompt: [...history, newPrompt],
};
const aiGen = WshServer.StreamWaveAiCommand(beMsg);
let fullMsg = ""; let fullMsg = "";
for await (const msg of aiGen) { for await (const msg of aiGen) {
fullMsg += msg.text ?? ""; fullMsg += msg.text ?? "";
@ -161,11 +179,11 @@ export class WaveAiModel implements ViewModel {
role: "assistant", role: "assistant",
content: fullMsg, content: fullMsg,
}; };
updatedHistory.push(responsePrompt); const writeToHistory = services.BlockService.SaveWaveAiData(blockId, [
const writeToHistory = WshServer.SetMetaCommand({ ...history,
oref: WOS.makeORef("block", blockId), newPrompt,
meta: { ...metadata, history: updatedHistory }, responsePrompt,
}); ]);
const typeResponse = simulateResponse(response); const typeResponse = simulateResponse(response);
Promise.all([writeToHistory, typeResponse]); Promise.all([writeToHistory, typeResponse]);
}; };
@ -388,6 +406,11 @@ const WaveAi = ({ model }: { model: WaveAiModel }) => {
const termFontSize: number = 14; const termFontSize: number = 14;
// a weird workaround to initialize ansynchronously
useEffect(() => {
model.populateMessages();
}, []);
useEffect(() => { useEffect(() => {
return () => { return () => {
if (submitTimeoutRef.current) { if (submitTimeoutRef.current) {

View File

@ -5,6 +5,16 @@
declare global { declare global {
// wconfig.AiConfigType
type AiConfigType = {
baseurl: string;
apitoken: string;
name: string;
model: string;
maxtokens: number;
timeoutms: number;
};
// wconfig.AutoUpdateOpts // wconfig.AutoUpdateOpts
type AutoUpdateOpts = { type AutoUpdateOpts = {
enabled: boolean; enabled: boolean;
@ -266,6 +276,7 @@ declare global {
type SettingsConfigType = { type SettingsConfigType = {
mimetypes: {[key: string]: MimeTypeConfigType}; mimetypes: {[key: string]: MimeTypeConfigType};
term: TerminalConfigType; term: TerminalConfigType;
ai: AiConfigType;
widgets: WidgetsConfigType[]; widgets: WidgetsConfigType[];
blockheader: BlockHeaderOpts; blockheader: BlockHeaderOpts;
autoupdate: AutoUpdateOpts; autoupdate: AutoUpdateOpts;

View File

@ -5,12 +5,14 @@ package blockservice
import ( import (
"context" "context"
"encoding/json"
"fmt" "fmt"
"time" "time"
"github.com/wavetermdev/thenextwave/pkg/blockcontroller" "github.com/wavetermdev/thenextwave/pkg/blockcontroller"
"github.com/wavetermdev/thenextwave/pkg/filestore" "github.com/wavetermdev/thenextwave/pkg/filestore"
"github.com/wavetermdev/thenextwave/pkg/tsgen/tsgenmeta" "github.com/wavetermdev/thenextwave/pkg/tsgen/tsgenmeta"
"github.com/wavetermdev/thenextwave/pkg/wshrpc"
"github.com/wavetermdev/thenextwave/pkg/wstore" "github.com/wavetermdev/thenextwave/pkg/wstore"
) )
@ -58,3 +60,24 @@ func (bs *BlockService) SaveTerminalState(ctx context.Context, blockId string, s
} }
return nil return nil
} }
func (bs *BlockService) SaveWaveAiData(ctx context.Context, blockId string, history []wshrpc.OpenAIPromptMessageType) error {
block, err := wstore.DBMustGet[*wstore.Block](ctx, blockId)
if err != nil {
return err
}
if block.View != "waveai" {
return fmt.Errorf("invalid view type: %s", block.View)
}
historyBytes, err := json.Marshal(history)
if err != nil {
return fmt.Errorf("unable to serialize ai history: %v", err)
}
// ignore MakeFile error (already exists is ok)
filestore.WFS.MakeFile(ctx, blockId, "aidata", nil, filestore.FileOptsType{})
err = filestore.WFS.WriteFile(ctx, blockId, "aidata", historyBytes)
if err != nil {
return fmt.Errorf("cannot save terminal state: %w", err)
}
return nil
}

View File

@ -8,19 +8,12 @@ import (
"path/filepath" "path/filepath"
"github.com/wavetermdev/thenextwave/pkg/wavebase" "github.com/wavetermdev/thenextwave/pkg/wavebase"
"github.com/wavetermdev/thenextwave/pkg/wshrpc"
"github.com/wavetermdev/thenextwave/pkg/wstore" "github.com/wavetermdev/thenextwave/pkg/wstore"
) )
const termThemesDir = "terminal-themes" const termThemesDir = "terminal-themes"
const settingsFile = "settings.json" const settingsFile = "settings.json"
var defaultAiMessage = wshrpc.OpenAIPromptMessageType{
Role: "assistant",
Content: `<p>Hello, how may I help you?<br>
(Cmd-Shift-Space: open/close, Ctrl+L: clear chat buffer, Up/Down: select code blocks, Enter: to copy a selected code block to the command input)</p>`,
}
var settingsAbsPath = filepath.Join(configDirAbsPath, settingsFile) var settingsAbsPath = filepath.Join(configDirAbsPath, settingsFile)
type WidgetsConfigType struct { type WidgetsConfigType struct {
@ -36,6 +29,15 @@ type TerminalConfigType struct {
FontFamily string `json:"fontfamily,omitempty"` FontFamily string `json:"fontfamily,omitempty"`
} }
type AiConfigType struct {
BaseURL string `json:"baseurl"`
ApiToken string `json:"apitoken"`
Name string `json:"name"`
Model string `json:"model"`
MaxTokens uint32 `json:"maxtokens"`
TimeoutMs uint32 `json:"timeoutms"`
}
type MimeTypeConfigType struct { type MimeTypeConfigType struct {
Icon string `json:"icon"` Icon string `json:"icon"`
Color string `json:"color"` Color string `json:"color"`
@ -90,6 +92,7 @@ type WindowSettingsType struct {
type SettingsConfigType struct { type SettingsConfigType struct {
MimeTypes map[string]MimeTypeConfigType `json:"mimetypes"` MimeTypes map[string]MimeTypeConfigType `json:"mimetypes"`
Term TerminalConfigType `json:"term"` Term TerminalConfigType `json:"term"`
Ai *AiConfigType `json:"ai"`
Widgets []WidgetsConfigType `json:"widgets"` Widgets []WidgetsConfigType `json:"widgets"`
BlockHeader BlockHeaderOpts `json:"blockheader"` BlockHeader BlockHeaderOpts `json:"blockheader"`
AutoUpdate *AutoUpdateOpts `json:"autoupdate"` AutoUpdate *AutoUpdateOpts `json:"autoupdate"`
@ -165,6 +168,14 @@ func applyDefaultSettings(settings *SettingsConfigType) {
} else { } else {
userName = currentUser.Username userName = currentUser.Username
} }
if settings.Ai == nil {
settings.Ai = &AiConfigType{
Name: userName,
Model: "gpt-4o-mini",
MaxTokens: 1000,
TimeoutMs: 10 * 1000,
}
}
defaultWidgets := []WidgetsConfigType{ defaultWidgets := []WidgetsConfigType{
{ {
Icon: "files", Icon: "files",
@ -194,7 +205,6 @@ func applyDefaultSettings(settings *SettingsConfigType) {
Label: "waveai", Label: "waveai",
BlockDef: wstore.BlockDef{ BlockDef: wstore.BlockDef{
View: "waveai", View: "waveai",
Meta: map[string]any{"name": userName, "baseurl": "", "apitoken": "", "history": []wshrpc.OpenAIPromptMessageType{defaultAiMessage}},
}, },
}, },
} }