Fixing a few AI-related bugs
This commit is contained in:
Sylvie Crowe 2024-07-29 22:35:21 -07:00 committed by GitHub
parent b27d032704
commit 923850313f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 106 additions and 35 deletions

View File

@ -145,6 +145,7 @@ tasks:
- "cmd/generate/*.go"
- "cmd/generatewshclient/*.go"
- "pkg/service/**/*.go"
- "pkg/wconfig/**/*.go"
- "pkg/wstore/*.go"
- "pkg/wshrpc/**/*.go"
- "pkg/tsgen/**/*.go"

View File

@ -13,6 +13,9 @@ class BlockServiceType {
SaveTerminalState(arg2: string, arg3: string, arg4: string, arg5: number): Promise<void> {
return WOS.callBackendService("block", "SaveTerminalState", Array.from(arguments))
}
SaveWaveAiData(arg2: string, arg3: OpenAIPromptMessageType[]): Promise<void> {
return WOS.callBackendService("block", "SaveWaveAiData", Array.from(arguments))
}
}
export const BlockService = new BlockServiceType();

View File

@ -3,7 +3,8 @@
import { Markdown } from "@/app/element/markdown";
import { TypingIndicator } from "@/app/element/typingindicator";
import { WOS, atoms, globalStore } from "@/store/global";
import { WOS, atoms, fetchWaveFile, globalStore } from "@/store/global";
import * as services from "@/store/services";
import { WshServer } from "@/store/wshserver";
import * as jotai from "jotai";
import type { OverlayScrollbars } from "overlayscrollbars";
@ -59,11 +60,7 @@ export class WaveAiModel implements ViewModel {
return "sparkles"; // should not be hardcoded
});
this.viewName = jotai.atom("Wave Ai");
this.messagesAtom = jotai.atom(
globalStore
.get(this.blockAtom)
.meta?.history?.map((prompt: OpenAIPromptMessageType) => promptToMsg(prompt)) ?? []
);
this.messagesAtom = jotai.atom([]);
this.addMessageAtom = jotai.atom(null, (get, set, message: ChatMessageType) => {
const messages = get(this.messagesAtom);
@ -105,14 +102,35 @@ export class WaveAiModel implements ViewModel {
}, 100);
}, 1500);
});
this.viewText = jotai.atom((get) => {
const viewTextChildren: HeaderElem[] = [
{
elemtype: "text",
text: get(atoms.settingsConfigAtom).ai.model,
},
];
return viewTextChildren;
});
}
async populateMessages(): Promise<void> {
const history = await this.fetchAiData();
globalStore.set(this.messagesAtom, history.map(promptToMsg));
}
async fetchAiData(): Promise<Array<OpenAIPromptMessageType>> {
const { data, fileInfo } = await fetchWaveFile(this.blockId, "aidata");
if (!data) {
return [];
}
const history: Array<OpenAIPromptMessageType> = JSON.parse(new TextDecoder().decode(data));
return history;
}
useWaveAi() {
const [messages] = jotai.useAtom(this.messagesAtom);
const [, addMessage] = jotai.useAtom(this.addMessageAtom);
const [, simulateResponse] = jotai.useAtom(this.simulateAssistantResponseAtom);
const block = jotai.useAtomValue(this.blockAtom);
const metadata = block.meta;
const clientId = jotai.useAtomValue(atoms.clientId);
const blockId = this.blockId;
@ -125,27 +143,27 @@ export class WaveAiModel implements ViewModel {
};
addMessage(newMessage);
// send message to backend and get response
const settings = globalStore.get(atoms.settingsConfigAtom);
const opts: OpenAIOptsType = {
model: "gpt-4o-mini",
apitoken: metadata?.apitoken as string,
maxtokens: 1000,
timeout: 10,
baseurl: metadata?.baseurl as string,
model: settings.ai.model,
apitoken: settings.ai.apitoken,
maxtokens: settings.ai.maxtokens,
timeout: settings.ai.timeoutms / 1000,
baseurl: settings.ai.baseurl,
};
const newPrompt: OpenAIPromptMessageType = {
role: "user",
content: text,
name: (metadata?.name as string) || "user",
name: globalStore.get(atoms.settingsConfigAtom).ai.name,
};
const updatedHistory: Array<OpenAIPromptMessageType> = metadata?.history || [];
updatedHistory.push(newPrompt);
const beMsg: OpenAiStreamRequest = {
clientid: clientId,
opts: opts,
prompt: updatedHistory,
};
const aiGen = WshServer.StreamWaveAiCommand(beMsg);
let temp = async () => {
const history = await this.fetchAiData();
const beMsg: OpenAiStreamRequest = {
clientid: clientId,
opts: opts,
prompt: [...history, newPrompt],
};
const aiGen = WshServer.StreamWaveAiCommand(beMsg);
let fullMsg = "";
for await (const msg of aiGen) {
fullMsg += msg.text ?? "";
@ -161,11 +179,11 @@ export class WaveAiModel implements ViewModel {
role: "assistant",
content: fullMsg,
};
updatedHistory.push(responsePrompt);
const writeToHistory = WshServer.SetMetaCommand({
oref: WOS.makeORef("block", blockId),
meta: { ...metadata, history: updatedHistory },
});
const writeToHistory = services.BlockService.SaveWaveAiData(blockId, [
...history,
newPrompt,
responsePrompt,
]);
const typeResponse = simulateResponse(response);
Promise.all([writeToHistory, typeResponse]);
};
@ -388,6 +406,11 @@ const WaveAi = ({ model }: { model: WaveAiModel }) => {
const termFontSize: number = 14;
// a weird workaround to initialize ansynchronously
useEffect(() => {
model.populateMessages();
}, []);
useEffect(() => {
return () => {
if (submitTimeoutRef.current) {

View File

@ -5,6 +5,16 @@
declare global {
// wconfig.AiConfigType
type AiConfigType = {
baseurl: string;
apitoken: string;
name: string;
model: string;
maxtokens: number;
timeoutms: number;
};
// wconfig.AutoUpdateOpts
type AutoUpdateOpts = {
enabled: boolean;
@ -266,6 +276,7 @@ declare global {
type SettingsConfigType = {
mimetypes: {[key: string]: MimeTypeConfigType};
term: TerminalConfigType;
ai: AiConfigType;
widgets: WidgetsConfigType[];
blockheader: BlockHeaderOpts;
autoupdate: AutoUpdateOpts;

View File

@ -5,12 +5,14 @@ package blockservice
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/wavetermdev/thenextwave/pkg/blockcontroller"
"github.com/wavetermdev/thenextwave/pkg/filestore"
"github.com/wavetermdev/thenextwave/pkg/tsgen/tsgenmeta"
"github.com/wavetermdev/thenextwave/pkg/wshrpc"
"github.com/wavetermdev/thenextwave/pkg/wstore"
)
@ -58,3 +60,24 @@ func (bs *BlockService) SaveTerminalState(ctx context.Context, blockId string, s
}
return nil
}
func (bs *BlockService) SaveWaveAiData(ctx context.Context, blockId string, history []wshrpc.OpenAIPromptMessageType) error {
block, err := wstore.DBMustGet[*wstore.Block](ctx, blockId)
if err != nil {
return err
}
if block.View != "waveai" {
return fmt.Errorf("invalid view type: %s", block.View)
}
historyBytes, err := json.Marshal(history)
if err != nil {
return fmt.Errorf("unable to serialize ai history: %v", err)
}
// ignore MakeFile error (already exists is ok)
filestore.WFS.MakeFile(ctx, blockId, "aidata", nil, filestore.FileOptsType{})
err = filestore.WFS.WriteFile(ctx, blockId, "aidata", historyBytes)
if err != nil {
return fmt.Errorf("cannot save terminal state: %w", err)
}
return nil
}

View File

@ -8,19 +8,12 @@ import (
"path/filepath"
"github.com/wavetermdev/thenextwave/pkg/wavebase"
"github.com/wavetermdev/thenextwave/pkg/wshrpc"
"github.com/wavetermdev/thenextwave/pkg/wstore"
)
const termThemesDir = "terminal-themes"
const settingsFile = "settings.json"
var defaultAiMessage = wshrpc.OpenAIPromptMessageType{
Role: "assistant",
Content: `<p>Hello, how may I help you?<br>
(Cmd-Shift-Space: open/close, Ctrl+L: clear chat buffer, Up/Down: select code blocks, Enter: to copy a selected code block to the command input)</p>`,
}
var settingsAbsPath = filepath.Join(configDirAbsPath, settingsFile)
type WidgetsConfigType struct {
@ -36,6 +29,15 @@ type TerminalConfigType struct {
FontFamily string `json:"fontfamily,omitempty"`
}
type AiConfigType struct {
BaseURL string `json:"baseurl"`
ApiToken string `json:"apitoken"`
Name string `json:"name"`
Model string `json:"model"`
MaxTokens uint32 `json:"maxtokens"`
TimeoutMs uint32 `json:"timeoutms"`
}
type MimeTypeConfigType struct {
Icon string `json:"icon"`
Color string `json:"color"`
@ -90,6 +92,7 @@ type WindowSettingsType struct {
type SettingsConfigType struct {
MimeTypes map[string]MimeTypeConfigType `json:"mimetypes"`
Term TerminalConfigType `json:"term"`
Ai *AiConfigType `json:"ai"`
Widgets []WidgetsConfigType `json:"widgets"`
BlockHeader BlockHeaderOpts `json:"blockheader"`
AutoUpdate *AutoUpdateOpts `json:"autoupdate"`
@ -165,6 +168,14 @@ func applyDefaultSettings(settings *SettingsConfigType) {
} else {
userName = currentUser.Username
}
if settings.Ai == nil {
settings.Ai = &AiConfigType{
Name: userName,
Model: "gpt-4o-mini",
MaxTokens: 1000,
TimeoutMs: 10 * 1000,
}
}
defaultWidgets := []WidgetsConfigType{
{
Icon: "files",
@ -194,7 +205,6 @@ func applyDefaultSettings(settings *SettingsConfigType) {
Label: "waveai",
BlockDef: wstore.BlockDef{
View: "waveai",
Meta: map[string]any{"name": userName, "baseurl": "", "apitoken": "", "history": []wshrpc.OpenAIPromptMessageType{defaultAiMessage}},
},
},
}