mirror of
https://github.com/wavetermdev/waveterm.git
synced 2024-12-22 16:48:23 +01:00
AI Sliding Window (#1151)
Only send the 30 most recent ai questions and responses to the model when making requests. This prevents the amount of data being sent from getting too big.
This commit is contained in:
parent
e235f563f1
commit
bbd530c052
@ -24,6 +24,7 @@ interface ChatMessageType {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const outline = "2px solid var(--accent-color)";
|
const outline = "2px solid var(--accent-color)";
|
||||||
|
const slidingWindowSize = 30;
|
||||||
|
|
||||||
interface ChatItemProps {
|
interface ChatItemProps {
|
||||||
chatItem: ChatMessageType;
|
chatItem: ChatMessageType;
|
||||||
@ -233,7 +234,7 @@ export class WaveAiModel implements ViewModel {
|
|||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
const history: Array<OpenAIPromptMessageType> = JSON.parse(new TextDecoder().decode(data));
|
const history: Array<OpenAIPromptMessageType> = JSON.parse(new TextDecoder().decode(data));
|
||||||
return history;
|
return history.slice(Math.max(history.length - slidingWindowSize, 0));
|
||||||
}
|
}
|
||||||
|
|
||||||
giveFocus(): boolean {
|
giveFocus(): boolean {
|
||||||
@ -294,19 +295,23 @@ export class WaveAiModel implements ViewModel {
|
|||||||
fullMsg += msg.text ?? "";
|
fullMsg += msg.text ?? "";
|
||||||
globalStore.set(this.updateLastMessageAtom, msg.text ?? "", true);
|
globalStore.set(this.updateLastMessageAtom, msg.text ?? "", true);
|
||||||
if (this.cancel) {
|
if (this.cancel) {
|
||||||
if (fullMsg == "") {
|
|
||||||
globalStore.set(this.removeLastMessageAtom);
|
|
||||||
}
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
if (fullMsg == "") {
|
||||||
|
// remove a message if empty
|
||||||
|
globalStore.set(this.removeLastMessageAtom);
|
||||||
|
// only save the author's prompt
|
||||||
|
await BlockService.SaveWaveAiData(blockId, [...history, newPrompt]);
|
||||||
|
} else {
|
||||||
|
const responsePrompt: OpenAIPromptMessageType = {
|
||||||
|
role: "assistant",
|
||||||
|
content: fullMsg,
|
||||||
|
};
|
||||||
|
//mark message as complete
|
||||||
globalStore.set(this.updateLastMessageAtom, "", false);
|
globalStore.set(this.updateLastMessageAtom, "", false);
|
||||||
if (fullMsg != "") {
|
// save a complete message prompt and response
|
||||||
const responsePrompt: OpenAIPromptMessageType = {
|
await BlockService.SaveWaveAiData(blockId, [...history, newPrompt, responsePrompt]);
|
||||||
role: "assistant",
|
|
||||||
content: fullMsg,
|
|
||||||
};
|
|
||||||
await BlockService.SaveWaveAiData(blockId, [...history, newPrompt, responsePrompt]);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const updatedHist = [...history, newPrompt];
|
const updatedHist = [...history, newPrompt];
|
||||||
|
Loading…
Reference in New Issue
Block a user