diff --git a/src/app/clientsettings/clientsettings.tsx b/src/app/clientsettings/clientsettings.tsx
index dd53cd3ad..db0e73d07 100644
--- a/src/app/clientsettings/clientsettings.tsx
+++ b/src/app/clientsettings/clientsettings.tsx
@@ -157,6 +157,12 @@ class ClientSettingsView extends React.Component<{ model: RemotesModel }, { hove
commandRtnHandler(prtn, this.errorMessage);
}
+ @boundMethod
+ inlineUpdateOpenAITimeout(newTimeout: string): void {
+ const prtn = GlobalCommandRunner.setClientOpenAISettings({ timeout: newTimeout });
+ commandRtnHandler(prtn, this.errorMessage);
+ }
+
@boundMethod
setErrorMessage(msg: string): void {
mobx.action(() => {
@@ -203,6 +209,9 @@ class ClientSettingsView extends React.Component<{ model: RemotesModel }, { hove
const maxTokensStr = String(
openAIOpts.maxtokens == null || openAIOpts.maxtokens == 0 ? 1000 : openAIOpts.maxtokens
);
+ const aiTimeoutStr = String(
+ openAIOpts.timeout == null || openAIOpts.timeout == 0 ? 10 : openAIOpts.timeout / 1000
+ );
const curFontSize = GlobalModel.getTermFontSize();
const curFontFamily = GlobalModel.getTermFontFamily();
const curTheme = GlobalModel.getThemeSource();
@@ -342,6 +351,19 @@ class ClientSettingsView extends React.Component<{ model: RemotesModel }, { hove
/>
+
Global Hotkey
diff --git a/src/models/commandrunner.ts b/src/models/commandrunner.ts
index e4a0e8494..48fa3dc1d 100644
--- a/src/models/commandrunner.ts
+++ b/src/models/commandrunner.ts
@@ -424,6 +424,7 @@ class CommandRunner {
apitoken?: string;
maxtokens?: string;
baseurl?: string;
+ timeout?: string;
}): Promise {
let kwargs = {
nohist: "1",
@@ -440,6 +441,9 @@ class CommandRunner {
if (opts.baseurl != null) {
kwargs["openaibaseurl"] = opts.baseurl;
}
+ if (opts.timeout != null) {
+ kwargs["openaitimeout"] = opts.timeout;
+ }
return GlobalModel.submitCommand("client", "set", null, kwargs, false);
}
diff --git a/src/types/custom.d.ts b/src/types/custom.d.ts
index 1546e0cd7..7a9db4acb 100644
--- a/src/types/custom.d.ts
+++ b/src/types/custom.d.ts
@@ -659,6 +659,7 @@ declare global {
maxtokens?: number;
maxchoices?: number;
baseurl?: string;
+ timeout?: number;
};
type PlaybookType = {
diff --git a/wavesrv/pkg/cmdrunner/cmdrunner.go b/wavesrv/pkg/cmdrunner/cmdrunner.go
index 96d29e2a4..6ba5a56ee 100644
--- a/wavesrv/pkg/cmdrunner/cmdrunner.go
+++ b/wavesrv/pkg/cmdrunner/cmdrunner.go
@@ -84,9 +84,9 @@ const TermFontSizeMax = 24
const TsFormatStr = "2006-01-02 15:04:05"
-const OpenAIPacketTimeout = 10 * time.Second
+const OpenAIPacketTimeout = 10 * 1000 * time.Millisecond
const OpenAIStreamTimeout = 5 * time.Minute
-const OpenAICloudCompletionTelemetryOffErrorMsg = "To ensure responsible usage and prevent misuse, Wave AI requires telemetry to be enabled when using its free AI features.\n\nIf you prefer not to enable telemetry, you can still access Wave AI's features by providing your own OpenAI API key in the Settings menu. Please note that when using your personal API key, requests will be sent directly to the OpenAI API without being proxied through Wave's servers.\n\nIf you wish to continue using Wave AI's free features, you can easily enable telemetry by running the '/telemetry:on' command in the terminal. This will allow you to access the free AI features while helping to protect the platform from abuse."
+const OpenAICloudCompletionTelemetryOffErrorMsg = "To ensure responsible usage and prevent misuse, Wave AI requires telemetry to be enabled when using its free AI features.\n\nIf you prefer not to enable telemetry, you can still access Wave AI's features by providing your own OpenAI API key or AI Base URL in the Settings menu. Please note that when using your personal API key, requests will be sent directly to the OpenAI API or the API that you specified with the AI Base URL, without being proxied through Wave's servers.\n\nIf you wish to continue using Wave AI's free features, you can easily enable telemetry by running the '/telemetry:on' command in the terminal. This will allow you to access the free AI features while helping to protect the platform from abuse."
const (
KwArgRenderer = "renderer"
@@ -2693,8 +2693,6 @@ func getCmdInfoEngineeredPrompt(userQuery string, curLineStr string, shellType s
}
func doOpenAICmdInfoCompletion(cmd *sstore.CmdType, clientId string, opts *sstore.OpenAIOptsType, prompt []packet.OpenAIPromptMessageType, curLineStr string) {
- var hadError bool
- log.Println("had error: ", hadError)
ctx, cancelFn := context.WithTimeout(context.Background(), OpenAIStreamTimeout)
defer cancelFn()
defer func() {
@@ -2702,7 +2700,6 @@ func doOpenAICmdInfoCompletion(cmd *sstore.CmdType, clientId string, opts *sstor
if r != nil {
panicMsg := fmt.Sprintf("panic: %v", r)
log.Printf("panic in doOpenAICompletion: %s\n", panicMsg)
- hadError = true
}
}()
var ch chan *packet.OpenAIPacketType
@@ -2730,12 +2727,15 @@ func doOpenAICmdInfoCompletion(cmd *sstore.CmdType, clientId string, opts *sstor
return
}
writePacketToUpdateBus(ctx, cmd, asstMessagePk)
+ packetTimeout := OpenAIPacketTimeout
+ if opts.Timeout >= 0 {
+ packetTimeout = time.Duration(opts.Timeout) * time.Millisecond
+ }
doneWaitingForPackets := false
for !doneWaitingForPackets {
select {
- case <-time.After(OpenAIPacketTimeout):
+ case <-time.After(packetTimeout):
// timeout reading from channel
- hadError = true
doneWaitingForPackets = true
asstOutputPk.Error = "timeout waiting for server response"
updateAsstResponseAndWriteToUpdateBus(ctx, cmd, asstMessagePk, asstOutputMessageID)
@@ -2743,7 +2743,6 @@ func doOpenAICmdInfoCompletion(cmd *sstore.CmdType, clientId string, opts *sstor
if ok {
// got a packet
if pk.Error != "" {
- hadError = true
asstOutputPk.Error = pk.Error
}
if pk.Model != "" && pk.Index == 0 {
@@ -2823,10 +2822,14 @@ func doOpenAIStreamCompletion(cmd *sstore.CmdType, clientId string, opts *sstore
writeErrorToPty(cmd, fmt.Sprintf("error calling OpenAI API: %v", err), outputPos)
return
}
+ packetTimeout := OpenAIPacketTimeout
+ if opts.Timeout >= 0 {
+ packetTimeout = time.Duration(opts.Timeout) * time.Millisecond
+ }
doneWaitingForPackets := false
for !doneWaitingForPackets {
select {
- case <-time.After(OpenAIPacketTimeout):
+ case <-time.After(packetTimeout):
// timeout reading from channel
hadError = true
pk := openai.CreateErrorPacket(fmt.Sprintf("timeout waiting for server response"))
@@ -2895,7 +2898,7 @@ func OpenAICommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (scbus
return nil, fmt.Errorf("error retrieving client open ai options")
}
opts := clientData.OpenAIOpts
- if opts.APIToken == "" {
+ if opts.APIToken == "" && opts.BaseURL == "" {
if clientData.ClientOpts.NoTelemetry {
return nil, fmt.Errorf(OpenAICloudCompletionTelemetryOffErrorMsg)
}
@@ -5798,6 +5801,15 @@ func validateFontFamily(fontFamily string) error {
return nil
}
+func CheckOptionAlias(kwargs map[string]string, aliases ...string) (string, bool) {
+ for _, alias := range aliases {
+ if val, found := kwargs[alias]; found {
+ return val, found
+ }
+ }
+ return "", false
+}
+
func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (scbus.UpdatePacket, error) {
clientData, err := sstore.EnsureClientData(ctx)
if err != nil {
@@ -5870,7 +5882,7 @@ func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (sc
}
varsUpdated = append(varsUpdated, "termtheme")
}
- if apiToken, found := pk.Kwargs["openaiapitoken"]; found {
+ if apiToken, found := CheckOptionAlias(pk.Kwargs, "openaiapitoken", "aiapitoken"); found {
err = validateOpenAIAPIToken(apiToken)
if err != nil {
return nil, err
@@ -5884,10 +5896,10 @@ func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (sc
aiOpts.APIToken = apiToken
err = sstore.UpdateClientOpenAIOpts(ctx, *aiOpts)
if err != nil {
- return nil, fmt.Errorf("error updating client openai api token: %v", err)
+ return nil, fmt.Errorf("error updating client ai api token: %v", err)
}
}
- if aiModel, found := pk.Kwargs["openaimodel"]; found {
+ if aiModel, found := CheckOptionAlias(pk.Kwargs, "openaimodel", "aimodel"); found {
err = validateOpenAIModel(aiModel)
if err != nil {
return nil, err
@@ -5901,16 +5913,16 @@ func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (sc
aiOpts.Model = aiModel
err = sstore.UpdateClientOpenAIOpts(ctx, *aiOpts)
if err != nil {
- return nil, fmt.Errorf("error updating client openai model: %v", err)
+ return nil, fmt.Errorf("error updating client ai model: %v", err)
}
}
- if maxTokensStr, found := pk.Kwargs["openaimaxtokens"]; found {
+ if maxTokensStr, found := CheckOptionAlias(pk.Kwargs, "openaimaxtokens", "aimaxtokens"); found {
maxTokens, err := strconv.Atoi(maxTokensStr)
if err != nil {
- return nil, fmt.Errorf("error updating client openai maxtokens, invalid number: %v", err)
+ return nil, fmt.Errorf("error updating client ai maxtokens, invalid number: %v", err)
}
if maxTokens < 0 || maxTokens > 1000000 {
- return nil, fmt.Errorf("error updating client openai maxtokens, out of range: %d", maxTokens)
+ return nil, fmt.Errorf("error updating client ai maxtokens, out of range: %d", maxTokens)
}
varsUpdated = append(varsUpdated, "openaimaxtokens")
aiOpts := clientData.OpenAIOpts
@@ -5921,16 +5933,16 @@ func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (sc
aiOpts.MaxTokens = maxTokens
err = sstore.UpdateClientOpenAIOpts(ctx, *aiOpts)
if err != nil {
- return nil, fmt.Errorf("error updating client openai maxtokens: %v", err)
+ return nil, fmt.Errorf("error updating client ai maxtokens: %v", err)
}
}
- if maxChoicesStr, found := pk.Kwargs["openaimaxchoices"]; found {
+ if maxChoicesStr, found := CheckOptionAlias(pk.Kwargs, "openaimaxchoices", "aimaxchoices"); found {
maxChoices, err := strconv.Atoi(maxChoicesStr)
if err != nil {
- return nil, fmt.Errorf("error updating client openai maxchoices, invalid number: %v", err)
+ return nil, fmt.Errorf("error updating client ai maxchoices, invalid number: %v", err)
}
if maxChoices < 0 || maxChoices > 10 {
- return nil, fmt.Errorf("error updating client openai maxchoices, out of range: %d", maxChoices)
+ return nil, fmt.Errorf("error updating client ai maxchoices, out of range: %d", maxChoices)
}
varsUpdated = append(varsUpdated, "openaimaxchoices")
aiOpts := clientData.OpenAIOpts
@@ -5941,10 +5953,10 @@ func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (sc
aiOpts.MaxChoices = maxChoices
err = sstore.UpdateClientOpenAIOpts(ctx, *aiOpts)
if err != nil {
- return nil, fmt.Errorf("error updating client openai maxchoices: %v", err)
+ return nil, fmt.Errorf("error updating client ai maxchoices: %v", err)
}
}
- if aiBaseURL, found := pk.Kwargs["openaibaseurl"]; found {
+ if aiBaseURL, found := CheckOptionAlias(pk.Kwargs, "openaibaseurl", "aibaseurl"); found {
aiOpts := clientData.OpenAIOpts
if aiOpts == nil {
aiOpts = &sstore.OpenAIOptsType{}
@@ -5954,7 +5966,24 @@ func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (sc
varsUpdated = append(varsUpdated, "openaibaseurl")
err = sstore.UpdateClientOpenAIOpts(ctx, *aiOpts)
if err != nil {
- return nil, fmt.Errorf("error updating client openai base url: %v", err)
+ return nil, fmt.Errorf("error updating client ai base url: %v", err)
+ }
+ }
+ if aiTimeoutStr, found := CheckOptionAlias(pk.Kwargs, "openaitimeout", "aitimeout"); found {
+ aiTimeout, err := strconv.ParseFloat(aiTimeoutStr, 64)
+ if err != nil {
+ return nil, fmt.Errorf("error updating client ai timeout, invalid number: %v", err)
+ }
+ aiOpts := clientData.OpenAIOpts
+ if aiOpts == nil {
+ aiOpts = &sstore.OpenAIOptsType{}
+ clientData.OpenAIOpts = aiOpts
+ }
+ aiOpts.Timeout = int(aiTimeout * 1000)
+ varsUpdated = append(varsUpdated, "openaitimeout")
+ err = sstore.UpdateClientOpenAIOpts(ctx, *aiOpts)
+ if err != nil {
+ return nil, fmt.Errorf("error updating client ai timeout: %v", err)
}
}
if webglStr, found := pk.Kwargs["webgl"]; found {
@@ -5968,7 +5997,7 @@ func ClientSetCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (sc
varsUpdated = append(varsUpdated, "webgl")
}
if len(varsUpdated) == 0 {
- return nil, fmt.Errorf("/client:set requires a value to set: %s", formatStrs([]string{"termfontsize", "termfontfamily", "openaiapitoken", "openaimodel", "openaibaseurl", "openaimaxtokens", "openaimaxchoices", "webgl"}, "or", false))
+ return nil, fmt.Errorf("/client:set requires a value to set: %s", formatStrs([]string{"termfontsize", "termfontfamily", "openaiapitoken", "openaimodel", "openaibaseurl", "openaimaxtokens", "openaimaxchoices", "openaitimeout", "webgl"}, "or", false))
}
clientData, err = sstore.EnsureClientData(ctx)
if err != nil {
@@ -6008,11 +6037,12 @@ func ClientShowCommand(ctx context.Context, pk *scpacket.FeCommandPacketType) (s
buf.WriteString(fmt.Sprintf(" %-15s %d\n", "termfontsize", clientData.FeOpts.TermFontSize))
buf.WriteString(fmt.Sprintf(" %-15s %s\n", "termfontfamily", clientData.FeOpts.TermFontFamily))
buf.WriteString(fmt.Sprintf(" %-15s %s\n", "termfontfamily", clientData.FeOpts.Theme))
- buf.WriteString(fmt.Sprintf(" %-15s %s\n", "openaiapitoken", clientData.OpenAIOpts.APIToken))
- buf.WriteString(fmt.Sprintf(" %-15s %s\n", "openaimodel", clientData.OpenAIOpts.Model))
- buf.WriteString(fmt.Sprintf(" %-15s %d\n", "openaimaxtokens", clientData.OpenAIOpts.MaxTokens))
- buf.WriteString(fmt.Sprintf(" %-15s %d\n", "openaimaxchoices", clientData.OpenAIOpts.MaxChoices))
- buf.WriteString(fmt.Sprintf(" %-15s %s\n", "openaibaseurl", clientData.OpenAIOpts.BaseURL))
+ buf.WriteString(fmt.Sprintf(" %-15s %s\n", "aiapitoken", clientData.OpenAIOpts.APIToken))
+ buf.WriteString(fmt.Sprintf(" %-15s %s\n", "aimodel", clientData.OpenAIOpts.Model))
+ buf.WriteString(fmt.Sprintf(" %-15s %d\n", "aimaxtokens", clientData.OpenAIOpts.MaxTokens))
+ buf.WriteString(fmt.Sprintf(" %-15s %d\n", "aimaxchoices", clientData.OpenAIOpts.MaxChoices))
+ buf.WriteString(fmt.Sprintf(" %-15s %s\n", "aibaseurl", clientData.OpenAIOpts.BaseURL))
+ buf.WriteString(fmt.Sprintf(" %-15s %ss\n", "aitimeout", strconv.FormatFloat((float64(clientData.OpenAIOpts.Timeout)/1000.0), 'f', -1, 64)))
update := scbus.MakeUpdatePacket()
update.AddUpdate(sstore.InfoMsgType{
InfoTitle: fmt.Sprintf("client info"),
diff --git a/wavesrv/pkg/sstore/sstore.go b/wavesrv/pkg/sstore/sstore.go
index c0a5b5e4b..fb5c3d641 100644
--- a/wavesrv/pkg/sstore/sstore.go
+++ b/wavesrv/pkg/sstore/sstore.go
@@ -289,6 +289,8 @@ func (cdata *ClientData) Clean() *ClientData {
Model: cdata.OpenAIOpts.Model,
MaxTokens: cdata.OpenAIOpts.MaxTokens,
MaxChoices: cdata.OpenAIOpts.MaxChoices,
+ Timeout: cdata.OpenAIOpts.Timeout,
+ BaseURL: cdata.OpenAIOpts.BaseURL,
// omit API Token
}
if cdata.OpenAIOpts.APIToken != "" {
@@ -736,6 +738,7 @@ type OpenAIOptsType struct {
BaseURL string `json:"baseurl,omitempty"`
MaxTokens int `json:"maxtokens,omitempty"`
MaxChoices int `json:"maxchoices,omitempty"`
+ Timeout int `json:"timeout,omitempty"`
}
const (