diff --git a/build.gradle b/build.gradle index 1811515..c1a9048 100644 --- a/build.gradle +++ b/build.gradle @@ -4,7 +4,7 @@ plugins { } group = 'it.ohalee.minecraftgpt' -version = '1.2.5' +version = '1.2.6' sourceCompatibility = 1.17 targetCompatibility = 1.17 diff --git a/src/main/java/it/ohalee/minecraftgpt/OpenAI.java b/src/main/java/it/ohalee/minecraftgpt/OpenAI.java index 061697d..801b538 100644 --- a/src/main/java/it/ohalee/minecraftgpt/OpenAI.java +++ b/src/main/java/it/ohalee/minecraftgpt/OpenAI.java @@ -1,6 +1,7 @@ package it.ohalee.minecraftgpt; import com.theokanning.openai.completion.CompletionRequest; +import com.theokanning.openai.completion.chat.ChatCompletionRequest; import com.theokanning.openai.service.OpenAiService; import org.bukkit.configuration.ConfigurationSection; import retrofit2.HttpException; @@ -20,17 +21,40 @@ public class OpenAI { public static CompletableFuture getResponse(ConfigurationSection section, StringBuilder cached, String message) { cached.append("\nHuman:").append(message).append("\nAI:"); - return CompletableFuture.supplyAsync(() -> service.createCompletion(CompletionRequest.builder() - .prompt(cached.toString()) - .model(section.getString("model")) - .temperature(section.getDouble("temperature")) - .maxTokens(section.getInt("max-tokens")) - .topP(section.getDouble("top-p")) - .frequencyPenalty(section.getDouble("frequency-penalty")) - .presencePenalty(section.getDouble("presence-penalty")) - .stop(Arrays.asList("Human:", "AI:")) - .build()) - .getChoices().get(0).getText()).exceptionally(throwable -> { + return CompletableFuture.supplyAsync(() -> { + String model = section.getString("model", "text-davinci-003"); + int maxTokens = section.getInt("max-tokens"); + double frequencyPenalty = section.getDouble("frequency-penalty"); + double presencePenalty = section.getDouble("presence-penalty"); + double topP = section.getDouble("top-p"); + double temperature = section.getDouble("temperature"); + + if (model.startsWith("gpt-4") || model.startsWith("gpt-3.5")) { + return service.createChatCompletion(ChatCompletionRequest.builder() + .model(model) + .temperature(temperature) + .maxTokens(maxTokens) + .topP(topP) + .frequencyPenalty(frequencyPenalty) + .presencePenalty(presencePenalty) + .stop(Arrays.asList("Human:", "AI:")) + .build()) + .getChoices().get(0).getMessage().getContent(); + } + + return service.createCompletion(CompletionRequest.builder() + .prompt(cached.toString()) + .model(model) + .temperature(temperature) + .maxTokens(maxTokens) + .topP(topP) + .frequencyPenalty(frequencyPenalty) + .presencePenalty(presencePenalty) + .stop(Arrays.asList("Human:", "AI:")) + .build()) + .getChoices().get(0).getText(); + + }).exceptionally(throwable -> { if (throwable.getCause() instanceof HttpException e) { String reason = switch (e.response().code()) { case 401 -> "Invalid API key! Please check your configuration."; diff --git a/src/main/resources/config.yml b/src/main/resources/config.yml index 375b393..3c13d48 100644 --- a/src/main/resources/config.yml +++ b/src/main/resources/config.yml @@ -14,7 +14,7 @@ format: chatgpt: # https://platform.openai.com/docs/models/ - model: "gpt-3.5-turbo" + model: "text-davinci-003" temperature: 0.9 max-tokens: 150 top-p: 1.0 diff --git a/src/main/resources/plugin.yml b/src/main/resources/plugin.yml index ff98bc2..b88216f 100644 --- a/src/main/resources/plugin.yml +++ b/src/main/resources/plugin.yml @@ -1,6 +1,6 @@ name: MinecraftGPT main: it.ohalee.minecraftgpt.Main -version: 1.2.5 +version: 1.2.6 author: ohAlee description: A Minecraft plugin that uses ChatGPT api-version: 1.16