Fixes. Version 1.2.6

This commit is contained in:
ohAlee 2023-04-23 00:57:00 +02:00
parent 3a77cf0ee7
commit 62eb8abf71
4 changed files with 38 additions and 14 deletions

View File

@ -4,7 +4,7 @@ plugins {
} }
group = 'it.ohalee.minecraftgpt' group = 'it.ohalee.minecraftgpt'
version = '1.2.5' version = '1.2.6'
sourceCompatibility = 1.17 sourceCompatibility = 1.17
targetCompatibility = 1.17 targetCompatibility = 1.17

View File

@ -1,6 +1,7 @@
package it.ohalee.minecraftgpt; package it.ohalee.minecraftgpt;
import com.theokanning.openai.completion.CompletionRequest; import com.theokanning.openai.completion.CompletionRequest;
import com.theokanning.openai.completion.chat.ChatCompletionRequest;
import com.theokanning.openai.service.OpenAiService; import com.theokanning.openai.service.OpenAiService;
import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.ConfigurationSection;
import retrofit2.HttpException; import retrofit2.HttpException;
@ -20,17 +21,40 @@ public class OpenAI {
public static CompletableFuture<String> getResponse(ConfigurationSection section, StringBuilder cached, String message) { public static CompletableFuture<String> getResponse(ConfigurationSection section, StringBuilder cached, String message) {
cached.append("\nHuman:").append(message).append("\nAI:"); cached.append("\nHuman:").append(message).append("\nAI:");
return CompletableFuture.supplyAsync(() -> service.createCompletion(CompletionRequest.builder() return CompletableFuture.supplyAsync(() -> {
.prompt(cached.toString()) String model = section.getString("model", "text-davinci-003");
.model(section.getString("model")) int maxTokens = section.getInt("max-tokens");
.temperature(section.getDouble("temperature")) double frequencyPenalty = section.getDouble("frequency-penalty");
.maxTokens(section.getInt("max-tokens")) double presencePenalty = section.getDouble("presence-penalty");
.topP(section.getDouble("top-p")) double topP = section.getDouble("top-p");
.frequencyPenalty(section.getDouble("frequency-penalty")) double temperature = section.getDouble("temperature");
.presencePenalty(section.getDouble("presence-penalty"))
.stop(Arrays.asList("Human:", "AI:")) if (model.startsWith("gpt-4") || model.startsWith("gpt-3.5")) {
.build()) return service.createChatCompletion(ChatCompletionRequest.builder()
.getChoices().get(0).getText()).exceptionally(throwable -> { .model(model)
.temperature(temperature)
.maxTokens(maxTokens)
.topP(topP)
.frequencyPenalty(frequencyPenalty)
.presencePenalty(presencePenalty)
.stop(Arrays.asList("Human:", "AI:"))
.build())
.getChoices().get(0).getMessage().getContent();
}
return service.createCompletion(CompletionRequest.builder()
.prompt(cached.toString())
.model(model)
.temperature(temperature)
.maxTokens(maxTokens)
.topP(topP)
.frequencyPenalty(frequencyPenalty)
.presencePenalty(presencePenalty)
.stop(Arrays.asList("Human:", "AI:"))
.build())
.getChoices().get(0).getText();
}).exceptionally(throwable -> {
if (throwable.getCause() instanceof HttpException e) { if (throwable.getCause() instanceof HttpException e) {
String reason = switch (e.response().code()) { String reason = switch (e.response().code()) {
case 401 -> "Invalid API key! Please check your configuration."; case 401 -> "Invalid API key! Please check your configuration.";

View File

@ -14,7 +14,7 @@ format:
chatgpt: chatgpt:
# https://platform.openai.com/docs/models/ # https://platform.openai.com/docs/models/
model: "gpt-3.5-turbo" model: "text-davinci-003"
temperature: 0.9 temperature: 0.9
max-tokens: 150 max-tokens: 150
top-p: 1.0 top-p: 1.0

View File

@ -1,6 +1,6 @@
name: MinecraftGPT name: MinecraftGPT
main: it.ohalee.minecraftgpt.Main main: it.ohalee.minecraftgpt.Main
version: 1.2.5 version: 1.2.6
author: ohAlee author: ohAlee
description: A Minecraft plugin that uses ChatGPT description: A Minecraft plugin that uses ChatGPT
api-version: 1.16 api-version: 1.16