From 6146f9e0c67b1779ac99869d8864fff2f0dc5fb5 Mon Sep 17 00:00:00 2001 From: Sar <97328936+mcraftbbs@users.noreply.github.com> Date: Wed, 12 Feb 2025 23:36:49 +0800 Subject: [PATCH] feat(plugin) Add AI API access support and multi-language file functionality - Added support for AI API, allowing the plugin to call external AI services - Implemented multi-language file support, enabling users to choose different display languages - Updated configuration file format to accommodate new language selection - Fixed compatibility issues with previous versions --- src/main/java/com/ollamachat/AIService.java | 58 +++++ src/main/java/com/ollamachat/Ollamachat.java | 244 ++++++++++++++----- src/main/resources/config.yml | 14 +- src/main/resources/lang/en.lang | 13 + src/main/resources/lang/zh_cn.lang | 13 + src/main/resources/plugin.yml | 28 ++- 6 files changed, 301 insertions(+), 69 deletions(-) create mode 100644 src/main/java/com/ollamachat/AIService.java create mode 100644 src/main/resources/lang/en.lang create mode 100644 src/main/resources/lang/zh_cn.lang diff --git a/src/main/java/com/ollamachat/AIService.java b/src/main/java/com/ollamachat/AIService.java new file mode 100644 index 0000000..1f1f2c3 --- /dev/null +++ b/src/main/java/com/ollamachat/AIService.java @@ -0,0 +1,58 @@ +package com.ollamachat; + +import com.google.gson.Gson; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.Map; +import java.util.concurrent.CompletableFuture; + +public class AIService { + + private final HttpClient httpClient; + private final Gson gson; + + public AIService() { + this.httpClient = HttpClient.newHttpClient(); + this.gson = new Gson(); + } + + public CompletableFuture sendRequest(String apiUrl, String apiKey, String model, String prompt) { + return CompletableFuture.supplyAsync(() -> { + try { + Map requestBody = Map.of( + "model", model, + "prompt", prompt, + "stream", false + ); + + String jsonRequest = gson.toJson(requestBody); + + HttpRequest.Builder requestBuilder = HttpRequest.newBuilder() + .uri(URI.create(apiUrl)) + .header("Content-Type", "application/json") + .POST(HttpRequest.BodyPublishers.ofString(jsonRequest)); + + if (apiKey != null && !apiKey.isEmpty()) { + requestBuilder.header("Authorization", "Bearer " + apiKey); + } + + HttpRequest request = requestBuilder.build(); + + HttpResponse response = httpClient.send( + request, + HttpResponse.BodyHandlers.ofString() + ); + + if (response.statusCode() == 200) { + return response.body(); + } else { + throw new RuntimeException("AI API Error: " + response.body()); + } + } catch (Exception e) { + throw new RuntimeException("Failed to get response from AI: " + e.getMessage(), e); + } + }); + } +} diff --git a/src/main/java/com/ollamachat/Ollamachat.java b/src/main/java/com/ollamachat/Ollamachat.java index 94f739e..3ec7668 100644 --- a/src/main/java/com/ollamachat/Ollamachat.java +++ b/src/main/java/com/ollamachat/Ollamachat.java @@ -5,127 +5,247 @@ import org.bukkit.Bukkit; import org.bukkit.command.Command; import org.bukkit.command.CommandSender; import org.bukkit.configuration.file.FileConfiguration; +import org.bukkit.configuration.file.YamlConfiguration; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.Listener; import org.bukkit.event.player.AsyncPlayerChatEvent; import org.bukkit.plugin.java.JavaPlugin; -import java.net.URI; -import java.net.http.HttpClient; -import java.net.http.HttpRequest; -import java.net.http.HttpResponse; +import java.io.File; +import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletableFuture; public class Ollamachat extends JavaPlugin implements Listener { - private HttpClient httpClient; + private AIService aiService; private Gson gson; - private String apiUrl; + private String ollamaApiUrl; + private String ollamaModel; + private String triggerPrefix; + private int maxResponseLength; + private Map otherAIConfigs; + private boolean ollamaEnabled; + private Map otherAIEnabled; + + private FileConfiguration langConfig; @Override public void onEnable() { saveDefaultConfig(); reloadConfigValues(); + String language = getConfig().getString("language", "en"); + loadLanguageFile(language); - httpClient = HttpClient.newHttpClient(); + aiService = new AIService(); gson = new Gson(); getServer().getPluginManager().registerEvents(this, this); - getCommand("ollamareload").setExecutor(this); + getCommand("ollamachat").setExecutor(this); + getCommand("aichat").setExecutor(this); + } + + private void updateConfig() { + FileConfiguration config = getConfig(); + + if (!config.contains("ollama-enabled")) { + config.set("ollama-enabled", true); + } + if (!config.contains("language")) { + config.set("language", "en"); + } + if (!config.contains("other-ai-configs")) { + config.createSection("other-ai-configs"); + } + + saveConfig(); } private void reloadConfigValues() { + reloadConfig(); + updateConfig(); + FileConfiguration config = getConfig(); - apiUrl = config.getString("ollama-api-url", "http://localhost:11434/api/generate"); + ollamaApiUrl = config.getString("ollama-api-url", "http://localhost:11434/api/generate"); + ollamaModel = config.getString("model", "llama3"); + triggerPrefix = config.getString("trigger-prefix", "@bot "); + maxResponseLength = config.getInt("max-response-length", 500); + ollamaEnabled = config.getBoolean("ollama-enabled", true); + + otherAIConfigs = new HashMap<>(); + otherAIEnabled = new HashMap<>(); + if (config.contains("other-ai-configs")) { + for (String aiName : config.getConfigurationSection("other-ai-configs").getKeys(false)) { + String apiUrl = config.getString("other-ai-configs." + aiName + ".api-url"); + String apiKey = config.getString("other-ai-configs." + aiName + ".api-key"); + String model = config.getString("other-ai-configs." + aiName + ".model"); + boolean enabled = config.getBoolean("other-ai-configs." + aiName + ".enabled", true); + otherAIConfigs.put(aiName, new AIConfig(apiUrl, apiKey, model)); + otherAIEnabled.put(aiName, enabled); + } + } + } + + private void loadLanguageFile(String language) { + File langFolder = new File(getDataFolder(), "lang"); + if (!langFolder.exists()) { + langFolder.mkdirs(); + } + + File langFile = new File(langFolder, language + ".lang"); + if (!langFile.exists()) { + saveResource("lang/" + language + ".lang", false); + } + + try { + langConfig = YamlConfiguration.loadConfiguration(langFile); + } catch (Exception e) { + getLogger().severe("Failed to load language file: " + langFile.getName()); + e.printStackTrace(); + } + } + + private String getMessage(String key, Map placeholders) { + String message = langConfig.getString(key, "§cMissing language key: " + key); + if (placeholders != null) { + for (Map.Entry entry : placeholders.entrySet()) { + message = message.replace("{" + entry.getKey() + "}", entry.getValue()); + } + } + return message; } @EventHandler public void onPlayerChat(AsyncPlayerChatEvent event) { + if (!ollamaEnabled) return; + String message = event.getMessage(); Player player = event.getPlayer(); - String triggerPrefix = getConfig().getString("trigger-prefix", "@bot "); if (message.startsWith(triggerPrefix)) { event.setCancelled(true); String prompt = message.substring(triggerPrefix.length()).trim(); if (!prompt.isEmpty()) { - processQueryAsync(player, prompt); + processOllamaQueryAsync(player, prompt); } } } - private void processQueryAsync(Player player, String prompt) { + private void processOllamaQueryAsync(Player player, String prompt) { CompletableFuture.runAsync(() -> { try { - Map requestBody = new HashMap<>(); - requestBody.put("model", getConfig().getString("model")); - requestBody.put("prompt", prompt); - requestBody.put("stream", false); - - String jsonRequest = gson.toJson(requestBody); - - HttpRequest request = HttpRequest.newBuilder() - .uri(URI.create(apiUrl)) - .header("Content-Type", "application/json") - .POST(HttpRequest.BodyPublishers.ofString(jsonRequest)) - .build(); - - HttpResponse response = httpClient.send( - request, - HttpResponse.BodyHandlers.ofString() - ); - - if (response.statusCode() == 200) { - OllamaResponse ollamaResponse = gson.fromJson( - response.body(), - OllamaResponse.class - ); - sendFormattedResponse(player, ollamaResponse.response); - } else { - sendErrorMessage(player, "Ollama API Error: " + response.body()); - } + String responseBody = aiService.sendRequest(ollamaApiUrl, null, ollamaModel, prompt).join(); + OllamaResponse ollamaResponse = gson.fromJson(responseBody, OllamaResponse.class); + sendFormattedResponse(player, ollamaResponse.response); } catch (Exception e) { getLogger().severe("Error processing Ollama request: " + e.getMessage()); - sendErrorMessage(player, "Failed to get response from AI"); + sendErrorMessage(player, getMessage("error-prefix", null) + "Failed to get response from Ollama"); + } + }); + } + + private void processOtherAIQueryAsync(Player player, String aiName, String prompt) { + if (!otherAIEnabled.getOrDefault(aiName, false)) { + sendErrorMessage(player, getMessage("error-prefix", null) + getMessage("toggle-disabled", Map.of("ai-name", aiName))); + return; + } + + CompletableFuture.runAsync(() -> { + try { + AIConfig aiConfig = otherAIConfigs.get(aiName); + String responseBody = aiService.sendRequest(aiConfig.getApiUrl(), aiConfig.getApiKey(), aiConfig.getModel(), prompt).join(); + sendFormattedResponse(player, responseBody); + } catch (Exception e) { + getLogger().severe("Error processing " + aiName + " request: " + e.getMessage()); + sendErrorMessage(player, getMessage("error-prefix", null) + "Failed to get response from " + aiName); } }); } private void sendFormattedResponse(Player player, String response) { - Bukkit.getScheduler().runTask(this, () -> { - String prefix = getConfig().getString("response-prefix", "[AI] "); - String formatted = prefix + response.replace("\\n", "\n"); - - for (String line : formatted.split("\n")) { - if (!line.trim().isEmpty()) { - player.sendMessage(line); - } - } - }); + if (response.length() > maxResponseLength) { + response = response.substring(0, maxResponseLength) + "..."; + } + player.sendMessage(getMessage("response-prefix", null) + response); } - private void sendErrorMessage(Player player, String message) { - Bukkit.getScheduler().runTask(this, () -> { - player.sendMessage("§c" + message); - }); + private void sendErrorMessage(Player player, String errorMessage) { + player.sendMessage(getMessage("error-prefix", null) + errorMessage); } @Override - public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) { - if (cmd.getName().equalsIgnoreCase("ollamareload")) { - reloadConfig(); - reloadConfigValues(); - sender.sendMessage("§aOllama configuration reloaded!"); + public boolean onCommand(CommandSender sender, Command command, String label, String[] args) { + if (command.getName().equalsIgnoreCase("ollamachat")) { + if (args.length > 0 && args[0].equalsIgnoreCase("reload")) { + reloadConfigValues(); + loadLanguageFile(getConfig().getString("language", "en")); + sender.sendMessage(getMessage("reload-success", null)); + return true; + } else if (args.length > 1 && args[0].equalsIgnoreCase("toggle")) { + String aiName = args[1]; + if (aiName.equalsIgnoreCase("ollama")) { + ollamaEnabled = !ollamaEnabled; + sender.sendMessage(getMessage(ollamaEnabled ? "ollama-enabled" : "ollama-disabled", null)); + } else if (otherAIConfigs.containsKey(aiName)) { + boolean newState = !otherAIEnabled.getOrDefault(aiName, false); + otherAIEnabled.put(aiName, newState); + sender.sendMessage(getMessage(newState ? "toggle-enabled" : "toggle-disabled", Map.of("ai-name", aiName))); + } else { + sender.sendMessage(getMessage("invalid-ai-name", Map.of("ai-list", String.join(", ", otherAIConfigs.keySet())))); + } + return true; + } + } else if (command.getName().equalsIgnoreCase("aichat")) { + if (args.length < 2) { + sender.sendMessage(getMessage("usage-aichat", null)); + return true; + } + + String aiName = args[0]; + String prompt = String.join(" ", java.util.Arrays.copyOfRange(args, 1, args.length)); + + if (otherAIConfigs.containsKey(aiName)) { + if (sender instanceof Player) { + processOtherAIQueryAsync((Player) sender, aiName, prompt); + } else { + sender.sendMessage(getMessage("player-only", null)); + } + } else { + sender.sendMessage(getMessage("invalid-ai-name", Map.of("ai-list", String.join(", ", otherAIConfigs.keySet())))); + } return true; } return false; } - private static class OllamaResponse { - String response; + private static class AIConfig { + private final String apiUrl; + private final String apiKey; + private final String model; + + public AIConfig(String apiUrl, String apiKey, String model) { + this.apiUrl = apiUrl; + this.apiKey = apiKey; + this.model = model; + } + + public String getApiUrl() { + return apiUrl; + } + + public String getApiKey() { + return apiKey; + } + + public String getModel() { + return model; + } } -} + + private static class OllamaResponse { + public String response; + } +} \ No newline at end of file diff --git a/src/main/resources/config.yml b/src/main/resources/config.yml index 8c4c7a0..d2fe8c9 100644 --- a/src/main/resources/config.yml +++ b/src/main/resources/config.yml @@ -1,10 +1,22 @@ # Ollama API ollama-api-url: "http://localhost:11434/api/generate" model: "llama3" +ollama-enabled: true # Chat trigger-prefix: "@bot " response-prefix: "§b[AI] §r" # Length -max-response-length: 500 \ No newline at end of file +max-response-length: 500 + +# Language Settings +language: "en" + +# Other AI Configurations +other-ai-configs: + openai: + api-url: "https://api.openai.com/v1/chat/completions" + api-key: "your-openai-api-key" + model: "gpt-4" + enabled: true \ No newline at end of file diff --git a/src/main/resources/lang/en.lang b/src/main/resources/lang/en.lang new file mode 100644 index 0000000..113e26c --- /dev/null +++ b/src/main/resources/lang/en.lang @@ -0,0 +1,13 @@ +# General messages +reload-success: "§aConfiguration reloaded." +toggle-enabled: "§a{ai-name} is now enabled." +toggle-disabled: "§a{ai-name} is now disabled." +invalid-ai-name: "§cInvalid AI name. Available AIs: {ai-list}." +usage-aichat: "§cUsage: /aichat " +player-only: "§cThis command can only be used by players." +ollama-enabled: "§aOllama is now enabled." +ollama-disabled: "§aOllama is now disabled." + +# Chat interaction +response-prefix: "§b[AI] §r" +error-prefix: "§c[Error] §r" \ No newline at end of file diff --git a/src/main/resources/lang/zh_cn.lang b/src/main/resources/lang/zh_cn.lang new file mode 100644 index 0000000..2fff9d3 --- /dev/null +++ b/src/main/resources/lang/zh_cn.lang @@ -0,0 +1,13 @@ +# 通用消息 +reload-success: "§a配置已重新加载。" +toggle-enabled: "§a{ai-name} 已启用。" +toggle-disabled: "§a{ai-name} 已禁用。" +invalid-ai-name: "§c无效的 AI 名称。可用的 AI: {ai-list}。" +usage-aichat: "§c用法: /aichat <提示>" +player-only: "§c该命令只能由玩家使用。" +ollama-enabled: "§aOllama 已启用。" +ollama-disabled: "§aOllama 已禁用。" + +# 聊天交互 +response-prefix: "§b[AI] §r" +error-prefix: "§c[错误] §r" \ No newline at end of file diff --git a/src/main/resources/plugin.yml b/src/main/resources/plugin.yml index 943e529..6fde17e 100644 --- a/src/main/resources/plugin.yml +++ b/src/main/resources/plugin.yml @@ -1,11 +1,27 @@ name: ollamachat -version: '1.0' +version: '1.0.1' main: com.ollamachat.Ollamachat api-version: '1.21' -authors: [ xwwsdd ] +authors: [xwwsdd] description: A plugin used to connect Ollama with Minecraft. -website: https://forum.sarskin.cn/ +website: https://chat.sarskin.cn/invite/iHgI6LTX + commands: - ollamareload: - description: Reload plugin configuration - usage: /ollamareload + ollamachat: + description: Manage OllamaChat plugin (reload configuration or toggle AI) + usage: | + /ollamachat reload - Reload the plugin configuration + /ollamachat toggle - Enable or disable an AI service + permission: ollamachat.admin + aichat: + description: Interact with other AI services + usage: /aichat + permission: ollamachat.use + +permissions: + ollamachat.admin: + description: Allows managing the plugin (reload and toggle AI) + default: op + ollamachat.use: + description: Allows using the /aichat command + default: true \ No newline at end of file