feat(plugin)

Add AI API access support and multi-language file functionality
- Added support for AI API, allowing the plugin to call external AI services
- Implemented multi-language file support, enabling users to choose different display languages
- Updated configuration file format to accommodate new language selection
- Fixed compatibility issues with previous versions
This commit is contained in:
Sar 2025-02-12 23:36:49 +08:00 committed by GitHub
parent 8f7529337e
commit 6146f9e0c6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 301 additions and 69 deletions

View File

@ -0,0 +1,58 @@
package com.ollamachat;
import com.google.gson.Gson;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
public class AIService {
private final HttpClient httpClient;
private final Gson gson;
public AIService() {
this.httpClient = HttpClient.newHttpClient();
this.gson = new Gson();
}
public CompletableFuture<String> sendRequest(String apiUrl, String apiKey, String model, String prompt) {
return CompletableFuture.supplyAsync(() -> {
try {
Map<String, Object> requestBody = Map.of(
"model", model,
"prompt", prompt,
"stream", false
);
String jsonRequest = gson.toJson(requestBody);
HttpRequest.Builder requestBuilder = HttpRequest.newBuilder()
.uri(URI.create(apiUrl))
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonRequest));
if (apiKey != null && !apiKey.isEmpty()) {
requestBuilder.header("Authorization", "Bearer " + apiKey);
}
HttpRequest request = requestBuilder.build();
HttpResponse<String> response = httpClient.send(
request,
HttpResponse.BodyHandlers.ofString()
);
if (response.statusCode() == 200) {
return response.body();
} else {
throw new RuntimeException("AI API Error: " + response.body());
}
} catch (Exception e) {
throw new RuntimeException("Failed to get response from AI: " + e.getMessage(), e);
}
});
}
}

View File

@ -5,127 +5,247 @@ import org.bukkit.Bukkit;
import org.bukkit.command.Command;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.AsyncPlayerChatEvent;
import org.bukkit.plugin.java.JavaPlugin;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
public class Ollamachat extends JavaPlugin implements Listener {
private HttpClient httpClient;
private AIService aiService;
private Gson gson;
private String apiUrl;
private String ollamaApiUrl;
private String ollamaModel;
private String triggerPrefix;
private int maxResponseLength;
private Map<String, AIConfig> otherAIConfigs;
private boolean ollamaEnabled;
private Map<String, Boolean> otherAIEnabled;
private FileConfiguration langConfig;
@Override
public void onEnable() {
saveDefaultConfig();
reloadConfigValues();
String language = getConfig().getString("language", "en");
loadLanguageFile(language);
httpClient = HttpClient.newHttpClient();
aiService = new AIService();
gson = new Gson();
getServer().getPluginManager().registerEvents(this, this);
getCommand("ollamareload").setExecutor(this);
getCommand("ollamachat").setExecutor(this);
getCommand("aichat").setExecutor(this);
}
private void updateConfig() {
FileConfiguration config = getConfig();
if (!config.contains("ollama-enabled")) {
config.set("ollama-enabled", true);
}
if (!config.contains("language")) {
config.set("language", "en");
}
if (!config.contains("other-ai-configs")) {
config.createSection("other-ai-configs");
}
saveConfig();
}
private void reloadConfigValues() {
reloadConfig();
updateConfig();
FileConfiguration config = getConfig();
apiUrl = config.getString("ollama-api-url", "http://localhost:11434/api/generate");
ollamaApiUrl = config.getString("ollama-api-url", "http://localhost:11434/api/generate");
ollamaModel = config.getString("model", "llama3");
triggerPrefix = config.getString("trigger-prefix", "@bot ");
maxResponseLength = config.getInt("max-response-length", 500);
ollamaEnabled = config.getBoolean("ollama-enabled", true);
otherAIConfigs = new HashMap<>();
otherAIEnabled = new HashMap<>();
if (config.contains("other-ai-configs")) {
for (String aiName : config.getConfigurationSection("other-ai-configs").getKeys(false)) {
String apiUrl = config.getString("other-ai-configs." + aiName + ".api-url");
String apiKey = config.getString("other-ai-configs." + aiName + ".api-key");
String model = config.getString("other-ai-configs." + aiName + ".model");
boolean enabled = config.getBoolean("other-ai-configs." + aiName + ".enabled", true);
otherAIConfigs.put(aiName, new AIConfig(apiUrl, apiKey, model));
otherAIEnabled.put(aiName, enabled);
}
}
}
private void loadLanguageFile(String language) {
File langFolder = new File(getDataFolder(), "lang");
if (!langFolder.exists()) {
langFolder.mkdirs();
}
File langFile = new File(langFolder, language + ".lang");
if (!langFile.exists()) {
saveResource("lang/" + language + ".lang", false);
}
try {
langConfig = YamlConfiguration.loadConfiguration(langFile);
} catch (Exception e) {
getLogger().severe("Failed to load language file: " + langFile.getName());
e.printStackTrace();
}
}
private String getMessage(String key, Map<String, String> placeholders) {
String message = langConfig.getString(key, "§cMissing language key: " + key);
if (placeholders != null) {
for (Map.Entry<String, String> entry : placeholders.entrySet()) {
message = message.replace("{" + entry.getKey() + "}", entry.getValue());
}
}
return message;
}
@EventHandler
public void onPlayerChat(AsyncPlayerChatEvent event) {
if (!ollamaEnabled) return;
String message = event.getMessage();
Player player = event.getPlayer();
String triggerPrefix = getConfig().getString("trigger-prefix", "@bot ");
if (message.startsWith(triggerPrefix)) {
event.setCancelled(true);
String prompt = message.substring(triggerPrefix.length()).trim();
if (!prompt.isEmpty()) {
processQueryAsync(player, prompt);
processOllamaQueryAsync(player, prompt);
}
}
}
private void processQueryAsync(Player player, String prompt) {
private void processOllamaQueryAsync(Player player, String prompt) {
CompletableFuture.runAsync(() -> {
try {
Map<String, Object> requestBody = new HashMap<>();
requestBody.put("model", getConfig().getString("model"));
requestBody.put("prompt", prompt);
requestBody.put("stream", false);
String jsonRequest = gson.toJson(requestBody);
HttpRequest request = HttpRequest.newBuilder()
.uri(URI.create(apiUrl))
.header("Content-Type", "application/json")
.POST(HttpRequest.BodyPublishers.ofString(jsonRequest))
.build();
HttpResponse<String> response = httpClient.send(
request,
HttpResponse.BodyHandlers.ofString()
);
if (response.statusCode() == 200) {
OllamaResponse ollamaResponse = gson.fromJson(
response.body(),
OllamaResponse.class
);
sendFormattedResponse(player, ollamaResponse.response);
} else {
sendErrorMessage(player, "Ollama API Error: " + response.body());
}
String responseBody = aiService.sendRequest(ollamaApiUrl, null, ollamaModel, prompt).join();
OllamaResponse ollamaResponse = gson.fromJson(responseBody, OllamaResponse.class);
sendFormattedResponse(player, ollamaResponse.response);
} catch (Exception e) {
getLogger().severe("Error processing Ollama request: " + e.getMessage());
sendErrorMessage(player, "Failed to get response from AI");
sendErrorMessage(player, getMessage("error-prefix", null) + "Failed to get response from Ollama");
}
});
}
private void processOtherAIQueryAsync(Player player, String aiName, String prompt) {
if (!otherAIEnabled.getOrDefault(aiName, false)) {
sendErrorMessage(player, getMessage("error-prefix", null) + getMessage("toggle-disabled", Map.of("ai-name", aiName)));
return;
}
CompletableFuture.runAsync(() -> {
try {
AIConfig aiConfig = otherAIConfigs.get(aiName);
String responseBody = aiService.sendRequest(aiConfig.getApiUrl(), aiConfig.getApiKey(), aiConfig.getModel(), prompt).join();
sendFormattedResponse(player, responseBody);
} catch (Exception e) {
getLogger().severe("Error processing " + aiName + " request: " + e.getMessage());
sendErrorMessage(player, getMessage("error-prefix", null) + "Failed to get response from " + aiName);
}
});
}
private void sendFormattedResponse(Player player, String response) {
Bukkit.getScheduler().runTask(this, () -> {
String prefix = getConfig().getString("response-prefix", "[AI] ");
String formatted = prefix + response.replace("\\n", "\n");
for (String line : formatted.split("\n")) {
if (!line.trim().isEmpty()) {
player.sendMessage(line);
}
}
});
if (response.length() > maxResponseLength) {
response = response.substring(0, maxResponseLength) + "...";
}
player.sendMessage(getMessage("response-prefix", null) + response);
}
private void sendErrorMessage(Player player, String message) {
Bukkit.getScheduler().runTask(this, () -> {
player.sendMessage("§c" + message);
});
private void sendErrorMessage(Player player, String errorMessage) {
player.sendMessage(getMessage("error-prefix", null) + errorMessage);
}
@Override
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
if (cmd.getName().equalsIgnoreCase("ollamareload")) {
reloadConfig();
reloadConfigValues();
sender.sendMessage("§aOllama configuration reloaded!");
public boolean onCommand(CommandSender sender, Command command, String label, String[] args) {
if (command.getName().equalsIgnoreCase("ollamachat")) {
if (args.length > 0 && args[0].equalsIgnoreCase("reload")) {
reloadConfigValues();
loadLanguageFile(getConfig().getString("language", "en"));
sender.sendMessage(getMessage("reload-success", null));
return true;
} else if (args.length > 1 && args[0].equalsIgnoreCase("toggle")) {
String aiName = args[1];
if (aiName.equalsIgnoreCase("ollama")) {
ollamaEnabled = !ollamaEnabled;
sender.sendMessage(getMessage(ollamaEnabled ? "ollama-enabled" : "ollama-disabled", null));
} else if (otherAIConfigs.containsKey(aiName)) {
boolean newState = !otherAIEnabled.getOrDefault(aiName, false);
otherAIEnabled.put(aiName, newState);
sender.sendMessage(getMessage(newState ? "toggle-enabled" : "toggle-disabled", Map.of("ai-name", aiName)));
} else {
sender.sendMessage(getMessage("invalid-ai-name", Map.of("ai-list", String.join(", ", otherAIConfigs.keySet()))));
}
return true;
}
} else if (command.getName().equalsIgnoreCase("aichat")) {
if (args.length < 2) {
sender.sendMessage(getMessage("usage-aichat", null));
return true;
}
String aiName = args[0];
String prompt = String.join(" ", java.util.Arrays.copyOfRange(args, 1, args.length));
if (otherAIConfigs.containsKey(aiName)) {
if (sender instanceof Player) {
processOtherAIQueryAsync((Player) sender, aiName, prompt);
} else {
sender.sendMessage(getMessage("player-only", null));
}
} else {
sender.sendMessage(getMessage("invalid-ai-name", Map.of("ai-list", String.join(", ", otherAIConfigs.keySet()))));
}
return true;
}
return false;
}
private static class OllamaResponse {
String response;
private static class AIConfig {
private final String apiUrl;
private final String apiKey;
private final String model;
public AIConfig(String apiUrl, String apiKey, String model) {
this.apiUrl = apiUrl;
this.apiKey = apiKey;
this.model = model;
}
public String getApiUrl() {
return apiUrl;
}
public String getApiKey() {
return apiKey;
}
public String getModel() {
return model;
}
}
}
private static class OllamaResponse {
public String response;
}
}

View File

@ -1,10 +1,22 @@
# Ollama API
ollama-api-url: "http://localhost:11434/api/generate"
model: "llama3"
ollama-enabled: true
# Chat
trigger-prefix: "@bot "
response-prefix: "§b[AI] §r"
# Length
max-response-length: 500
max-response-length: 500
# Language Settings
language: "en"
# Other AI Configurations
other-ai-configs:
openai:
api-url: "https://api.openai.com/v1/chat/completions"
api-key: "your-openai-api-key"
model: "gpt-4"
enabled: true

View File

@ -0,0 +1,13 @@
# General messages
reload-success: "§aConfiguration reloaded."
toggle-enabled: "§a{ai-name} is now enabled."
toggle-disabled: "§a{ai-name} is now disabled."
invalid-ai-name: "§cInvalid AI name. Available AIs: {ai-list}."
usage-aichat: "§cUsage: /aichat <ai-name> <prompt>"
player-only: "§cThis command can only be used by players."
ollama-enabled: "§aOllama is now enabled."
ollama-disabled: "§aOllama is now disabled."
# Chat interaction
response-prefix: "§b[AI] §r"
error-prefix: "§c[Error] §r"

View File

@ -0,0 +1,13 @@
# 通用消息
reload-success: "§a配置已重新加载。"
toggle-enabled: "§a{ai-name} 已启用。"
toggle-disabled: "§a{ai-name} 已禁用。"
invalid-ai-name: "§c无效的 AI 名称。可用的 AI: {ai-list}。"
usage-aichat: "§c用法: /aichat <ai名称> <提示>"
player-only: "§c该命令只能由玩家使用。"
ollama-enabled: "§aOllama 已启用。"
ollama-disabled: "§aOllama 已禁用。"
# 聊天交互
response-prefix: "§b[AI] §r"
error-prefix: "§c[错误] §r"

View File

@ -1,11 +1,27 @@
name: ollamachat
version: '1.0'
version: '1.0.1'
main: com.ollamachat.Ollamachat
api-version: '1.21'
authors: [ xwwsdd ]
authors: [xwwsdd]
description: A plugin used to connect Ollama with Minecraft.
website: https://forum.sarskin.cn/
website: https://chat.sarskin.cn/invite/iHgI6LTX
commands:
ollamareload:
description: Reload plugin configuration
usage: /ollamareload
ollamachat:
description: Manage OllamaChat plugin (reload configuration or toggle AI)
usage: |
/ollamachat reload - Reload the plugin configuration
/ollamachat toggle <ai-name> - Enable or disable an AI service
permission: ollamachat.admin
aichat:
description: Interact with other AI services
usage: /aichat <ai-name> <prompt>
permission: ollamachat.use
permissions:
ollamachat.admin:
description: Allows managing the plugin (reload and toggle AI)
default: op
ollamachat.use:
description: Allows using the /aichat command
default: true