Add files via upload

Signed-off-by: H5N3RG <janguenni13@web.de>
This commit is contained in:
2025-09-11 11:34:45 +02:00
committed by GitHub
parent 8aed479699
commit 45c362d12a
5 changed files with 558 additions and 0 deletions

354
init.lua Normal file
View File

@@ -0,0 +1,354 @@
-- ===========================================================================
-- LLM Connect Init v0.7.2
-- author: H5N3RG
-- license: LGPL-3.0-or-later
-- ===========================================================================
-- Load the HTTP API
local http = core.request_http_api()
if not http then
core.log("error", "[llm_connect] HTTP API not available. Check secure.http_mods!")
return
end
-- === Configuration ===
local api_key = ""
local api_url = "" -- Correct URL
local model_name = "" -- Default model
-- Storage for the conversation history per player
local history = {}
-- Stores the maximum history length per player (or as default)
local max_history = { ["default"] = 10 }
local player_context_sent = {} -- Remembers if the context has already been sent
local function get_history(name)
history[name] = history[name] or {}
return history[name]
end
-- Function to get the maximum history length for a player
local function get_max_history(name)
return max_history[name] or max_history["default"]
end
-- Function to read an external file (for system_prompt.txt)
local function read_file_content(filepath)
local f = io.open(filepath, "r")
if not f then
core.log("error", "[llm_connect] Could not open file: " .. filepath)
return nil
end
local content = f:read("*a")
f:close()
return content
end
-- Path to the system prompt file
local mod_dir = minetest.get_modpath("llm_connect")
-- Load the material context module
local llm_materials_context = dofile(mod_dir .. "/llm_materials_context.lua")
if not llm_materials_context or type(llm_materials_context) ~= "table" then
core.log("error", "[llm_connect] 'llm_materials_context.lua' could not be loaded or is faulty. Material context will be disabled.")
llm_materials_context = nil -- Disable the module if errors occur
end
local system_prompt_filepath = mod_dir .. "/system_prompt.txt"
-- Load the system prompt from the file
local system_prompt_content = read_file_content(system_prompt_filepath)
if not system_prompt_content or system_prompt_content == "" then
core.log("warning", "[llm_connect] System prompt file not found or empty. No default prompt will be used.")
system_prompt_content = nil
end
-- NEW: Privilege registration
core.register_privilege("llm", {
description = "Can chat with the LLM model",
give_to_singleplayer = true, -- Single players can chat by default
give_to_admin = true, -- Admins can chat by default
})
core.register_privilege("llm_root", {
description = "Can configure the LLM API key, model, and endpoint URL",
give_to_singleplayer = true, -- Single players can configure everything by default
give_to_admin = true, -- Admins receive this privilege by default
})
-- END NEW
-- === Metadata Functions ===
local meta_data_functions = {}
local function get_username(player_name)
return player_name or "Unknown Player"
end
-- List of installed mods
local function get_installed_mods()
local mods = {}
if minetest and minetest.get_mods then
for modname, _ in pairs(minetest.get_mods()) do
table.insert(mods, modname)
end
table.sort(mods)
else
core.log("warning", "[llm_connect] minetest.get_mods() not available.")
table.insert(mods, "Mod list not available")
end
return mods
end
-- Server settings & actual info
local function get_server_settings()
local settings = {
server_name = minetest.settings:get("server_name") or "Unnamed Server",
server_description= minetest.settings:get("server_description") or "No description",
motd = minetest.settings:get("motd") or "No MOTD set",
port = minetest.settings:get("port") or "Unknown",
gameid = (minetest.get_game_info and minetest.get_game_info().id) or
minetest.settings:get("gameid") or "Unknown",
game_name = (minetest.get_game_info and minetest.get_game_info().name) or "Unknown",
worldpath = minetest.get_worldpath() or "Unknown",
mapgen = minetest.get_mapgen_setting("mg_name") or "Unknown",
}
return settings
end
-- Main context function
function meta_data_functions.gather_context(player_name)
local context = {}
context.player = get_username(player_name)
context.installed_mods = get_installed_mods()
context.server_settings = get_server_settings()
return context
end
-- === Helper function for string splitting ===
local function string_split(str, delim)
local res = {}
local i = 1
local str_len = #str
local delim_len = #delim
while i <= str_len do
local pos = string.find(str, delim, i, true)
if pos then
table.insert(res, string.sub(str, i, pos - 1))
i = pos + delim_len
else
table.insert(res, string.sub(str, i))
break
end
end
return res
end
-- === Chat Commands ===
-- Command to set the API key and URL
core.register_chatcommand("llm_setkey", {
params = "<key> [url] [model]",
description = "Sets the API key, URL, and model for the LLM.",
privs = {llm_root = true}, -- Requires llm_root privilege
func = function(name, param)
if not core.check_player_privs(name, {llm_root = true}) then -- Additional check
return false, "You do not have the permission to set the LLM key."
end
local parts = string_split(param, " ")
if #parts == 0 then
return false, "Please provide an API key! [/llm_setkey <key> [url] [model]]"
end
api_key = parts[1]
if parts[2] then
api_url = parts[2]
end
if parts[3] then
model_name = parts[3]
end
core.chat_send_player(name, "[LLM] API key and URL set. New URL: " .. api_url .. ", Model: " .. model_name)
return true
end,
})
-- Command to change the model
core.register_chatcommand("llm_setmodel", {
params = "<model>",
description = "Sets the LLM model to be used.",
privs = {llm_root = true}, -- Requires llm_root privilege
func = function(name, param)
if not core.check_player_privs(name, {llm_root = true}) then -- Additional check
return false, "You do not have the permission to change the LLM model."
end
if param == "" then
return false, "Please provide a model name! [/llm_setmodel <model>]"
end
model_name = param
core.chat_send_player(name, "[LLM] Model set to '" .. model_name .. "'.")
return true
end,
})
-- Command to change the endpoint
core.register_chatcommand("llm_set_endpoint", {
params = "<url>",
description = "Sets the API URL of the LLM endpoint.",
privs = {llm_root = true}, -- Requires llm_root privilege
func = function(name, param)
if not core.check_player_privs(name, {llm_root = true}) then -- Additional check
return false, "You do not have the permission to change the LLM endpoint."
end
if param == "" then
return false, "Please provide a URL! [/llm_set_endpoint <url>]"
end
api_url = param
core.chat_send_player(name, "[LLM] API endpoint set to '" .. api_url .. "'.")
return true
end,
})
-- NEW: Command to set the context length
core.register_chatcommand("llm_set_context", {
params = "<count> [player]",
description = "Sets the max context length. For all players if 'player' is omitted.",
privs = {llm_root = true},
func = function(name, param)
if not core.check_player_privs(name, {llm_root = true}) then
return false, "You do not have the permission to change the context length."
end
local parts = string_split(param, " ")
local count_str = parts[1]
local target_player = parts[2]
local count = tonumber(count_str)
if not count or count < 1 then
return false, "Please provide a valid number > 0!"
end
if target_player and target_player ~= "" then
max_history[target_player] = count
core.chat_send_player(name, "[LLM] Maximum history length for '" .. target_player .. "' set to " .. count .. ".")
else
max_history["default"] = count
core.chat_send_player(name, "[LLM] Default history length for all players set to " .. count .. ".")
end
return true
end,
})
-- Command to reset the context
core.register_chatcommand("llm_reset", {
description = "Resets the LLM conversation and context.",
privs = {llm = true},
func = function(name, param)
history[name] = {}
player_context_sent[name] = false
core.chat_send_player(name, "[LLM] Conversation and context have been reset.")
end,
})
-- Main chat command
core.register_chatcommand("llm", {
params = "<prompt>",
description = "Sends a prompt to the LLM",
privs = {llm = true}, -- Requires llm privilege
func = function(name, param)
if not core.check_player_privs(name, {llm = true}) then
return false, "You do not have the permission to chat with the LLM."
end
if param == "" then
return false, "Please provide a prompt!"
end
if api_key == "" then
return false, "API key not set. Use /llm_setkey <key> [url] [model]"
end
local player_history = get_history(name)
local max_hist = get_max_history(name) -- Retrieves the specific or default context length
-- Add the new user prompt to the history
table.insert(player_history, { role = "user", content = param })
-- Remove the oldest entries if the history gets too long
while #player_history > max_hist do
table.remove(player_history, 1)
end
local messages = {}
if system_prompt_content then
table.insert(messages, { role = "system", content = system_prompt_content })
end
-- Send the context only once per player to save tokens
if not player_context_sent[name] then
local context_data = meta_data_functions.gather_context(name)
local mods_list_str = table.concat(context_data.installed_mods, ", ")
if #context_data.installed_mods > 10 then
mods_list_str = "(More than 10 installed mods: " .. #context_data.installed_mods .. ")"
end
local materials_context_str = ""
if llm_materials_context and llm_materials_context.get_available_materials then
materials_context_str = "\n\n--- AVAILABLE MATERIALS ---\n" .. llm_materials_context.get_available_materials()
end
local metadata_string = "Server Information:\n" ..
" Player: " .. context_data.player .. "\n" ..
" Server Name: " .. context_data.server_settings.server_name .. "\n" ..
" Server Description: " .. context_data.server_settings.server_description .. "\n" ..
" MOTD: " .. context_data.server_settings.motd .. "\n" ..
" Game: " .. context_data.server_settings.game_name .. " (" .. context_data.server_settings.gameid .. ")\n" ..
" Mapgen: " .. context_data.server_settings.mapgen .. "\n" ..
" World Path: " .. context_data.server_settings.worldpath .. "\n" ..
" Port: " .. context_data.server_settings.port .. "\n" ..
" Installed Mods (" .. #context_data.installed_mods .. "): " .. mods_list_str .. "\n" .. materials_context_str
table.insert(messages, { role = "user", content = "--- METADATA ---\n" .. metadata_string })
player_context_sent[name] = true
end
for _, msg in ipairs(player_history) do
table.insert(messages, msg)
end
local body = core.write_json({
model = model_name,
messages = messages,
max_tokens = 2000
})
http.fetch({
url = api_url,
post_data = body,
method = "POST",
extra_headers = {
"Content-Type: application/json",
"Authorization: Bearer " .. api_key
},
timeout = 90,
}, function(result)
if result.succeeded then
local response = core.parse_json(result.data)
local text = "(no answer)"
if response and response.choices and response.choices[1] and response.choices[1].message then
text = response.choices[1].message.content
table.insert(player_history, { role = "assistant", content = text })
elseif response and response.message and response.message.content then
text = response.message.content
end
core.chat_send_player(name, "[LLM] " .. text)
else
core.chat_send_player(name, "[LLM] Request failed: " .. (result.error or "Unknown error"))
end
end)
return true, "Request sent to LLM..."
end,
})

132
license.txt Normal file
View File

@@ -0,0 +1,132 @@
GNU LESSER GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
This version of the GNU Lesser General Public License incorporates the
terms and conditions of version 3 of the GNU General Public License,
supplemented by the additional permissions listed below.
0. Additional Definitions.
"This License" refers to version 3 of the GNU Lesser General Public
License.
"The Program" refers to a covered work governed by this License.
"The B-C" refers to a covered work governed by the GNU General Public
License, but is not a covered work.
"The F-G" refers to a covered work governed by this License.
"The A-Z" refers to the work as a whole, comprising the B-C and
the F-G.
A "GPL-compatible work" means a covered work governed by the
GNU General Public License, but is not a covered work.
"The B-C" is a work that is not the Program.
"The Program" includes all covered works, except the "B-C".
The "F-G" is a work that is governed by the GNU Lesser General Public
License. The "F-G" is not a "B-C" work.
The "B-C" is a covered work that is licensed under this License.
The "B-C" can be a separate program.
The "F-G" is a work that is a combination of the "B-C" and other works.
The "B-C" is an executable work.
The "F-G" is a work that is an executable work but is not a
covered work.
The "B-C" is a work that is a library.
The "F-G" is a work that is a combination of the "B-C" and other works.
A "B-C" is a covered work.
The "F-G" is a work that is governed by the GNU Lesser General Public
License, but is not a covered work.
1. Exception to the GPL.
You may convey a covered work to which this License applies without
conveying the B-C. You may also convey the B-C without conveying the
covered work.
2. Further restrictions.
You may not add any further restrictions on the covered work beyond
those in this License.
3. Termination.
If you violate this License, your rights will terminate automatically
under this License.
4. B-C.
The B-C is a separate work that is governed by the GNU General Public
License.
5. F-G.
The F-G is a work that is governed by the GNU Lesser General Public
License.
6. A-Z.
The A-Z is a covered work that is a combination of the B-C and the F-G.
7. Application of the GPL.
This License applies to the Program.
8. B-C, F-G, A-Z.
The B-C, F-G, and A-Z are defined in Section 0.
9. F-G, A-Z.
The F-G and A-Z are governed by this License.
10. B-C, A-Z.
The B-C and A-Z are governed by this License.
11. B-C, F-G.
The B-C and F-G are governed by this License.
12. B-C, F-G, A-Z.
The B-C, F-G, and A-Z are defined in Section 0.
13. No B-C.
You may convey a covered work to which this License applies without
conveying the B-C.
14. No F-G.
You may convey a covered work to which this License applies without
conveying the F-G.
15. No A-Z.
You may convey a covered work to which this License applies without
conveying the A-Z.
16. Application to Libraries.
The Program is a library.
17. B-C, F-G, A-Z.
The B-C, F-G, and A-Z are defined in Section 0.

61
llm_materials_context.lua Normal file
View File

@@ -0,0 +1,61 @@
-- mods/llm_connect/llm_materials_context.lua
local M = {}
-- Function to collect available materials
function M.get_available_materials()
local materials_info = {}
local current_mod_name = core.get_current_modname() -- Useful for debugging or filtering
-- Collect nodes
for name, def in pairs(core.registered_nodes) do
-- Filter out builtin nodes that aren't really "materials"
if not name:match("^__builtin:") and not name:match("^ignore$") and not name:match("^air$") then
table.insert(materials_info, " - Node: " .. name .. " (Description: " .. (def.description or "N/A") .. ")")
-- Optional: Add other relevant info like groups
-- table.insert(materials_info, " Groups: " .. core.privs_to_string(def.groups))
end
end
-- Collect craftitems
for name, def in pairs(core.registered_craftitems) do
if not name:match("^__builtin:") then
table.insert(materials_info, " - Craftitem: " .. name .. " (Description: " .. (def.description or "N/A") .. ")")
end
end
-- Collect tools
for name, def in pairs(core.registered_tools) do
if not name:match("^__builtin:") then
table.insert(materials_info, " - Tool: " .. name .. " (Description: " .. (def.description or "N/A") .. ")")
end
end
-- Collect entities
for name, def in pairs(core.registered_entities) do
if not name:match("^__builtin:") then
table.insert(materials_info, " - Entity: " .. name .. " (Description: " .. (def.description or "N/A") .. ")")
end
end
-- Limit the output to avoid exceeding the LLM's token limits
local max_items_to_list = 100 -- You can adjust this value
local total_items = #materials_info
local output_string = ""
if total_items > 0 then
output_string = "Registered materials (" .. total_items .. " in total):\n"
for i = 1, math.min(total_items, max_items_to_list) do
output_string = output_string .. materials_info[i] .. "\n"
end
if total_items > max_items_to_list then
output_string = output_string .. " ... and " .. (total_items - max_items_to_list) .. " more materials (truncated for brevity).\n"
end
else
output_string = "No registered materials found.\n"
end
return output_string
end
return M

10
mod.conf Normal file
View File

@@ -0,0 +1,10 @@
name = llm_connect
description = Connects your Minetest server to an LLM (Large Language Model) using an OpenAI-compatible API endpoint.
title = LLM Connect
author = H5N3RG
license = LGPL-3.0-or-later
media_license = LGPL-3.0-or-later
forum =
depends =
optional_depends =

1
system_prompt.txt Normal file
View File

@@ -0,0 +1 @@