Update init.lua

Signed-off-by: H5N3RG <janguenni13@web.de>
This commit is contained in:
2025-09-12 22:55:22 +02:00
committed by GitHub
parent 19d7562b98
commit 219b068742

View File

@@ -1,8 +1,9 @@
-- =========================================================================== -- ===========================================================================
-- LLM Connect Init v0.7.5 -- LLM Connect Init v0.7.6
-- author: H5N3RG -- author: H5N3RG
-- license: LGPL-3.0-or-later -- license: LGPL-3.0-or-later
-- Fix: max_tokens type handling, fully configurable, robust JSON -- Fix: max_tokens type handling, fully configurable, robust JSON
-- Enhancement: Dynamic metadata handling, player name in prompts
-- =========================================================================== -- ===========================================================================
local core = core local core = core
@@ -29,7 +30,7 @@ end
-- Storage for conversation history per player -- Storage for conversation history per player
local history = {} local history = {}
local max_history = { ["default"] = 10 } local max_history = { ["default"] = 10 }
local player_context_sent = {} local metadata_cache = {} -- Cache for metadata to detect changes
-- Helper functions -- Helper functions
local function get_history(name) local function get_history(name)
@@ -74,7 +75,7 @@ local function read_file_content(filepath)
return content return content
end end
local system_prompt_content = read_file_content(mod_dir .. "/system_prompt.txt") local system_prompt_content = read_file_content(mod_dir .. "/system_prompt.txt") or ""
-- === Privileges === -- === Privileges ===
core.register_privilege("llm", { description = "Can chat with the LLM model", give_to_singleplayer=true, give_to_admin=true }) core.register_privilege("llm", { description = "Can chat with the LLM model", give_to_singleplayer=true, give_to_admin=true })
@@ -112,9 +113,23 @@ function meta_data_functions.gather_context(player_name)
context.player = get_username(player_name) context.player = get_username(player_name)
context.installed_mods = get_installed_mods() context.installed_mods = get_installed_mods()
context.server_settings = get_server_settings() context.server_settings = get_server_settings()
-- Add dynamic player data (e.g., position)
local player = core.get_player_by_name(player_name)
if player then
local pos = player:get_pos()
context.player_position = string.format("x=%.2f, y=%.2f, z=%.2f", pos.x, pos.y, pos.z)
else
context.player_position = "Unknown"
end
return context return context
end end
-- Compute a simple hash for metadata to detect changes
local function compute_metadata_hash(context)
local str = context.player .. context.server_settings.server_name .. context.server_settings.worldpath .. table.concat(context.installed_mods, ",")
return core.sha1(str)
end
-- === Chat Commands === -- === Chat Commands ===
core.register_chatcommand("llm_setkey", { core.register_chatcommand("llm_setkey", {
params = "<key> [url] [model]", params = "<key> [url] [model]",
@@ -200,8 +215,8 @@ core.register_chatcommand("llm_reset", {
privs = {llm=true}, privs = {llm=true},
func = function(name) func = function(name)
history[name] = {} history[name] = {}
player_context_sent[name] = false metadata_cache[name] = nil -- Reset metadata cache
core.chat_send_player(name,"[LLM] Conversation reset.") core.chat_send_player(name,"[LLM] Conversation and metadata reset.")
end, end,
}) })
@@ -219,36 +234,44 @@ core.register_chatcommand("llm", {
local player_history = get_history(name) local player_history = get_history(name)
local max_hist = get_max_history(name) local max_hist = get_max_history(name)
table.insert(player_history,{role="user",content=param}) -- Add player name to prompt for clarity
local user_prompt = "Player " .. name .. ": " .. param
table.insert(player_history,{role="user",content=user_prompt})
while #player_history>max_hist do table.remove(player_history,1) end while #player_history>max_hist do table.remove(player_history,1) end
local messages = {} -- Gather and cache metadata
if system_prompt_content then local context_data = meta_data_functions.gather_context(name)
table.insert(messages,{role="system",content=system_prompt_content}) local current_metadata_hash = compute_metadata_hash(context_data)
end local needs_metadata_update = not metadata_cache[name] or metadata_cache[name].hash ~= current_metadata_hash
if not player_context_sent[name] then local messages = {}
local context_data = meta_data_functions.gather_context(name) -- Build dynamic system prompt with metadata
local dynamic_system_prompt = system_prompt_content
if needs_metadata_update then
local mods_list_str = table.concat(context_data.installed_mods,", ") local mods_list_str = table.concat(context_data.installed_mods,", ")
if #context_data.installed_mods>10 then mods_list_str="(More than 10 installed mods: "..#context_data.installed_mods..")" end if #context_data.installed_mods>10 then mods_list_str="(More than 10 installed mods: "..#context_data.installed_mods..")" end
local materials_context_str = "" local materials_context_str = ""
if llm_materials_context and llm_materials_context.get_available_materials then if llm_materials_context and llm_materials_context.get_available_materials then
materials_context_str = "\n\n--- AVAILABLE MATERIALS ---\n" .. llm_materials_context.get_available_materials() materials_context_str = "\n\n--- AVAILABLE MATERIALS ---\n" .. llm_materials_context.get_available_materials()
end end
local metadata_string = "Server Information:\n" .. local metadata_string = "\n\n--- METADATA ---\n" ..
" Player: " .. context_data.player .. "\n" .. "Player: " .. context_data.player .. "\n" ..
" Server Name: " .. context_data.server_settings.server_name .. "\n" .. "Player Position: " .. context_data.player_position .. "\n" ..
" Server Description: " .. context_data.server_settings.server_description .. "\n" .. "Server Name: " .. context_data.server_settings.server_name .. "\n" ..
" MOTD: " .. context_data.server_settings.motd .. "\n" .. "Server Description: " .. context_data.server_settings.server_description .. "\n" ..
" Game: " .. context_data.server_settings.game_name .. " (" .. context_data.server_settings.gameid .. ")\n" .. "MOTD: " .. context_data.server_settings.motd .. "\n" ..
" Mapgen: " .. context_data.server_settings.mapgen .. "\n" .. "Game: " .. context_data.server_settings.game_name .. " (" .. context_data.server_settings.gameid .. ")\n" ..
" World Path: " .. context_data.server_settings.worldpath .. "\n" .. "Mapgen: " .. context_data.server_settings.mapgen .. "\n" ..
" Port: " .. context_data.server_settings.port .. "\n" .. "World Path: " .. context_data.server_settings.worldpath .. "\n" ..
" Installed Mods (" .. #context_data.installed_mods .. "): " .. mods_list_str .. "\n" .. materials_context_str "Port: " .. context_data.server_settings.port .. "\n" ..
table.insert(messages,{role="user",content="--- METADATA ---\n"..metadata_string}) "Installed Mods (" .. #context_data.installed_mods .. "): " .. mods_list_str .. "\n" .. materials_context_str
player_context_sent[name] = true dynamic_system_prompt = system_prompt_content .. metadata_string
metadata_cache[name] = { hash = current_metadata_hash, metadata = metadata_string }
else
dynamic_system_prompt = system_prompt_content .. metadata_cache[name].metadata
end end
table.insert(messages,{role="system",content=dynamic_system_prompt})
for _,msg in ipairs(player_history) do table.insert(messages,msg) end for _,msg in ipairs(player_history) do table.insert(messages,msg) end
-- === max_tokens handling with final JSON fix === -- === max_tokens handling with final JSON fix ===
@@ -256,7 +279,7 @@ core.register_chatcommand("llm", {
if max_tokens_type == "integer" then if max_tokens_type == "integer" then
max_tokens_value = math.floor(max_tokens_value) max_tokens_value = math.floor(max_tokens_value)
else else
max_tokens_value = tonumber(max_tokens_value) -- float, no +0.0 max_tokens_value = tonumber(max_tokens_value)
end end
local body = core.write_json({ model=model_name, messages=messages, max_tokens=max_tokens_value }) local body = core.write_json({ model=model_name, messages=messages, max_tokens=max_tokens_value })