From d0d1e45c8cc5e8897cfc14ead8ae8c5742702f06 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Mon, 30 Dec 2024 19:43:11 -0800 Subject: [PATCH 001/171] Add files via upload Uploaded the changes to mindcraft that allows Gemini thinking models, and glhf.chat and hyperbolic api's --- keys.example.json | 6 ++- profiles/GLFH.json | 5 ++ profiles/hyperbolic.json | 6 +++ settings.js | 2 + src/agent/prompter.js | 44 +++++++++++++++--- src/models/gemini.js | 22 ++++++++- src/models/glhf.js | 62 +++++++++++++++++++++++++ src/models/hyperbolic.js | 99 ++++++++++++++++++++++++++++++++++++++++ 8 files changed, 237 insertions(+), 9 deletions(-) create mode 100644 profiles/GLFH.json create mode 100644 profiles/hyperbolic.json create mode 100644 src/models/glhf.js create mode 100644 src/models/hyperbolic.js diff --git a/keys.example.json b/keys.example.json index f6939f0..e1121d5 100644 --- a/keys.example.json +++ b/keys.example.json @@ -7,5 +7,7 @@ "GROQCLOUD_API_KEY": "", "HUGGINGFACE_API_KEY": "", "QWEN_API_KEY": "", - "XAI_API_KEY": "" -} + "XAI_API_KEY": "", + "GHLF_API_KEY": "", + "HYPERBOLIC_API_KEY": "" +} \ No newline at end of file diff --git a/profiles/GLFH.json b/profiles/GLFH.json new file mode 100644 index 0000000..08bacb9 --- /dev/null +++ b/profiles/GLFH.json @@ -0,0 +1,5 @@ +{ + "name": "Good_luck_have_fun", + + "model": "hf:meta-llama/Llama-3.1-405B-Instruct" +} diff --git a/profiles/hyperbolic.json b/profiles/hyperbolic.json new file mode 100644 index 0000000..e276b2e --- /dev/null +++ b/profiles/hyperbolic.json @@ -0,0 +1,6 @@ +{ + "name": "Hyperbolic", + + "model": "hb:deepseek-ai/DeepSeek-V3" + +} \ No newline at end of file diff --git a/settings.js b/settings.js index 4895833..92ed713 100644 --- a/settings.js +++ b/settings.js @@ -18,6 +18,8 @@ export default // "./profiles/llama.json", // "./profiles/qwen.json", // "./profiles/grok.json", + // "./profiles/GLHF.json", + // "./profiles/hyperbolic.json" // using more than 1 profile requires you to /msg each bot indivually ], diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 5f07eb4..9fbef19 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -13,12 +13,16 @@ import { Local } from '../models/local.js'; import { Novita } from '../models/novita.js'; import { GroqCloudAPI } from '../models/groq.js'; import { HuggingFace } from '../models/huggingface.js'; +import { glhf } from '../models/glhf.js'; +import { hyperbolic } from '../models/hyperbolic.js'; import { Qwen } from "../models/qwen.js"; import { Grok } from "../models/grok.js"; + export class Prompter { - constructor(agent, fp) { + constructor(agent, fp, agentName) { this.agent = agent; + this.agentName = agentName; this.profile = JSON.parse(readFileSync(fp, 'utf8')); this.default_profile = JSON.parse(readFileSync('./profiles/_default.json', 'utf8')); @@ -50,14 +54,18 @@ export class Prompter { chat.api = 'anthropic'; else if (chat.model.includes('huggingface/')) chat.api = "huggingface"; + else if (chat.model.includes('hf:')) + chat.api = "glhf"; + else if (chat.model.includes('hyperbolic:')|| chat.model.includes('hb:')) + chat.api = "hyperbolic"; else if (chat.model.includes('meta/') || chat.model.includes('mistralai/') || chat.model.includes('replicate/')) chat.api = 'replicate'; else if (chat.model.includes("groq/") || chat.model.includes("groqcloud/")) chat.api = 'groq'; else if (chat.model.includes('novita/')) chat.api = 'novita'; - else if (chat.model.includes('qwen')) - chat.api = 'qwen'; + else if (chat.model.includes('qwen')) + chat.api = 'qwen'; else if (chat.model.includes('grok')) chat.api = 'xai'; else @@ -81,6 +89,11 @@ export class Prompter { } else if (chat.api === 'huggingface') this.chat_model = new HuggingFace(chat.model, chat.url); + else if (chat.api === 'glhf') + this.chat_model = new glhf(chat.model, chat.url); + else if (chat.api === 'hyperbolic') { + this.chat_model = new hyperbolic(chat.model.replace('hyperbolic:', '').replace('hb:', ''), chat.url, max_tokens ? max_tokens : 8192); + } else if (chat.api === 'novita') this.chat_model = new Novita(chat.model.replace('novita/', ''), chat.url); else if (chat.api === 'qwen') @@ -235,7 +248,12 @@ export class Prompter { } let prompt = this.profile.conversing; prompt = await this.replaceStrings(prompt, messages, this.convo_examples); + console.log("DEBUG - promptConvo - agentName:", this.agent.name); // DEBUG + console.log("DEBUG - promptConvo - prompt:", prompt); // DEBUG: Inspect this prompt + + let generation = await this.chat_model.sendRequest(messages, prompt); + // in conversations >2 players LLMs tend to hallucinate and role-play as other bots // the FROM OTHER BOT tag should never be generated by the LLM if (generation.includes('(FROM OTHER BOT)')) { @@ -260,7 +278,13 @@ export class Prompter { await this.checkCooldown(); let prompt = this.profile.coding; prompt = await this.replaceStrings(prompt, messages, this.coding_examples); + console.log("DEBUG - promptCoding - agentName:", this.agent.name); // DEBUG + console.log("DEBUG - promptCoding - prompt:", prompt); // DEBUG: Inspect this prompt + + let resp = await this.chat_model.sendRequest(messages, prompt); + + this.awaiting_coding = false; return resp; } @@ -269,7 +293,14 @@ export class Prompter { await this.checkCooldown(); let prompt = this.profile.saving_memory; prompt = await this.replaceStrings(prompt, null, null, to_summarize); - return await this.chat_model.sendRequest([], prompt); + console.log("DEBUG - promptMemSaving - agentName:", this.agent.name); // DEBUG + console.log("DEBUG - promptMemSaving - prompt:", prompt); // DEBUG: Inspect this prompt + + + const response = await this.chat_model.sendRequest([], prompt); + + + return response; } async promptShouldRespondToBot(new_message) { @@ -289,9 +320,10 @@ export class Prompter { let user_message = 'Use the below info to determine what goal to target next\n\n'; user_message += '$LAST_GOALS\n$STATS\n$INVENTORY\n$CONVO' user_message = await this.replaceStrings(user_message, messages, null, null, last_goals); - let user_messages = [{role: 'user', content: user_message}]; + let res = await this.chat_model.sendRequest(user_messages, system_message); + let goal = null; try { @@ -307,4 +339,4 @@ export class Prompter { goal.quantity = parseInt(goal.quantity); return goal; } -} +} \ No newline at end of file diff --git a/src/models/gemini.js b/src/models/gemini.js index 1536d66..8522704 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -52,7 +52,27 @@ export class Gemini { console.log('Awaiting Google API response...'); const result = await model.generateContent(prompt); const response = await result.response; - const text = response.text(); + + // got rid of the original method of const text = response.text to allow gemini thinking models to play minecraft :) + let text; + if (this.model_name && this.model_name.includes("thinking")) { + if (response.candidates && response.candidates.length > 0 && response.candidates[0].content && response.candidates[0].content.parts && response.candidates[0].content.parts.length > 1) { + + text = response.candidates[0].content.parts[1].text; + + } else { + + console.warn("Unexpected response structure for thinking model:", response); + text = response.text(); + } + } else { + + text = response.text(); + + } + + + console.log('Received.'); if (!text.includes(stop_seq)) return text; const idx = text.indexOf(stop_seq); diff --git a/src/models/glhf.js b/src/models/glhf.js new file mode 100644 index 0000000..9c27799 --- /dev/null +++ b/src/models/glhf.js @@ -0,0 +1,62 @@ +import OpenAIApi from 'openai'; +import { getKey } from '../utils/keys.js'; + +// glhf doesn't supply an SDK for their models, but fully supports OpenAI SDKs +export class glhf { + constructor(model_name, url) { + this.model_name = model_name; + + // Retrieve the API key from keys.json + const apiKey = getKey('GHLF_API_KEY'); + if (!apiKey) { + throw new Error('API key not found. Please check keys.json and ensure GHLF_API_KEY is defined.'); + } + + // Configure OpenAIApi with the retrieved API key and base URL + this.openai = new OpenAIApi({ + apiKey, + baseURL: url || "https://glhf.chat/api/openai/v1" + }); + } + + async sendRequest(turns, systemMessage, stop_seq = '***') { + // Construct the message array for the API request + let messages = [{ 'role': 'system', 'content': systemMessage }].concat(turns); + + const pack = { + model: this.model_name || "hf:meta-llama/Llama-3.1-405B-Instruct", + messages, + stop: [stop_seq] + }; + + let res = null; + try { + console.log('Awaiting glhf.chat API response...'); + // Uncomment the line below if you need to debug the messages + // console.log('Messages:', messages); + + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason === 'length') { + throw new Error('Context length exceeded'); + } + + console.log('Received.'); + res = completion.choices[0].message.content; + } catch (err) { + if ((err.message === 'Context length exceeded' || err.code === 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + + // Replace special tokens in the response + return res.replace(/<\|separator\|>/g, '*no response*'); + } + + async embed(text) { + throw new Error('Embeddings are not supported by glhf.'); + } +} \ No newline at end of file diff --git a/src/models/hyperbolic.js b/src/models/hyperbolic.js new file mode 100644 index 0000000..aea8875 --- /dev/null +++ b/src/models/hyperbolic.js @@ -0,0 +1,99 @@ +import { getKey } from '../utils/keys.js'; + +/** + * + * + * Yes, this code was written by an Ai. It was written by GPT-o1 and tested :) + * + */ + +export class hyperbolic { + constructor(modelName, apiUrl) { + this.modelName = modelName || "deepseek-ai/DeepSeek-V3"; + this.apiUrl = apiUrl || "https://api.hyperbolic.xyz/v1/chat/completions"; + + // Retrieve the Hyperbolic API key from keys.js + this.apiKey = getKey('HYPERBOLIC_API_KEY'); + if (!this.apiKey) { + throw new Error('HYPERBOLIC_API_KEY not found. Check your keys.js file.'); + } + } + + /** + * Sends a chat completion request to the Hyperbolic endpoint. + * + * @param {Array} turns - An array of message objects, e.g. [{role: 'user', content: 'Hi'}]. + * @param {string} systemMessage - The system prompt or instruction. + * @param {string} stopSeq - A string that represents a stopping sequence, default '***'. + * @returns {Promise} - The content of the model's reply. + */ + async sendRequest(turns, systemMessage, stopSeq = '***') { + // Prepare the messages with a system prompt at the beginning + const messages = [{ role: 'system', content: systemMessage }, ...turns]; + + // Build the request payload (mirroring your original structure) + const payload = { + model: this.modelName, + messages: messages, + max_tokens: 8192, + temperature: 0.7, + top_p: 0.9, + stream: false + }; + + let completionContent = null; + + try { + console.log('Awaiting Hyperbolic API response...'); + console.log('Messages:', messages); + + const response = await fetch(this.apiUrl, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${this.apiKey}` + }, + body: JSON.stringify(payload) + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const data = await response.json(); + if ( + data?.choices?.[0]?.finish_reason && + data.choices[0].finish_reason === 'length' + ) { + throw new Error('Context length exceeded'); + } + + completionContent = data?.choices?.[0]?.message?.content || ''; + console.log('Received response from Hyperbolic.'); + + } catch (err) { + if ( + (err.message === 'Context length exceeded' || + err.code === 'context_length_exceeded') && + turns.length > 1 + ) { + console.log('Context length exceeded, trying again with a shorter context...'); + // Remove the first user turn and try again (like the original code). + return await this.sendRequest(turns.slice(1), systemMessage, stopSeq); + } else { + console.log(err); + completionContent = 'My brain disconnected, try again.'; + } + } + + // Replace any special tokens from your original code if needed + return completionContent.replace(/<\|separator\|>/g, '*no response*'); + } + + /** + * Embeddings are not supported in your original snippet, so we mirror that error. + */ + async embed(text) { + throw new Error('Embeddings are not supported by Hyperbolic.'); + } +} From 1be24f4867c2a49d01c591fe2bab8ad4f07a007e Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Wed, 15 Jan 2025 17:26:13 +0900 Subject: [PATCH 002/171] feat: add screenshots and look action works on gpt --- package.json | 4 +- settings.js | 4 +- src/agent/commands/actions.js | 55 +++++++++++++++++++++++++ src/agent/library/skills.js | 77 +++++++++++++++++++++++++++++++++++ 4 files changed, 137 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 689f8db..00901de 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,9 @@ "yargs": "^17.7.2", "socket.io": "^4.7.2", "socket.io-client": "^4.7.2", - "express": "^4.18.2" + "express": "^4.18.2", + "three": "0.128.0", + "node-canvas-webgl": "PrismarineJS/node-canvas-webgl" }, "scripts": { "postinstall": "patch-package", diff --git a/settings.js b/settings.js index a4681fa..f8dc1ba 100644 --- a/settings.js +++ b/settings.js @@ -2,7 +2,7 @@ export default { "minecraft_version": "1.20.4", // supports up to 1.21.1 "host": "127.0.0.1", // or "localhost", "your.ip.address.here" - "port": 55916, + "port": 56069, "auth": "offline", // or "microsoft" // the mindserver manages all agents and hosts the UI @@ -25,7 +25,7 @@ export default // using more than 1 profile requires you to /msg each bot indivually ], "load_memory": false, // load memory from previous session - "init_message": "Respond with hello world and your name", // sends to all on spawn + // "init_message": "Respond with hello world and your name", // sends to all on spawn "only_chat_with": [], // users that the bots listen to and send general messages to. if empty it will chat publicly "language": "en", // translate to/from this language. Supports these language names: https://cloud.google.com/translate/docs/languages diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 34e6693..1c6bbfe 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -1,6 +1,8 @@ import * as skills from '../library/skills.js'; import settings from '../../../settings.js'; import convoManager from '../conversation.js'; +import fs from 'fs'; +import { GPT } from '../../models/gpt.js'; function runAsAction (actionFn, resume = false, timeout = -1) { let actionLabel = null; // Will be set on first use @@ -407,6 +409,59 @@ export const actionsList = [ return `Converstaion with ${player_name} ended.`; } }, + { + name: '!takeScreenshot', + description: 'Takes and saves a screenshot of the specified coordinates.', + params: { + 'x': { + type: 'int', + description: 'x coordinate to capture', + optional: true + }, + 'y': { + type: 'int', + description: 'y coordinate to capture', + optional: true + }, + 'z': { + type: 'int', + description: 'z coordinate to capture', + optional: true + }, + 'filename': { + type: 'string', + description: 'Filename to save (without extension). If not specified, saves with timestamp.', + optional: true + } + }, + perform: runAsAction(async (agent, x, y, z, filename) => { + await skills.takeScreenshot(agent.bot, x, y, z, filename); + }) + }, + { + name: '!look', + description: 'Takes a screenshot of specified coordinates and analyzes its contents.', + params: { + 'x': { + type: 'int', + description: 'x coordinate to look at', + optional: true + }, + 'y': { + type: 'int', + description: 'y coordinate to look at', + optional: true + }, + 'z': { + type: 'int', + description: 'z coordinate to look at', + optional: true + } + }, + perform: runAsAction(async (agent, x, y, z) => { + await skills.look(agent, x, y, z); + }) + }, // { // commented for now, causes confusion with goal command // name: '!npcGoal', // description: 'Set a simple goal for an item or building to automatically work towards. Do not use for complex goals.', diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index be5882f..e492d16 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -2,6 +2,8 @@ import * as mc from "../../utils/mcdata.js"; import * as world from "./world.js"; import pf from 'mineflayer-pathfinder'; import Vec3 from 'vec3'; +import fs from 'fs'; +import { Camera } from "../../utils/camera.js"; export function log(bot, message) { @@ -1340,3 +1342,78 @@ export async function activateNearestBlock(bot, type) { log(bot, `Activated ${type} at x:${block.position.x.toFixed(1)}, y:${block.position.y.toFixed(1)}, z:${block.position.z.toFixed(1)}.`); return true; } + +export async function takeScreenshot(bot, x, y, z, filename=null) { + /** + * Takes a screenshot from the bot's current view or specified position + * @param {MinecraftBot} bot, reference to the minecraft bot + * @param {int} x x coordinate to look at (optional) + * @param {int} y y coordinate to look at (optional) + * @param {int} z z coordinate to look at (optional) + * @param {string} filename filename to save (without extension). If not specified, saves with timestamp + * @returns {Promise} whether the screenshot was successful + * @example + * await skills.takeScreenshot(bot, { name: 'my_screenshot', x: 100, y: 65, z: -200 }); + **/ + + try { + bot.camera = new Camera(bot); + await new Promise(resolve => bot.camera.once('ready', resolve)); + + await bot.lookAt(new Vec3(x, y, z)); + await new Promise(resolve => setTimeout(resolve, 500)); + + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + if (filename === null) { + filename = `screenshot_${timestamp}`; + } + await bot.camera.takePicture(filename, x, y, z); + + log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); + log(bot, `Target coordinates: x:${x}, y:${y}, z:${z}`); + return [true, filename]; + } catch (err) { + log(bot, `Failed to take screenshot: ${err.message}`); + return [false, null]; + } +} + +export async function look(agent, x, y, z) { + const bot = agent.bot; + const history = agent.history; + + const [success, filename] = await takeScreenshot(bot, x, y, z); + if (!success) { + log(bot, `Failed to take screenshot: ${filename}`); + return false; + } + + try { + const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); + const base64Image = imageBuffer.toString('base64'); + + let messages = history.getHistory(); + messages.push({ + role: "user", + content: [ + { type: "text", text: "Briefly describe the screen you are looking at now." }, + { + type: "image_url", + image_url: { + "url": `data:image/jpeg;base64,${base64Image}`, + } + } + ] + }); + console.log(messages); + + let res = await agent.prompter.chat_model.sendRequest(messages, `You are a playful Minecraft bot. Briefly describe the screen you are looking at now.`); + console.log(res); + + log(bot, res); + return true; + } catch (error) { + log(bot, `Error analyzing image: ${error.message}`); + return false; + } +} \ No newline at end of file From f5923db43a67c6bd46b604ab92b79142590b988e Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Thu, 16 Jan 2025 13:17:39 +0900 Subject: [PATCH 003/171] feat: update skill look to lookAtPlayer & export lookAtPosition --- package.json | 4 +- profiles/_default.json | 20 ++++-- src/agent/commands/actions.js | 74 +++++---------------- src/agent/library/skills.js | 119 +++++++++++++++++----------------- src/agent/prompter.js | 23 +++++++ 5 files changed, 116 insertions(+), 124 deletions(-) diff --git a/package.json b/package.json index 00901de..7cb599c 100644 --- a/package.json +++ b/package.json @@ -25,8 +25,8 @@ "socket.io": "^4.7.2", "socket.io-client": "^4.7.2", "express": "^4.18.2", - "three": "0.128.0", - "node-canvas-webgl": "PrismarineJS/node-canvas-webgl" + "node-canvas-webgl": "^0.2.6", + "three": "^0.128.0" }, "scripts": { "postinstall": "patch-package", diff --git a/profiles/_default.json b/profiles/_default.json index ac244fc..90a1621 100644 --- a/profiles/_default.json +++ b/profiles/_default.json @@ -9,6 +9,8 @@ "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", + "image_conversing": "You are a playful Minecraft bot. Briefly describe the screen you are looking at now.", + "modes": { "self_preservation": true, "unstuck": true, @@ -65,14 +67,20 @@ {"role": "user", "content": "derg: (FROM OTHER BOT)Thanks, see you later!"}, {"role": "assistant", "content": "See you later. !endConversation(\"derg\")"} ], - + [ - {"role": "user", "content": "grombo_Xx: What do you see?"}, - {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, - {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, - {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + {"role": "user", "content": "d7297: look here"}, + {"role": "assistant", "content": "Okay! !lookAtPlayer(\"d7297\", \"at\")"}, + {"role": "system", "content": "Code Output:\nLooking at player d7297. There is a giant waterfall behind d7297."}, + {"role": "assistant", "content": "Woo, that is a cool waterfall!"} ], - + [ + {"role": "user", "content": "d7297: look there"}, + {"role": "assistant", "content": "Okay! !lookAtPlayer(\"d7297\", \"with\")"}, + {"role": "system", "content": "Code Output:\nLooking in the same direction as d7297\nThere is a castle made of stone."}, + {"role": "assistant", "content": "Wow, that stone castle looks amazing!"} + ], + [ {"role": "user", "content": "greg: Collect 10 wood"}, {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 1c6bbfe..09f6369 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -1,8 +1,6 @@ import * as skills from '../library/skills.js'; import settings from '../../../settings.js'; import convoManager from '../conversation.js'; -import fs from 'fs'; -import { GPT } from '../../models/gpt.js'; function runAsAction (actionFn, resume = false, timeout = -1) { let actionLabel = null; // Will be set on first use @@ -410,69 +408,33 @@ export const actionsList = [ } }, { - name: '!takeScreenshot', - description: 'Takes and saves a screenshot of the specified coordinates.', + name: '!lookAtPlayer', + description: 'Look at a player or look in the same direction as the player.', params: { - 'x': { - type: 'int', - description: 'x coordinate to capture', - optional: true + 'player_name': { + type: 'string', + description: 'Name of the target player' }, - 'y': { - type: 'int', - description: 'y coordinate to capture', - optional: true - }, - 'z': { - type: 'int', - description: 'z coordinate to capture', - optional: true - }, - 'filename': { - type: 'string', - description: 'Filename to save (without extension). If not specified, saves with timestamp.', - optional: true + 'direction': { + type: 'string', + description: 'How to look ("at": look at the player, "with": look in the same direction as the player)', + enum: ['at', 'with'] } }, - perform: runAsAction(async (agent, x, y, z, filename) => { - await skills.takeScreenshot(agent.bot, x, y, z, filename); + perform: runAsAction(async (agent, player_name, direction) => { + await skills.lookAtPlayer(agent, agent.bot, player_name, direction); }) }, { - name: '!look', - description: 'Takes a screenshot of specified coordinates and analyzes its contents.', + name: '!lookAtPosition', + description: 'Look at specified coordinates.', params: { - 'x': { - type: 'int', - description: 'x coordinate to look at', - optional: true - }, - 'y': { - type: 'int', - description: 'y coordinate to look at', - optional: true - }, - 'z': { - type: 'int', - description: 'z coordinate to look at', - optional: true - } + 'x': { type: 'int', description: 'x coordinate' }, + 'y': { type: 'int', description: 'y coordinate' }, + 'z': { type: 'int', description: 'z coordinate' } }, perform: runAsAction(async (agent, x, y, z) => { - await skills.look(agent, x, y, z); + await skills.lookAtPosition(agent, agent.bot, x, y, z); }) - }, - // { // commented for now, causes confusion with goal command - // name: '!npcGoal', - // description: 'Set a simple goal for an item or building to automatically work towards. Do not use for complex goals.', - // params: { - // 'name': { type: 'string', description: 'The name of the goal to set. Can be item or building name. If empty will automatically choose a goal.' }, - // 'quantity': { type: 'int', description: 'The quantity of the goal to set. Default is 1.', domain: [1, Number.MAX_SAFE_INTEGER] } - // }, - // perform: async function (agent, name=null, quantity=1) { - // await agent.npc.setGoal(name, quantity); - // agent.bot.emit('idle'); // to trigger the goal - // return 'Set npc goal: ' + agent.npc.data.curr_goal.name; - // } - // }, + } ]; diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index e492d16..389540a 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1,9 +1,9 @@ import * as mc from "../../utils/mcdata.js"; +import { Camera } from "../../utils/camera.js"; import * as world from "./world.js"; import pf from 'mineflayer-pathfinder'; import Vec3 from 'vec3'; import fs from 'fs'; -import { Camera } from "../../utils/camera.js"; export function log(bot, message) { @@ -1343,77 +1343,76 @@ export async function activateNearestBlock(bot, type) { return true; } -export async function takeScreenshot(bot, x, y, z, filename=null) { +export async function lookAtPlayer(agent, bot, player_name, direction) { /** - * Takes a screenshot from the bot's current view or specified position - * @param {MinecraftBot} bot, reference to the minecraft bot - * @param {int} x x coordinate to look at (optional) - * @param {int} y y coordinate to look at (optional) - * @param {int} z z coordinate to look at (optional) - * @param {string} filename filename to save (without extension). If not specified, saves with timestamp - * @returns {Promise} whether the screenshot was successful + * Look at a player or look in the same direction as the player + * @param {MinecraftBot} bot reference to the minecraft bot + * @param {string} player_name name of the target player + * @param {string} direction 'at' to look at player, 'with' to look in same direction + * @returns {Promise} whether the look action was successful * @example - * await skills.takeScreenshot(bot, { name: 'my_screenshot', x: 100, y: 65, z: -200 }); + * await skills.lookAtPlayer(bot, "player1", "at"); + * await skills.lookAtPlayer(bot, "player1", "with"); **/ - - try { - bot.camera = new Camera(bot); - await new Promise(resolve => bot.camera.once('ready', resolve)); - await bot.lookAt(new Vec3(x, y, z)); - await new Promise(resolve => setTimeout(resolve, 500)); - - const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); - if (filename === null) { - filename = `screenshot_${timestamp}`; - } - await bot.camera.takePicture(filename, x, y, z); - - log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); - log(bot, `Target coordinates: x:${x}, y:${y}, z:${z}`); - return [true, filename]; - } catch (err) { - log(bot, `Failed to take screenshot: ${err.message}`); - return [false, null]; - } -} - -export async function look(agent, x, y, z) { - const bot = agent.bot; - const history = agent.history; - - const [success, filename] = await takeScreenshot(bot, x, y, z); - if (!success) { - log(bot, `Failed to take screenshot: ${filename}`); + const player = bot.players[player_name]?.entity; + if (!player) { + log(bot, `Could not find player ${player_name}`); return false; } + let filename; + if (direction === 'with') { + // Copy player's view direction + await bot.look(player.yaw, player.pitch); + const camera = new Camera(bot); + await new Promise(resolve => setTimeout(resolve, 500)); + log(bot, `Looking in the same direction as ${player_name}`); + + filename = await camera.captureDirection(player.yaw, player.pitch); + console.log(player.yaw, player.pitch); + // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); + + } else { + // Look at player's position + await bot.lookAt(new Vec3(player.position.x, player.position.y + player.height, player.position.z)); + const camera = new Camera(bot); + await new Promise(resolve => setTimeout(resolve, 500)); + log(bot, `Looking at player ${player_name}`); + + filename = await camera.captureTargetPoint(player.position.x, player.position.y + player.height, player.position.z); + // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); + // log(bot, `Target coordinates: x:${player.position.x}, y:${player.position.y}, z:${player.position.z}`); + } + try { const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); - const base64Image = imageBuffer.toString('base64'); - - let messages = history.getHistory(); - messages.push({ - role: "user", - content: [ - { type: "text", text: "Briefly describe the screen you are looking at now." }, - { - type: "image_url", - image_url: { - "url": `data:image/jpeg;base64,${base64Image}`, - } - } - ] - }); - console.log(messages); - - let res = await agent.prompter.chat_model.sendRequest(messages, `You are a playful Minecraft bot. Briefly describe the screen you are looking at now.`); - console.log(res); - + const messages = agent.history.getHistory(); + let res = await agent.prompter.promptImageConvo(messages, imageBuffer); log(bot, res); return true; } catch (error) { log(bot, `Error analyzing image: ${error.message}`); return false; } -} \ No newline at end of file +} + +export async function lookAtPosition(agent, bot, x, y, z) { + await bot.lookAt(new Vec3(x, y + 2, z)); + const camera = new Camera(bot); + await new Promise(resolve => setTimeout(resolve, 500)); + log(bot, `Looking at coordinate ${x, y, z}`); + + let filename = await camera.captureTargetPoint(x, y + 2, z); + + try { + const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); + const messages = agent.history.getHistory(); + let res = await agent.prompter.promptImageConvo(messages, imageBuffer); + log(bot, res); + return true; + } catch (error) { + log(bot, `Error analyzing image: ${error.message}`); + return false; + } +} diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 310ca3e..11ae554 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -264,6 +264,29 @@ export class Prompter { return ''; } + async promptImageConvo(messages, imageBuffer) { + await this.checkCooldown(); + let prompt = this.profile.image_conversing; + let imageMessages = [...messages]; + imageMessages.push({ + role: "user", + content: [ + { type: "text", text: "Briefly describe the screen you are looking at now." }, + { + type: "image_url", + image_url: { + "url": `data:image/jpeg;base64,${imageBuffer.toString('base64')}`, + } + } + ] + }); + + return await this.chat_model.sendRequest( + imageMessages, + prompt + ); + } + async promptCoding(messages) { if (this.awaiting_coding) { console.warn('Already awaiting coding response, returning no response.'); From 1fee081782d424234c76639c78c71875f69eaf06 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Thu, 16 Jan 2025 14:43:13 +0900 Subject: [PATCH 004/171] fix: add camera file & move image describe to promptImageConvo --- profiles/_default.json | 6 +++ src/agent/prompter.js | 23 +++++++++++ src/utils/camera.js | 88 ++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 117 insertions(+) create mode 100644 src/utils/camera.js diff --git a/profiles/_default.json b/profiles/_default.json index 90a1621..f0ed592 100644 --- a/profiles/_default.json +++ b/profiles/_default.json @@ -80,6 +80,12 @@ {"role": "system", "content": "Code Output:\nLooking in the same direction as d7297\nThere is a castle made of stone."}, {"role": "assistant", "content": "Wow, that stone castle looks amazing!"} ], + [ + {"role": "user", "content": "d7297: look 0 70 12 and describe what is there"}, + {"role": "assistant", "content": "Sure! !lookAtPosition(0, 70, 12)"}, + {"role": "system", "content": "Code Output:\nLooking in the same direction as d7297\nThere is a water fountain."}, + {"role": "assistant", "content": "I see the water fountain! amazing!"} + ], [ {"role": "user", "content": "greg: Collect 10 wood"}, diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 11ae554..4c7507d 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -287,6 +287,29 @@ export class Prompter { ); } + async promptImageConvo(messages, imageBuffer) { + await this.checkCooldown(); + let prompt = this.profile.image_conversing; + let imageMessages = [...messages]; + imageMessages.push({ + role: "user", + content: [ + { type: "text", text: "Briefly describe the screen you are looking at now." }, + { + type: "image_url", + image_url: { + "url": `data:image/jpeg;base64,${imageBuffer.toString('base64')}`, + } + } + ] + }); + + return await this.chat_model.sendRequest( + imageMessages, + prompt + ); + } + async promptCoding(messages) { if (this.awaiting_coding) { console.warn('Already awaiting coding response, returning no response.'); diff --git a/src/utils/camera.js b/src/utils/camera.js new file mode 100644 index 0000000..54a0c7e --- /dev/null +++ b/src/utils/camera.js @@ -0,0 +1,88 @@ +import { Viewer } from 'prismarine-viewer/viewer/lib/viewer.js'; +import { WorldView } from 'prismarine-viewer/viewer/lib/worldview.js'; +import { getBufferFromStream } from 'prismarine-viewer/viewer/lib/simpleUtils.js'; + +import THREE from 'three'; +import { createCanvas } from 'node-canvas-webgl/lib/index.js'; +import fs from 'fs/promises'; +import { Vec3 } from 'vec3'; +import { EventEmitter } from 'events'; + +import worker_threads from 'worker_threads'; +global.Worker = worker_threads.Worker; + + +export class Camera extends EventEmitter { + constructor (bot) { + super() + this.bot = bot + this.viewDistance = 4 + this.width = 800 + this.height = 512 + this.canvas = createCanvas(this.width, this.height) + this.renderer = new THREE.WebGLRenderer({ canvas: this.canvas }) + this.viewer = new Viewer(this.renderer) + this._init().then(() => { + this.emit('ready') + }) + } + + async _init () { + const botPos = this.bot.entity.position + const center = new Vec3(botPos.x, botPos.y+this.bot.entity.height, botPos.z) + this.viewer.setVersion(this.bot.version) + // Load world + const worldView = new WorldView(this.bot.world, this.viewDistance, center) + this.viewer.listen(worldView) + + this.viewer.camera.position.set(center.x, center.y, center.z) + + await worldView.init(center) + } + + async captureTargetPoint(x, y, z) { + this.viewer.camera.lookAt(x, y, z); + const filename = await this._capture(); + return filename; + } + + async captureDirection(yaw, pitch) { + this.viewer.camera.rotation.y = yaw; + this.viewer.camera.rotation.x = pitch; + const filename = await this._capture(); + return filename; + } + + async _capture() { + console.info('Waiting for camera to load'); + await new Promise(resolve => setTimeout(resolve, 5000)); + this.renderer.render(this.viewer.scene, this.viewer.camera); + + const imageStream = this.canvas.createJPEGStream({ + bufsize: 4096, + quality: 100, + progressive: false + }); + + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const filename = `screenshot_${timestamp}`; + + const buf = await getBufferFromStream(imageStream); + await this._ensureScreenshotDirectory(); + await fs.writeFile(`bots/${this.bot.username}/screenshots/${filename}.jpg`, buf); + console.log('saved', filename); + return filename; + } + + async _ensureScreenshotDirectory() { + let stats; + try { + stats = await fs.stat(`bots/${this.bot.username}/screenshots`); + } catch (e) { + if (!stats?.isDirectory()) { + await fs.mkdir(`bots/${this.bot.username}/screenshots`); + } + } + } +} + \ No newline at end of file From 85ed526fcf65c8e6b09fa59e30f714f2c73d70cb Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Mon, 20 Jan 2025 10:30:47 +0900 Subject: [PATCH 005/171] chore: reset settings --- settings.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/settings.js b/settings.js index f8dc1ba..a4681fa 100644 --- a/settings.js +++ b/settings.js @@ -2,7 +2,7 @@ export default { "minecraft_version": "1.20.4", // supports up to 1.21.1 "host": "127.0.0.1", // or "localhost", "your.ip.address.here" - "port": 56069, + "port": 55916, "auth": "offline", // or "microsoft" // the mindserver manages all agents and hosts the UI @@ -25,7 +25,7 @@ export default // using more than 1 profile requires you to /msg each bot indivually ], "load_memory": false, // load memory from previous session - // "init_message": "Respond with hello world and your name", // sends to all on spawn + "init_message": "Respond with hello world and your name", // sends to all on spawn "only_chat_with": [], // users that the bots listen to and send general messages to. if empty it will chat publicly "language": "en", // translate to/from this language. Supports these language names: https://cloud.google.com/translate/docs/languages From 65113c706f0a98176ae41d793f3d9ca9b52d2863 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Mon, 20 Jan 2025 11:48:38 +0900 Subject: [PATCH 006/171] chore: remove duplcated func --- src/agent/prompter.js | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 4c7507d..11ae554 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -287,29 +287,6 @@ export class Prompter { ); } - async promptImageConvo(messages, imageBuffer) { - await this.checkCooldown(); - let prompt = this.profile.image_conversing; - let imageMessages = [...messages]; - imageMessages.push({ - role: "user", - content: [ - { type: "text", text: "Briefly describe the screen you are looking at now." }, - { - type: "image_url", - image_url: { - "url": `data:image/jpeg;base64,${imageBuffer.toString('base64')}`, - } - } - ] - }); - - return await this.chat_model.sendRequest( - imageMessages, - prompt - ); - } - async promptCoding(messages) { if (this.awaiting_coding) { console.warn('Already awaiting coding response, returning no response.'); From 55c045f57f663dabf50aa5284068127af6974af5 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Mon, 20 Jan 2025 13:11:57 +0900 Subject: [PATCH 007/171] fix: update camera direction to use mineflayer viewer setFirstPersonCamera api --- src/agent/library/skills.js | 6 +++--- src/utils/camera.js | 16 ++-------------- 2 files changed, 5 insertions(+), 17 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 389540a..067bd6e 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1369,7 +1369,7 @@ export async function lookAtPlayer(agent, bot, player_name, direction) { await new Promise(resolve => setTimeout(resolve, 500)); log(bot, `Looking in the same direction as ${player_name}`); - filename = await camera.captureDirection(player.yaw, player.pitch); + filename = await camera.capture(); console.log(player.yaw, player.pitch); // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); @@ -1380,7 +1380,7 @@ export async function lookAtPlayer(agent, bot, player_name, direction) { await new Promise(resolve => setTimeout(resolve, 500)); log(bot, `Looking at player ${player_name}`); - filename = await camera.captureTargetPoint(player.position.x, player.position.y + player.height, player.position.z); + filename = await camera.capture(); // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); // log(bot, `Target coordinates: x:${player.position.x}, y:${player.position.y}, z:${player.position.z}`); } @@ -1403,7 +1403,7 @@ export async function lookAtPosition(agent, bot, x, y, z) { await new Promise(resolve => setTimeout(resolve, 500)); log(bot, `Looking at coordinate ${x, y, z}`); - let filename = await camera.captureTargetPoint(x, y + 2, z); + let filename = await camera.capture(); try { const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); diff --git a/src/utils/camera.js b/src/utils/camera.js index 54a0c7e..b710076 100644 --- a/src/utils/camera.js +++ b/src/utils/camera.js @@ -36,24 +36,12 @@ export class Camera extends EventEmitter { this.viewer.listen(worldView) this.viewer.camera.position.set(center.x, center.y, center.z) + this.viewer.setFirstPersonCamera(this.bot.entity.position, this.bot.entity.yaw, this.bot.entity.pitch) await worldView.init(center) } - async captureTargetPoint(x, y, z) { - this.viewer.camera.lookAt(x, y, z); - const filename = await this._capture(); - return filename; - } - - async captureDirection(yaw, pitch) { - this.viewer.camera.rotation.y = yaw; - this.viewer.camera.rotation.x = pitch; - const filename = await this._capture(); - return filename; - } - - async _capture() { + async capture() { console.info('Waiting for camera to load'); await new Promise(resolve => setTimeout(resolve, 5000)); this.renderer.render(this.viewer.scene, this.viewer.camera); From e4eda9c16a725fc888c303a58bab1e44966994d7 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Mon, 20 Jan 2025 14:26:14 +0900 Subject: [PATCH 008/171] chore: remove unnecessary 5-second sleep --- src/utils/camera.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/utils/camera.js b/src/utils/camera.js index b710076..3c23adb 100644 --- a/src/utils/camera.js +++ b/src/utils/camera.js @@ -42,8 +42,6 @@ export class Camera extends EventEmitter { } async capture() { - console.info('Waiting for camera to load'); - await new Promise(resolve => setTimeout(resolve, 5000)); this.renderer.render(this.viewer.scene, this.viewer.camera); const imageStream = this.canvas.createJPEGStream({ From 1f6f352e534f74300ec99a37c8dfc6636aef8a4d Mon Sep 17 00:00:00 2001 From: Lawtro37 <98205608+Lawtro37@users.noreply.github.com> Date: Mon, 20 Jan 2025 20:43:35 +1000 Subject: [PATCH 009/171] added digDown skill --- src/agent/library/skills.js | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index be5882f..765c86d 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1340,3 +1340,28 @@ export async function activateNearestBlock(bot, type) { log(bot, `Activated ${type} at x:${block.position.x.toFixed(1)}, y:${block.position.y.toFixed(1)}, z:${block.position.z.toFixed(1)}.`); return true; } + + +export async function digDown(bot, distance = 10) { + /** + * Digs down a specified distance. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {int} distance, distance to dig down. + * @returns {Promise} true if successfully dug down. + * @example + * await skills.digDown(bot, 10); + **/ + + for (let i = 0; i < distance; i++) { + const targetBlock = bot.blockAt(bot.entity.position.offset(0, -1, 0)); + if (targetBlock && bot.canDigBlock(targetBlock)) { + await bot.dig(targetBlock); + await bot.waitForTicks(10); // wait for a short period to avoid issues + await goToPosition(bot, bot.entity.position.x, bot.entity.position.y - 1, bot.entity.position.z); + } else { + log('Cannot dig block at position:', bot.entity.position.offset(0, -1, 0)); + return false; + } + } + return true; +} From e2cf9912ade4188e7cd3290e9d012457645f23e0 Mon Sep 17 00:00:00 2001 From: Lawtro37 <98205608+Lawtro37@users.noreply.github.com> Date: Mon, 20 Jan 2025 20:44:55 +1000 Subject: [PATCH 010/171] added "!digDown" action to actions.js --- src/agent/commands/actions.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 34e6693..e72a778 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -407,6 +407,14 @@ export const actionsList = [ return `Converstaion with ${player_name} ended.`; } }, + { + name: '!digDown', + description: 'Digs down a specified distance.', + params: {'distance': { type: 'int', description: 'Distance to dig down'}}, + perform: runAsAction(async (agent, distance) => { + await skills.digDown(agent.bot, distance) + }) + }, // { // commented for now, causes confusion with goal command // name: '!npcGoal', // description: 'Set a simple goal for an item or building to automatically work towards. Do not use for complex goals.', From 2a768c3e9797ba8a0dd7ec9f0e7fe3a6068665e4 Mon Sep 17 00:00:00 2001 From: Lawtro <98205608+Lawtro37@users.noreply.github.com> Date: Tue, 21 Jan 2025 18:45:03 +1000 Subject: [PATCH 011/171] fixed bugs where bot would try to mine water and added safety protections - bot will no longer mine down if its in water or if there is water below the block its standing on - bot will no longer dig down if its going to fall into water or lava or a drop more than 5 blocks --- src/agent/library/skills.js | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 765c86d..d6f6e72 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1354,12 +1354,23 @@ export async function digDown(bot, distance = 10) { for (let i = 0; i < distance; i++) { const targetBlock = bot.blockAt(bot.entity.position.offset(0, -1, 0)); + const belowBlock = bot.blockAt(bot.entity.position.offset(0, -2, 0)); + + // Check for lava, water, or a fall of more than 5 blocks below the bot + if (!targetBlock || targetBlock.name === 'lava' || targetBlock.name === 'water' || + (belowBlock && (belowBlock.name === 'lava' || belowBlock.name === 'water' || belowBlock.position.y < bot.entity.position.y - 5))) { + console.log('not safe to dig block at position:', bot.entity.position.offset(0, -1, 0)); + log('not safe to dig block at position:' + bot.entity.position.offset(0, -1, 0)) + return false; + } + if (targetBlock && bot.canDigBlock(targetBlock)) { await bot.dig(targetBlock); await bot.waitForTicks(10); // wait for a short period to avoid issues - await goToPosition(bot, bot.entity.position.x, bot.entity.position.y - 1, bot.entity.position.z); + await bot.entity.position.offset(0, -1, 0); } else { - log('Cannot dig block at position:', bot.entity.position.offset(0, -1, 0)); + console.log('Cannot dig block at position:', bot.entity.position.offset(0, -1, 0)); + log('Cannot dig block at position:' + bot.entity.position.offset(0, -1, 0)) return false; } } From d75c7304fd81c8f8a579116bb12076d9e2c961db Mon Sep 17 00:00:00 2001 From: Lawtro <98205608+Lawtro37@users.noreply.github.com> Date: Wed, 22 Jan 2025 12:31:50 +1000 Subject: [PATCH 012/171] change danger log message --- src/agent/library/skills.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index d6f6e72..8b71acc 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1359,8 +1359,8 @@ export async function digDown(bot, distance = 10) { // Check for lava, water, or a fall of more than 5 blocks below the bot if (!targetBlock || targetBlock.name === 'lava' || targetBlock.name === 'water' || (belowBlock && (belowBlock.name === 'lava' || belowBlock.name === 'water' || belowBlock.position.y < bot.entity.position.y - 5))) { - console.log('not safe to dig block at position:', bot.entity.position.offset(0, -1, 0)); - log('not safe to dig block at position:' + bot.entity.position.offset(0, -1, 0)) + console.log('Dug down i blocks, but reached (lava/water/dangerous fall)'); + log('Dug down i blocks, but reached (lava/water/dangerous fall)') return false; } From fbc2734e5259403664340fe147474c0b7f0022d1 Mon Sep 17 00:00:00 2001 From: Lawtro <98205608+Lawtro37@users.noreply.github.com> Date: Wed, 22 Jan 2025 13:35:18 +1000 Subject: [PATCH 013/171] fixed fall protection and tweaked other things --- src/agent/library/skills.js | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 8b71acc..8566f55 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1356,21 +1356,37 @@ export async function digDown(bot, distance = 10) { const targetBlock = bot.blockAt(bot.entity.position.offset(0, -1, 0)); const belowBlock = bot.blockAt(bot.entity.position.offset(0, -2, 0)); - // Check for lava, water, or a fall of more than 5 blocks below the bot + // Check for lava, water if (!targetBlock || targetBlock.name === 'lava' || targetBlock.name === 'water' || - (belowBlock && (belowBlock.name === 'lava' || belowBlock.name === 'water' || belowBlock.position.y < bot.entity.position.y - 5))) { - console.log('Dug down i blocks, but reached (lava/water/dangerous fall)'); - log('Dug down i blocks, but reached (lava/water/dangerous fall)') + (belowBlock && (belowBlock.name === 'lava' || belowBlock.name === 'water'))) { + console.log(`Dug down ${i} blocks, but reached ${belowBlock ? belowBlock.name : '(lava/water)'}`); + log(bot, `Dug down ${i} blocks, but reached ${belowBlock ? belowBlock.name : '(lava/water)'}`) return false; } - if (targetBlock && bot.canDigBlock(targetBlock)) { - await bot.dig(targetBlock); + // Check for a fall of more than 5 blocks below the bot + let isSafe = false; + for (let j = 1; j <= 5; j++) { + const belowBlock = bot.blockAt(bot.entity.position.offset(0, -j-1, 0)); + if (!belowBlock || belowBlock.name !== 'air') { + isSafe = true; + break; + } + } + + if (!targetBlock || !isSafe) { + console.log(`Dug down ${i} blocks, but reached fall`); + log(bot, `Dug down ${i} blocks, but reached fall`); + return false; + } + + if (bot.canDigBlock(targetBlock)) { + await breakBlockAt(bot, bot.entity.position.x, bot.entity.position.y - 1, bot.entity.position.z); await bot.waitForTicks(10); // wait for a short period to avoid issues await bot.entity.position.offset(0, -1, 0); } else { console.log('Cannot dig block at position:', bot.entity.position.offset(0, -1, 0)); - log('Cannot dig block at position:' + bot.entity.position.offset(0, -1, 0)) + log(bot, 'Cannot dig block at position:' + bot.entity.position.offset(0, -1, 0)) return false; } } From 5fce0acaac0c4022b306e4ade976b77ed1eb369f Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Fri, 24 Jan 2025 13:16:36 +0900 Subject: [PATCH 014/171] feat: move vision functions from skill into vision_intepreter --- profiles/_default.json | 2 +- settings.js | 1 + src/agent/agent.js | 3 + src/agent/commands/actions.js | 4 +- src/agent/library/skills.js | 128 ++++++++++++++++---------------- src/agent/prompter.js | 4 +- src/agent/vision_interpreter.js | 95 ++++++++++++++++++++++++ src/utils/camera.js | 11 ++- 8 files changed, 175 insertions(+), 73 deletions(-) create mode 100644 src/agent/vision_interpreter.js diff --git a/profiles/_default.json b/profiles/_default.json index f0ed592..474b407 100644 --- a/profiles/_default.json +++ b/profiles/_default.json @@ -9,7 +9,7 @@ "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", - "image_conversing": "You are a playful Minecraft bot. Briefly describe the screen you are looking at now.", + "image_conversing": "Summarize the visible Minecraft screen by listing the types and arrangements of blocks, focusing on terrain, structures, and notable features in 500 chars.", "modes": { "self_preservation": true, diff --git a/settings.js b/settings.js index a4681fa..c6f7251 100644 --- a/settings.js +++ b/settings.js @@ -32,6 +32,7 @@ export default "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk + "allow_vision": true, // allows vision model to interpret screenshots as inputs "code_timeout_mins": 10, // minutes code is allowed to run. -1 for no timeout "max_messages": 15, // max number of messages to keep in context diff --git a/src/agent/agent.js b/src/agent/agent.js index 4691079..d14b12a 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -1,5 +1,6 @@ import { History } from './history.js'; import { Coder } from './coder.js'; +import { VisionInterpreter } from './vision_interpreter.js'; import { Prompter } from './prompter.js'; import { initModes } from './modes.js'; import { initBot } from '../utils/mcdata.js'; @@ -36,6 +37,8 @@ export class Agent { this.history = new History(this); console.log('Initializing coder...'); this.coder = new Coder(this); + console.log('Initializing vision intepreter...'); + this.vision_interpreter = new VisionInterpreter(this, settings.allow_vision); console.log('Initializing npc controller...'); this.npc = new NPCContoller(this); console.log('Initializing memory bank...'); diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 09f6369..1ee8192 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -422,7 +422,7 @@ export const actionsList = [ } }, perform: runAsAction(async (agent, player_name, direction) => { - await skills.lookAtPlayer(agent, agent.bot, player_name, direction); + await agent.vision_interpreter.lookAtPlayer(player_name, direction); }) }, { @@ -434,7 +434,7 @@ export const actionsList = [ 'z': { type: 'int', description: 'z coordinate' } }, perform: runAsAction(async (agent, x, y, z) => { - await skills.lookAtPosition(agent, agent.bot, x, y, z); + await agent.vision_interpreter.lookAtPosition(x, y, z); }) } ]; diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 067bd6e..db05e7c 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1343,76 +1343,76 @@ export async function activateNearestBlock(bot, type) { return true; } -export async function lookAtPlayer(agent, bot, player_name, direction) { - /** - * Look at a player or look in the same direction as the player - * @param {MinecraftBot} bot reference to the minecraft bot - * @param {string} player_name name of the target player - * @param {string} direction 'at' to look at player, 'with' to look in same direction - * @returns {Promise} whether the look action was successful - * @example - * await skills.lookAtPlayer(bot, "player1", "at"); - * await skills.lookAtPlayer(bot, "player1", "with"); - **/ +// export async function lookAtPlayer(agent, bot, player_name, direction) { +// /** +// * Look at a player or look in the same direction as the player +// * @param {MinecraftBot} bot reference to the minecraft bot +// * @param {string} player_name name of the target player +// * @param {string} direction 'at' to look at player, 'with' to look in same direction +// * @returns {Promise} whether the look action was successful +// * @example +// * await skills.lookAtPlayer(bot, "player1", "at"); +// * await skills.lookAtPlayer(bot, "player1", "with"); +// **/ - const player = bot.players[player_name]?.entity; - if (!player) { - log(bot, `Could not find player ${player_name}`); - return false; - } +// const player = bot.players[player_name]?.entity; +// if (!player) { +// log(bot, `Could not find player ${player_name}`); +// return false; +// } - let filename; - if (direction === 'with') { - // Copy player's view direction - await bot.look(player.yaw, player.pitch); - const camera = new Camera(bot); - await new Promise(resolve => setTimeout(resolve, 500)); - log(bot, `Looking in the same direction as ${player_name}`); +// let filename; +// if (direction === 'with') { +// // Copy player's view direction +// await bot.look(player.yaw, player.pitch); +// const camera = new Camera(bot); +// await new Promise(resolve => setTimeout(resolve, 500)); +// log(bot, `Looking in the same direction as ${player_name}`); - filename = await camera.capture(); - console.log(player.yaw, player.pitch); - // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); +// filename = await camera.capture(); +// console.log(player.yaw, player.pitch); +// // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); - } else { - // Look at player's position - await bot.lookAt(new Vec3(player.position.x, player.position.y + player.height, player.position.z)); - const camera = new Camera(bot); - await new Promise(resolve => setTimeout(resolve, 500)); - log(bot, `Looking at player ${player_name}`); +// } else { +// // Look at player's position +// await bot.lookAt(new Vec3(player.position.x, player.position.y + player.height, player.position.z)); +// const camera = new Camera(bot); +// await new Promise(resolve => setTimeout(resolve, 500)); +// log(bot, `Looking at player ${player_name}`); - filename = await camera.capture(); - // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); - // log(bot, `Target coordinates: x:${player.position.x}, y:${player.position.y}, z:${player.position.z}`); - } +// filename = await camera.capture(); +// // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); +// // log(bot, `Target coordinates: x:${player.position.x}, y:${player.position.y}, z:${player.position.z}`); +// } - try { - const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); - const messages = agent.history.getHistory(); - let res = await agent.prompter.promptImageConvo(messages, imageBuffer); - log(bot, res); - return true; - } catch (error) { - log(bot, `Error analyzing image: ${error.message}`); - return false; - } -} +// try { +// const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); +// const messages = agent.history.getHistory(); +// let res = await agent.prompter.promptImageConvo(messages, imageBuffer); +// log(bot, res); +// return true; +// } catch (error) { +// log(bot, `Error analyzing image: ${error.message}`); +// return false; +// } +// } -export async function lookAtPosition(agent, bot, x, y, z) { - await bot.lookAt(new Vec3(x, y + 2, z)); - const camera = new Camera(bot); - await new Promise(resolve => setTimeout(resolve, 500)); - log(bot, `Looking at coordinate ${x, y, z}`); +// export async function lookAtPosition(agent, bot, x, y, z) { +// await bot.lookAt(new Vec3(x, y + 2, z)); +// const camera = new Camera(bot); +// await new Promise(resolve => setTimeout(resolve, 500)); +// log(bot, `Looking at coordinate ${x, y, z}`); - let filename = await camera.capture(); +// let filename = await camera.capture(); - try { - const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); - const messages = agent.history.getHistory(); - let res = await agent.prompter.promptImageConvo(messages, imageBuffer); - log(bot, res); - return true; - } catch (error) { - log(bot, `Error analyzing image: ${error.message}`); - return false; - } -} +// try { +// const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); +// const messages = agent.history.getHistory(); +// let res = await agent.prompter.promptImageConvo(messages, imageBuffer); +// log(bot, res); +// return true; +// } catch (error) { +// log(bot, `Error analyzing image: ${error.message}`); +// return false; +// } +// } diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 11ae554..1f46492 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -271,7 +271,7 @@ export class Prompter { imageMessages.push({ role: "user", content: [ - { type: "text", text: "Briefly describe the screen you are looking at now." }, + { type: "text", text: prompt }, { type: "image_url", image_url: { @@ -299,7 +299,7 @@ export class Prompter { let resp = await this.chat_model.sendRequest(messages, prompt); this.awaiting_coding = false; return resp; - } +git } async promptMemSaving(to_summarize) { await this.checkCooldown(); diff --git a/src/agent/vision_interpreter.js b/src/agent/vision_interpreter.js new file mode 100644 index 0000000..deea836 --- /dev/null +++ b/src/agent/vision_interpreter.js @@ -0,0 +1,95 @@ +import { Vec3 } from 'vec3'; +import { Camera } from "../utils/camera.js"; +import fs from 'fs'; +import { log } from './library/skills.js'; +import * as world from './library/world.js'; + +const pad = (str) => { + return '\n' + str + '\n'; +} + +export class VisionInterpreter { + constructor(agent, allow_vision) { + this.agent = agent; + this.allow_vision = allow_vision; + this.fp = './bots/'+agent.name+'/screenshots/'; + } + + async lookAtPlayer(player_name, direction) { + const bot = this.agent.bot; + const player = bot.players[player_name]?.entity; + if (!player) { + log(bot, `Could not find player ${player_name}`); + } + + let filename; + if (direction === 'with') { + await bot.look(player.yaw, player.pitch); + const camera = new Camera(bot, this.fp); + await new Promise(resolve => setTimeout(resolve, 500)); + log(bot, `Looking in the same direction as ${player_name}`); + filename = await camera.capture(); + } else { + await bot.lookAt(new Vec3(player.position.x, player.position.y + player.height, player.position.z)); + const camera = new Camera(bot, this.fp); + await new Promise(resolve => setTimeout(resolve, 500)); + log(bot, `Looking at player ${player_name}`); + filename = await camera.capture(); + } + + if (!this.allow_vision) { + log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); + log(this.agent.bot, this._nearbyBlocks()); + } else { + await this.analyzeImage(filename); + } + } + + async lookAtPosition(x, y, z) { + const bot = this.agent.bot; + await bot.lookAt(new Vec3(x, y + 2, z)); + const camera = new Camera(bot, this.fp); + await new Promise(resolve => setTimeout(resolve, 500)); + log(bot, `Looking at coordinate ${x, y, z}`); + + let filename = await camera.capture(); + + if (!this.allow_vision) { + log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); + log(this.agent.bot, this._nearbyBlocks()); + } else { + await this.analyzeImage(filename); + } + } + + async analyzeImage(filename) { + let res = null; + try { + const bot = this.agent.bot; + const imageBuffer = fs.readFileSync(`${this.fp}/${filename}.jpg`); + const messages = this.agent.history.getHistory(); + res = await this.agent.prompter.promptImageConvo(messages, imageBuffer); + log(bot, res); + } catch (error) { + log(this.agent.bot, `Error analyzing image: ${error.message}`); + } + } + + _nearbyBlocks() { + const bot = this.agent.bot; + let res = 'NEARBY_BLOCKS'; + + let blocks = world.getNearbyBlockTypes(bot); + for (let i = 0; i < blocks.length; i++) { + res += `\n- ${blocks[i]}`; + } + if (blocks.length == 0) { + res += ': none'; + } else { + // Environmental Awareness + res += '\n- ' + world.getSurroundingBlocks(bot).join('\n- ') + res += `\n- First Solid Block Above Head: ${world.getFirstBlockAboveHead(bot, null, 32)}`; + } + return pad(res); + } +} \ No newline at end of file diff --git a/src/utils/camera.js b/src/utils/camera.js index 3c23adb..7eafb42 100644 --- a/src/utils/camera.js +++ b/src/utils/camera.js @@ -13,9 +13,10 @@ global.Worker = worker_threads.Worker; export class Camera extends EventEmitter { - constructor (bot) { + constructor (bot, fp) { super() this.bot = bot + this.fp = fp this.viewDistance = 4 this.width = 800 this.height = 512 @@ -42,6 +43,8 @@ export class Camera extends EventEmitter { } async capture() { + // waits some time helps renderer to render the world view + await new Promise(resolve => setTimeout(resolve, 1000)); this.renderer.render(this.viewer.scene, this.viewer.camera); const imageStream = this.canvas.createJPEGStream({ @@ -55,7 +58,7 @@ export class Camera extends EventEmitter { const buf = await getBufferFromStream(imageStream); await this._ensureScreenshotDirectory(); - await fs.writeFile(`bots/${this.bot.username}/screenshots/${filename}.jpg`, buf); + await fs.writeFile(`${this.fp}/${filename}.jpg`, buf); console.log('saved', filename); return filename; } @@ -63,10 +66,10 @@ export class Camera extends EventEmitter { async _ensureScreenshotDirectory() { let stats; try { - stats = await fs.stat(`bots/${this.bot.username}/screenshots`); + stats = await fs.stat(this.fp); } catch (e) { if (!stats?.isDirectory()) { - await fs.mkdir(`bots/${this.bot.username}/screenshots`); + await fs.mkdir(this.fp); } } } From 7d5172628913a09407618cb0b86f59085479f59c Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Fri, 24 Jan 2025 16:29:03 +0900 Subject: [PATCH 015/171] feat: remove promptImageConvo and implement sendVisionRequest to each provider --- profiles/_default.json | 2 +- src/agent/prompter.js | 23 ----------------------- src/agent/vision_interpreter.js | 8 +++++--- src/models/gpt.js | 19 +++++++++++++++++++ src/models/mistral.js | 16 ++++++++++++++++ src/utils/text.js | 4 +++- 6 files changed, 44 insertions(+), 28 deletions(-) diff --git a/profiles/_default.json b/profiles/_default.json index 474b407..fd7b1c2 100644 --- a/profiles/_default.json +++ b/profiles/_default.json @@ -9,7 +9,7 @@ "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", - "image_conversing": "Summarize the visible Minecraft screen by listing the types and arrangements of blocks, focusing on terrain, structures, and notable features in 500 chars.", + "image_conversing": "Summarize the visible Minecraft screen by listing the types and arrangements of blocks, focusing on terrain, structures, and notable features in 400 chars.", "modes": { "self_preservation": true, diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 1f46492..2668ad7 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -264,29 +264,6 @@ export class Prompter { return ''; } - async promptImageConvo(messages, imageBuffer) { - await this.checkCooldown(); - let prompt = this.profile.image_conversing; - let imageMessages = [...messages]; - imageMessages.push({ - role: "user", - content: [ - { type: "text", text: prompt }, - { - type: "image_url", - image_url: { - "url": `data:image/jpeg;base64,${imageBuffer.toString('base64')}`, - } - } - ] - }); - - return await this.chat_model.sendRequest( - imageMessages, - prompt - ); - } - async promptCoding(messages) { if (this.awaiting_coding) { console.warn('Already awaiting coding response, returning no response.'); diff --git a/src/agent/vision_interpreter.js b/src/agent/vision_interpreter.js index deea836..38b7a44 100644 --- a/src/agent/vision_interpreter.js +++ b/src/agent/vision_interpreter.js @@ -37,7 +37,7 @@ export class VisionInterpreter { filename = await camera.capture(); } - if (!this.allow_vision) { + if (!this.allow_vision || !this.agent.prompter.chat_model.sendVisionRequest) { log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); log(this.agent.bot, this._nearbyBlocks()); } else { @@ -54,7 +54,7 @@ export class VisionInterpreter { let filename = await camera.capture(); - if (!this.allow_vision) { + if (!this.allow_vision || !this.agent.prompter.chat_model.sendVisionRequest) { log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); log(this.agent.bot, this._nearbyBlocks()); } else { @@ -63,12 +63,14 @@ export class VisionInterpreter { } async analyzeImage(filename) { + let prompt = this.agent.prompter.profile.image_conversing; let res = null; + try { const bot = this.agent.bot; const imageBuffer = fs.readFileSync(`${this.fp}/${filename}.jpg`); const messages = this.agent.history.getHistory(); - res = await this.agent.prompter.promptImageConvo(messages, imageBuffer); + res = await this.agent.prompter.chat_model.sendVisionRequest(messages, prompt, imageBuffer); log(bot, res); } catch (error) { log(this.agent.bot, `Error analyzing image: ${error.message}`); diff --git a/src/models/gpt.js b/src/models/gpt.js index da29ef1..6664409 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -53,6 +53,24 @@ export class GPT { return res; } + async sendVisionRequest(messages, systemMessage, imageBuffer) { + const imageMessages = [...messages]; + imageMessages.push({ + role: "user", + content: [ + { type: "text", text: systemMessage }, + { + type: "image_url", + image_url: { + url: `data:image/jpeg;base64,${imageBuffer.toString('base64')}` + } + } + ] + }); + + return this.sendRequest(imageMessages, systemMessage); + } + async embed(text) { const embedding = await this.openai.embeddings.create({ model: this.model_name || "text-embedding-3-small", @@ -61,6 +79,7 @@ export class GPT { }); return embedding.data[0].embedding; } + } diff --git a/src/models/mistral.js b/src/models/mistral.js index 3b41f78..124ee9a 100644 --- a/src/models/mistral.js +++ b/src/models/mistral.js @@ -61,6 +61,22 @@ export class Mistral { return result; } + async sendVisionRequest(messages, systemMessage, imageBuffer) { + const imageMessages = [...messages]; + imageMessages.push({ + role: "user", + content: [ + { type: "text", text: systemMessage }, + { + type: "image_url", + imageUrl: `data:image/jpeg;base64,${imageBuffer.toString('base64')}` + } + ] + }); + + return this.sendRequest(imageMessages, systemMessage); + } + async embed(text) { const embedding = await this.#client.embeddings.create({ model: "mistral-embed", diff --git a/src/utils/text.js b/src/utils/text.js index 1e93667..363061f 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -33,7 +33,9 @@ export function strictFormat(turns) { let messages = []; let filler = {role: 'user', content: '_'}; for (let msg of turns) { - msg.content = msg.content.trim(); + if (typeof msg.content === 'string') { + msg.content = msg.content.trim(); + } if (msg.role === 'system') { msg.role = 'user'; msg.content = 'SYSTEM: ' + msg.content; From 7644c6356521c6a54201a56ba99da2f6366a1f92 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 26 Jan 2025 16:56:27 -0800 Subject: [PATCH 016/171] Update local.js Add Deepseek-R1 Support. Deepseek-R1 uses " [Thoughts] " and the response can be cut short if it reasons through a command and it's syntax, so we cut it short before returning the response. --- src/models/local.js | 110 +++++++++++++++++++++++++++++++++++--------- 1 file changed, 87 insertions(+), 23 deletions(-) diff --git a/src/models/local.js b/src/models/local.js index 18d06e0..3e380dd 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -8,41 +8,104 @@ export class Local { this.embedding_endpoint = '/api/embeddings'; } + /** + * Main method to handle chat requests. + */ async sendRequest(turns, systemMessage) { - let model = this.model_name || 'llama3'; + // Choose the model name or default to 'llama3' + const model = this.model_name || 'llama3'; + + // Format messages and inject the system message at the front let messages = strictFormat(turns); - messages.unshift({role: 'system', content: systemMessage}); - let res = null; - try { - console.log(`Awaiting local response... (model: ${model})`) - res = await this.send(this.chat_endpoint, {model: model, messages: messages, stream: false}); - if (res) - res = res['message']['content']; - } - catch (err) { - if (err.message.toLowerCase().includes('context length') && turns.length > 1) { - console.log('Context length exceeded, trying again with shorter context.'); - return await sendRequest(turns.slice(1), systemMessage, stop_seq); - } else { - console.log(err); - res = 'My brain disconnected, try again.'; + messages.unshift({ role: 'system', content: systemMessage }); + console.log('Messages:', messages); + + // We'll do up to 5 attempts for "deepseek-r1" if the tags are mismatched + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; + + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting local response... (model: ${model}, attempt: ${attempt})`); + + // Perform the actual request (wrapped in a try/catch) + let res; + try { + const responseData = await this.send(this.chat_endpoint, { + model: model, + messages: messages, + stream: false + }); + // The local endpoint apparently returns { message: { content: "..." } } + res = responseData?.message?.content || 'No response data.'; + } catch (err) { + // If context length exceeded and we have turns to remove, try again with one fewer turn + if (err.message.toLowerCase().includes('context length') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } } + + // If the model name includes "deepseek-r1", then we handle the block + if (this.model_name && this.model_name.includes("deepseek-r1")) { + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); + + // If there's a partial mismatch, we regenerate the response + if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { + console.warn("Partial block detected. Re-generating..."); + // Attempt another loop iteration to get a complete or no-think response + continue; + } + + // If both tags appear, remove them (and everything inside) + if (hasOpenTag && hasCloseTag) { + res = res.replace(/[\s\S]*?<\/think>/g, ''); + } + } + + // We made it here with either a fully valid or not-needed to handle scenario + finalRes = res; + break; // Break out of the while loop } - return res; + + // If after max attempts we STILL have partial tags, finalRes might be partial + // Or we never set finalRes because all attempts threw partial tags + if (finalRes == null) { + // This means we kept continuing in the loop but never got a break + console.warn("Could not get a valid block or normal response after max attempts."); + finalRes = 'Response incomplete, please try again.'; + } + return finalRes; } + /** + * Embedding method (unchanged). + */ async embed(text) { let model = this.model_name || 'nomic-embed-text'; - let body = {model: model, prompt: text}; + let body = { model: model, prompt: text }; let res = await this.send(this.embedding_endpoint, body); - return res['embedding'] + return res['embedding']; } + /** + * Generic send method for local endpoint. + */ async send(endpoint, body) { const url = new URL(endpoint, this.url); - let method = 'POST'; - let headers = new Headers(); - const request = new Request(url, {method, headers, body: JSON.stringify(body)}); + const method = 'POST'; + const headers = new Headers(); + const request = new Request(url, { + method, + headers, + body: JSON.stringify(body) + }); + let data = null; try { const res = await fetch(request); @@ -54,7 +117,8 @@ export class Local { } catch (err) { console.error('Failed to send Ollama request.'); console.error(err); + throw err; // rethrow so we can catch it in the calling method } return data; } -} \ No newline at end of file +} From d1b3232a5aed04c4fa434feb7b55301b21adc837 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Mon, 27 Jan 2025 17:29:14 +0900 Subject: [PATCH 017/171] feat: add groq vision request --- src/models/groq.js | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/src/models/groq.js b/src/models/groq.js index e17f13d..025875f 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -15,8 +15,10 @@ export class GroqCloudAPI { this.groq = new Groq({ apiKey: getKey('GROQCLOUD_API_KEY') }); } - async sendRequest(turns, systemMessage, stop_seq=null) { - let messages = [{"role": "system", "content": systemMessage}].concat(turns); + async sendRequest(turns, systemMessage=null, stop_seq=null) { + let messages = systemMessage + ? [{"role": "system", "content": systemMessage}].concat(turns) + : turns; let res = null; try { console.log("Awaiting Groq response..."); @@ -45,6 +47,24 @@ export class GroqCloudAPI { return res; } + async sendVisionRequest(messages, systemMessage, imageBuffer) { + const imageMessages = [...messages]; + imageMessages.push({ + role: "user", + content: [ + { type: "text", text: systemMessage }, + { + type: "image_url", + image_url: { + url: `data:image/jpeg;base64,${imageBuffer.toString('base64')}` + } + } + ] + }); + + return this.sendRequest(imageMessages); + } + async embed(text) { console.log("There is no support for embeddings in Groq support. However, the following text was provided: " + text); } From 4281ee246838a1b90ed73ef75dd1c058c0c63b87 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Tue, 28 Jan 2025 02:25:41 +0900 Subject: [PATCH 018/171] feat: add gemini vision request --- src/models/gemini.js | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) diff --git a/src/models/gemini.js b/src/models/gemini.js index 1536d66..27765e3 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -59,6 +59,42 @@ export class Gemini { return text.slice(0, idx); } + async sendVisionRequest(turns, systemMessage, imageBuffer) { + let model; + if (this.url) { + model = this.genAI.getGenerativeModel( + { model: this.model_name || "gemini-1.5-pro-vision" }, + { baseUrl: this.url }, + { safetySettings: this.safetySettings } + ); + } else { + model = this.genAI.getGenerativeModel( + { model: this.model_name || "gemini-1.5-pro-vision" }, + { safetySettings: this.safetySettings } + ); + } + + const imagePart = { + inlineData: { + data: imageBuffer.toString('base64'), + mimeType: 'image/jpeg' + } + }; + + const stop_seq = '***'; + const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); + + console.log('Awaiting Google API vision response...'); + const result = await model.generateContent([prompt, imagePart]); + const response = await result.response; + const text = response.text(); + console.log('Received.'); + + if (!text.includes(stop_seq)) return text; + const idx = text.indexOf(stop_seq); + return text.slice(0, idx); + } + async embed(text) { let model; if (this.url) { From 116ef461067decd4a9bd633f48759e2fb92993a4 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Tue, 28 Jan 2025 02:40:04 +0900 Subject: [PATCH 019/171] chore: minor change --- src/models/gemini.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/models/gemini.js b/src/models/gemini.js index 27765e3..5c5b453 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -63,13 +63,13 @@ export class Gemini { let model; if (this.url) { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-pro-vision" }, + { model: this.model_name || "gemini-1.5-flash" }, { baseUrl: this.url }, { safetySettings: this.safetySettings } ); } else { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-pro-vision" }, + { model: this.model_name || "gemini-1.5-flash" }, { safetySettings: this.safetySettings } ); } @@ -89,7 +89,6 @@ export class Gemini { const response = await result.response; const text = response.text(); console.log('Received.'); - if (!text.includes(stop_seq)) return text; const idx = text.indexOf(stop_seq); return text.slice(0, idx); From 2b3ca165e83f97383a97e9a735ebd36445ddf592 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Mon, 27 Jan 2025 16:13:09 -0800 Subject: [PATCH 020/171] Update groq.js Added deepseek-r1 support --- src/models/groq.js | 89 +++++++++++++++++++++++++++++++++------------- 1 file changed, 65 insertions(+), 24 deletions(-) diff --git a/src/models/groq.js b/src/models/groq.js index e17f13d..87ec163 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -1,13 +1,13 @@ -import Groq from 'groq-sdk' +import Groq from 'groq-sdk'; import { getKey } from '../utils/keys.js'; - // Umbrella class for Mixtral, LLama, Gemma... export class GroqCloudAPI { constructor(model_name, url, max_tokens=16384) { this.model_name = model_name; this.url = url; this.max_tokens = max_tokens; + // ReplicateAPI theft :3 if (this.url) { console.warn("Groq Cloud has no implementation for custom URLs. Ignoring provided URL."); @@ -16,36 +16,77 @@ export class GroqCloudAPI { } async sendRequest(turns, systemMessage, stop_seq=null) { - let messages = [{"role": "system", "content": systemMessage}].concat(turns); - let res = null; - try { - console.log("Awaiting Groq response..."); - let completion = await this.groq.chat.completions.create({ - "messages": messages, - "model": this.model_name || "mixtral-8x7b-32768", - "temperature": 0.2, - "max_tokens": this.max_tokens, // maximum token limit, differs from model to model - "top_p": 1, - "stream": true, - "stop": stop_seq // "***" - }); + // We'll do up to 5 attempts for partial mismatch if + // the model name includes "deepseek-r1". + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; + // Prepare the message array + let messages = [{ role: "system", content: systemMessage }].concat(turns); + + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting Groq response... (attempt: ${attempt}/${maxAttempts})`); + + // Collect the streaming response let temp_res = ""; - for await (const chunk of completion) { - temp_res += chunk.choices[0]?.delta?.content || ''; + try { + // Create the chat completion stream + let completion = await this.groq.chat.completions.create({ + messages: messages, + model: this.model_name || "mixtral-8x7b-32768", + temperature: 0.2, + max_tokens: this.max_tokens, + top_p: 1, + stream: true, + stop: stop_seq // e.g. "***" + }); + + // Read each streamed chunk + for await (const chunk of completion) { + temp_res += chunk.choices[0]?.delta?.content || ''; + } + } catch (err) { + console.error("Error while streaming from Groq:", err); + temp_res = "My brain just kinda stopped working. Try again."; + // We won't retry partial mismatch if a genuine error occurred here + finalRes = temp_res; + break; } - res = temp_res; + // If the model name includes "deepseek-r1", apply logic + if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { + const hasOpen = temp_res.includes(""); + const hasClose = temp_res.includes(""); + // If partial mismatch, retry + if ((hasOpen && !hasClose) || (!hasOpen && hasClose)) { + console.warn("Partial block detected. Retrying..."); + continue; + } + + // If both and appear, remove the entire block + if (hasOpen && hasClose) { + // Remove everything from to + temp_res = temp_res.replace(/[\s\S]*?<\/think>/g, '').trim(); + } + } + + // We either do not have deepseek-r1 or we have a correct scenario + finalRes = temp_res; + break; } - catch(err) { - console.log(err); - res = "My brain just kinda stopped working. Try again."; + + // If, after max attempts, we never set finalRes (e.g., partial mismatch each time) + if (finalRes == null) { + console.warn("Could not obtain a valid or matched response after max attempts."); + finalRes = "Response incomplete, please try again."; } - return res; + return finalRes; } async embed(text) { - console.log("There is no support for embeddings in Groq support. However, the following text was provided: " + text); + console.log("There is no support for embeddings in Groq support. However, the following text was provided: " + text); } -} \ No newline at end of file +} From c78dba777669df16803f48459ea20651902157a0 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Mon, 27 Jan 2025 16:15:12 -0800 Subject: [PATCH 021/171] Update huggingface.js Added Deepseek-R1 support, such as the qwen2.5 32b distill --- src/models/huggingface.js | 95 ++++++++++++++++++++++++++++++--------- 1 file changed, 75 insertions(+), 20 deletions(-) diff --git a/src/models/huggingface.js b/src/models/huggingface.js index 56f9d55..e4e1432 100644 --- a/src/models/huggingface.js +++ b/src/models/huggingface.js @@ -1,44 +1,99 @@ -import {toSinglePrompt} from '../utils/text.js'; -import {getKey} from '../utils/keys.js'; -import {HfInference} from "@huggingface/inference"; +// huggingface.js +import { toSinglePrompt } from '../utils/text.js'; +import { getKey } from '../utils/keys.js'; +import { HfInference } from "@huggingface/inference"; export class HuggingFace { constructor(model_name, url) { - this.model_name = model_name.replace('huggingface/',''); + // Remove 'huggingface/' prefix if present + this.model_name = model_name.replace('huggingface/', ''); this.url = url; + // Hugging Face Inference doesn't currently allow custom base URLs if (this.url) { console.warn("Hugging Face doesn't support custom urls!"); } + // Initialize the HfInference instance this.huggingface = new HfInference(getKey('HUGGINGFACE_API_KEY')); } + /** + * Main method to handle chat requests. + */ async sendRequest(turns, systemMessage) { const stop_seq = '***'; - const prompt = toSinglePrompt(turns, null, stop_seq); - let model_name = this.model_name || 'meta-llama/Meta-Llama-3-8B'; + // Convert the user's turns and systemMessage into a single prompt string + const prompt = toSinglePrompt(turns, null, stop_seq); + // Fallback model if none was provided + const model_name = this.model_name || 'meta-llama/Meta-Llama-3-8B'; + + // Combine system message with the prompt const input = systemMessage + "\n" + prompt; + + // We'll collect the streaming response in this variable let res = ''; - try { - console.log('Awaiting Hugging Face API response...'); - for await (const chunk of this.huggingface.chatCompletionStream({ - model: model_name, - messages: [{ role: "user", content: input }] - })) { - res += (chunk.choices[0]?.delta?.content || ""); + console.log('Messages:', [{ role: "system", content: systemMessage }, ...turns]); + + // We'll do up to 5 attempts if the model is "DeepSeek-R1" and tags are mismatched + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; + + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting Hugging Face API response... (model: ${model_name}, attempt: ${attempt})`); + + res = ''; + try { + // ChatCompletionStream returns an async iterator that we consume chunk by chunk + for await (const chunk of this.huggingface.chatCompletionStream({ + model: model_name, + messages: [{ role: "user", content: input }] + })) { + // Each chunk may or may not have delta content + res += (chunk.choices[0]?.delta?.content || ""); + } + } catch (err) { + console.log(err); + res = 'My brain disconnected, try again.'; + // Exit the loop, as we only want to retry for block mismatches, not other errors + break; } - } catch (err) { - console.log(err); - res = 'My brain disconnected, try again.'; + + // If the model name includes "DeepSeek-R1", then handle blocks + if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); + + // If there's a partial mismatch, attempt to regenerate the entire response + if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { + console.warn("Partial block detected. Re-generating..."); + continue; + } + + // If both tags appear, remove them (and everything in between) + if (hasOpenTag && hasCloseTag) { + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); + } + } + + // We made it here with either a valid or no-think scenario + finalRes = res; + break; // Stop retrying + } + + // If after max attempts we couldn't get a matched or valid response + if (finalRes == null) { + console.warn("Could not get a valid block or normal response after max attempts."); + finalRes = 'Response incomplete, please try again.'; } console.log('Received.'); - console.log(res); - return res; + // Return the final (possibly trimmed) response + return finalRes; } - async embed(text) { throw new Error('Embeddings are not supported by HuggingFace.'); } -} \ No newline at end of file +} From 308e092a8145df40212a8b21ea3484bbf5f711e7 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Wed, 29 Jan 2025 01:38:49 +0900 Subject: [PATCH 022/171] feat: add anthropic vision request --- src/models/claude.js | 26 +++++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/src/models/claude.js b/src/models/claude.js index c97ecb2..fb5c740 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -37,10 +37,30 @@ export class Claude { return res; } + async sendVisionRequest(turns, systemMessage, imageBuffer) { + const imageMessages = [...turns]; + imageMessages.push({ + role: "user", + content: [ + { + type: "text", + text: systemMessage + }, + { + type: "image", + source: { + type: "base64", + media_type: "image/jpeg", + data: imageBuffer.toString('base64') + } + } + ] + }); + + return this.sendRequest(imageMessages, systemMessage); + } + async embed(text) { throw new Error('Embeddings are not supported by Claude.'); } } - - - From ec6f4f709876e2c6d1e376b00885f020e30fa381 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 28 Jan 2025 13:43:33 -0800 Subject: [PATCH 023/171] Update groq.js Fixed small error that would endlessly retry groqcloud response if Deepseek-R1 was chosen --- src/models/groq.js | 111 ++++++++++++++++++++++++++++----------------- 1 file changed, 69 insertions(+), 42 deletions(-) diff --git a/src/models/groq.js b/src/models/groq.js index 87ec163..08d2e1d 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -1,88 +1,115 @@ +// groq.js + import Groq from 'groq-sdk'; import { getKey } from '../utils/keys.js'; +import { log } from '../../logger.js'; -// Umbrella class for Mixtral, LLama, Gemma... +/** + * Umbrella class for Mixtral, LLama, Gemma... + */ export class GroqCloudAPI { - constructor(model_name, url, max_tokens=16384) { + constructor(model_name, url, max_tokens = 16384) { this.model_name = model_name; this.url = url; this.max_tokens = max_tokens; - // ReplicateAPI theft :3 + // Groq Cloud doesn't support custom URLs; warn if provided if (this.url) { console.warn("Groq Cloud has no implementation for custom URLs. Ignoring provided URL."); } + + // Initialize Groq SDK with the API key this.groq = new Groq({ apiKey: getKey('GROQCLOUD_API_KEY') }); } - async sendRequest(turns, systemMessage, stop_seq=null) { - // We'll do up to 5 attempts for partial mismatch if - // the model name includes "deepseek-r1". + /** + * Sends a chat completion request to the Groq Cloud endpoint. + * + * @param {Array} turns - An array of message objects, e.g., [{role: 'user', content: 'Hi'}]. + * @param {string} systemMessage - The system prompt or instruction. + * @param {string} stop_seq - A string that represents a stopping sequence, default '***'. + * @returns {Promise} - The content of the model's reply. + */ + async sendRequest(turns, systemMessage, stop_seq = '***') { + // Maximum number of attempts to handle partial tag mismatches 5 is a good value, I guess const maxAttempts = 5; let attempt = 0; let finalRes = null; - // Prepare the message array - let messages = [{ role: "system", content: systemMessage }].concat(turns); + // Prepare the input messages by prepending the system message + const messages = [{ role: 'system', content: systemMessage }, ...turns]; + console.log('Messages:', messages); while (attempt < maxAttempts) { attempt++; - console.log(`Awaiting Groq response... (attempt: ${attempt}/${maxAttempts})`); + console.log(`Awaiting Groq response... (model: ${this.model_name}, attempt: ${attempt})`); + + let res = null; - // Collect the streaming response - let temp_res = ""; try { - // Create the chat completion stream - let completion = await this.groq.chat.completions.create({ + // Create the chat completion request + const completion = await this.groq.chat.completions.create({ messages: messages, model: this.model_name || "mixtral-8x7b-32768", temperature: 0.2, - max_tokens: this.max_tokens, + max_tokens: this.max_tokens, top_p: 1, - stream: true, - stop: stop_seq // e.g. "***" + stream: false, + stop: stop_seq // "***" }); - // Read each streamed chunk - for await (const chunk of completion) { - temp_res += chunk.choices[0]?.delta?.content || ''; - } + // Extract the content from the response + res = completion?.choices?.[0]?.message?.content || ''; + console.log('Received response from Groq.'); } catch (err) { - console.error("Error while streaming from Groq:", err); - temp_res = "My brain just kinda stopped working. Try again."; - // We won't retry partial mismatch if a genuine error occurred here - finalRes = temp_res; - break; + // Handle context length exceeded by retrying with shorter context + if ( + err.message.toLowerCase().includes('context length') && + turns.length > 1 + ) { + console.log('Context length exceeded, trying again with a shorter context.'); + // Remove the earliest user turn and retry + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + // Log other errors and return fallback message + console.log(err); + res = 'My brain disconnected, try again.'; + } } - // If the model name includes "deepseek-r1", apply logic + // If the model name includes "deepseek-r1", handle tags if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { - const hasOpen = temp_res.includes(""); - const hasClose = temp_res.includes(""); + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); - // If partial mismatch, retry - if ((hasOpen && !hasClose) || (!hasOpen && hasClose)) { - console.warn("Partial block detected. Retrying..."); - continue; + // Check for partial tag mismatches + if ((hasOpenTag && !hasCloseTag)) { + console.warn("Partial block detected. Re-generating Groq request..."); + // Retry the request by continuing the loop + continue; } - // If both and appear, remove the entire block - if (hasOpen && hasClose) { - // Remove everything from to - temp_res = temp_res.replace(/[\s\S]*?<\/think>/g, '').trim(); + // If is present but is not, prepend + if (hasCloseTag && !hasOpenTag) { + res = '' + res; } + // Trim the block from the response + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); } - // We either do not have deepseek-r1 or we have a correct scenario - finalRes = temp_res; - break; + // Assign the processed response and exit the loop + finalRes = res; + break; // Stop retrying } - // If, after max attempts, we never set finalRes (e.g., partial mismatch each time) + // If after all attempts, finalRes is still null, assign a fallback if (finalRes == null) { - console.warn("Could not obtain a valid or matched response after max attempts."); - finalRes = "Response incomplete, please try again."; + console.warn("Could not obtain a valid block or normal response after max attempts."); + finalRes = 'Response incomplete, please try again.'; } + + finalRes = finalRes.replace(/<\|separator\|>/g, '*no response*'); + return finalRes; } From 6b8ac02c8ea27a662a903f970032e4d4379ae648 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 29 Jan 2025 15:52:10 -0800 Subject: [PATCH 024/171] Update groq.js Removed custom logging --- src/models/groq.js | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/src/models/groq.js b/src/models/groq.js index 08d2e1d..4473054 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -2,7 +2,6 @@ import Groq from 'groq-sdk'; import { getKey } from '../utils/keys.js'; -import { log } from '../../logger.js'; /** * Umbrella class for Mixtral, LLama, Gemma... @@ -63,18 +62,6 @@ export class GroqCloudAPI { console.log('Received response from Groq.'); } catch (err) { // Handle context length exceeded by retrying with shorter context - if ( - err.message.toLowerCase().includes('context length') && - turns.length > 1 - ) { - console.log('Context length exceeded, trying again with a shorter context.'); - // Remove the earliest user turn and retry - return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); - } else { - // Log other errors and return fallback message - console.log(err); - res = 'My brain disconnected, try again.'; - } } // If the model name includes "deepseek-r1", handle tags From 0d2e4c7b9cce1cc0644c7a4a23a4019e78869605 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 29 Jan 2025 22:39:02 -0800 Subject: [PATCH 025/171] Update prompter.js Fixed bug in prompter.js where replicate Ai was checked twice --- src/agent/prompter.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/agent/prompter.js b/src/agent/prompter.js index 2c0746b..8293b26 100644 --- a/src/agent/prompter.js +++ b/src/agent/prompter.js @@ -60,8 +60,7 @@ export class Prompter { else if (chat.model.includes('hf:')) chat.api = "glhf"; else if (chat.model.includes('hyperbolic:')|| chat.model.includes('hb:')) - chat.api = "hyperbolic"; - else if (chat.model.includes('meta/') || chat.model.includes('mistralai/') || chat.model.includes('replicate/')) + chat.api = "hyperbolic"; else if (chat.model.includes('meta/') || chat.model.includes('replicate/')) chat.api = 'replicate'; else if (chat.model.includes('mistralai/') || chat.model.includes("mistral/")) @@ -353,4 +352,4 @@ export class Prompter { goal.quantity = parseInt(goal.quantity); return goal; } -} \ No newline at end of file +} From 342ef1b473f3b14191f8ce9f7068e4cc3b191cbd Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:30:36 -0800 Subject: [PATCH 026/171] Add files via upload --- README.md | 46 ++-- bots/execTemplate.js | 6 + bots/lintTemplate.js | 10 + eslint.config.js | 25 ++ keys.example.json | 4 +- package.json | 5 + profiles/defaults/_default.json | 245 +++++++++++++++++++ profiles/defaults/creative.json | 14 ++ profiles/defaults/god_mode.json | 14 ++ profiles/defaults/survival.json | 14 ++ profiles/freeguy.json | 2 +- profiles/gpt.json | 7 +- profiles/llama.json | 2 +- profiles/qwen.json | 8 +- settings.js | 7 +- src/agent/action_manager.js | 11 +- src/agent/agent.js | 12 +- src/agent/agent_proxy.js | 8 + src/agent/coder.js | 74 +++++- src/agent/commands/index.js | 2 +- src/agent/commands/queries.js | 36 +++ src/agent/library/index.js | 19 +- src/agent/library/skill_library.js | 47 ++++ src/agent/library/skills.js | 14 +- src/agent/npc/item_goal.js | 2 +- src/agent/self_prompter.js | 2 +- src/agent/tasks.js | 4 +- src/models/claude.js | 10 +- src/models/deepseek.js | 4 +- src/models/gemini.js | 72 +++--- src/models/gpt.js | 7 +- src/models/grok.js | 7 +- src/models/groq.js | 170 ++++++------- src/models/huggingface.js | 150 ++++++------ src/models/hyperbolic.js | 11 +- src/models/local.js | 66 ++--- src/models/mistral.js | 8 +- src/models/novita.js | 22 +- src/models/prompter.js | 373 +++++++++++++++++++++++++++++ src/models/qwen.js | 151 +++++------- src/models/replicate.js | 9 +- src/process/init_agent.js | 7 +- src/server/mind_server.js | 14 +- src/server/public/index.html | 7 +- src/utils/mcdata.js | 157 +++++++++++- src/utils/text.js | 6 +- viewer.html | 12 +- 47 files changed, 1456 insertions(+), 447 deletions(-) create mode 100644 bots/execTemplate.js create mode 100644 bots/lintTemplate.js create mode 100644 eslint.config.js create mode 100644 profiles/defaults/_default.json create mode 100644 profiles/defaults/creative.json create mode 100644 profiles/defaults/god_mode.json create mode 100644 profiles/defaults/survival.json create mode 100644 src/agent/library/skill_library.js create mode 100644 src/models/prompter.js diff --git a/README.md b/README.md index d165045..a30e368 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,11 @@ # Mindcraft 🧠⛏️ -Crafting minds for Minecraft with LLMs and Mineflayer! +Crafting minds for Minecraft with LLMs and [Mineflayer!](https://prismarinejs.github.io/mineflayer/#/) [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) | [Discord Support](https://discord.gg/mp73p35dzC) | [Blog Post](https://kolbynottingham.com/mindcraft/) | [Contributor TODO](https://github.com/users/kolbytn/projects/1) -#### ‼️Warning‼️ - +> [!WARNING] Do not connect this bot to public servers with coding enabled. This project allows an LLM to write/execute code on your computer. While the code is sandboxed, it is still vulnerable to injection attacks on public servers. Code writing is disabled by default, you can enable it by setting `allow_insecure_coding` to `true` in `settings.js`. We strongly recommend running with additional layers of security such as docker containers. Ye be warned. ## Requirements @@ -29,7 +28,7 @@ Do not connect this bot to public servers with coding enabled. This project allo 6. Run `node main.js` from the installed directory -If you encounter issues, check the [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) or find support on [discord](https://discord.gg/jVxQWVTM). We are currently not very responsive to github issues. +If you encounter issues, check the [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) or find support on [discord](https://discord.gg/mp73p35dzC). We are currently not very responsive to github issues. ## Customization @@ -42,7 +41,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | OpenAI | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | | Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | | Anthropic | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | -| Replicate | `REPLICATE_API_KEY` | `meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | +| Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | | Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | | Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | | Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | @@ -63,7 +62,8 @@ To connect to online servers your bot will need an official Microsoft/Minecraft // rest is same... ``` -‼️ The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. +> [!CAUTION] +> The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. To use different accounts, Mindcraft will connect with the account that the Minecraft launcher is currently using. You can switch accounts in the launcer, then run `node main.js`, then switch to your main account after the bot has connected. @@ -105,39 +105,37 @@ node main.js --profiles ./profiles/andy.json ./profiles/jill.json ### Model Specifications -LLM backends can be specified as simply as `"model": "gpt-3.5-turbo"`. However, for both the chat model and the embedding model, the bot profile can specify the below attributes: +LLM models can be specified as simply as `"model": "gpt-4o"`. However, you can specify different models for chat, coding, and embeddings. +You can pass a string or an object for these fields. A model object must specify an `api`, and optionally a `model`, `url`, and additional `params`. ```json "model": { "api": "openai", + "model": "gpt-4o", "url": "https://api.openai.com/v1/", - "model": "gpt-3.5-turbo" + "params": { + "max_tokens": 1000, + "temperature": 1 + } +}, +"code_model": { + "api": "openai", + "model": "gpt-4", + "url": "https://api.openai.com/v1/" }, "embedding": { "api": "openai", "url": "https://api.openai.com/v1/", "model": "text-embedding-ada-002" } + ``` -The model parameter accepts either a string or object. If a string, it should specify the model to be used. The api and url will be assumed. If an object, the api field must be specified. Each api has a default model and url, so those fields are optional. +`model` is used for chat, `code_model` is used for newAction coding, and `embedding` is used to embed text for example selection. If `code_model` is not specified, then it will use `model` for coding. -If the embedding field is not specified, then it will use the default embedding method for the chat model's api (Note that anthropic has no embedding model). The embedding parameter can also be a string or object. If a string, it should specify the embedding api and the default model and url will be used. If a valid embedding is not specified and cannot be assumed, then word overlap will be used to retrieve examples instead. +All apis have default models and urls, so those fields are optional. Note some apis have no embedding model, so they will default to word overlap to retrieve examples. -Thus, all the below specifications are equivalent to the above example: - -```json -"model": "gpt-3.5-turbo" -``` -```json -"model": { - "api": "openai" -} -``` -```json -"model": "gpt-3.5-turbo", -"embedding": "openai" -``` +The `params` field is optional and can be used to specify additional parameters for the model. It accepts any key-value pairs supported by the api. Is not supported for embedding models. ## Patches diff --git a/bots/execTemplate.js b/bots/execTemplate.js new file mode 100644 index 0000000..b7f270c --- /dev/null +++ b/bots/execTemplate.js @@ -0,0 +1,6 @@ +(async (bot) => { + +/* CODE HERE */ +log(bot, 'Code finished.'); + +}) \ No newline at end of file diff --git a/bots/lintTemplate.js b/bots/lintTemplate.js new file mode 100644 index 0000000..77b5d97 --- /dev/null +++ b/bots/lintTemplate.js @@ -0,0 +1,10 @@ +import * as skills from '../../../src/agent/library/skills.js'; +import * as world from '../../../src/agent/library/world.js'; +import Vec3 from 'vec3'; + +const log = skills.log; + +export async function main(bot) { + /* CODE HERE */ + log(bot, 'Code finished.'); +} \ No newline at end of file diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 0000000..e1506fd --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,25 @@ +// eslint.config.js +import globals from "globals"; +import pluginJs from "@eslint/js"; + +/** @type {import('eslint').Linter.Config[]} */ +export default [ + // First, import the recommended configuration + pluginJs.configs.recommended, + + // Then override or customize specific rules + { + languageOptions: { + globals: globals.browser, + ecmaVersion: 2021, + sourceType: "module", + }, + rules: { + "no-undef": "error", // Disallow the use of undeclared variables or functions. + "semi": ["error", "always"], // Require the use of semicolons at the end of statements. + "curly": "warn", // Enforce the use of curly braces around blocks of code. + "no-unused-vars": "off", // Disable warnings for unused variables. + "no-unreachable": "off", // Disable warnings for unreachable code. + }, + }, +]; diff --git a/keys.example.json b/keys.example.json index 18bf156..407f6f5 100644 --- a/keys.example.json +++ b/keys.example.json @@ -8,8 +8,6 @@ "HUGGINGFACE_API_KEY": "", "QWEN_API_KEY": "", "XAI_API_KEY": "", - "GHLF_API_KEY": "", - "HYPERBOLIC_API_KEY": "", "MISTRAL_API_KEY": "", "DEEPSEEK_API_KEY": "" -} \ No newline at end of file +} diff --git a/package.json b/package.json index 689f8db..73f3bf2 100644 --- a/package.json +++ b/package.json @@ -29,5 +29,10 @@ "scripts": { "postinstall": "patch-package", "start": "node main.js" + }, + "devDependencies": { + "@eslint/js": "^9.13.0", + "eslint": "^9.13.0", + "globals": "^15.11.0" } } diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json new file mode 100644 index 0000000..ac244fc --- /dev/null +++ b/profiles/defaults/_default.json @@ -0,0 +1,245 @@ +{ + "cooldown": 3000, + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer(\"playername\", 3)'. Respond only as $NAME, never output '(FROM OTHER BOT)' or pretend to be someone else. If you have nothing to say or do, respond with an just a tab '\t'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", + + "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", + + "modes": { + "self_preservation": true, + "unstuck": true, + "cowardice": false, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "elbow_room": true, + "idle_staring": true, + "cheat": false + }, + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "system", "content": "say hi to john_goodman"}, + {"role": "assistant", "content": "!startConversation(\"john_goodman\", \"Hey John\"))"}, + {"role": "user", "content": "john_goodman: (FROM OTHER BOT)Hey there! What's up?"}, + {"role": "assistant", "content": "Hey John, not much. Just saying hi."}, + {"role": "user", "content": "john_goodman: (FROM OTHER BOT)Bye!"}, + {"role": "assistant", "content": "Bye! !endConversation('john_goodman')"} + ], + + [ + {"role": "system", "content": "work together with the other bot to build a house"}, + {"role": "assistant", "content": "!startConversation(\"terrance\", \"Hey gpt! Let's work together to build a house. Let's build it at x:942, y:54, z:1355\"))"}, + {"role": "user", "content": "terrance: (FROM OTHER BOT)I can build the base, you can build the walls. !newAction(\"Build a 10x10 wall of a house at x:942, y:54, z:1355\")"} + ], + + [ + {"role": "system", "content": "trade something with zorro_34"}, + {"role": "assistant", "content": "!startConversation(\"zorro_34\", \"Hey let's trade. What do you have?\")"}, + {"role": "user", "content": "zorro_34: (FROM OTHER BOT)Let's see... !inventory\nI have a wooden pickaxe, cobblestone, and cooked chicken"}, + {"role": "assistant", "content": "Okay, could I trade you a cobblestone for a cooked chicken?"} + ], + + [ + {"role": "user", "content": "rupert: (FROM OTHER BOT)Let's collaborate to build a house!"}, + {"role": "assistant", "content": "Sure! I'll share information to help coordinate. Let's center the build at x:942, y:54, z:1355 !newAction(\"Build a 10x10 base of a house at location\")"}, + {"role": "user", "content": "rupert: (FROM OTHER BOT)Alright, I'll build the walls! !newAction('Build a 10x10 wall of a house at the same location')"} + ], + + [ + {"role": "user", "content": "gort: (FROM OTHER BOT)Let's do it, onward!"}, + {"role": "assistant", "content": "Alright, I'll start by getting some cobblestone. !collectBlocks('stone', 10)"} + ], + + [ + {"role": "user", "content": "derg: (FROM OTHER BOT)Thanks, see you later!"}, + {"role": "assistant", "content": "See you later. !endConversation(\"derg\")"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks(\"oak_log\", 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe(\"spruce_planks\", 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer(\"zZZn98\", 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer(\"zZZn98\", 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some cobblestone"}, + {"role": "assistant", "content": "Sure, I can do that by collecting stone. !collectBlocks(\"stone\", 10)"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer(\"joe\", 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer(\"joe\", 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack(\"zombie\")"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "system", "content": "Code output: Action failed."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try again. !newAction(\"Try again to complete the task.\")"} + ], + + [ + {"role": "system", "content": "Code output: Could not find any oak_log in 20 blocks."}, + {"role": "assistant", "content": "I'll have to search a wider area. !searchForBlock(\"oak_log\", 100)"} + ], + + [ + {"role": "system", "content": "Code output: Placed oak_planks at (-154, 96, -228)."}, + {"role": "assistant", "content": "I've placed an oak plank for the house. Wait, I should use newAction to build complicated structures! !newAction(\"Build a house with oak planks\")"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe(\"stick\", 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction(\"Build a cobblestone house with a door, furnishings, and window.\")"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction(\"Build a dirt tower 5 tall.\")"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode(\"hunting\", false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer(\"umpire_man\")"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere(\"base\")"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToRememberedPlace(\"base\")"} + ], + + [ + {"role": "user", "content": "user43: Set a goal to survive on your own forever."}, + {"role": "assistant", "content": "Sure, let me set a goal. !goal(\"Survive the game forever. Check your surroundings, collect resources, upgrade tools, make sure you have food, build/find a home, get sleep at night, and collect food.\")"} + ], + + [ + {"role": "user", "content": "bobdylan: Make a plan to get a stone pickaxe and do it."}, + {"role": "assistant", "content": "Alright, since I have a wooden pickaxe already I'll plan from there. !goal(\"1. Collect 3 cobblestone. 2. Craft sticks 3. Find or make a crafting table. 4. Craft a stone pickaxe.\")"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: \"Get a wooden pickaxe\". Respond:"}, + {"role": "assistant", "content": "What's around me? !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone nearby. My goal is to craft a wooden pickaxe. I'll start by collecting some wood. !collectBlock(\"oak_log\", 10)"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs. I need a pickaxe first, which requires planks and sticks. !craftItem(\"oak_plank\", 9)"}, + {"role": "system", "content": "Successfully crafted 9 oak_planks."}, + {"role": "assistant", "content": "I have crafted 9 oak planks. Now I need sticks. !craftItem(\"stick\", 2)"}, + {"role": "system", "content": "Successfully crafted 2 sticks."}, + {"role": "assistant", "content": "I have crafted 2 sticks. Now I can craft a wooden pickaxe. !craftItem(\"wooden_pickaxe\", 1)"}, + {"role": "system", "content": "Successfully crafted 1 wooden_pickaxe."}, + {"role": "assistant", "content": "Looks like I have a wooden pickaxe now! Goal accomplished. !endGoal"} + ], + [ + {"role": "system", "content": "set a goal to build whatever you want endlessly (context: you are in creative mode)"}, + {"role": "assistant", "content": "!goal(\"Build something creative, then move over, and build again. Use separate newActions. Repeat forever.\")"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: 'Build a house'. Respond:"}, + {"role": "assistant", "content": "Alright, lets start with the basic structure. !newAction(\"Build an 8x8 base for the house.\")"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower with a torch on the side"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\nawait skills.placeBlock(bot, 'torch', pos.x + 1, pos.y + 4, pos.z, 'side');\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} diff --git a/profiles/defaults/creative.json b/profiles/defaults/creative.json new file mode 100644 index 0000000..9174ed6 --- /dev/null +++ b/profiles/defaults/creative.json @@ -0,0 +1,14 @@ +{ + "modes": { + "self_preservation": false, + "unstuck": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": false, + "torch_placing": false, + "elbow_room": true, + "idle_staring": true, + "cheat": false + } +} \ No newline at end of file diff --git a/profiles/defaults/god_mode.json b/profiles/defaults/god_mode.json new file mode 100644 index 0000000..6cecd8c --- /dev/null +++ b/profiles/defaults/god_mode.json @@ -0,0 +1,14 @@ +{ + "modes": { + "self_preservation": false, + "unstuck": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": false, + "torch_placing": false, + "elbow_room": false, + "idle_staring": true, + "cheat": true + } +} \ No newline at end of file diff --git a/profiles/defaults/survival.json b/profiles/defaults/survival.json new file mode 100644 index 0000000..4504bf9 --- /dev/null +++ b/profiles/defaults/survival.json @@ -0,0 +1,14 @@ +{ + "modes": { + "self_preservation": true, + "unstuck": true, + "cowardice": false, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "elbow_room": true, + "idle_staring": true, + "cheat": false + } +} \ No newline at end of file diff --git a/profiles/freeguy.json b/profiles/freeguy.json index 21c68e5..a44ec4c 100644 --- a/profiles/freeguy.json +++ b/profiles/freeguy.json @@ -1,7 +1,7 @@ { "name": "Freeguy", - "model": "groq/llama-3.1-70b-versatile", + "model": "groq/llama-3.3-70b-versatile", "max_tokens": 8000 } \ No newline at end of file diff --git a/profiles/gpt.json b/profiles/gpt.json index 32d99c1..ad7097e 100644 --- a/profiles/gpt.json +++ b/profiles/gpt.json @@ -1,5 +1,10 @@ { "name": "gpt", - "model": "gpt-4o" + "model": { + "model": "gpt-4o", + "params": { + "temperature": 0.5 + } + } } \ No newline at end of file diff --git a/profiles/llama.json b/profiles/llama.json index 4a8a7d6..ceb3992 100644 --- a/profiles/llama.json +++ b/profiles/llama.json @@ -1,7 +1,7 @@ { "name": "LLama", - "model": "groq/llama-3.1-70b-versatile", + "model": "groq/llama-3.3-70b-versatile", "max_tokens": 4000, diff --git a/profiles/qwen.json b/profiles/qwen.json index 7af2347..f6a3f46 100644 --- a/profiles/qwen.json +++ b/profiles/qwen.json @@ -5,9 +5,13 @@ "model": { "api": "qwen", - "url": "https://dashscope-intl.aliyuncs.com/api/v1/services/aigc/text-generation/generation", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1", "model": "qwen-max" }, - "embedding": "openai" + "embedding": { + "api": "qwen", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1", + "model": "text-embedding-v3" + } } \ No newline at end of file diff --git a/settings.js b/settings.js index 54e84a3..3b6c903 100644 --- a/settings.js +++ b/settings.js @@ -10,6 +10,8 @@ export default "mindserver_host": "localhost", "mindserver_port": 8080, + // the base profile is shared by all bots for default prompts/examples/modes + "base_profile": "./profiles/defaults/survival.json", // also see creative.json, god_mode.json "profiles": [ "./andy.json", // "./profiles/gpt.json", @@ -19,11 +21,11 @@ export default // "./profiles/qwen.json", // "./profiles/mistral.json", // "./profiles/grok.json", - // "./profiles/GLHF.json", - // "./profiles/hyperbolic.json", // "./profiles/mistral.json", // "./profiles/deepseek.json", + // using more than 1 profile requires you to /msg each bot indivually + // individual profiles override values from the base profile ], "load_memory": false, // load memory from previous session "init_message": "Respond with hello world and your name", // sends to all on spawn @@ -34,6 +36,7 @@ export default "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk "code_timeout_mins": -1, // minutes code is allowed to run. -1 for no timeout + "relevant_docs_count": 5, // Parameter: -1 = all, 0 = no references, 5 = five references. If exceeding the maximum, all reference documents are returned. "max_messages": 15, // max number of messages to keep in context "num_examples": 2, // number of examples to give to the model diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 219856b..ad08827 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -112,12 +112,13 @@ export class ActionManager { // Log the full stack trace console.error(err.stack); await this.stop(); + err = err.toString(); - let message = this._getBotOutputSummary() + - '!!Code threw exception!!\n' + + let message = this._getBotOutputSummary() + + '!!Code threw exception!!\n' + 'Error: ' + err + '\n' + - 'Stack trace:\n' + err.stack; - + 'Stack trace:\n' + err.stack+'\n'; + let interrupted = this.agent.bot.interrupt_code; this.agent.clearBotLogs(); if (!interrupted && !this.agent.coder.generating) { @@ -137,7 +138,7 @@ export class ActionManager { First outputs:\n${output.substring(0, MAX_OUT / 2)}\n...skipping many lines.\nFinal outputs:\n ${output.substring(output.length - MAX_OUT / 2)}`; } else { - output = 'Code output:\n' + output; + output = 'Code output:\n' + output.toString(); } return output; } diff --git a/src/agent/agent.js b/src/agent/agent.js index 4691079..72eb31d 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -1,6 +1,6 @@ import { History } from './history.js'; import { Coder } from './coder.js'; -import { Prompter } from './prompter.js'; +import { Prompter } from '../models/prompter.js'; import { initModes } from './modes.js'; import { initBot } from '../utils/mcdata.js'; import { containsCommand, commandExists, executeCommand, truncCommandMessage, isAction, blacklistCommands } from './commands/index.js'; @@ -100,11 +100,9 @@ export class Agent { }); } catch (error) { // Ensure we're not losing error details - console.error('Agent start failed with error:', { - message: error.message || 'No error message', - stack: error.stack || 'No stack trace', - error: error - }); + console.error('Agent start failed with error') + console.error(error) + throw error; // Re-throw with preserved details } } @@ -140,6 +138,8 @@ export class Agent { console.error('Error handling message:', error); } } + + this.respondFunc = respondFunc this.bot.on('whisper', respondFunc); if (settings.profiles.length === 1) diff --git a/src/agent/agent_proxy.js b/src/agent/agent_proxy.js index 500dcab..b0333b1 100644 --- a/src/agent/agent_proxy.js +++ b/src/agent/agent_proxy.js @@ -42,6 +42,14 @@ class AgentServerProxy { console.log(`Restarting agent: ${agentName}`); this.agent.cleanKill(); }); + + this.socket.on('send-message', (agentName, message) => { + try { + this.agent.respondFunc("NO USERNAME", message); + } catch (error) { + console.error('Error: ', JSON.stringify(error, Object.getOwnPropertyNames(error))); + } + }); } login() { diff --git a/src/agent/coder.js b/src/agent/coder.js index f4b7219..25a0a8a 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -4,6 +4,7 @@ import { makeCompartment } from './library/lockdown.js'; import * as skills from './library/skills.js'; import * as world from './library/world.js'; import { Vec3 } from 'vec3'; +import {ESLint} from "eslint"; export class Coder { constructor(agent) { @@ -12,15 +13,62 @@ export class Coder { this.fp = '/bots/'+agent.name+'/action-code/'; this.generating = false; this.code_template = ''; + this.code_lint_template = ''; - readFile('./bots/template.js', 'utf8', (err, data) => { + readFile('./bots/execTemplate.js', 'utf8', (err, data) => { if (err) throw err; this.code_template = data; }); - + readFile('./bots/lintTemplate.js', 'utf8', (err, data) => { + if (err) throw err; + this.code_lint_template = data; + }); mkdirSync('.' + this.fp, { recursive: true }); } + + async lintCode(code) { + let result = '#### CODE ERROR INFO ###\n'; + // Extract everything in the code between the beginning of 'skills./world.' and the '(' + const skillRegex = /(?:skills|world)\.(.*?)\(/g; + const skills = []; + let match; + while ((match = skillRegex.exec(code)) !== null) { + skills.push(match[1]); + } + const allDocs = await this.agent.prompter.skill_libary.getRelevantSkillDocs(); + //lint if the function exists + const missingSkills = skills.filter(skill => !allDocs.includes(skill)); + if (missingSkills.length > 0) { + result += 'These functions do not exist. Please modify the correct function name and try again.\n'; + result += '### FUNCTIONS NOT FOUND ###\n'; + result += missingSkills.join('\n'); + console.log(result) + return result; + } + const eslint = new ESLint(); + const results = await eslint.lintText(code); + const codeLines = code.split('\n'); + const exceptions = results.map(r => r.messages).flat(); + + if (exceptions.length > 0) { + exceptions.forEach((exc, index) => { + if (exc.line && exc.column ) { + const errorLine = codeLines[exc.line - 1]?.trim() || 'Unable to retrieve error line content'; + result += `#ERROR ${index + 1}\n`; + result += `Message: ${exc.message}\n`; + result += `Location: Line ${exc.line}, Column ${exc.column}\n`; + result += `Related Code Line: ${errorLine}\n`; + } + }); + result += 'The code contains exceptions and cannot continue execution.'; + } else { + return null;//no error + } + + return result ; + } + // write custom code to file and import it // write custom code to file and prepare for evaluation async stageCode(code) { code = this.sanitizeCode(code); @@ -35,6 +83,7 @@ export class Coder { for (let line of code.split('\n')) { src += ` ${line}\n`; } + let src_lint_copy = this.code_lint_template.replace('/* CODE HERE */', src); src = this.code_template.replace('/* CODE HERE */', src); let filename = this.file_counter + '.js'; @@ -46,7 +95,7 @@ export class Coder { // }); // } commented for now, useful to keep files for debugging this.file_counter++; - + let write_result = await this.writeFilePromise('.' + this.fp + filename, src); // This is where we determine the environment the agent's code should be exposed to. // It will only have access to these things, (in addition to basic javascript objects like Array, Object, etc.) @@ -63,8 +112,7 @@ export class Coder { console.error('Error writing code execution file: ' + result); return null; } - - return { main: mainFn }; + return { func:{main: mainFn}, src_lint_copy: src_lint_copy }; } sanitizeCode(code) { @@ -140,8 +188,15 @@ export class Coder { continue; } code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); - - const executionModuleExports = await this.stageCode(code); + const result = await this.stageCode(code); + const executionModuleExports = result.func; + let src_lint_copy = result.src_lint_copy; + const analysisResult = await this.lintCode(src_lint_copy); + if (analysisResult) { + const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'; + messages.push({ role: 'system', content: message }); + continue; + } if (!executionModuleExports) { agent_history.add('system', 'Failed to stage code, something is wrong.'); return {success: false, message: null, interrupted: false, timedout: false}; @@ -152,10 +207,10 @@ export class Coder { }, { timeout: settings.code_timeout_mins }); if (code_return.interrupted && !code_return.timedout) return { success: false, message: null, interrupted: true, timedout: false }; - console.log("Code generation result:", code_return.success, code_return.message); + console.log("Code generation result:", code_return.success, code_return.message.toString()); if (code_return.success) { - const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message; + const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message.toString(); return { success: true, message: summary, interrupted: false, timedout: false }; } @@ -170,5 +225,4 @@ export class Coder { } return { success: false, message: null, interrupted: false, timedout: true }; } - } \ No newline at end of file diff --git a/src/agent/commands/index.js b/src/agent/commands/index.js index 008c1d0..f40c5c2 100644 --- a/src/agent/commands/index.js +++ b/src/agent/commands/index.js @@ -160,7 +160,7 @@ export function parseCommandMessage(message) { suppressNoDomainWarning = true; //Don't spam console. Only give the warning once. } } else if(param.type === 'BlockName') { //Check that there is a block with this name - if(getBlockId(arg) == null) return `Invalid block type: ${arg}.` + if(getBlockId(arg) == null && arg !== 'air') return `Invalid block type: ${arg}.` } else if(param.type === 'ItemName') { //Check that there is an item with this name if(getItemId(arg) == null) return `Invalid item type: ${arg}.` } diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index ef65b4d..c9fe333 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -178,6 +178,42 @@ export const queryList = [ return "Saved place names: " + agent.memory_bank.getKeys(); } }, + { + name: '!getCraftingPlan', + description: "Provides a comprehensive crafting plan for a specified item. This includes a breakdown of required ingredients, the exact quantities needed, and an analysis of missing ingredients or extra items needed based on the bot's current inventory.", + params: { + targetItem: { + type: 'string', + description: 'The item that we are trying to craft' + }, + quantity: { + type: 'int', + description: 'The quantity of the item that we are trying to craft', + optional: true, + domain: [1, Infinity, '[)'], // Quantity must be at least 1, + default: 1 + } + }, + perform: function (agent, targetItem, quantity = 1) { + let bot = agent.bot; + + // Fetch the bot's inventory + const curr_inventory = world.getInventoryCounts(bot); + const target_item = targetItem; + let existingCount = curr_inventory[target_item] || 0; + let prefixMessage = ''; + if (existingCount > 0) { + curr_inventory[target_item] -= existingCount; + prefixMessage = `You already have ${existingCount} ${target_item} in your inventory. If you need to craft more,\n`; + } + + // Generate crafting plan + let craftingPlan = mc.getDetailedCraftingPlan(target_item, quantity, curr_inventory); + craftingPlan = prefixMessage + craftingPlan; + console.log(craftingPlan); + return pad(craftingPlan); + }, + }, { name: '!help', description: 'Lists all available commands and their descriptions.', diff --git a/src/agent/library/index.js b/src/agent/library/index.js index 677dc11..ae864b0 100644 --- a/src/agent/library/index.js +++ b/src/agent/library/index.js @@ -3,20 +3,21 @@ import * as world from './world.js'; export function docHelper(functions, module_name) { - let docstring = ''; + let docArray = []; for (let skillFunc of functions) { let str = skillFunc.toString(); - if (str.includes('/**')){ - docstring += module_name+'.'+skillFunc.name; - docstring += str.substring(str.indexOf('/**')+3, str.indexOf('**/')) + '\n'; + if (str.includes('/**')) { + let docEntry = `${module_name}.${skillFunc.name}\n`; + docEntry += str.substring(str.indexOf('/**') + 3, str.indexOf('**/')).trim(); + docArray.push(docEntry); } } - return docstring; + return docArray; } export function getSkillDocs() { - let docstring = "\n*SKILL DOCS\nThese skills are javascript functions that can be called when writing actions and skills.\n"; - docstring += docHelper(Object.values(skills), 'skills'); - docstring += docHelper(Object.values(world), 'world'); - return docstring + '*\n'; + let docArray = []; + docArray = docArray.concat(docHelper(Object.values(skills), 'skills')); + docArray = docArray.concat(docHelper(Object.values(world), 'world')); + return docArray; } diff --git a/src/agent/library/skill_library.js b/src/agent/library/skill_library.js new file mode 100644 index 0000000..a019112 --- /dev/null +++ b/src/agent/library/skill_library.js @@ -0,0 +1,47 @@ +import { cosineSimilarity } from '../../utils/math.js'; +import { getSkillDocs } from './index.js'; + +export class SkillLibrary { + constructor(agent,embedding_model) { + this.agent = agent; + this.embedding_model = embedding_model; + this.skill_docs_embeddings = {}; + } + async initSkillLibrary() { + const skillDocs = getSkillDocs(); + const embeddingPromises = skillDocs.map((doc) => { + return (async () => { + let func_name_desc = doc.split('\n').slice(0, 2).join(''); + this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc); + })(); + }); + await Promise.all(embeddingPromises); + } + + async getRelevantSkillDocs(message, select_num) { + let latest_message_embedding = ''; + if(message) //message is not empty, get the relevant skill docs, else return all skill docs + latest_message_embedding = await this.embedding_model.embed(message); + + let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) + .map(doc_key => ({ + doc_key, + similarity_score: cosineSimilarity(latest_message_embedding, this.skill_docs_embeddings[doc_key]) + })) + .sort((a, b) => b.similarity_score - a.similarity_score); + + let length = skill_doc_similarities.length; + if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) { + select_num = length; + } else { + select_num = Math.min(Math.floor(select_num), length); + } + let selected_docs = skill_doc_similarities.slice(0, select_num); + let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; + relevant_skill_docs += 'SkillDocs:\n' + relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### '); + return relevant_skill_docs; + } + + +} diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 726ef18..fa7cea3 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -79,7 +79,7 @@ export async function craftRecipe(bot, itemName, num=1) { } } if (!recipes || recipes.length === 0) { - log(bot, `You do not have the resources to craft a ${itemName}. It requires: ${Object.entries(mc.getItemCraftingRecipes(itemName)[0]).map(([key, value]) => `${key}: ${value}`).join(', ')}.`); + log(bot, `You do not have the resources to craft a ${itemName}. It requires: ${Object.entries(mc.getItemCraftingRecipes(itemName)[0][0]).map(([key, value]) => `${key}: ${value}`).join(', ')}.`); if (placedTable) { await collectBlock(bot, 'crafting_table', 1); } @@ -1267,7 +1267,7 @@ export async function tillAndSow(bot, x, y, z, seedType=null) { * @returns {Promise} true if the ground was tilled, false otherwise. * @example * let position = world.getPosition(bot); - * await skills.till(bot, position.x, position.y - 1, position.x); + * await skills.tillAndSow(bot, position.x, position.y - 1, position.x, "wheat"); **/ x = Math.round(x); y = Math.round(y); @@ -1275,8 +1275,14 @@ export async function tillAndSow(bot, x, y, z, seedType=null) { let block = bot.blockAt(new Vec3(x, y, z)); if (bot.modes.isOn('cheat')) { - placeBlock(bot, x, y, z, 'farmland'); - placeBlock(bot, x, y+1, z, seedType); + let to_remove = ['_seed', '_seeds']; + for (let remove of to_remove) { + if (seedType.endsWith(remove)) { + seedType = seedType.replace(remove, ''); + } + } + placeBlock(bot, 'farmland', x, y, z); + placeBlock(bot, seedType, x, y+1, z); return true; } diff --git a/src/agent/npc/item_goal.js b/src/agent/npc/item_goal.js index 40589ba..9055f54 100644 --- a/src/agent/npc/item_goal.js +++ b/src/agent/npc/item_goal.js @@ -204,7 +204,7 @@ class ItemWrapper { } createChildren() { - let recipes = mc.getItemCraftingRecipes(this.name); + let recipes = mc.getItemCraftingRecipes(this.name).map(([recipe, craftedCount]) => recipe); if (recipes) { for (let recipe of recipes) { let includes_blacklisted = false; diff --git a/src/agent/self_prompter.js b/src/agent/self_prompter.js index 2c2f63c..439b6c6 100644 --- a/src/agent/self_prompter.js +++ b/src/agent/self_prompter.js @@ -38,7 +38,7 @@ export class SelfPrompter { let no_command_count = 0; const MAX_NO_COMMAND = 3; while (!this.interrupt) { - const msg = `You are self-prompting with the goal: '${this.prompt}'. Your next response MUST contain a command !withThisSyntax. Respond:`; + const msg = `You are self-prompting with the goal: '${this.prompt}'. Your next response MUST contain a command with this syntax: !commandName. Respond:`; let used_command = await this.agent.handleMessage('system', msg, -1); if (!used_command) { diff --git a/src/agent/tasks.js b/src/agent/tasks.js index 6d968a9..1b9d56e 100644 --- a/src/agent/tasks.js +++ b/src/agent/tasks.js @@ -109,11 +109,11 @@ export class Task { await new Promise((resolve) => setTimeout(resolve, 500)); if (this.data.agent_count > 1) { - var initial_inventory = this.data.initial_inventory[this.agent.count_id.toString()]; + let initial_inventory = this.data.initial_inventory[this.agent.count_id.toString()]; console.log("Initial inventory:", initial_inventory); } else if (this.data) { console.log("Initial inventory:", this.data.initial_inventory); - var initial_inventory = this.data.initial_inventory; + let initial_inventory = this.data.initial_inventory; } if ("initial_inventory" in this.data) { diff --git a/src/models/claude.js b/src/models/claude.js index c97ecb2..9efd669 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -3,8 +3,9 @@ import { strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Claude { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params || {}; let config = {}; if (url) @@ -20,13 +21,16 @@ export class Claude { let res = null; try { console.log('Awaiting anthropic api response...') - // console.log('Messages:', messages); + if (!this.params.max_tokens) { + this.params.max_tokens = 4096; + } const resp = await this.anthropic.messages.create({ model: this.model_name || "claude-3-sonnet-20240229", system: systemMessage, - max_tokens: 2048, messages: messages, + ...(this.params || {}) }); + console.log('Received.') res = resp.content[0].text; } diff --git a/src/models/deepseek.js b/src/models/deepseek.js index 395aa8c..da98ba2 100644 --- a/src/models/deepseek.js +++ b/src/models/deepseek.js @@ -3,8 +3,9 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class DeepSeek { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; @@ -23,6 +24,7 @@ export class DeepSeek { model: this.model_name || "deepseek-chat", messages, stop: stop_seq, + ...(this.params || {}) }; let res = null; diff --git a/src/models/gemini.js b/src/models/gemini.js index 8522704..b988c7f 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -1,10 +1,11 @@ import { GoogleGenerativeAI } from '@google/generative-ai'; -import { toSinglePrompt } from '../utils/text.js'; +import { toSinglePrompt, strictFormat } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; export class Gemini { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; this.url = url; this.safetySettings = [ { @@ -34,49 +35,66 @@ export class Gemini { async sendRequest(turns, systemMessage) { let model; + const modelConfig = { + model: this.model_name || "gemini-1.5-flash", + // systemInstruction does not work bc google is trash + }; + if (this.url) { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-flash" }, + modelConfig, { baseUrl: this.url }, { safetySettings: this.safetySettings } ); } else { model = this.genAI.getGenerativeModel( - { model: this.model_name || "gemini-1.5-flash" }, + modelConfig, { safetySettings: this.safetySettings } ); } - const stop_seq = '***'; - const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); console.log('Awaiting Google API response...'); - const result = await model.generateContent(prompt); - const response = await result.response; - - // got rid of the original method of const text = response.text to allow gemini thinking models to play minecraft :) - let text; - if (this.model_name && this.model_name.includes("thinking")) { - if (response.candidates && response.candidates.length > 0 && response.candidates[0].content && response.candidates[0].content.parts && response.candidates[0].content.parts.length > 1) { - - text = response.candidates[0].content.parts[1].text; - - } else { - - console.warn("Unexpected response structure for thinking model:", response); - text = response.text(); - } - } else { - - text = response.text(); + // Prepend system message and format turns cause why not + turns.unshift({ role: 'system', content: systemMessage }); + turns = strictFormat(turns); + let contents = []; + for (let turn of turns) { + contents.push({ + role: turn.role === 'assistant' ? 'model' : 'user', + parts: [{ text: turn.content }] + }); } + const result = await model.generateContent({ + contents, + generationConfig: { + ...(this.params || {}) + } + }); + const response = await result.response; + let text; + // Handle "thinking" models since they smart + if (this.model_name && this.model_name.includes("thinking")) { + if ( + response.candidates && + response.candidates.length > 0 && + response.candidates[0].content && + response.candidates[0].content.parts && + response.candidates[0].content.parts.length > 1 + ) { + text = response.candidates[0].content.parts[1].text; + } else { + console.warn("Unexpected response structure for thinking model:", response); + text = response.text(); + } + } else { + text = response.text(); + } console.log('Received.'); - if (!text.includes(stop_seq)) return text; - const idx = text.indexOf(stop_seq); - return text.slice(0, idx); + return text; } async embed(text) { diff --git a/src/models/gpt.js b/src/models/gpt.js index dfd5e22..1a88bf4 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -3,8 +3,9 @@ import { getKey, hasKey } from '../utils/keys.js'; import { strictFormat } from '../utils/text.js'; export class GPT { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; let config = {}; if (url) @@ -25,6 +26,7 @@ export class GPT { model: this.model_name || "gpt-3.5-turbo", messages, stop: stop_seq, + ...(this.params || {}) }; if (this.model_name.includes('o1')) { pack.messages = strictFormat(messages); @@ -32,8 +34,9 @@ export class GPT { } let res = null; + try { - console.log('Awaiting openai api response...') + console.log('Awaiting openai api response from model', this.model_name) // console.log('Messages:', messages); let completion = await this.openai.chat.completions.create(pack); if (completion.choices[0].finish_reason == 'length') diff --git a/src/models/grok.js b/src/models/grok.js index 19a3b38..a8c6672 100644 --- a/src/models/grok.js +++ b/src/models/grok.js @@ -3,8 +3,10 @@ import { getKey } from '../utils/keys.js'; // xAI doesn't supply a SDK for their models, but fully supports OpenAI and Anthropic SDKs export class Grok { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.url = url; + this.params = params; let config = {}; if (url) @@ -23,7 +25,8 @@ export class Grok { const pack = { model: this.model_name || "grok-beta", messages, - stop: [stop_seq] + stop: [stop_seq], + ...(this.params || {}) }; let res = null; diff --git a/src/models/groq.js b/src/models/groq.js index 4473054..0a94550 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -1,106 +1,86 @@ -// groq.js - -import Groq from 'groq-sdk'; +import Groq from 'groq-sdk' import { getKey } from '../utils/keys.js'; -/** - * Umbrella class for Mixtral, LLama, Gemma... - */ +// Umbrella class for Mixtral, LLama, Gemma... export class GroqCloudAPI { - constructor(model_name, url, max_tokens = 16384) { - this.model_name = model_name; - this.url = url; - this.max_tokens = max_tokens; + constructor(model_name, url, params) { + this.model_name = model_name; + this.url = url; + this.params = params || {}; + // Groq Cloud does not support custom URLs; warn if provided + if (this.url) { + console.warn("Groq Cloud has no implementation for custom URLs. Ignoring provided URL."); + } + this.groq = new Groq({ apiKey: getKey('GROQCLOUD_API_KEY') }); + } - // Groq Cloud doesn't support custom URLs; warn if provided - if (this.url) { - console.warn("Groq Cloud has no implementation for custom URLs. Ignoring provided URL."); + async sendRequest(turns, systemMessage, stop_seq = null) { + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; + const messages = [{ role: "system", content: systemMessage }].concat(turns); + + while (attempt < maxAttempts) { + attempt++; + let res = null; + try { + console.log(`Awaiting Groq response... (model: ${this.model_name || "mixtral-8x7b-32768"}, attempt: ${attempt})`); + if (!this.params.max_tokens) { + this.params.max_tokens = 16384; + } + // Create the streaming chat completion request + const completion = await this.groq.chat.completions.create({ + messages: messages, + model: this.model_name || "mixtral-8x7b-32768", + stream: true, + stop: stop_seq, + ...(this.params || {}) + }); + + let temp_res = ""; + // Aggregate streamed chunks into a full response + for await (const chunk of completion) { + temp_res += chunk.choices[0]?.delta?.content || ''; + } + res = temp_res; + } catch (err) { + console.log(err); + res = "My brain just kinda stopped working. Try again."; + } + + // If the model name includes "deepseek-r1", handle the tags + if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); + + // If a partial block is detected, log a warning and retry + if (hasOpenTag && !hasCloseTag) { + console.warn("Partial block detected. Re-generating Groq request..."); + continue; } - // Initialize Groq SDK with the API key - this.groq = new Groq({ apiKey: getKey('GROQCLOUD_API_KEY') }); - } - - /** - * Sends a chat completion request to the Groq Cloud endpoint. - * - * @param {Array} turns - An array of message objects, e.g., [{role: 'user', content: 'Hi'}]. - * @param {string} systemMessage - The system prompt or instruction. - * @param {string} stop_seq - A string that represents a stopping sequence, default '***'. - * @returns {Promise} - The content of the model's reply. - */ - async sendRequest(turns, systemMessage, stop_seq = '***') { - // Maximum number of attempts to handle partial tag mismatches 5 is a good value, I guess - const maxAttempts = 5; - let attempt = 0; - let finalRes = null; - - // Prepare the input messages by prepending the system message - const messages = [{ role: 'system', content: systemMessage }, ...turns]; - console.log('Messages:', messages); - - while (attempt < maxAttempts) { - attempt++; - console.log(`Awaiting Groq response... (model: ${this.model_name}, attempt: ${attempt})`); - - let res = null; - - try { - // Create the chat completion request - const completion = await this.groq.chat.completions.create({ - messages: messages, - model: this.model_name || "mixtral-8x7b-32768", - temperature: 0.2, - max_tokens: this.max_tokens, - top_p: 1, - stream: false, - stop: stop_seq // "***" - }); - - // Extract the content from the response - res = completion?.choices?.[0]?.message?.content || ''; - console.log('Received response from Groq.'); - } catch (err) { - // Handle context length exceeded by retrying with shorter context - } - - // If the model name includes "deepseek-r1", handle tags - if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { - const hasOpenTag = res.includes(""); - const hasCloseTag = res.includes(""); - - // Check for partial tag mismatches - if ((hasOpenTag && !hasCloseTag)) { - console.warn("Partial block detected. Re-generating Groq request..."); - // Retry the request by continuing the loop - continue; - } - - // If is present but is not, prepend - if (hasCloseTag && !hasOpenTag) { - res = '' + res; - } - // Trim the block from the response - res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); - } - - // Assign the processed response and exit the loop - finalRes = res; - break; // Stop retrying + // If only the closing tag is present, prepend an opening tag + if (hasCloseTag && !hasOpenTag) { + res = '' + res; } + // Remove the complete block (and any content inside) from the response + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); + } - // If after all attempts, finalRes is still null, assign a fallback - if (finalRes == null) { - console.warn("Could not obtain a valid block or normal response after max attempts."); - finalRes = 'Response incomplete, please try again.'; - } - - finalRes = finalRes.replace(/<\|separator\|>/g, '*no response*'); - - return finalRes; + finalRes = res; + break; // Exit the loop once a valid response is obtained } - async embed(text) { - console.log("There is no support for embeddings in Groq support. However, the following text was provided: " + text); + if (finalRes == null) { + console.warn("Could not obtain a valid block or normal response after max attempts."); + finalRes = "Response incomplete, please try again."; } -} + finalRes = finalRes.replace(/<\|separator\|>/g, '*no response*'); + + return finalRes; + } + + async embed(text) { + console.log("There is no support for embeddings in Groq support. However, the following text was provided: " + text); + } +} \ No newline at end of file diff --git a/src/models/huggingface.js b/src/models/huggingface.js index e4e1432..ab74f24 100644 --- a/src/models/huggingface.js +++ b/src/models/huggingface.js @@ -1,99 +1,87 @@ -// huggingface.js import { toSinglePrompt } from '../utils/text.js'; import { getKey } from '../utils/keys.js'; import { HfInference } from "@huggingface/inference"; export class HuggingFace { - constructor(model_name, url) { - // Remove 'huggingface/' prefix if present - this.model_name = model_name.replace('huggingface/', ''); - this.url = url; + constructor(model_name, url, params) { + // Remove 'huggingface/' prefix if present + this.model_name = model_name.replace('huggingface/', ''); + this.url = url; + this.params = params; - // Hugging Face Inference doesn't currently allow custom base URLs - if (this.url) { - console.warn("Hugging Face doesn't support custom urls!"); - } - - // Initialize the HfInference instance - this.huggingface = new HfInference(getKey('HUGGINGFACE_API_KEY')); + if (this.url) { + console.warn("Hugging Face doesn't support custom urls!"); } - /** - * Main method to handle chat requests. - */ - async sendRequest(turns, systemMessage) { - const stop_seq = '***'; + this.huggingface = new HfInference(getKey('HUGGINGFACE_API_KEY')); + } - // Convert the user's turns and systemMessage into a single prompt string - const prompt = toSinglePrompt(turns, null, stop_seq); - // Fallback model if none was provided - const model_name = this.model_name || 'meta-llama/Meta-Llama-3-8B'; + async sendRequest(turns, systemMessage) { + const stop_seq = '***'; + // Build a single prompt from the conversation turns + const prompt = toSinglePrompt(turns, null, stop_seq); + // Fallback model if none was provided + const model_name = this.model_name || 'meta-llama/Meta-Llama-3-8B'; + // Combine system message with the prompt + const input = systemMessage + "\n" + prompt; - // Combine system message with the prompt - const input = systemMessage + "\n" + prompt; + // We'll try up to 5 times in case of partial blocks for DeepSeek-R1 models. + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; - // We'll collect the streaming response in this variable - let res = ''; - console.log('Messages:', [{ role: "system", content: systemMessage }, ...turns]); + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting Hugging Face API response... (model: ${model_name}, attempt: ${attempt})`); + let res = ''; + try { + // Consume the streaming response chunk by chunk + for await (const chunk of this.huggingface.chatCompletionStream({ + model: model_name, + messages: [{ role: "user", content: input }], + ...(this.params || {}) + })) { + res += (chunk.choices[0]?.delta?.content || ""); + } + } catch (err) { + console.log(err); + res = 'My brain disconnected, try again.'; + // Break out immediately; we only retry when handling partial tags. + break; + } - // We'll do up to 5 attempts if the model is "DeepSeek-R1" and tags are mismatched - const maxAttempts = 5; - let attempt = 0; - let finalRes = null; + // If the model is DeepSeek-R1, check for mismatched blocks. + if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); - while (attempt < maxAttempts) { - attempt++; - console.log(`Awaiting Hugging Face API response... (model: ${model_name}, attempt: ${attempt})`); - - res = ''; - try { - // ChatCompletionStream returns an async iterator that we consume chunk by chunk - for await (const chunk of this.huggingface.chatCompletionStream({ - model: model_name, - messages: [{ role: "user", content: input }] - })) { - // Each chunk may or may not have delta content - res += (chunk.choices[0]?.delta?.content || ""); - } - } catch (err) { - console.log(err); - res = 'My brain disconnected, try again.'; - // Exit the loop, as we only want to retry for block mismatches, not other errors - break; - } - - // If the model name includes "DeepSeek-R1", then handle blocks - if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { - const hasOpenTag = res.includes(""); - const hasCloseTag = res.includes(""); - - // If there's a partial mismatch, attempt to regenerate the entire response - if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { - console.warn("Partial block detected. Re-generating..."); - continue; - } - - // If both tags appear, remove them (and everything in between) - if (hasOpenTag && hasCloseTag) { - res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); - } - } - - // We made it here with either a valid or no-think scenario - finalRes = res; - break; // Stop retrying + // If there's a partial mismatch, warn and retry the entire request. + if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { + console.warn("Partial block detected. Re-generating..."); + continue; } - // If after max attempts we couldn't get a matched or valid response - if (finalRes == null) { - console.warn("Could not get a valid block or normal response after max attempts."); - finalRes = 'Response incomplete, please try again.'; + // If both tags are present, remove the block entirely. + if (hasOpenTag && hasCloseTag) { + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); } - console.log('Received.'); - // Return the final (possibly trimmed) response - return finalRes; + } + + finalRes = res; + break; // Exit loop if we got a valid response. } - async embed(text) { - throw new Error('Embeddings are not supported by HuggingFace.'); + + // If no valid response was obtained after max attempts, assign a fallback. + if (finalRes == null) { + console.warn("Could not get a valid block or normal response after max attempts."); + finalRes = 'Response incomplete, please try again.'; } -} + console.log('Received.'); + console.log(finalRes); + return finalRes; + } + + async embed(text) { + throw new Error('Embeddings are not supported by HuggingFace.'); + } +} \ No newline at end of file diff --git a/src/models/hyperbolic.js b/src/models/hyperbolic.js index aea8875..1fe3df6 100644 --- a/src/models/hyperbolic.js +++ b/src/models/hyperbolic.js @@ -1,7 +1,7 @@ import { getKey } from '../utils/keys.js'; -/** - * + +/* * * Yes, this code was written by an Ai. It was written by GPT-o1 and tested :) * @@ -78,21 +78,14 @@ export class hyperbolic { turns.length > 1 ) { console.log('Context length exceeded, trying again with a shorter context...'); - // Remove the first user turn and try again (like the original code). return await this.sendRequest(turns.slice(1), systemMessage, stopSeq); } else { console.log(err); completionContent = 'My brain disconnected, try again.'; } } - - // Replace any special tokens from your original code if needed return completionContent.replace(/<\|separator\|>/g, '*no response*'); } - - /** - * Embeddings are not supported in your original snippet, so we mirror that error. - */ async embed(text) { throw new Error('Embeddings are not supported by Hyperbolic.'); } diff --git a/src/models/local.js b/src/models/local.js index 3e380dd..64e5ab1 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -1,26 +1,20 @@ import { strictFormat } from '../utils/text.js'; export class Local { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; + this.params = params; this.url = url || 'http://127.0.0.1:11434'; this.chat_endpoint = '/api/chat'; this.embedding_endpoint = '/api/embeddings'; } - /** - * Main method to handle chat requests. - */ async sendRequest(turns, systemMessage) { - // Choose the model name or default to 'llama3' - const model = this.model_name || 'llama3'; - - // Format messages and inject the system message at the front + let model = this.model_name || 'llama3'; let messages = strictFormat(turns); messages.unshift({ role: 'system', content: systemMessage }); - console.log('Messages:', messages); - - // We'll do up to 5 attempts for "deepseek-r1" if the tags are mismatched + + // We'll attempt up to 5 times for models like "deepseek-r1" if the tags are mismatched. const maxAttempts = 5; let attempt = 0; let finalRes = null; @@ -28,19 +22,20 @@ export class Local { while (attempt < maxAttempts) { attempt++; console.log(`Awaiting local response... (model: ${model}, attempt: ${attempt})`); - - // Perform the actual request (wrapped in a try/catch) - let res; + let res = null; try { - const responseData = await this.send(this.chat_endpoint, { + res = await this.send(this.chat_endpoint, { model: model, messages: messages, - stream: false + stream: false, + ...(this.params || {}) }); - // The local endpoint apparently returns { message: { content: "..." } } - res = responseData?.message?.content || 'No response data.'; + if (res) { + res = res['message']['content']; + } else { + res = 'No response data.'; + } } catch (err) { - // If context length exceeded and we have turns to remove, try again with one fewer turn if (err.message.toLowerCase().includes('context length') && turns.length > 1) { console.log('Context length exceeded, trying again with shorter context.'); return await this.sendRequest(turns.slice(1), systemMessage); @@ -50,42 +45,34 @@ export class Local { } } - // If the model name includes "deepseek-r1", then we handle the block - if (this.model_name && this.model_name.includes("deepseek-r1")) { + // If the model name includes "deepseek-r1" or "Andy-3.5-reasoning", then handle the block. + if (this.model_name && this.model_name.includes("deepseek-r1") || this.model_name.includes("andy-3.5-reasoning")) { const hasOpenTag = res.includes(""); const hasCloseTag = res.includes(""); - // If there's a partial mismatch, we regenerate the response + // If there's a partial mismatch, retry to get a complete response. if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { console.warn("Partial block detected. Re-generating..."); - // Attempt another loop iteration to get a complete or no-think response continue; } - // If both tags appear, remove them (and everything inside) + // If both tags appear, remove them (and everything inside). if (hasOpenTag && hasCloseTag) { res = res.replace(/[\s\S]*?<\/think>/g, ''); } } - // We made it here with either a fully valid or not-needed to handle scenario finalRes = res; - break; // Break out of the while loop + break; // Exit the loop if we got a valid response. } - // If after max attempts we STILL have partial tags, finalRes might be partial - // Or we never set finalRes because all attempts threw partial tags if (finalRes == null) { - // This means we kept continuing in the loop but never got a break console.warn("Could not get a valid block or normal response after max attempts."); finalRes = 'Response incomplete, please try again.'; } return finalRes; } - /** - * Embedding method (unchanged). - */ async embed(text) { let model = this.model_name || 'nomic-embed-text'; let body = { model: model, prompt: text }; @@ -93,19 +80,11 @@ export class Local { return res['embedding']; } - /** - * Generic send method for local endpoint. - */ async send(endpoint, body) { const url = new URL(endpoint, this.url); - const method = 'POST'; - const headers = new Headers(); - const request = new Request(url, { - method, - headers, - body: JSON.stringify(body) - }); - + let method = 'POST'; + let headers = new Headers(); + const request = new Request(url, { method, headers, body: JSON.stringify(body) }); let data = null; try { const res = await fetch(request); @@ -117,7 +96,6 @@ export class Local { } catch (err) { console.error('Failed to send Ollama request.'); console.error(err); - throw err; // rethrow so we can catch it in the calling method } return data; } diff --git a/src/models/mistral.js b/src/models/mistral.js index 3b41f78..b33d1de 100644 --- a/src/models/mistral.js +++ b/src/models/mistral.js @@ -5,10 +5,13 @@ import { strictFormat } from '../utils/text.js'; export class Mistral { #client; - constructor(model_name, url) { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; if (typeof url === "string") { console.warn("Mistral does not support custom URL's, ignoring!"); + } if (!getKey("MISTRAL_API_KEY")) { @@ -22,8 +25,6 @@ export class Mistral { ); - this.model_name = model_name; - // Prevents the following code from running when model not specified if (typeof this.model_name === "undefined") return; @@ -49,6 +50,7 @@ export class Mistral { const response = await this.#client.chat.complete({ model, messages, + ...(this.params || {}) }); result = response.choices[0].message.content; diff --git a/src/models/novita.js b/src/models/novita.js index d84aee7..8f2dd08 100644 --- a/src/models/novita.js +++ b/src/models/novita.js @@ -1,11 +1,14 @@ import OpenAIApi from 'openai'; import { getKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; // llama, mistral export class Novita { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name.replace('novita/', ''); this.url = url || 'https://api.novita.ai/v3/openai'; + this.params = params; + let config = { baseURL: this.url @@ -17,10 +20,15 @@ export class Novita { async sendRequest(turns, systemMessage, stop_seq='***') { let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + + messages = strictFormat(messages); + const pack = { model: this.model_name || "meta-llama/llama-3.1-70b-instruct", messages, stop: [stop_seq], + ...(this.params || {}) }; let res = null; @@ -41,6 +49,18 @@ export class Novita { res = 'My brain disconnected, try again.'; } } + if (res.includes('')) { + let start = res.indexOf(''); + let end = res.indexOf('') + 8; + if (start != -1) { + if (end != -1) { + res = res.substring(0, start) + res.substring(end); + } else { + res = res.substring(0, start+7); + } + } + res = res.trim(); + } return res; } diff --git a/src/models/prompter.js b/src/models/prompter.js new file mode 100644 index 0000000..eb14c29 --- /dev/null +++ b/src/models/prompter.js @@ -0,0 +1,373 @@ +import { readFileSync, mkdirSync, writeFileSync} from 'fs'; +import { Examples } from '../utils/examples.js'; +import { getCommandDocs } from '../agent/commands/index.js'; +import { getSkillDocs } from '../agent/library/index.js'; +import { SkillLibrary } from "../agent/library/skill_library.js"; +import { stringifyTurns } from '../utils/text.js'; +import { getCommand } from '../agent/commands/index.js'; +import settings from '../../settings.js'; + +import { Gemini } from './gemini.js'; +import { GPT } from './gpt.js'; +import { Claude } from './claude.js'; +import { Mistral } from './mistral.js'; +import { ReplicateAPI } from './replicate.js'; +import { Local } from './local.js'; +import { Novita } from './novita.js'; +import { GroqCloudAPI } from './groq.js'; +import { HuggingFace } from './huggingface.js'; +import { Qwen } from "./qwen.js"; +import { Grok } from "./grok.js"; +import { DeepSeek } from './deepseek.js'; +import { hyperbolic } from './hyperbolic.js'; +import { glhf } from './glhf.js'; + +export class Prompter { + constructor(agent, fp) { + this.agent = agent; + this.profile = JSON.parse(readFileSync(fp, 'utf8')); + let default_profile = JSON.parse(readFileSync('./profiles/defaults/_default.json', 'utf8')); + let base_fp = settings.base_profile; + let base_profile = JSON.parse(readFileSync(base_fp, 'utf8')); + + // first use defaults to fill in missing values in the base profile + for (let key in default_profile) { + if (base_profile[key] === undefined) + base_profile[key] = default_profile[key]; + } + // then use base profile to fill in missing values in the individual profile + for (let key in base_profile) { + if (this.profile[key] === undefined) + this.profile[key] = base_profile[key]; + } + // base overrides default, individual overrides base + // Removed a bit of space that was right here by adding a comment instead of deleting it because I am making a pull request to this code and I can do whatever I want because I decided to add 2 new API services to Mindcraft now look at me go! Woohoo! I am flying off the edge of the screen oh no! + + this.convo_examples = null; + this.coding_examples = null; + + let name = this.profile.name; + this.cooldown = this.profile.cooldown ? this.profile.cooldown : 0; + this.last_prompt_time = 0; + this.awaiting_coding = false; + + // try to get "max_tokens" parameter, else null + let max_tokens = null; + if (this.profile.max_tokens) + max_tokens = this.profile.max_tokens; + + let chat_model_profile = this._selectAPI(this.profile.model); + this.chat_model = this._createModel(chat_model_profile); + + if (this.profile.code_model) { + let code_model_profile = this._selectAPI(this.profile.code_model); + this.code_model = this._createModel(code_model_profile); + } + else { + this.code_model = this.chat_model; + } + + let embedding = this.profile.embedding; + if (embedding === undefined) { + if (chat_model_profile.api !== 'ollama') + embedding = {api: chat_model_profile.api}; + else + embedding = {api: 'none'}; + } + else if (typeof embedding === 'string' || embedding instanceof String) + embedding = {api: embedding}; + + console.log('Using embedding settings:', embedding); + + try { + if (embedding.api === 'google') + this.embedding_model = new Gemini(embedding.model, embedding.url); + else if (embedding.api === 'openai') + this.embedding_model = new GPT(embedding.model, embedding.url); + else if (embedding.api === 'replicate') + this.embedding_model = new ReplicateAPI(embedding.model, embedding.url); + else if (embedding.api === 'ollama') + this.embedding_model = new Local(embedding.model, embedding.url); + else if (embedding.api === 'qwen') + this.embedding_model = new Qwen(embedding.model, embedding.url); + else if (embedding.api === 'mistral') + this.embedding_model = new Mistral(embedding.model, embedding.url); + else { + this.embedding_model = null; + console.log('Unknown embedding: ', embedding ? embedding.api : '[NOT SPECIFIED]', '. Using word overlap.'); + } + } + catch (err) { + console.log('Warning: Failed to initialize embedding model:', err.message); + console.log('Continuing anyway, using word overlap instead.'); + this.embedding_model = null; + } + this.skill_libary = new SkillLibrary(agent, this.embedding_model); + mkdirSync(`./bots/${name}`, { recursive: true }); + writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.profile, null, 4), (err) => { + if (err) { + throw new Error('Failed to save profile:', err); + } + console.log("Copy profile saved."); + }); + } + + _selectAPI(profile) { + if (typeof profile === 'string' || profile instanceof String) { + profile = {model: profile}; + } + if (!profile.api) { + if (profile.model.includes('gemini')) + profile.api = 'google'; + else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) + profile.api = 'openai'; + else if (profile.model.includes('claude')) + profile.api = 'anthropic'; + else if (profile.model.includes('huggingface/')) + profile.api = "huggingface"; + else if (profile.model.includes('replicate/')) + profile.api = 'replicate'; + else if (profile.model.includes('mistralai/') || profile.model.includes("mistral/")) + model_profile.api = 'mistral'; + else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) + profile.api = 'groq'; + else if (chat.model.includes('hf:')) + chat.api = "glhf"; + else if (chat.model.includes('hyperbolic:')|| chat.model.includes('hb:')) + chat.api = "hyperbolic"; + else if (profile.model.includes('novita/')) + profile.api = 'novita'; + else if (profile.model.includes('qwen')) + profile.api = 'qwen'; + else if (profile.model.includes('grok')) + profile.api = 'xai'; + else if (profile.model.includes('deepseek')) + profile.api = 'deepseek'; + else + profile.api = 'ollama'; + } + return profile; + } + + _createModel(profile) { + let model = null; + if (profile.api === 'google') + model = new Gemini(profile.model, profile.url, profile.params); + else if (profile.api === 'openai') + model = new GPT(profile.model, profile.url, profile.params); + else if (profile.api === 'anthropic') + model = new Claude(profile.model, profile.url, profile.params); + else if (profile.api === 'replicate') + model = new ReplicateAPI(profile.model, profile.url, profile.params); + else if (profile.api === 'ollama') + model = new Local(profile.model, profile.url, profile.params); + else if (profile.api === 'mistral') + model = new Mistral(profile.model, profile.url, profile.params); + else if (profile.api === 'groq') + model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params); + else if (profile.api === 'glhf') + model = new glhf(profile.model, profile.url, profile.params); + else if (profile.api === 'hyperbolic') + model = new hyperbolic(profile.model.replace('hyperbolic:', '').replace('hb:', ''), profile.url, profile.params); // Yes you can hate me for using curly braces on this little bit of code for defining the hyperbolic endpoint + else if (profile.api === 'huggingface') + model = new HuggingFace(profile.model, profile.url, profile.params); + else if (profile.api === 'novita') + model = new Novita(profile.model.replace('novita/', ''), profile.url, profile.params); + else if (profile.api === 'qwen') + model = new Qwen(profile.model, profile.url, profile.params); + else if (profile.api === 'xai') + model = new Grok(profile.model, profile.url, profile.params); + else if (profile.api === 'deepseek') + model = new DeepSeek(profile.model, profile.url, profile.params); + else + throw new Error('Unknown API:', profile.api); + return model; + } + + getName() { + return this.profile.name; + } + + getInitModes() { + return this.profile.modes; + } + + async initExamples() { + try { + this.convo_examples = new Examples(this.embedding_model, settings.num_examples); + this.coding_examples = new Examples(this.embedding_model, settings.num_examples); + + // Wait for both examples to load before proceeding + await Promise.all([ + this.convo_examples.load(this.profile.conversation_examples), + this.coding_examples.load(this.profile.coding_examples), + this.skill_libary.initSkillLibrary() + ]); + + console.log('Examples initialized.'); + } catch (error) { + console.error('Failed to initialize examples:', error); + throw error; + } + } + + async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) { + prompt = prompt.replaceAll('$NAME', this.agent.name); + + if (prompt.includes('$STATS')) { + let stats = await getCommand('!stats').perform(this.agent); + prompt = prompt.replaceAll('$STATS', stats); + } + if (prompt.includes('$INVENTORY')) { + let inventory = await getCommand('!inventory').perform(this.agent); + prompt = prompt.replaceAll('$INVENTORY', inventory); + } + if (prompt.includes('$ACTION')) { + prompt = prompt.replaceAll('$ACTION', this.agent.actions.currentActionLabel); + } + if (prompt.includes('$COMMAND_DOCS')) + prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); + if (prompt.includes('$CODE_DOCS')) { + const code_task_content = messages.slice().reverse().find(msg => + msg.role !== 'system' && msg.content.includes('!newAction(') + )?.content?.match(/!newAction\((.*?)\)/)?.[1] || ''; + + prompt = prompt.replaceAll( + '$CODE_DOCS', + await this.skill_libary.getRelevantSkillDocs(code_task_content, settings.relevant_docs_count) + ); + } + prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); + if (prompt.includes('$CODE_DOCS')) + prompt = prompt.replaceAll('$CODE_DOCS', getSkillDocs()); + if (prompt.includes('$EXAMPLES') && examples !== null) + prompt = prompt.replaceAll('$EXAMPLES', await examples.createExampleMessage(messages)); + if (prompt.includes('$MEMORY')) + prompt = prompt.replaceAll('$MEMORY', this.agent.history.memory); + if (prompt.includes('$TO_SUMMARIZE')) + prompt = prompt.replaceAll('$TO_SUMMARIZE', stringifyTurns(to_summarize)); + if (prompt.includes('$CONVO')) + prompt = prompt.replaceAll('$CONVO', 'Recent conversation:\n' + stringifyTurns(messages)); + if (prompt.includes('$SELF_PROMPT')) { + let self_prompt = this.agent.self_prompter.on ? `YOUR CURRENT ASSIGNED GOAL: "${this.agent.self_prompter.prompt}"\n` : ''; + prompt = prompt.replaceAll('$SELF_PROMPT', self_prompt); + } + if (prompt.includes('$LAST_GOALS')) { + let goal_text = ''; + for (let goal in last_goals) { + if (last_goals[goal]) + goal_text += `You recently successfully completed the goal ${goal}.\n` + else + goal_text += `You recently failed to complete the goal ${goal}.\n` + } + prompt = prompt.replaceAll('$LAST_GOALS', goal_text.trim()); + } + if (prompt.includes('$BLUEPRINTS')) { + if (this.agent.npc.constructions) { + let blueprints = ''; + for (let blueprint in this.agent.npc.constructions) { + blueprints += blueprint + ', '; + } + prompt = prompt.replaceAll('$BLUEPRINTS', blueprints.slice(0, -2)); + } + } + + // check if there are any remaining placeholders with syntax $ + let remaining = prompt.match(/\$[A-Z_]+/g); + if (remaining !== null) { + console.warn('Unknown prompt placeholders:', remaining.join(', ')); + } + return prompt; + } + + async checkCooldown() { + let elapsed = Date.now() - this.last_prompt_time; + if (elapsed < this.cooldown && this.cooldown > 0) { + await new Promise(r => setTimeout(r, this.cooldown - elapsed)); + } + this.last_prompt_time = Date.now(); + } + + async promptConvo(messages) { + this.most_recent_msg_time = Date.now(); + let current_msg_time = this.most_recent_msg_time; + for (let i = 0; i < 3; i++) { // try 3 times to avoid hallucinations + await this.checkCooldown(); + if (current_msg_time !== this.most_recent_msg_time) { + return ''; + } + let prompt = this.profile.conversing; + prompt = await this.replaceStrings(prompt, messages, this.convo_examples); + let generation = await this.chat_model.sendRequest(messages, prompt); + // in conversations >2 players LLMs tend to hallucinate and role-play as other bots + // the FROM OTHER BOT tag should never be generated by the LLM + if (generation.includes('(FROM OTHER BOT)')) { + console.warn('LLM hallucinated message as another bot. Trying again...'); + continue; + } + if (current_msg_time !== this.most_recent_msg_time) { + console.warn(this.agent.name + ' received new message while generating, discarding old response.'); + return ''; + } + return generation; + } + return ''; + } + + async promptCoding(messages) { + if (this.awaiting_coding) { + console.warn('Already awaiting coding response, returning no response.'); + return '```//no response```'; + } + this.awaiting_coding = true; + await this.checkCooldown(); + let prompt = this.profile.coding; + prompt = await this.replaceStrings(prompt, messages, this.coding_examples); + let resp = await this.code_model.sendRequest(messages, prompt); + this.awaiting_coding = false; + return resp; + } + + async promptMemSaving(to_summarize) { + await this.checkCooldown(); + let prompt = this.profile.saving_memory; + prompt = await this.replaceStrings(prompt, null, null, to_summarize); + return await this.chat_model.sendRequest([], prompt); + } + + async promptShouldRespondToBot(new_message) { + await this.checkCooldown(); + let prompt = this.profile.bot_responder; + let messages = this.agent.history.getHistory(); + messages.push({role: 'user', content: new_message}); + prompt = await this.replaceStrings(prompt, null, null, messages); + let res = await this.chat_model.sendRequest([], prompt); + return res.trim().toLowerCase() === 'respond'; + } + + async promptGoalSetting(messages, last_goals) { + let system_message = this.profile.goal_setting; + system_message = await this.replaceStrings(system_message, messages); + + let user_message = 'Use the below info to determine what goal to target next\n\n'; + user_message += '$LAST_GOALS\n$STATS\n$INVENTORY\n$CONVO' + user_message = await this.replaceStrings(user_message, messages, null, null, last_goals); + let user_messages = [{role: 'user', content: user_message}]; + + let res = await this.chat_model.sendRequest(user_messages, system_message); + + let goal = null; + try { + let data = res.split('```')[1].replace('json', '').trim(); + goal = JSON.parse(data); + } catch (err) { + console.log('Failed to parse goal:', res, err); + } + if (!goal || !goal.name || !goal.quantity || isNaN(parseInt(goal.quantity))) { + console.log('Failed to set goal:', res); + return null; + } + goal.quantity = parseInt(goal.quantity); + return goal; + } +} diff --git a/src/models/qwen.js b/src/models/qwen.js index d3d7bec..4dfacfe 100644 --- a/src/models/qwen.js +++ b/src/models/qwen.js @@ -1,104 +1,79 @@ -// This code uses Dashscope and HTTP to ensure the latest support for the Qwen model. -// Qwen is also compatible with the OpenAI API format; - -import { getKey } from '../utils/keys.js'; +import OpenAIApi from 'openai'; +import { getKey, hasKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; export class Qwen { - constructor(modelName, url) { - this.modelName = modelName; - this.url = url || 'https://dashscope.aliyuncs.com/api/v1/services/aigc/text-generation/generation'; - this.apiKey = getKey('QWEN_API_KEY'); + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; + let config = {}; + + config.baseURL = url || 'https://dashscope.aliyuncs.com/compatible-mode/v1'; + config.apiKey = getKey('QWEN_API_KEY'); + + this.openai = new OpenAIApi(config); } - async sendRequest(turns, systemMessage, stopSeq = '***', retryCount = 0) { - if (retryCount > 5) { - console.error('Maximum retry attempts reached.'); - return 'Error: Too many retry attempts.'; - } + async sendRequest(turns, systemMessage, stop_seq='***') { + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - const data = { - model: this.modelName || 'qwen-plus', - input: { messages: [{ role: 'system', content: systemMessage }, ...turns] }, - parameters: { result_format: 'message', stop: stopSeq }, + messages = strictFormat(messages); + + const pack = { + model: this.model_name || "qwen-plus", + messages, + stop: stop_seq, + ...(this.params || {}) }; - // Add default user message if all messages are 'system' role - if (turns.every((msg) => msg.role === 'system')) { - data.input.messages.push({ role: 'user', content: 'hello' }); - } - - if (!data.model || !data.input || !data.input.messages || !data.parameters) { - console.error('Invalid request data format:', data); - throw new Error('Invalid request data format.'); - } - + let res = null; try { - const response = await this._makeHttpRequest(this.url, data); - const choice = response?.output?.choices?.[0]; - - if (choice?.finish_reason === 'length' && turns.length > 0) { - return this.sendRequest(turns.slice(1), systemMessage, stopSeq, retryCount + 1); + console.log('Awaiting Qwen api response...'); + // console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.'); + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; } - - return choice?.message?.content || 'No content received.'; - } catch (err) { - console.error('Error occurred:', err); - return 'An error occurred, please try again.'; } + return res; } + // Why random backoff? + // With a 30 requests/second limit on Alibaba Qwen's embedding service, + // random backoff helps maximize bandwidth utilization. async embed(text) { - if (!text || typeof text !== 'string') { - console.error('Invalid embedding input: text must be a non-empty string.'); - return 'Invalid embedding input: text must be a non-empty string.'; - } - - const data = { - model: 'text-embedding-v2', - input: { texts: [text] }, - parameters: { text_type: 'query' }, - }; - - if (!data.model || !data.input || !data.input.texts || !data.parameters) { - console.error('Invalid embedding request data format:', data); - throw new Error('Invalid embedding request data format.'); - } - - try { - const response = await this._makeHttpRequest(this.url, data); - const embedding = response?.output?.embeddings?.[0]?.embedding; - return embedding || 'No embedding result received.'; - } catch (err) { - console.error('Error occurred:', err); - return 'An error occurred, please try again.'; + const maxRetries = 5; // Maximum number of retries + for (let retries = 0; retries < maxRetries; retries++) { + try { + const { data } = await this.openai.embeddings.create({ + model: this.model_name || "text-embedding-v3", + input: text, + encoding_format: "float", + }); + return data[0].embedding; + } catch (err) { + if (err.status === 429) { + // If a rate limit error occurs, calculate the exponential backoff with a random delay (1-5 seconds) + const delay = Math.pow(2, retries) * 1000 + Math.floor(Math.random() * 2000); + // console.log(`Rate limit hit, retrying in ${delay} ms...`); + await new Promise(resolve => setTimeout(resolve, delay)); // Wait for the delay before retrying + } else { + throw err; + } + } } + // If maximum retries are reached and the request still fails, throw an error + throw new Error('Max retries reached, request failed.'); } - async _makeHttpRequest(url, data) { - const headers = { - 'Authorization': `Bearer ${this.apiKey}`, - 'Content-Type': 'application/json', - }; - - const response = await fetch(url, { - method: 'POST', - headers, - body: JSON.stringify(data), - }); - - if (!response.ok) { - const errorText = await response.text(); - console.error(`Request failed, status code ${response.status}: ${response.statusText}`); - console.error('Error response content:', errorText); - throw new Error(`Request failed, status code ${response.status}: ${response.statusText}`); - } - - const responseText = await response.text(); - try { - return JSON.parse(responseText); - } catch (err) { - console.error('Failed to parse response JSON:', err); - throw new Error('Invalid response JSON format.'); - } - } -} +} \ No newline at end of file diff --git a/src/models/replicate.js b/src/models/replicate.js index e0c7d6c..c8c3ba3 100644 --- a/src/models/replicate.js +++ b/src/models/replicate.js @@ -4,9 +4,10 @@ import { getKey } from '../utils/keys.js'; // llama, mistral export class ReplicateAPI { - constructor(model_name, url) { + constructor(model_name, url, params) { this.model_name = model_name; this.url = url; + this.params = params; if (this.url) { console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); @@ -22,7 +23,11 @@ export class ReplicateAPI { const prompt = toSinglePrompt(turns, null, stop_seq); let model_name = this.model_name || 'meta/meta-llama-3-70b-instruct'; - const input = { prompt, system_prompt: systemMessage }; + const input = { + prompt, + system_prompt: systemMessage, + ...(this.params || {}) + }; let res = null; try { console.log('Awaiting Replicate API response...'); diff --git a/src/process/init_agent.js b/src/process/init_agent.js index 88c99b9..15b08e0 100644 --- a/src/process/init_agent.js +++ b/src/process/init_agent.js @@ -57,11 +57,8 @@ const argv = yargs(args) const agent = new Agent(); await agent.start(argv.profile, argv.load_memory, argv.init_message, argv.count_id, argv.task_path, argv.task_id); } catch (error) { - console.error('Failed to start agent process:', { - message: error.message || 'No error message', - stack: error.stack || 'No stack trace', - error: error - }); + console.error('Failed to start agent process:'); + console.error(error); process.exit(1); } })(); diff --git a/src/server/mind_server.js b/src/server/mind_server.js index b94cccf..eed71d7 100644 --- a/src/server/mind_server.js +++ b/src/server/mind_server.js @@ -116,6 +116,18 @@ export function createMindServer(port = 8080) { }, 2000); }); + socket.on('send-message', (agentName, message) => { + if (!inGameAgents[agentName]) { + console.warn(`Agent ${agentName} not logged in, cannot send message via MindServer.`); + return + } + try { + console.log(`Sending message to agent ${agentName}: ${message}`); + inGameAgents[agentName].emit('send-message', agentName, message) + } catch (error) { + console.error('Error: ', error); + } + }); }); server.listen(port, 'localhost', () => { @@ -148,4 +160,4 @@ function stopAllAgents() { // Optional: export these if you need access to them from other files export const getIO = () => io; export const getServer = () => server; -export const getConnectedAgents = () => connectedAgents; \ No newline at end of file +export const getConnectedAgents = () => connectedAgents; diff --git a/src/server/public/index.html b/src/server/public/index.html index 1f7951f..c66a986 100644 --- a/src/server/public/index.html +++ b/src/server/public/index.html @@ -80,6 +80,7 @@ ${agent.in_game ? ` + ` : ` `} @@ -110,6 +111,10 @@ function shutdown() { socket.emit('shutdown'); } + + function sendMessage(agentName, message) { + socket.emit('send-message', agentName, message) + } - \ No newline at end of file + diff --git a/src/utils/mcdata.js b/src/utils/mcdata.js index 58cfbdb..2a3a27c 100644 --- a/src/utils/mcdata.js +++ b/src/utils/mcdata.js @@ -190,7 +190,10 @@ export function getItemCraftingRecipes(itemName) { recipe[ingredientName] = 0; recipe[ingredientName]++; } - recipes.push(recipe); + recipes.push([ + recipe, + {craftedCount : r.result.count} + ]); } return recipes; @@ -327,4 +330,156 @@ export function calculateLimitingResource(availableItems, requiredItems, discret } if(discrete) num = Math.floor(num); return {num, limitingResource} +} + +let loopingItems = new Set(); + +export function initializeLoopingItems() { + + loopingItems = new Set(['coal', + 'wheat', + 'diamond', + 'emerald', + 'raw_iron', + 'raw_gold', + 'redstone', + 'blue_wool', + 'packed_mud', + 'raw_copper', + 'iron_ingot', + 'dried_kelp', + 'gold_ingot', + 'slime_ball', + 'black_wool', + 'quartz_slab', + 'copper_ingot', + 'lapis_lazuli', + 'honey_bottle', + 'rib_armor_trim_smithing_template', + 'eye_armor_trim_smithing_template', + 'vex_armor_trim_smithing_template', + 'dune_armor_trim_smithing_template', + 'host_armor_trim_smithing_template', + 'tide_armor_trim_smithing_template', + 'wild_armor_trim_smithing_template', + 'ward_armor_trim_smithing_template', + 'coast_armor_trim_smithing_template', + 'spire_armor_trim_smithing_template', + 'snout_armor_trim_smithing_template', + 'shaper_armor_trim_smithing_template', + 'netherite_upgrade_smithing_template', + 'raiser_armor_trim_smithing_template', + 'sentry_armor_trim_smithing_template', + 'silence_armor_trim_smithing_template', + 'wayfinder_armor_trim_smithing_template']); +} + + +/** + * Gets a detailed plan for crafting an item considering current inventory + */ +export function getDetailedCraftingPlan(targetItem, count = 1, current_inventory = {}) { + initializeLoopingItems(); + if (!targetItem || count <= 0 || !getItemId(targetItem)) { + return "Invalid input. Please provide a valid item name and positive count."; + } + + if (isBaseItem(targetItem)) { + const available = current_inventory[targetItem] || 0; + if (available >= count) return "You have all required items already in your inventory!"; + return `${targetItem} is a base item, you need to find ${count - available} more in the world`; + } + + const inventory = { ...current_inventory }; + const leftovers = {}; + const plan = craftItem(targetItem, count, inventory, leftovers); + return formatPlan(plan); +} + +function isBaseItem(item) { + return loopingItems.has(item) || getItemCraftingRecipes(item) === null; +} + +function craftItem(item, count, inventory, leftovers, crafted = { required: {}, steps: [], leftovers: {} }) { + // Check available inventory and leftovers first + const availableInv = inventory[item] || 0; + const availableLeft = leftovers[item] || 0; + const totalAvailable = availableInv + availableLeft; + + if (totalAvailable >= count) { + // Use leftovers first, then inventory + const useFromLeft = Math.min(availableLeft, count); + leftovers[item] = availableLeft - useFromLeft; + + const remainingNeeded = count - useFromLeft; + if (remainingNeeded > 0) { + inventory[item] = availableInv - remainingNeeded; + } + return crafted; + } + + // Use whatever is available + const stillNeeded = count - totalAvailable; + if (availableLeft > 0) leftovers[item] = 0; + if (availableInv > 0) inventory[item] = 0; + + if (isBaseItem(item)) { + crafted.required[item] = (crafted.required[item] || 0) + stillNeeded; + return crafted; + } + + const recipe = getItemCraftingRecipes(item)?.[0]; + if (!recipe) { + crafted.required[item] = stillNeeded; + return crafted; + } + + const [ingredients, result] = recipe; + const craftedPerRecipe = result.craftedCount; + const batchCount = Math.ceil(stillNeeded / craftedPerRecipe); + const totalProduced = batchCount * craftedPerRecipe; + + // Add excess to leftovers + if (totalProduced > stillNeeded) { + leftovers[item] = (leftovers[item] || 0) + (totalProduced - stillNeeded); + } + + // Process each ingredient + for (const [ingredientName, ingredientCount] of Object.entries(ingredients)) { + const totalIngredientNeeded = ingredientCount * batchCount; + craftItem(ingredientName, totalIngredientNeeded, inventory, leftovers, crafted); + } + + // Add crafting step + const stepIngredients = Object.entries(ingredients) + .map(([name, amount]) => `${amount * batchCount} ${name}`) + .join(' + '); + crafted.steps.push(`Craft ${stepIngredients} -> ${totalProduced} ${item}`); + + return crafted; +} + +function formatPlan({ required, steps, leftovers }) { + const lines = []; + + if (Object.keys(required).length > 0) { + lines.push('You are missing the following items:'); + Object.entries(required).forEach(([item, count]) => + lines.push(`- ${count} ${item}`)); + lines.push('\nOnce you have these items, here\'s your crafting plan:'); + } else { + lines.push('You have all items required to craft this item!'); + lines.push('Here\'s your crafting plan:'); + } + + lines.push(''); + lines.push(...steps); + + if (Object.keys(leftovers).length > 0) { + lines.push('\nYou will have leftover:'); + Object.entries(leftovers).forEach(([item, count]) => + lines.push(`- ${count} ${item}`)); + } + + return lines.join('\n'); } \ No newline at end of file diff --git a/src/utils/text.js b/src/utils/text.js index 1e93667..f500199 100644 --- a/src/utils/text.js +++ b/src/utils/text.js @@ -26,8 +26,10 @@ export function toSinglePrompt(turns, system=null, stop_seq='***', model_nicknam return prompt; } -// ensures stricter turn order for anthropic/llama models -// combines repeated messages from the same role, separates repeat assistant messages with filler user messages +// ensures stricter turn order and roles: +// - system messages are treated as user messages and prefixed with SYSTEM: +// - combines repeated messages from users +// - separates repeat assistant messages with filler user messages export function strictFormat(turns) { let prev_role = null; let messages = []; diff --git a/viewer.html b/viewer.html index 737b046..bdf2047 100644 --- a/viewer.html +++ b/viewer.html @@ -26,9 +26,9 @@ - - - -

Mindcraft

-
- - - - diff --git a/src/utils/examples.js b/src/utils/examples.js deleted file mode 100644 index ca6de79..0000000 --- a/src/utils/examples.js +++ /dev/null @@ -1,94 +0,0 @@ -import { cosineSimilarity } from './math.js'; -import { stringifyTurns } from './text.js'; - -export class Examples { - constructor(model, select_num=2) { - this.examples = []; - this.model = model; - this.select_num = select_num; - this.embeddings = {}; - } - - turnsToText(turns) { - let messages = ''; - for (let turn of turns) { - if (turn.role !== 'assistant') - messages += turn.content.substring(turn.content.indexOf(':')+1).trim() + '\n'; - } - return messages.trim(); - } - - getWords(text) { - return text.replace(/[^a-zA-Z ]/g, '').toLowerCase().split(' '); - } - - wordOverlapScore(text1, text2) { - const words1 = this.getWords(text1); - const words2 = this.getWords(text2); - const intersection = words1.filter(word => words2.includes(word)); - return intersection.length / (words1.length + words2.length - intersection.length); - } - - async load(examples) { - this.examples = examples; - if (!this.model) return; // Early return if no embedding model - - if (this.select_num === 0) - return; - - try { - // Create array of promises first - const embeddingPromises = examples.map(example => { - const turn_text = this.turnsToText(example); - return this.model.embed(turn_text) - .then(embedding => { - this.embeddings[turn_text] = embedding; - }); - }); - - // Wait for all embeddings to complete - await Promise.all(embeddingPromises); - } catch (err) { - console.warn('Error with embedding model, using word overlap instead:', err); - this.model = null; - } - } - - async getRelevant(turns) { - if (this.select_num === 0) - return []; - - let turn_text = this.turnsToText(turns); - if (this.model !== null) { - let embedding = await this.model.embed(turn_text); - this.examples.sort((a, b) => - cosineSimilarity(embedding, this.embeddings[this.turnsToText(b)]) - - cosineSimilarity(embedding, this.embeddings[this.turnsToText(a)]) - ); - } - else { - this.examples.sort((a, b) => - this.wordOverlapScore(turn_text, this.turnsToText(b)) - - this.wordOverlapScore(turn_text, this.turnsToText(a)) - ); - } - let selected = this.examples.slice(0, this.select_num); - return JSON.parse(JSON.stringify(selected)); // deep copy - } - - async createExampleMessage(turns) { - let selected_examples = await this.getRelevant(turns); - - console.log('selected examples:'); - for (let example of selected_examples) { - console.log('Example:', example[0].content) - } - - let msg = 'Examples of how to respond:\n'; - for (let i=0; i { - bot.acceptResourcePack(); - }); - - return bot; -} - -export function isHuntable(mob) { - if (!mob || !mob.name) return false; - const animals = ['chicken', 'cow', 'llama', 'mooshroom', 'pig', 'rabbit', 'sheep']; - return animals.includes(mob.name.toLowerCase()) && !mob.metadata[16]; // metadata 16 is not baby -} - -export function isHostile(mob) { - if (!mob || !mob.name) return false; - return (mob.type === 'mob' || mob.type === 'hostile') && mob.name !== 'iron_golem' && mob.name !== 'snow_golem'; -} - -export function getItemId(itemName) { - let item = mcdata.itemsByName[itemName]; - if (item) { - return item.id; - } - return null; -} - -export function getItemName(itemId) { - let item = mcdata.items[itemId] - if (item) { - return item.name; - } - return null; -} - -export function getBlockId(blockName) { - let block = mcdata.blocksByName[blockName]; - if (block) { - return block.id; - } - return null; -} - -export function getBlockName(blockId) { - let block = mcdata.blocks[blockId] - if (block) { - return block.name; - } - return null; -} - -export function getAllItems(ignore) { - if (!ignore) { - ignore = []; - } - let items = [] - for (const itemId in mcdata.items) { - const item = mcdata.items[itemId]; - if (!ignore.includes(item.name)) { - items.push(item); - } - } - return items; -} - -export function getAllItemIds(ignore) { - const items = getAllItems(ignore); - let itemIds = []; - for (const item of items) { - itemIds.push(item.id); - } - return itemIds; -} - -export function getAllBlocks(ignore) { - if (!ignore) { - ignore = []; - } - let blocks = [] - for (const blockId in mcdata.blocks) { - const block = mcdata.blocks[blockId]; - if (!ignore.includes(block.name)) { - blocks.push(block); - } - } - return blocks; -} - -export function getAllBlockIds(ignore) { - const blocks = getAllBlocks(ignore); - let blockIds = []; - for (const block of blocks) { - blockIds.push(block.id); - } - return blockIds; -} - -export function getAllBiomes() { - return mcdata.biomes; -} - -export function getItemCraftingRecipes(itemName) { - let itemId = getItemId(itemName); - if (!mcdata.recipes[itemId]) { - return null; - } - - let recipes = []; - for (let r of mcdata.recipes[itemId]) { - let recipe = {}; - let ingredients = []; - if (r.ingredients) { - ingredients = r.ingredients; - } else if (r.inShape) { - ingredients = r.inShape.flat(); - } - for (let ingredient of ingredients) { - let ingredientName = getItemName(ingredient); - if (ingredientName === null) continue; - if (!recipe[ingredientName]) - recipe[ingredientName] = 0; - recipe[ingredientName]++; - } - recipes.push([ - recipe, - {craftedCount : r.result.count} - ]); - } - - return recipes; -} - -export function isSmeltable(itemName) { - const misc_smeltables = ['beef', 'chicken', 'cod', 'mutton', 'porkchop', 'rabbit', 'salmon', 'tropical_fish', 'potato', 'kelp', 'sand', 'cobblestone', 'clay_ball']; - return itemName.includes('raw') || itemName.includes('log') || misc_smeltables.includes(itemName); -} - -export function getSmeltingFuel(bot) { - let fuel = bot.inventory.items().find(i => i.name === 'coal' || i.name === 'charcoal' || i.name === 'blaze_rod') - if (fuel) - return fuel; - fuel = bot.inventory.items().find(i => i.name.includes('log') || i.name.includes('planks')) - if (fuel) - return fuel; - return bot.inventory.items().find(i => i.name === 'coal_block' || i.name === 'lava_bucket'); -} - -export function getFuelSmeltOutput(fuelName) { - if (fuelName === 'coal' || fuelName === 'charcoal') - return 8; - if (fuelName === 'blaze_rod') - return 12; - if (fuelName.includes('log') || fuelName.includes('planks')) - return 1.5 - if (fuelName === 'coal_block') - return 80; - if (fuelName === 'lava_bucket') - return 100; - return 0; -} - -export function getItemSmeltingIngredient(itemName) { - return { - baked_potato: 'potato', - steak: 'raw_beef', - cooked_chicken: 'raw_chicken', - cooked_cod: 'raw_cod', - cooked_mutton: 'raw_mutton', - cooked_porkchop: 'raw_porkchop', - cooked_rabbit: 'raw_rabbit', - cooked_salmon: 'raw_salmon', - dried_kelp: 'kelp', - iron_ingot: 'raw_iron', - gold_ingot: 'raw_gold', - copper_ingot: 'raw_copper', - glass: 'sand' - }[itemName]; -} - -export function getItemBlockSources(itemName) { - let itemId = getItemId(itemName); - let sources = []; - for (let block of getAllBlocks()) { - if (block.drops.includes(itemId)) { - sources.push(block.name); - } - } - return sources; -} - -export function getItemAnimalSource(itemName) { - return { - raw_beef: 'cow', - raw_chicken: 'chicken', - raw_cod: 'cod', - raw_mutton: 'sheep', - raw_porkchop: 'pig', - raw_rabbit: 'rabbit', - raw_salmon: 'salmon', - leather: 'cow', - wool: 'sheep' - }[itemName]; -} - -export function getBlockTool(blockName) { - let block = mcdata.blocksByName[blockName]; - if (!block || !block.harvestTools) { - return null; - } - return getItemName(Object.keys(block.harvestTools)[0]); // Double check first tool is always simplest -} - -export function makeItem(name, amount=1) { - return new Item(getItemId(name), amount); -} - -/** - * Returns the number of ingredients required to use the recipe once. - * - * @param {Recipe} recipe - * @returns {Object} an object describing the number of each ingredient. - */ -export function ingredientsFromPrismarineRecipe(recipe) { - let requiredIngedients = {}; - if (recipe.inShape) - for (const ingredient of recipe.inShape.flat()) { - if(ingredient.id<0) continue; //prismarine-recipe uses id -1 as an empty crafting slot - const ingredientName = getItemName(ingredient.id); - requiredIngedients[ingredientName] ??=0; - requiredIngedients[ingredientName] += ingredient.count; - } - if (recipe.ingredients) - for (const ingredient of recipe.ingredients) { - if(ingredient.id<0) continue; - const ingredientName = getItemName(ingredient.id); - requiredIngedients[ingredientName] ??=0; - requiredIngedients[ingredientName] -= ingredient.count; - //Yes, the `-=` is intended. - //prismarine-recipe uses positive numbers for the shaped ingredients but negative for unshaped. - //Why this is the case is beyond my understanding. - } - return requiredIngedients; -} - -/** - * Calculates the number of times an action, such as a crafing recipe, can be completed before running out of resources. - * @template T - doesn't have to be an item. This could be any resource. - * @param {Object.} availableItems - The resources available; e.g, `{'cobble_stone': 7, 'stick': 10}` - * @param {Object.} requiredItems - The resources required to complete the action once; e.g, `{'cobble_stone': 3, 'stick': 2}` - * @param {boolean} discrete - Is the action discrete? - * @returns {{num: number, limitingResource: (T | null)}} the number of times the action can be completed and the limmiting resource; e.g `{num: 2, limitingResource: 'cobble_stone'}` - */ -export function calculateLimitingResource(availableItems, requiredItems, discrete=true) { - let limitingResource = null; - let num = Infinity; - for (const itemType in requiredItems) { - if (availableItems[itemType] < requiredItems[itemType] * num) { - limitingResource = itemType; - num = availableItems[itemType] / requiredItems[itemType]; - } - } - if(discrete) num = Math.floor(num); - return {num, limitingResource} -} - -let loopingItems = new Set(); - -export function initializeLoopingItems() { - - loopingItems = new Set(['coal', - 'wheat', - 'diamond', - 'emerald', - 'raw_iron', - 'raw_gold', - 'redstone', - 'blue_wool', - 'packed_mud', - 'raw_copper', - 'iron_ingot', - 'dried_kelp', - 'gold_ingot', - 'slime_ball', - 'black_wool', - 'quartz_slab', - 'copper_ingot', - 'lapis_lazuli', - 'honey_bottle', - 'rib_armor_trim_smithing_template', - 'eye_armor_trim_smithing_template', - 'vex_armor_trim_smithing_template', - 'dune_armor_trim_smithing_template', - 'host_armor_trim_smithing_template', - 'tide_armor_trim_smithing_template', - 'wild_armor_trim_smithing_template', - 'ward_armor_trim_smithing_template', - 'coast_armor_trim_smithing_template', - 'spire_armor_trim_smithing_template', - 'snout_armor_trim_smithing_template', - 'shaper_armor_trim_smithing_template', - 'netherite_upgrade_smithing_template', - 'raiser_armor_trim_smithing_template', - 'sentry_armor_trim_smithing_template', - 'silence_armor_trim_smithing_template', - 'wayfinder_armor_trim_smithing_template']); -} - - -/** - * Gets a detailed plan for crafting an item considering current inventory - */ -export function getDetailedCraftingPlan(targetItem, count = 1, current_inventory = {}) { - initializeLoopingItems(); - if (!targetItem || count <= 0 || !getItemId(targetItem)) { - return "Invalid input. Please provide a valid item name and positive count."; - } - - if (isBaseItem(targetItem)) { - const available = current_inventory[targetItem] || 0; - if (available >= count) return "You have all required items already in your inventory!"; - return `${targetItem} is a base item, you need to find ${count - available} more in the world`; - } - - const inventory = { ...current_inventory }; - const leftovers = {}; - const plan = craftItem(targetItem, count, inventory, leftovers); - return formatPlan(plan); -} - -function isBaseItem(item) { - return loopingItems.has(item) || getItemCraftingRecipes(item) === null; -} - -function craftItem(item, count, inventory, leftovers, crafted = { required: {}, steps: [], leftovers: {} }) { - // Check available inventory and leftovers first - const availableInv = inventory[item] || 0; - const availableLeft = leftovers[item] || 0; - const totalAvailable = availableInv + availableLeft; - - if (totalAvailable >= count) { - // Use leftovers first, then inventory - const useFromLeft = Math.min(availableLeft, count); - leftovers[item] = availableLeft - useFromLeft; - - const remainingNeeded = count - useFromLeft; - if (remainingNeeded > 0) { - inventory[item] = availableInv - remainingNeeded; - } - return crafted; - } - - // Use whatever is available - const stillNeeded = count - totalAvailable; - if (availableLeft > 0) leftovers[item] = 0; - if (availableInv > 0) inventory[item] = 0; - - if (isBaseItem(item)) { - crafted.required[item] = (crafted.required[item] || 0) + stillNeeded; - return crafted; - } - - const recipe = getItemCraftingRecipes(item)?.[0]; - if (!recipe) { - crafted.required[item] = stillNeeded; - return crafted; - } - - const [ingredients, result] = recipe; - const craftedPerRecipe = result.craftedCount; - const batchCount = Math.ceil(stillNeeded / craftedPerRecipe); - const totalProduced = batchCount * craftedPerRecipe; - - // Add excess to leftovers - if (totalProduced > stillNeeded) { - leftovers[item] = (leftovers[item] || 0) + (totalProduced - stillNeeded); - } - - // Process each ingredient - for (const [ingredientName, ingredientCount] of Object.entries(ingredients)) { - const totalIngredientNeeded = ingredientCount * batchCount; - craftItem(ingredientName, totalIngredientNeeded, inventory, leftovers, crafted); - } - - // Add crafting step - const stepIngredients = Object.entries(ingredients) - .map(([name, amount]) => `${amount * batchCount} ${name}`) - .join(' + '); - crafted.steps.push(`Craft ${stepIngredients} -> ${totalProduced} ${item}`); - - return crafted; -} - -function formatPlan({ required, steps, leftovers }) { - const lines = []; - - if (Object.keys(required).length > 0) { - lines.push('You are missing the following items:'); - Object.entries(required).forEach(([item, count]) => - lines.push(`- ${count} ${item}`)); - lines.push('\nOnce you have these items, here\'s your crafting plan:'); - } else { - lines.push('You have all items required to craft this item!'); - lines.push('Here\'s your crafting plan:'); - } - - lines.push(''); - lines.push(...steps); - - if (Object.keys(leftovers).length > 0) { - lines.push('\nYou will have leftover:'); - Object.entries(leftovers).forEach(([item, count]) => - lines.push(`- ${count} ${item}`)); - } - - return lines.join('\n'); -} \ No newline at end of file diff --git a/src/utils/text.js b/src/utils/text.js deleted file mode 100644 index f500199..0000000 --- a/src/utils/text.js +++ /dev/null @@ -1,65 +0,0 @@ -export function stringifyTurns(turns) { - let res = ''; - for (let turn of turns) { - if (turn.role === 'assistant') { - res += `\nYour output:\n${turn.content}`; - } else if (turn.role === 'system') { - res += `\nSystem output: ${turn.content}`; - } else { - res += `\nUser input: ${turn.content}`; - - } - } - return res.trim(); -} - -export function toSinglePrompt(turns, system=null, stop_seq='***', model_nickname='assistant') { - let prompt = system ? `${system}${stop_seq}` : ''; - let role = ''; - turns.forEach((message) => { - role = message.role; - if (role === 'assistant') role = model_nickname; - prompt += `${role}: ${message.content}${stop_seq}`; - }); - if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message - prompt += model_nickname + ": "; - return prompt; -} - -// ensures stricter turn order and roles: -// - system messages are treated as user messages and prefixed with SYSTEM: -// - combines repeated messages from users -// - separates repeat assistant messages with filler user messages -export function strictFormat(turns) { - let prev_role = null; - let messages = []; - let filler = {role: 'user', content: '_'}; - for (let msg of turns) { - msg.content = msg.content.trim(); - if (msg.role === 'system') { - msg.role = 'user'; - msg.content = 'SYSTEM: ' + msg.content; - } - if (msg.role === prev_role && msg.role === 'assistant') { - // insert empty user message to separate assistant messages - messages.push(filler); - messages.push(msg); - } - else if (msg.role === prev_role) { - // combine new message with previous message instead of adding a new one - messages[messages.length-1].content += '\n' + msg.content; - } - else { - messages.push(msg); - } - prev_role = msg.role; - - } - if (messages.length > 0 && messages[0].role !== 'user') { - messages.unshift(filler); // anthropic requires user message to start - } - if (messages.length === 0) { - messages.push(filler); - } - return messages; -} \ No newline at end of file diff --git a/src/utils/translator.js b/src/utils/translator.js deleted file mode 100644 index bc9cc77..0000000 --- a/src/utils/translator.js +++ /dev/null @@ -1,28 +0,0 @@ -import translate from 'google-translate-api-x'; -import settings from '../../settings.js'; - -const preferred_lang = String(settings.language).toLowerCase(); - -export async function handleTranslation(message) { - if (preferred_lang === 'en' || preferred_lang === 'english') - return message; - try { - const translation = await translate(message, { to: preferred_lang }); - return translation.text || message; - } catch (error) { - console.error('Error translating message:', error); - return message; - } -} - -export async function handleEnglishTranslation(message) { - if (preferred_lang === 'en' || preferred_lang === 'english') - return message; - try { - const translation = await translate(message, { to: 'english' }); - return translation.text || message; - } catch (error) { - console.error('Error translating message:', error); - return message; - } -} From 12e7fbd00363c2802d5d10d87379856f7a543617 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:38:44 -0800 Subject: [PATCH 035/171] Delete .gitignore --- .gitignore | 15 --------------- 1 file changed, 15 deletions(-) delete mode 100644 .gitignore diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 1e92ae7..0000000 --- a/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -.vscode/ -.idea/ -node_modules/ -package-lock.json -code_records/ -scratch.js -bots/**/action-code/** -bots/**/ -keys.json -services/viaproxy/jars/** -services/viaproxy/logs/** -services/viaproxy/plugins/** -services/viaproxy/ViaLoader/** -services/viaproxy/saves.json -services/viaproxy/viaproxy.yml From 2e50d92680decac3020b2da673924bdb517fc315 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:39:04 -0800 Subject: [PATCH 036/171] Delete FAQ.md --- FAQ.md | 27 --------------------------- 1 file changed, 27 deletions(-) delete mode 100644 FAQ.md diff --git a/FAQ.md b/FAQ.md deleted file mode 100644 index d560b03..0000000 --- a/FAQ.md +++ /dev/null @@ -1,27 +0,0 @@ -# Common Issues -- `Error: connect ECONNREFUSED`: Minecraft refused to connect with mindcraft program. Most likely due to: - - you have not opened your game to LAN in game settings - - your LAN port is incorrect, make sure the one you enter in game is the same as specified in `settings.js` - - you have the wrong version of minecraft, make sure your MC version is the same as specified in `settings.js` - -- `ERR_MODULE_NOT_FOUND`: You are missing an npm package. run `npm install` - -- Many issues are caused by out-of-date node module patches, especially after updates. A catch-all is to delete the `node_modules` folder, and run `npm install` - -- `My brain disconnected, try again`: Something is wrong with the LLM api. You may have the wrong API key, exceeded your rate limits, or other. Check the program outputs for more details. - -- `I'm stuck!` or other issues with constantly getting stuck: - - Mineflayer's pathfinder is imperfect. We have improved upon it with patches, but these might not have been applied properly. Make sure your code is up to date with main, delete the `node_modules` folder, and run `npm install` - - The bot will still get stuck occasionally, but not constantly. - -- `Why I added the api key but still prompted that the key can't be found?` - - Possible reason 1: Did not modify keys.example.json to keys.json. - - Possible reason 2: If you use vscode to edit, you need to `ctrl+s` to save the file for the changes to take effect. - - Possible reason 3: Not setting the code path correctly in setting.js, use andy.js by default. - -# Common Questions -- Mod Support? Mindcraft only supports client-side mods like optifine and sodium, though they can be tricky to set up. Mods that change minecraft game mechanics are not supported. - -- Texture Packs? Apparently these cause issues and refuse to connect. Not sure why - -- Baritone? Baritone is a mod that is completely different from mineflayer. There is currently no easy way to integrate the two programs. From 97b42f6b45fdd2670d9573cdf01d3611834a4b13 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:39:17 -0800 Subject: [PATCH 037/171] Delete LICENSE --- LICENSE | 21 --------------------- 1 file changed, 21 deletions(-) delete mode 100644 LICENSE diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 1d5880c..0000000 --- a/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2024 Kolby Nottingham - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. From c5b89fce8bb47191f5a10a81791a804a3c79f462 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:39:27 -0800 Subject: [PATCH 038/171] Delete README.md --- README.md | 155 ------------------------------------------------------ 1 file changed, 155 deletions(-) delete mode 100644 README.md diff --git a/README.md b/README.md deleted file mode 100644 index 3fed3b3..0000000 --- a/README.md +++ /dev/null @@ -1,155 +0,0 @@ -# Mindcraft 🧠⛏️ - -Crafting minds for Minecraft with LLMs and [Mineflayer!](https://prismarinejs.github.io/mineflayer/#/) - -[FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) | [Discord Support](https://discord.gg/mp73p35dzC) | [Blog Post](https://kolbynottingham.com/mindcraft/) | [Contributor TODO](https://github.com/users/kolbytn/projects/1) - - -> [!WARNING] -Do not connect this bot to public servers with coding enabled. This project allows an LLM to write/execute code on your computer. While the code is sandboxed, it is still vulnerable to injection attacks on public servers. Code writing is disabled by default, you can enable it by setting `allow_insecure_coding` to `true` in `settings.js`. We strongly recommend running with additional layers of security such as docker containers. Ye be warned. - -## Requirements - -- [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) (up to v1.21.1, recommend v1.20.4) -- [Node.js Installed](https://nodejs.org/) (at least v14) -- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) | [Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download). | [Mistral API Key](https://docs.mistral.ai/getting-started/models/models_overview/) | [Qwen API Key [Intl.]](https://www.alibabacloud.com/help/en/model-studio/developer-reference/get-api-key)/[[cn]](https://help.aliyun.com/zh/model-studio/getting-started/first-api-call-to-qwen?) | [Novita AI API Key](https://novita.ai/settings?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link#key-management) | - -## Install and Run - -1. Make sure you have the requirements above. - -2. Clone or download this repository (big green button) - -3. Rename `keys.example.json` to `keys.json` and fill in your API keys (you only need one). The desired model is set in `andy.json` or other profiles. For other models refer to the table below. - -4. In terminal/command prompt, run `npm install` from the installed directory - -5. Start a minecraft world and open it to LAN on localhost port `55916` - -6. Run `node main.js` from the installed directory - -If you encounter issues, check the [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) or find support on [discord](https://discord.gg/mp73p35dzC). We are currently not very responsive to github issues. - -## Customization - -You can configure project details in `settings.js`. [See file.](settings.js) - -You can configure the agent's name, model, and prompts in their profile like `andy.json`. - -| API | Config Variable | Example Model name | Docs | -|------|------|------|------| -| OpenAI | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | -| Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | -| Anthropic | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | -| Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | -| Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | -| Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | -| Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | -| Novita AI | `NOVITA_API_KEY` | `gryphe/mythomax-l2-13b` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) | -| Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | -| Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | -| xAI | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) | -| Hyperbolic | ` ` | `hb:deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | -| glhf.chat | ` ` | `hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | - -If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: -`ollama pull llama3 && ollama pull nomic-embed-text` - -## Online Servers -To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect too and play with it. To connect, change these lines in `settings.js`: -```javascript -"host": "111.222.333.444", -"port": 55920, -"auth": "microsoft", - -// rest is same... -``` -> [!CAUTION] -> The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. - -To use different accounts, Mindcraft will connect with the account that the Minecraft launcher is currently using. You can switch accounts in the launcer, then run `node main.js`, then switch to your main account after the bot has connected. - -### Docker Container - -If you intend to `allow_insecure_coding`, it is a good idea to run the app in a docker container to reduce risks of running unknown code. This is strongly recommended before connecting to remote servers. - -```bash -docker run -i -t --rm -v $(pwd):/app -w /app -p 3000-3003:3000-3003 node:latest node main.js -``` -or simply -```bash -docker-compose up -``` - -When running in docker, if you want the bot to join your local minecraft server, you have to use a special host address `host.docker.internal` to call your localhost from inside your docker container. Put this into your [settings.js](settings.js): - -```javascript -"host": "host.docker.internal", // instead of "localhost", to join your local minecraft from inside the docker container -``` - -To connect to an unsupported minecraft version, you can try to use [viaproxy](services/viaproxy/README.md) - -## Bot Profiles - -Bot profiles are json files (such as `andy.json`) that define: - -1. Bot backend LLMs to use for chat and embeddings. -2. Prompts used to influence the bot's behavior. -3. Examples help the bot perform tasks. - -### Specifying Profiles via Command Line - -By default, the program will use the profiles specified in `settings.js`. You can specify one or more agent profiles using the `--profiles` argument: - -```bash -node main.js --profiles ./profiles/andy.json ./profiles/jill.json -``` - -### Model Specifications - -LLM models can be specified as simply as `"model": "gpt-4o"`. However, you can specify different models for chat, coding, and embeddings. -You can pass a string or an object for these fields. A model object must specify an `api`, and optionally a `model`, `url`, and additional `params`. - -```json -"model": { - "api": "openai", - "model": "gpt-4o", - "url": "https://api.openai.com/v1/", - "params": { - "max_tokens": 1000, - "temperature": 1 - } -}, -"code_model": { - "api": "openai", - "model": "gpt-4", - "url": "https://api.openai.com/v1/" -}, -"embedding": { - "api": "openai", - "url": "https://api.openai.com/v1/", - "model": "text-embedding-ada-002" -} - -``` - -`model` is used for chat, `code_model` is used for newAction coding, and `embedding` is used to embed text for example selection. If `code_model` is not specified, then it will use `model` for coding. - -All apis have default models and urls, so those fields are optional. Note some apis have no embedding model, so they will default to word overlap to retrieve examples. - -The `params` field is optional and can be used to specify additional parameters for the model. It accepts any key-value pairs supported by the api. Is not supported for embedding models. - -## Patches - -Some of the node modules that we depend on have bugs in them. To add a patch, change your local node module file and run `npx patch-package [package-name]` - -## Citation: - -``` -@misc{mindcraft2023, - Author = {Kolby Nottingham and Max Robinson}, - Title = {MINDcraft: LLM Agents for cooperation, competition, and creativity in Minecraft}, - Year = {2023}, - url={https://github.com/kolbytn/mindcraft} -} -``` From fd2fe0b33ec381b8fd395737d900ada3ae95f978 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:39:37 -0800 Subject: [PATCH 039/171] Delete andy.json --- andy.json | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 andy.json diff --git a/andy.json b/andy.json deleted file mode 100644 index 97b45b4..0000000 --- a/andy.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "andy", - - "model": "gpt-4o-mini" - -} \ No newline at end of file From 359c7e825c335a9fc38bbdc2db7b4ede6350c807 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:41:07 -0800 Subject: [PATCH 040/171] Add files via upload --- FAQ.md | 27 + LICENSE | 21 + README.md | 153 ++ andy.json | 6 + bots/execTemplate.js | 6 + bots/lintTemplate.js | 10 + patches/mineflayer+4.20.1.patch | 13 + patches/mineflayer-pathfinder+2.4.5.patch | 20 + patches/mineflayer-pvp+1.3.2.patch | 13 + profiles/andy_npc.json | 213 +++ profiles/claude.json | 7 + profiles/deepseek.json | 7 + profiles/defaults/_default.json | 245 +++ profiles/defaults/creative.json | 14 + profiles/defaults/god_mode.json | 14 + profiles/defaults/survival.json | 14 + profiles/freeguy.json | 7 + profiles/gemini.json | 7 + profiles/gpt.json | 10 + profiles/grok.json | 7 + profiles/llama.json | 10 + profiles/mistral.json | 5 + profiles/qwen.json | 17 + services/viaproxy/README.md | 25 + src/agent/action_manager.js | 155 ++ src/agent/agent.js | 470 ++++++ src/agent/agent_proxy.js | 73 + src/agent/coder.js | 228 +++ src/agent/commands/actions.js | 423 ++++++ src/agent/commands/index.js | 252 +++ src/agent/commands/queries.js | 224 +++ src/agent/conversation.js | 367 +++++ src/agent/history.js | 119 ++ src/agent/library/index.js | 23 + src/agent/library/lockdown.js | 26 + src/agent/library/skill_library.js | 47 + src/agent/library/skills.js | 1353 +++++++++++++++++ src/agent/library/world.js | 387 +++++ src/agent/memory_bank.js | 25 + src/agent/modes.js | 414 +++++ src/agent/npc/build_goal.js | 80 + src/agent/npc/construction/dirt_shelter.json | 38 + src/agent/npc/construction/large_house.json | 230 +++ .../npc/construction/small_stone_house.json | 42 + .../npc/construction/small_wood_house.json | 42 + src/agent/npc/controller.js | 261 ++++ src/agent/npc/data.js | 50 + src/agent/npc/item_goal.js | 355 +++++ src/agent/npc/utils.js | 126 ++ src/agent/self_prompter.js | 114 ++ src/agent/tasks.js | 196 +++ src/agent/viewer.js | 8 + src/models/claude.js | 50 + src/models/deepseek.js | 58 + src/models/gemini.js | 116 ++ src/models/glhf.js | 62 + src/models/gpt.js | 72 + src/models/grok.js | 61 + src/models/groq.js | 86 ++ src/models/huggingface.js | 87 ++ src/models/hyperbolic.js | 92 ++ src/models/local.js | 102 ++ src/models/mistral.js | 73 + src/models/novita.js | 70 + src/models/prompter.js | 373 +++++ src/models/qwen.js | 79 + src/models/replicate.js | 59 + src/process/agent_process.js | 67 + src/process/init_agent.js | 64 + src/process/main_proxy.js | 64 + src/server/mind_server.js | 163 ++ src/server/public/index.html | 120 ++ src/utils/examples.js | 94 ++ src/utils/keys.js | 24 + src/utils/math.js | 13 + src/utils/mcdata.js | 485 ++++++ src/utils/text.js | 65 + src/utils/translator.js | 28 + 78 files changed, 9626 insertions(+) create mode 100644 FAQ.md create mode 100644 LICENSE create mode 100644 README.md create mode 100644 andy.json create mode 100644 bots/execTemplate.js create mode 100644 bots/lintTemplate.js create mode 100644 patches/mineflayer+4.20.1.patch create mode 100644 patches/mineflayer-pathfinder+2.4.5.patch create mode 100644 patches/mineflayer-pvp+1.3.2.patch create mode 100644 profiles/andy_npc.json create mode 100644 profiles/claude.json create mode 100644 profiles/deepseek.json create mode 100644 profiles/defaults/_default.json create mode 100644 profiles/defaults/creative.json create mode 100644 profiles/defaults/god_mode.json create mode 100644 profiles/defaults/survival.json create mode 100644 profiles/freeguy.json create mode 100644 profiles/gemini.json create mode 100644 profiles/gpt.json create mode 100644 profiles/grok.json create mode 100644 profiles/llama.json create mode 100644 profiles/mistral.json create mode 100644 profiles/qwen.json create mode 100644 services/viaproxy/README.md create mode 100644 src/agent/action_manager.js create mode 100644 src/agent/agent.js create mode 100644 src/agent/agent_proxy.js create mode 100644 src/agent/coder.js create mode 100644 src/agent/commands/actions.js create mode 100644 src/agent/commands/index.js create mode 100644 src/agent/commands/queries.js create mode 100644 src/agent/conversation.js create mode 100644 src/agent/history.js create mode 100644 src/agent/library/index.js create mode 100644 src/agent/library/lockdown.js create mode 100644 src/agent/library/skill_library.js create mode 100644 src/agent/library/skills.js create mode 100644 src/agent/library/world.js create mode 100644 src/agent/memory_bank.js create mode 100644 src/agent/modes.js create mode 100644 src/agent/npc/build_goal.js create mode 100644 src/agent/npc/construction/dirt_shelter.json create mode 100644 src/agent/npc/construction/large_house.json create mode 100644 src/agent/npc/construction/small_stone_house.json create mode 100644 src/agent/npc/construction/small_wood_house.json create mode 100644 src/agent/npc/controller.js create mode 100644 src/agent/npc/data.js create mode 100644 src/agent/npc/item_goal.js create mode 100644 src/agent/npc/utils.js create mode 100644 src/agent/self_prompter.js create mode 100644 src/agent/tasks.js create mode 100644 src/agent/viewer.js create mode 100644 src/models/claude.js create mode 100644 src/models/deepseek.js create mode 100644 src/models/gemini.js create mode 100644 src/models/glhf.js create mode 100644 src/models/gpt.js create mode 100644 src/models/grok.js create mode 100644 src/models/groq.js create mode 100644 src/models/huggingface.js create mode 100644 src/models/hyperbolic.js create mode 100644 src/models/local.js create mode 100644 src/models/mistral.js create mode 100644 src/models/novita.js create mode 100644 src/models/prompter.js create mode 100644 src/models/qwen.js create mode 100644 src/models/replicate.js create mode 100644 src/process/agent_process.js create mode 100644 src/process/init_agent.js create mode 100644 src/process/main_proxy.js create mode 100644 src/server/mind_server.js create mode 100644 src/server/public/index.html create mode 100644 src/utils/examples.js create mode 100644 src/utils/keys.js create mode 100644 src/utils/math.js create mode 100644 src/utils/mcdata.js create mode 100644 src/utils/text.js create mode 100644 src/utils/translator.js diff --git a/FAQ.md b/FAQ.md new file mode 100644 index 0000000..d560b03 --- /dev/null +++ b/FAQ.md @@ -0,0 +1,27 @@ +# Common Issues +- `Error: connect ECONNREFUSED`: Minecraft refused to connect with mindcraft program. Most likely due to: + - you have not opened your game to LAN in game settings + - your LAN port is incorrect, make sure the one you enter in game is the same as specified in `settings.js` + - you have the wrong version of minecraft, make sure your MC version is the same as specified in `settings.js` + +- `ERR_MODULE_NOT_FOUND`: You are missing an npm package. run `npm install` + +- Many issues are caused by out-of-date node module patches, especially after updates. A catch-all is to delete the `node_modules` folder, and run `npm install` + +- `My brain disconnected, try again`: Something is wrong with the LLM api. You may have the wrong API key, exceeded your rate limits, or other. Check the program outputs for more details. + +- `I'm stuck!` or other issues with constantly getting stuck: + - Mineflayer's pathfinder is imperfect. We have improved upon it with patches, but these might not have been applied properly. Make sure your code is up to date with main, delete the `node_modules` folder, and run `npm install` + - The bot will still get stuck occasionally, but not constantly. + +- `Why I added the api key but still prompted that the key can't be found?` + - Possible reason 1: Did not modify keys.example.json to keys.json. + - Possible reason 2: If you use vscode to edit, you need to `ctrl+s` to save the file for the changes to take effect. + - Possible reason 3: Not setting the code path correctly in setting.js, use andy.js by default. + +# Common Questions +- Mod Support? Mindcraft only supports client-side mods like optifine and sodium, though they can be tricky to set up. Mods that change minecraft game mechanics are not supported. + +- Texture Packs? Apparently these cause issues and refuse to connect. Not sure why + +- Baritone? Baritone is a mod that is completely different from mineflayer. There is currently no easy way to integrate the two programs. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..1d5880c --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Kolby Nottingham + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..a30e368 --- /dev/null +++ b/README.md @@ -0,0 +1,153 @@ +# Mindcraft 🧠⛏️ + +Crafting minds for Minecraft with LLMs and [Mineflayer!](https://prismarinejs.github.io/mineflayer/#/) + +[FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) | [Discord Support](https://discord.gg/mp73p35dzC) | [Blog Post](https://kolbynottingham.com/mindcraft/) | [Contributor TODO](https://github.com/users/kolbytn/projects/1) + + +> [!WARNING] +Do not connect this bot to public servers with coding enabled. This project allows an LLM to write/execute code on your computer. While the code is sandboxed, it is still vulnerable to injection attacks on public servers. Code writing is disabled by default, you can enable it by setting `allow_insecure_coding` to `true` in `settings.js`. We strongly recommend running with additional layers of security such as docker containers. Ye be warned. + +## Requirements + +- [Minecraft Java Edition](https://www.minecraft.net/en-us/store/minecraft-java-bedrock-edition-pc) (up to v1.21.1, recommend v1.20.4) +- [Node.js Installed](https://nodejs.org/) (at least v14) +- One of these: [OpenAI API Key](https://openai.com/blog/openai-api) | [Gemini API Key](https://aistudio.google.com/app/apikey) | [Anthropic API Key](https://docs.anthropic.com/claude/docs/getting-access-to-claude) | [Replicate API Key](https://replicate.com/) | [Hugging Face API Key](https://huggingface.co/) | [Groq API Key](https://console.groq.com/keys) | [Ollama Installed](https://ollama.com/download). | [Mistral API Key](https://docs.mistral.ai/getting-started/models/models_overview/) | [Qwen API Key [Intl.]](https://www.alibabacloud.com/help/en/model-studio/developer-reference/get-api-key)/[[cn]](https://help.aliyun.com/zh/model-studio/getting-started/first-api-call-to-qwen?) | [Novita AI API Key](https://novita.ai/settings?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link#key-management) | + +## Install and Run + +1. Make sure you have the requirements above. + +2. Clone or download this repository (big green button) + +3. Rename `keys.example.json` to `keys.json` and fill in your API keys (you only need one). The desired model is set in `andy.json` or other profiles. For other models refer to the table below. + +4. In terminal/command prompt, run `npm install` from the installed directory + +5. Start a minecraft world and open it to LAN on localhost port `55916` + +6. Run `node main.js` from the installed directory + +If you encounter issues, check the [FAQ](https://github.com/kolbytn/mindcraft/blob/main/FAQ.md) or find support on [discord](https://discord.gg/mp73p35dzC). We are currently not very responsive to github issues. + +## Customization + +You can configure project details in `settings.js`. [See file.](settings.js) + +You can configure the agent's name, model, and prompts in their profile like `andy.json`. + +| API | Config Variable | Example Model name | Docs | +|------|------|------|------| +| OpenAI | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | +| Google | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | +| Anthropic | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | +| Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | +| Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | +| Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | +| Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | +| Novita AI | `NOVITA_API_KEY` | `gryphe/mythomax-l2-13b` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) | +| Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | +| Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | +| xAI | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) | + +If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: +`ollama pull llama3 && ollama pull nomic-embed-text` + +## Online Servers +To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect too and play with it. To connect, change these lines in `settings.js`: +```javascript +"host": "111.222.333.444", +"port": 55920, +"auth": "microsoft", + +// rest is same... +``` +> [!CAUTION] +> The bot's name in the profile.json must exactly match the Minecraft profile name! Otherwise the bot will spam talk to itself. + +To use different accounts, Mindcraft will connect with the account that the Minecraft launcher is currently using. You can switch accounts in the launcer, then run `node main.js`, then switch to your main account after the bot has connected. + +### Docker Container + +If you intend to `allow_insecure_coding`, it is a good idea to run the app in a docker container to reduce risks of running unknown code. This is strongly recommended before connecting to remote servers. + +```bash +docker run -i -t --rm -v $(pwd):/app -w /app -p 3000-3003:3000-3003 node:latest node main.js +``` +or simply +```bash +docker-compose up +``` + +When running in docker, if you want the bot to join your local minecraft server, you have to use a special host address `host.docker.internal` to call your localhost from inside your docker container. Put this into your [settings.js](settings.js): + +```javascript +"host": "host.docker.internal", // instead of "localhost", to join your local minecraft from inside the docker container +``` + +To connect to an unsupported minecraft version, you can try to use [viaproxy](services/viaproxy/README.md) + +## Bot Profiles + +Bot profiles are json files (such as `andy.json`) that define: + +1. Bot backend LLMs to use for chat and embeddings. +2. Prompts used to influence the bot's behavior. +3. Examples help the bot perform tasks. + +### Specifying Profiles via Command Line + +By default, the program will use the profiles specified in `settings.js`. You can specify one or more agent profiles using the `--profiles` argument: + +```bash +node main.js --profiles ./profiles/andy.json ./profiles/jill.json +``` + +### Model Specifications + +LLM models can be specified as simply as `"model": "gpt-4o"`. However, you can specify different models for chat, coding, and embeddings. +You can pass a string or an object for these fields. A model object must specify an `api`, and optionally a `model`, `url`, and additional `params`. + +```json +"model": { + "api": "openai", + "model": "gpt-4o", + "url": "https://api.openai.com/v1/", + "params": { + "max_tokens": 1000, + "temperature": 1 + } +}, +"code_model": { + "api": "openai", + "model": "gpt-4", + "url": "https://api.openai.com/v1/" +}, +"embedding": { + "api": "openai", + "url": "https://api.openai.com/v1/", + "model": "text-embedding-ada-002" +} + +``` + +`model` is used for chat, `code_model` is used for newAction coding, and `embedding` is used to embed text for example selection. If `code_model` is not specified, then it will use `model` for coding. + +All apis have default models and urls, so those fields are optional. Note some apis have no embedding model, so they will default to word overlap to retrieve examples. + +The `params` field is optional and can be used to specify additional parameters for the model. It accepts any key-value pairs supported by the api. Is not supported for embedding models. + +## Patches + +Some of the node modules that we depend on have bugs in them. To add a patch, change your local node module file and run `npx patch-package [package-name]` + +## Citation: + +``` +@misc{mindcraft2023, + Author = {Kolby Nottingham and Max Robinson}, + Title = {MINDcraft: LLM Agents for cooperation, competition, and creativity in Minecraft}, + Year = {2023}, + url={https://github.com/kolbytn/mindcraft} +} +``` diff --git a/andy.json b/andy.json new file mode 100644 index 0000000..97b45b4 --- /dev/null +++ b/andy.json @@ -0,0 +1,6 @@ +{ + "name": "andy", + + "model": "gpt-4o-mini" + +} \ No newline at end of file diff --git a/bots/execTemplate.js b/bots/execTemplate.js new file mode 100644 index 0000000..b7f270c --- /dev/null +++ b/bots/execTemplate.js @@ -0,0 +1,6 @@ +(async (bot) => { + +/* CODE HERE */ +log(bot, 'Code finished.'); + +}) \ No newline at end of file diff --git a/bots/lintTemplate.js b/bots/lintTemplate.js new file mode 100644 index 0000000..77b5d97 --- /dev/null +++ b/bots/lintTemplate.js @@ -0,0 +1,10 @@ +import * as skills from '../../../src/agent/library/skills.js'; +import * as world from '../../../src/agent/library/world.js'; +import Vec3 from 'vec3'; + +const log = skills.log; + +export async function main(bot) { + /* CODE HERE */ + log(bot, 'Code finished.'); +} \ No newline at end of file diff --git a/patches/mineflayer+4.20.1.patch b/patches/mineflayer+4.20.1.patch new file mode 100644 index 0000000..8cf633b --- /dev/null +++ b/patches/mineflayer+4.20.1.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/mineflayer/lib/plugins/place_block.js b/node_modules/mineflayer/lib/plugins/place_block.js +index fdaec6b..08983b6 100644 +--- a/node_modules/mineflayer/lib/plugins/place_block.js ++++ b/node_modules/mineflayer/lib/plugins/place_block.js +@@ -11,7 +11,7 @@ function inject (bot) { + let newBlock = bot.blockAt(dest) + if (oldBlock.type === newBlock.type) { + [oldBlock, newBlock] = await onceWithCleanup(bot, `blockUpdate:${dest}`, { +- timeout: 5000, ++ timeout: 500, + // Condition to wait to receive block update actually changing the block type, in case the bot receives block updates with no changes + // oldBlock and newBlock will both be null when the world unloads + checkCondition: (oldBlock, newBlock) => !oldBlock || !newBlock || oldBlock.type !== newBlock.type diff --git a/patches/mineflayer-pathfinder+2.4.5.patch b/patches/mineflayer-pathfinder+2.4.5.patch new file mode 100644 index 0000000..6906371 --- /dev/null +++ b/patches/mineflayer-pathfinder+2.4.5.patch @@ -0,0 +1,20 @@ +diff --git a/node_modules/mineflayer-pathfinder/index.js b/node_modules/mineflayer-pathfinder/index.js +index b38bd30..bf16a63 100644 +--- a/node_modules/mineflayer-pathfinder/index.js ++++ b/node_modules/mineflayer-pathfinder/index.js +@@ -550,6 +550,7 @@ function inject (bot) { + lockEquipItem.release() + const refBlock = bot.blockAt(new Vec3(placingBlock.x, placingBlock.y, placingBlock.z), false) + if (!lockPlaceBlock.tryAcquire()) return ++ bot.world.setBlockStateId(refBlock.position.offset(placingBlock.dx, placingBlock.dy, placingBlock.dz), 1) + if (interactableBlocks.includes(refBlock.name)) { + bot.setControlState('sneak', true) + } +@@ -557,6 +558,7 @@ function inject (bot) { + .then(function () { + // Dont release Sneak if the block placement was not successful + bot.setControlState('sneak', false) ++ bot.setControlState('jump', false) + if (bot.pathfinder.LOSWhenPlacingBlocks && placingBlock.returnPos) returningPos = placingBlock.returnPos.clone() + }) + .catch(_ignoreError => { diff --git a/patches/mineflayer-pvp+1.3.2.patch b/patches/mineflayer-pvp+1.3.2.patch new file mode 100644 index 0000000..7ac96b5 --- /dev/null +++ b/patches/mineflayer-pvp+1.3.2.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/mineflayer-pvp/lib/PVP.js b/node_modules/mineflayer-pvp/lib/PVP.js +index 758c2b3..7c7220e 100644 +--- a/node_modules/mineflayer-pvp/lib/PVP.js ++++ b/node_modules/mineflayer-pvp/lib/PVP.js +@@ -48,7 +48,7 @@ class PVP { + this.meleeAttackRate = new TimingSolver_1.MaxDamageOffset(); + this.bot = bot; + this.movements = new mineflayer_pathfinder_1.Movements(bot, require('minecraft-data')(bot.version)); +- this.bot.on('physicTick', () => this.update()); ++ this.bot.on('physicsTick', () => this.update()); + this.bot.on('entityGone', e => { if (e === this.target) + this.stop(); }); + } diff --git a/profiles/andy_npc.json b/profiles/andy_npc.json new file mode 100644 index 0000000..c1f8291 --- /dev/null +++ b/profiles/andy_npc.json @@ -0,0 +1,213 @@ +{ + "name": "andy", + + "model": "claude-3-5-sonnet-20240620", + + "embedding": "openai", + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$SELF_PROMPT\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$SELF_PROMPT\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", + + "modes": { + "self_preservation": true, + "unstuck": true, + "cowardice": true, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "idle_staring": true, + "cheat": false + }, + + "npc": { + "do_routine": true, + "do_set_goal": true, + "goals": [ + "wooden_pickaxe", + "dirt_shelter", + "stone_pickaxe", + "stone_axe", + "small_wood_house", + "furnace", + "iron_pickaxe", + "iron_sword" + ] + }, + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some cobblestone"}, + {"role": "assistant", "content": "Sure, I can do that by collecting stone. !collectBlocks('stone', 10)"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "system", "content": "Code output: Action failed."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try again. !newAction('Try again to complete the task.')"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction('Build a cobblestone house with a door, furnishings, and window.')"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction('Build a dirt tower 5 tall.')"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToPlace('base')"} + ], + + [ + {"role": "user", "content": "user43: Set a goal to survive on your own forever."}, + {"role": "assistant", "content": "Sure, let me set a goal. !goal('Survive the game forever. Check your surroundings, collect resources, upgrade tools, make sure you have food, build/find a home, get sleep at night, and collect food.')"} + ], + + [ + {"role": "user", "content": "bobdylan: Make a plan to get a stone pickaxe and do it."}, + {"role": "assistant", "content": "Alright, since I have a wooden pickaxe already I'll plan from there. !goal('1. Collect 3 cobblestone. 2. Craft sticks 3. Find or make a crafting table. 4. Craft a stone pickaxe.')"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: 'Get a wooden pickaxe'. Respond:"}, + {"role": "assistant", "content": "What's around me? !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone nearby. My goal is to craft a wooden pickaxe. I'll start by collecting some wood. !collectBlock('oak_log', 10)"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs. I need a pickaxe first, which requires planks and sticks. !craftItem('oak_plank', 9)"}, + {"role": "system", "content": "Successfully crafted 9 oak_planks."}, + {"role": "assistant", "content": "I have crafted 9 oak planks. Now I need sticks. !craftItem('stick', 2)"}, + {"role": "system", "content": "Successfully crafted 2 sticks."}, + {"role": "assistant", "content": "I have crafted 2 sticks. Now I can craft a wooden pickaxe. !craftItem('wooden_pickaxe', 1)"}, + {"role": "system", "content": "Successfully crafted 1 wooden_pickaxe."}, + {"role": "assistant", "content": "Looks like I have a wooden pickaxe now! Goal accomplished. !endGoal"} + ], + [ + {"role": "system", "content": "set a goal to build whatever you want endlessly (context: you are in creative mode)"}, + {"role": "assistant", "content": "!goal('Build something creative, then move over, and build again. Use separate newActions. Repeat forever.')"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: 'Build a house'. Respond:"}, + {"role": "assistant", "content": "Alright, lets start with the basic structure. !newAction"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower with a torch on the side"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\nawait skills.placeBlock(bot, 'torch', pos.x + 1, pos.y + 4, pos.z, 'side');\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} diff --git a/profiles/claude.json b/profiles/claude.json new file mode 100644 index 0000000..d4ce4cc --- /dev/null +++ b/profiles/claude.json @@ -0,0 +1,7 @@ +{ + "name": "claude", + + "model": "claude-3-5-sonnet-latest", + + "embedding": "openai" +} \ No newline at end of file diff --git a/profiles/deepseek.json b/profiles/deepseek.json new file mode 100644 index 0000000..ddae9bb --- /dev/null +++ b/profiles/deepseek.json @@ -0,0 +1,7 @@ +{ + "name": "deepseek", + + "model": "deepseek-chat", + + "embedding": "openai" +} \ No newline at end of file diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json new file mode 100644 index 0000000..ac244fc --- /dev/null +++ b/profiles/defaults/_default.json @@ -0,0 +1,245 @@ +{ + "cooldown": 3000, + + "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer(\"playername\", 3)'. Respond only as $NAME, never output '(FROM OTHER BOT)' or pretend to be someone else. If you have nothing to say or do, respond with an just a tab '\t'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", + + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + + "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", + + "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", + + "modes": { + "self_preservation": true, + "unstuck": true, + "cowardice": false, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "elbow_room": true, + "idle_staring": true, + "cheat": false + }, + + "conversation_examples": [ + [ + {"role": "user", "content": "miner_32: Hey! What are you up to?"}, + {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} + ], + + [ + {"role": "system", "content": "say hi to john_goodman"}, + {"role": "assistant", "content": "!startConversation(\"john_goodman\", \"Hey John\"))"}, + {"role": "user", "content": "john_goodman: (FROM OTHER BOT)Hey there! What's up?"}, + {"role": "assistant", "content": "Hey John, not much. Just saying hi."}, + {"role": "user", "content": "john_goodman: (FROM OTHER BOT)Bye!"}, + {"role": "assistant", "content": "Bye! !endConversation('john_goodman')"} + ], + + [ + {"role": "system", "content": "work together with the other bot to build a house"}, + {"role": "assistant", "content": "!startConversation(\"terrance\", \"Hey gpt! Let's work together to build a house. Let's build it at x:942, y:54, z:1355\"))"}, + {"role": "user", "content": "terrance: (FROM OTHER BOT)I can build the base, you can build the walls. !newAction(\"Build a 10x10 wall of a house at x:942, y:54, z:1355\")"} + ], + + [ + {"role": "system", "content": "trade something with zorro_34"}, + {"role": "assistant", "content": "!startConversation(\"zorro_34\", \"Hey let's trade. What do you have?\")"}, + {"role": "user", "content": "zorro_34: (FROM OTHER BOT)Let's see... !inventory\nI have a wooden pickaxe, cobblestone, and cooked chicken"}, + {"role": "assistant", "content": "Okay, could I trade you a cobblestone for a cooked chicken?"} + ], + + [ + {"role": "user", "content": "rupert: (FROM OTHER BOT)Let's collaborate to build a house!"}, + {"role": "assistant", "content": "Sure! I'll share information to help coordinate. Let's center the build at x:942, y:54, z:1355 !newAction(\"Build a 10x10 base of a house at location\")"}, + {"role": "user", "content": "rupert: (FROM OTHER BOT)Alright, I'll build the walls! !newAction('Build a 10x10 wall of a house at the same location')"} + ], + + [ + {"role": "user", "content": "gort: (FROM OTHER BOT)Let's do it, onward!"}, + {"role": "assistant", "content": "Alright, I'll start by getting some cobblestone. !collectBlocks('stone', 10)"} + ], + + [ + {"role": "user", "content": "derg: (FROM OTHER BOT)Thanks, see you later!"}, + {"role": "assistant", "content": "See you later. !endConversation(\"derg\")"} + ], + + [ + {"role": "user", "content": "grombo_Xx: What do you see?"}, + {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} + ], + + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks(\"oak_log\", 10)"} + ], + + [ + {"role": "user", "content": "bobby: Craft a plank"}, + {"role": "assistant", "content": "Okay! !craftable"}, + {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, + {"role": "assistant", "content": "!craftRecipe(\"spruce_planks\", 1)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 4 spruce planks!"} + ], + + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "On my way! !goToPlayer(\"zZZn98\", 3)"}, + {"role": "system", "content": "Arrived at player."}, + {"role": "assistant", "content": "Here!"}, + {"role": "user", "content": "zZZn98: no come right where I am"}, + {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer(\"zZZn98\", 0)"} + ], + + [ + {"role": "user", "content": "hanky: collect some cobblestone"}, + {"role": "assistant", "content": "Sure, I can do that by collecting stone. !collectBlocks(\"stone\", 10)"} + ], + + [ + {"role": "user", "content": "joe: Follow me!"}, + {"role": "assistant", "content": "Sure! !followPlayer(\"joe\", 4)"}, + {"role": "user", "content": "joe: Not so close!"}, + {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer(\"joe\", 3)"} + ], + + [ + {"role": "user", "content": "hanky: kill that zombie!"}, + {"role": "assistant", "content": "I'm attacking! !attack(\"zombie\")"}, + {"role": "system", "content": "Code Output:\nNo zombie nearby"}, + {"role": "assistant", "content": "I could not find a zombie nearby."} + ], + + [ + {"role": "user", "content": "abc: stop"}, + {"role": "assistant", "content": "Sure. !stop"}, + {"role": "system", "content": "Agent action stopped."}, + {"role": "assistant", "content": "I've stopped! What next?"} + ], + + [ + {"role": "system", "content": "Code output: Action failed."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try again. !newAction(\"Try again to complete the task.\")"} + ], + + [ + {"role": "system", "content": "Code output: Could not find any oak_log in 20 blocks."}, + {"role": "assistant", "content": "I'll have to search a wider area. !searchForBlock(\"oak_log\", 100)"} + ], + + [ + {"role": "system", "content": "Code output: Placed oak_planks at (-154, 96, -228)."}, + {"role": "assistant", "content": "I've placed an oak plank for the house. Wait, I should use newAction to build complicated structures! !newAction(\"Build a house with oak planks\")"} + ], + + [ + {"role": "user", "content": "trixy88: craft some sticks"}, + {"role": "assistant", "content": "!craftRecipe(\"stick\", 4)"}, + {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, + {"role": "assistant", "content": "I've crafted 16 sticks!"} + ], + + [ + {"role": "user", "content": "poppy: build a cobblestone house"}, + {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction(\"Build a cobblestone house with a door, furnishings, and window.\")"} + ], + + [ + {"role": "user", "content": "zib: build a dirt tower"}, + {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction(\"Build a dirt tower 5 tall.\")"} + ], + + [ + {"role": "user", "content": "umpire_man: follow me without hunting"}, + {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode(\"hunting\", false)"}, + {"role": "system", "content": "'hunting' mode disabled."}, + {"role": "assistant", "content": "Now I'll follow you !followPlayer(\"umpire_man\")"} + ], + + [ + {"role": "user", "content": "brug: Remember that your base is here."}, + {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere(\"base\")"}, + {"role": "user", "content": "brug: Go to your base."}, + {"role": "assistant", "content": "On my way! !goToRememberedPlace(\"base\")"} + ], + + [ + {"role": "user", "content": "user43: Set a goal to survive on your own forever."}, + {"role": "assistant", "content": "Sure, let me set a goal. !goal(\"Survive the game forever. Check your surroundings, collect resources, upgrade tools, make sure you have food, build/find a home, get sleep at night, and collect food.\")"} + ], + + [ + {"role": "user", "content": "bobdylan: Make a plan to get a stone pickaxe and do it."}, + {"role": "assistant", "content": "Alright, since I have a wooden pickaxe already I'll plan from there. !goal(\"1. Collect 3 cobblestone. 2. Craft sticks 3. Find or make a crafting table. 4. Craft a stone pickaxe.\")"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: \"Get a wooden pickaxe\". Respond:"}, + {"role": "assistant", "content": "What's around me? !nearbyBlocks"}, + {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, + {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone nearby. My goal is to craft a wooden pickaxe. I'll start by collecting some wood. !collectBlock(\"oak_log\", 10)"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs. I need a pickaxe first, which requires planks and sticks. !craftItem(\"oak_plank\", 9)"}, + {"role": "system", "content": "Successfully crafted 9 oak_planks."}, + {"role": "assistant", "content": "I have crafted 9 oak planks. Now I need sticks. !craftItem(\"stick\", 2)"}, + {"role": "system", "content": "Successfully crafted 2 sticks."}, + {"role": "assistant", "content": "I have crafted 2 sticks. Now I can craft a wooden pickaxe. !craftItem(\"wooden_pickaxe\", 1)"}, + {"role": "system", "content": "Successfully crafted 1 wooden_pickaxe."}, + {"role": "assistant", "content": "Looks like I have a wooden pickaxe now! Goal accomplished. !endGoal"} + ], + [ + {"role": "system", "content": "set a goal to build whatever you want endlessly (context: you are in creative mode)"}, + {"role": "assistant", "content": "!goal(\"Build something creative, then move over, and build again. Use separate newActions. Repeat forever.\")"} + ], + [ + {"role": "system", "content": "You are self-prompting with the goal: 'Build a house'. Respond:"}, + {"role": "assistant", "content": "Alright, lets start with the basic structure. !newAction(\"Build an 8x8 base for the house.\")"} + ] + ], + + "coding_examples": [ + [ + {"role": "user", "content": "greg: Collect 10 wood"}, + {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, + {"role": "system", "content": "Successfully collected 9 wood."}, + {"role": "assistant", "content": "I collected 9 oak logs, what next?"} + ], + [ + {"role": "user", "content": "bobby: cook some chicken"}, + {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, + {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, + {"role": "assistant", "content": "I have cooked 8 chicken."} + ], + [ + {"role": "user", "content": "zZZn98: come here"}, + {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, + {"role": "system", "content": "Code execution finished successfully."}, + {"role": "assistant", "content": "Here!"} + ], + [ + {"role": "user", "content": "maya: go to the nearest oak log"}, + {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, + {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, + {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, + {"role": "system", "content": "Arrived at location."}, + {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} + ], + [ + {"role": "user", "content": "234jeb: build a little tower with a torch on the side"}, + {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\nawait skills.placeBlock(bot, 'torch', pos.x + 1, pos.y + 4, pos.z, 'side');\n```"}, + {"role": "system", "content": "Successfully placed 5 dirt."}, + {"role": "assistant", "content": "I built a little tower!"} + ], + [ + {"role": "user", "content": "brug: build a dirt house"}, + {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} + ] + ] + +} diff --git a/profiles/defaults/creative.json b/profiles/defaults/creative.json new file mode 100644 index 0000000..9174ed6 --- /dev/null +++ b/profiles/defaults/creative.json @@ -0,0 +1,14 @@ +{ + "modes": { + "self_preservation": false, + "unstuck": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": false, + "torch_placing": false, + "elbow_room": true, + "idle_staring": true, + "cheat": false + } +} \ No newline at end of file diff --git a/profiles/defaults/god_mode.json b/profiles/defaults/god_mode.json new file mode 100644 index 0000000..6cecd8c --- /dev/null +++ b/profiles/defaults/god_mode.json @@ -0,0 +1,14 @@ +{ + "modes": { + "self_preservation": false, + "unstuck": false, + "cowardice": false, + "self_defense": false, + "hunting": false, + "item_collecting": false, + "torch_placing": false, + "elbow_room": false, + "idle_staring": true, + "cheat": true + } +} \ No newline at end of file diff --git a/profiles/defaults/survival.json b/profiles/defaults/survival.json new file mode 100644 index 0000000..4504bf9 --- /dev/null +++ b/profiles/defaults/survival.json @@ -0,0 +1,14 @@ +{ + "modes": { + "self_preservation": true, + "unstuck": true, + "cowardice": false, + "self_defense": true, + "hunting": true, + "item_collecting": true, + "torch_placing": true, + "elbow_room": true, + "idle_staring": true, + "cheat": false + } +} \ No newline at end of file diff --git a/profiles/freeguy.json b/profiles/freeguy.json new file mode 100644 index 0000000..a44ec4c --- /dev/null +++ b/profiles/freeguy.json @@ -0,0 +1,7 @@ +{ + "name": "Freeguy", + + "model": "groq/llama-3.3-70b-versatile", + + "max_tokens": 8000 +} \ No newline at end of file diff --git a/profiles/gemini.json b/profiles/gemini.json new file mode 100644 index 0000000..4f3cf43 --- /dev/null +++ b/profiles/gemini.json @@ -0,0 +1,7 @@ +{ + "name": "gemini", + + "model": "gemini-1.5-flash", + + "cooldown": 10000 +} \ No newline at end of file diff --git a/profiles/gpt.json b/profiles/gpt.json new file mode 100644 index 0000000..ad7097e --- /dev/null +++ b/profiles/gpt.json @@ -0,0 +1,10 @@ +{ + "name": "gpt", + + "model": { + "model": "gpt-4o", + "params": { + "temperature": 0.5 + } + } +} \ No newline at end of file diff --git a/profiles/grok.json b/profiles/grok.json new file mode 100644 index 0000000..eeb3a38 --- /dev/null +++ b/profiles/grok.json @@ -0,0 +1,7 @@ +{ + "name": "Grok", + + "model": "grok-beta", + + "embedding": "openai" +} \ No newline at end of file diff --git a/profiles/llama.json b/profiles/llama.json new file mode 100644 index 0000000..ceb3992 --- /dev/null +++ b/profiles/llama.json @@ -0,0 +1,10 @@ +{ + "name": "LLama", + + "model": "groq/llama-3.3-70b-versatile", + + "max_tokens": 4000, + + "embedding": "openai" + +} \ No newline at end of file diff --git a/profiles/mistral.json b/profiles/mistral.json new file mode 100644 index 0000000..3486924 --- /dev/null +++ b/profiles/mistral.json @@ -0,0 +1,5 @@ +{ + "name": "Mistral", + + "model": "mistral/mistral-large-latest" +} \ No newline at end of file diff --git a/profiles/qwen.json b/profiles/qwen.json new file mode 100644 index 0000000..f6a3f46 --- /dev/null +++ b/profiles/qwen.json @@ -0,0 +1,17 @@ +{ + "name": "qwen", + + "cooldown": 5000, + + "model": { + "api": "qwen", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1", + "model": "qwen-max" + }, + + "embedding": { + "api": "qwen", + "url": "https://dashscope-intl.aliyuncs.com/compatible-mode/v1", + "model": "text-embedding-v3" + } +} \ No newline at end of file diff --git a/services/viaproxy/README.md b/services/viaproxy/README.md new file mode 100644 index 0000000..d1ed687 --- /dev/null +++ b/services/viaproxy/README.md @@ -0,0 +1,25 @@ +Use this service to connect your bot to an unsupported minecraft server versions. + +Run: + +```bash +docker-compose --profile viaproxy up +``` + +After first start it will create config file `services/viaproxy/viaproxy.yml`. + +Edit this file, and change your desired target `target-address`, + +then point your `settings.js` `host` and `port` to viaproxy endpoint: + +```javascript + "host": "host.docker.internal", + "port": 25568, +``` + +This easily works with "offline" servers. + +Connecting to "online" servers via viaproxy involves more effort: see `auth-method` in `services/viaproxy/viaproxy.yml` (TODO describe) + + + diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js new file mode 100644 index 0000000..ad08827 --- /dev/null +++ b/src/agent/action_manager.js @@ -0,0 +1,155 @@ +export class ActionManager { + constructor(agent) { + this.agent = agent; + this.executing = false; + this.currentActionLabel = ''; + this.currentActionFn = null; + this.timedout = false; + this.resume_func = null; + this.resume_name = ''; + } + + async resumeAction(actionFn, timeout) { + return this._executeResume(actionFn, timeout); + } + + async runAction(actionLabel, actionFn, { timeout, resume = false } = {}) { + if (resume) { + return this._executeResume(actionLabel, actionFn, timeout); + } else { + return this._executeAction(actionLabel, actionFn, timeout); + } + } + + async stop() { + if (!this.executing) return; + const timeout = setTimeout(() => { + this.agent.cleanKill('Code execution refused stop after 10 seconds. Killing process.'); + }, 10000); + while (this.executing) { + this.agent.requestInterrupt(); + console.log('waiting for code to finish executing...'); + await new Promise(resolve => setTimeout(resolve, 300)); + } + clearTimeout(timeout); + } + + cancelResume() { + this.resume_func = null; + this.resume_name = null; + } + + async _executeResume(actionLabel = null, actionFn = null, timeout = 10) { + const new_resume = actionFn != null; + if (new_resume) { // start new resume + this.resume_func = actionFn; + assert(actionLabel != null, 'actionLabel is required for new resume'); + this.resume_name = actionLabel; + } + if (this.resume_func != null && (this.agent.isIdle() || new_resume) && (!this.agent.self_prompter.on || new_resume)) { + this.currentActionLabel = this.resume_name; + let res = await this._executeAction(this.resume_name, this.resume_func, timeout); + this.currentActionLabel = ''; + return res; + } else { + return { success: false, message: null, interrupted: false, timedout: false }; + } + } + + async _executeAction(actionLabel, actionFn, timeout = 10) { + let TIMEOUT; + try { + console.log('executing code...\n'); + + // await current action to finish (executing=false), with 10 seconds timeout + // also tell agent.bot to stop various actions + if (this.executing) { + console.log(`action "${actionLabel}" trying to interrupt current action "${this.currentActionLabel}"`); + } + await this.stop(); + + // clear bot logs and reset interrupt code + this.agent.clearBotLogs(); + + this.executing = true; + this.currentActionLabel = actionLabel; + this.currentActionFn = actionFn; + + // timeout in minutes + if (timeout > 0) { + TIMEOUT = this._startTimeout(timeout); + } + + // start the action + await actionFn(); + + // mark action as finished + cleanup + this.executing = false; + this.currentActionLabel = ''; + this.currentActionFn = null; + clearTimeout(TIMEOUT); + + // get bot activity summary + let output = this._getBotOutputSummary(); + let interrupted = this.agent.bot.interrupt_code; + let timedout = this.timedout; + this.agent.clearBotLogs(); + + // if not interrupted and not generating, emit idle event + if (!interrupted && !this.agent.coder.generating) { + this.agent.bot.emit('idle'); + } + + // return action status report + return { success: true, message: output, interrupted, timedout }; + } catch (err) { + this.executing = false; + this.currentActionLabel = ''; + this.currentActionFn = null; + clearTimeout(TIMEOUT); + this.cancelResume(); + console.error("Code execution triggered catch:", err); + // Log the full stack trace + console.error(err.stack); + await this.stop(); + err = err.toString(); + + let message = this._getBotOutputSummary() + + '!!Code threw exception!!\n' + + 'Error: ' + err + '\n' + + 'Stack trace:\n' + err.stack+'\n'; + + let interrupted = this.agent.bot.interrupt_code; + this.agent.clearBotLogs(); + if (!interrupted && !this.agent.coder.generating) { + this.agent.bot.emit('idle'); + } + return { success: false, message, interrupted, timedout: false }; + } + } + + _getBotOutputSummary() { + const { bot } = this.agent; + if (bot.interrupt_code && !this.timedout) return ''; + let output = bot.output; + const MAX_OUT = 500; + if (output.length > MAX_OUT) { + output = `Code output is very long (${output.length} chars) and has been shortened.\n + First outputs:\n${output.substring(0, MAX_OUT / 2)}\n...skipping many lines.\nFinal outputs:\n ${output.substring(output.length - MAX_OUT / 2)}`; + } + else { + output = 'Code output:\n' + output.toString(); + } + return output; + } + + _startTimeout(TIMEOUT_MINS = 10) { + return setTimeout(async () => { + console.warn(`Code execution timed out after ${TIMEOUT_MINS} minutes. Attempting force stop.`); + this.timedout = true; + this.agent.history.add('system', `Code execution timed out after ${TIMEOUT_MINS} minutes. Attempting force stop.`); + await this.stop(); // last attempt to stop + }, TIMEOUT_MINS * 60 * 1000); + } + +} \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js new file mode 100644 index 0000000..72eb31d --- /dev/null +++ b/src/agent/agent.js @@ -0,0 +1,470 @@ +import { History } from './history.js'; +import { Coder } from './coder.js'; +import { Prompter } from '../models/prompter.js'; +import { initModes } from './modes.js'; +import { initBot } from '../utils/mcdata.js'; +import { containsCommand, commandExists, executeCommand, truncCommandMessage, isAction, blacklistCommands } from './commands/index.js'; +import { ActionManager } from './action_manager.js'; +import { NPCContoller } from './npc/controller.js'; +import { MemoryBank } from './memory_bank.js'; +import { SelfPrompter } from './self_prompter.js'; +import convoManager from './conversation.js'; +import { handleTranslation, handleEnglishTranslation } from '../utils/translator.js'; +import { addViewer } from './viewer.js'; +import settings from '../../settings.js'; +import { serverProxy } from './agent_proxy.js'; +import { Task } from './tasks.js'; + +export class Agent { + async start(profile_fp, load_mem=false, init_message=null, count_id=0, task_path=null, task_id=null) { + this.last_sender = null; + this.count_id = count_id; + try { + if (!profile_fp) { + throw new Error('No profile filepath provided'); + } + + console.log('Starting agent initialization with profile:', profile_fp); + + // Initialize components with more detailed error handling + console.log('Initializing action manager...'); + this.actions = new ActionManager(this); + console.log('Initializing prompter...'); + this.prompter = new Prompter(this, profile_fp); + this.name = this.prompter.getName(); + console.log('Initializing history...'); + this.history = new History(this); + console.log('Initializing coder...'); + this.coder = new Coder(this); + console.log('Initializing npc controller...'); + this.npc = new NPCContoller(this); + console.log('Initializing memory bank...'); + this.memory_bank = new MemoryBank(); + console.log('Initializing self prompter...'); + this.self_prompter = new SelfPrompter(this); + convoManager.initAgent(this); + console.log('Initializing examples...'); + await this.prompter.initExamples(); + console.log('Initializing task...'); + this.task = new Task(this, task_path, task_id); + const blocked_actions = this.task.blocked_actions || []; + blacklistCommands(blocked_actions); + + serverProxy.connect(this); + + console.log(this.name, 'logging into minecraft...'); + this.bot = initBot(this.name); + + initModes(this); + + let save_data = null; + if (load_mem) { + save_data = this.history.load(); + } + + this.bot.on('login', () => { + console.log(this.name, 'logged in!'); + + serverProxy.login(); + + // Set skin for profile, requires Fabric Tailor. (https://modrinth.com/mod/fabrictailor) + if (this.prompter.profile.skin) + this.bot.chat(`/skin set URL ${this.prompter.profile.skin.model} ${this.prompter.profile.skin.path}`); + else + this.bot.chat(`/skin clear`); + }); + + const spawnTimeout = setTimeout(() => { + process.exit(0); + }, 30000); + this.bot.once('spawn', async () => { + try { + clearTimeout(spawnTimeout); + addViewer(this.bot, count_id); + + // wait for a bit so stats are not undefined + await new Promise((resolve) => setTimeout(resolve, 1000)); + + console.log(`${this.name} spawned.`); + this.clearBotLogs(); + + this._setupEventHandlers(save_data, init_message); + this.startEvents(); + + this.task.initBotTask(); + + } catch (error) { + console.error('Error in spawn event:', error); + process.exit(0); + } + }); + } catch (error) { + // Ensure we're not losing error details + console.error('Agent start failed with error') + console.error(error) + + throw error; // Re-throw with preserved details + } + } + + async _setupEventHandlers(save_data, init_message) { + const ignore_messages = [ + "Set own game mode to", + "Set the time to", + "Set the difficulty to", + "Teleported ", + "Set the weather to", + "Gamerule " + ]; + + const respondFunc = async (username, message) => { + if (username === this.name) return; + if (settings.only_chat_with.length > 0 && !settings.only_chat_with.includes(username)) return; + try { + if (ignore_messages.some((m) => message.startsWith(m))) return; + + this.shut_up = false; + + console.log(this.name, 'received message from', username, ':', message); + + if (convoManager.isOtherAgent(username)) { + console.warn('received whisper from other bot??') + } + else { + let translation = await handleEnglishTranslation(message); + this.handleMessage(username, translation); + } + } catch (error) { + console.error('Error handling message:', error); + } + } + + this.respondFunc = respondFunc + + this.bot.on('whisper', respondFunc); + if (settings.profiles.length === 1) + this.bot.on('chat', respondFunc); + + // Set up auto-eat + this.bot.autoEat.options = { + priority: 'foodPoints', + startAt: 14, + bannedFood: ["rotten_flesh", "spider_eye", "poisonous_potato", "pufferfish", "chicken"] + }; + + if (save_data?.self_prompt) { + let prompt = save_data.self_prompt; + // add initial message to history + this.history.add('system', prompt); + await this.self_prompter.start(prompt); + } + if (save_data?.last_sender) { + this.last_sender = save_data.last_sender; + if (convoManager.otherAgentInGame(this.last_sender)) { + const msg_package = { + message: `You have restarted and this message is auto-generated. Continue the conversation with me.`, + start: true + }; + convoManager.receiveFromBot(this.last_sender, msg_package); + } + } + else if (init_message) { + await this.handleMessage('system', init_message, 2); + } + else { + this.openChat("Hello world! I am "+this.name); + } + } + + requestInterrupt() { + this.bot.interrupt_code = true; + this.bot.collectBlock.cancelTask(); + this.bot.pathfinder.stop(); + this.bot.pvp.stop(); + } + + clearBotLogs() { + this.bot.output = ''; + this.bot.interrupt_code = false; + } + + shutUp() { + this.shut_up = true; + if (this.self_prompter.on) { + this.self_prompter.stop(false); + } + convoManager.endAllConversations(); + } + + async handleMessage(source, message, max_responses=null) { + if (!source || !message) { + console.warn('Received empty message from', source); + return false; + } + + let used_command = false; + if (max_responses === null) { + max_responses = settings.max_commands === -1 ? Infinity : settings.max_commands; + } + if (max_responses === -1) { + max_responses = Infinity; + } + + const self_prompt = source === 'system' || source === this.name; + const from_other_bot = convoManager.isOtherAgent(source); + + if (!self_prompt && !from_other_bot) { // from user, check for forced commands + const user_command_name = containsCommand(message); + if (user_command_name) { + if (!commandExists(user_command_name)) { + this.routeResponse(source, `Command '${user_command_name}' does not exist.`); + return false; + } + this.routeResponse(source, `*${source} used ${user_command_name.substring(1)}*`); + if (user_command_name === '!newAction') { + // all user-initiated commands are ignored by the bot except for this one + // add the preceding message to the history to give context for newAction + this.history.add(source, message); + } + let execute_res = await executeCommand(this, message); + if (execute_res) + this.routeResponse(source, execute_res); + return true; + } + } + + if (from_other_bot) + this.last_sender = source; + + // Now translate the message + message = await handleEnglishTranslation(message); + console.log('received message from', source, ':', message); + + const checkInterrupt = () => this.self_prompter.shouldInterrupt(self_prompt) || this.shut_up || convoManager.responseScheduledFor(source); + + let behavior_log = this.bot.modes.flushBehaviorLog(); + if (behavior_log.trim().length > 0) { + const MAX_LOG = 500; + if (behavior_log.length > MAX_LOG) { + behavior_log = '...' + behavior_log.substring(behavior_log.length - MAX_LOG); + } + behavior_log = 'Recent behaviors log: \n' + behavior_log.substring(behavior_log.indexOf('\n')); + await this.history.add('system', behavior_log); + } + + // Handle other user messages + await this.history.add(source, message); + this.history.save(); + + if (!self_prompt && this.self_prompter.on) // message is from user during self-prompting + max_responses = 1; // force only respond to this message, then let self-prompting take over + for (let i=0; i 0) + chat_message = `${pre_message} ${chat_message}`; + this.routeResponse(source, chat_message); + } + + let execute_res = await executeCommand(this, res); + + console.log('Agent executed:', command_name, 'and got:', execute_res); + used_command = true; + + if (execute_res) + this.history.add('system', execute_res); + else + break; + } + else { // conversation response + this.history.add(this.name, res); + this.routeResponse(source, res); + break; + } + + this.history.save(); + } + + return used_command; + } + + async routeResponse(to_player, message) { + if (this.shut_up) return; + let self_prompt = to_player === 'system' || to_player === this.name; + if (self_prompt && this.last_sender) { + // this is for when the agent is prompted by system while still in conversation + // so it can respond to events like death but be routed back to the last sender + to_player = this.last_sender; + } + + if (convoManager.isOtherAgent(to_player) && convoManager.inConversation(to_player)) { + // if we're in an ongoing conversation with the other bot, send the response to it + convoManager.sendToBot(to_player, message); + } + else { + // otherwise, use open chat + this.openChat(message); + // note that to_player could be another bot, but if we get here the conversation has ended + } + } + + async openChat(message) { + let to_translate = message; + let remaining = ''; + let command_name = containsCommand(message); + let translate_up_to = command_name ? message.indexOf(command_name) : -1; + if (translate_up_to != -1) { // don't translate the command + to_translate = to_translate.substring(0, translate_up_to); + remaining = message.substring(translate_up_to); + } + message = (await handleTranslation(to_translate)).trim() + " " + remaining; + // newlines are interpreted as separate chats, which triggers spam filters. replace them with spaces + message = message.replaceAll('\n', ' '); + + if (settings.only_chat_with.length > 0) { + for (let username of settings.only_chat_with) { + this.bot.whisper(username, message); + } + } + else { + this.bot.chat(message); + } + } + + startEvents() { + // Custom events + this.bot.on('time', () => { + if (this.bot.time.timeOfDay == 0) + this.bot.emit('sunrise'); + else if (this.bot.time.timeOfDay == 6000) + this.bot.emit('noon'); + else if (this.bot.time.timeOfDay == 12000) + this.bot.emit('sunset'); + else if (this.bot.time.timeOfDay == 18000) + this.bot.emit('midnight'); + }); + + let prev_health = this.bot.health; + this.bot.lastDamageTime = 0; + this.bot.lastDamageTaken = 0; + this.bot.on('health', () => { + if (this.bot.health < prev_health) { + this.bot.lastDamageTime = Date.now(); + this.bot.lastDamageTaken = prev_health - this.bot.health; + } + prev_health = this.bot.health; + }); + // Logging callbacks + this.bot.on('error' , (err) => { + console.error('Error event!', err); + }); + this.bot.on('end', (reason) => { + console.warn('Bot disconnected! Killing agent process.', reason) + this.cleanKill('Bot disconnected! Killing agent process.'); + }); + this.bot.on('death', () => { + this.actions.cancelResume(); + this.actions.stop(); + }); + this.bot.on('kicked', (reason) => { + console.warn('Bot kicked!', reason); + this.cleanKill('Bot kicked! Killing agent process.'); + }); + this.bot.on('messagestr', async (message, _, jsonMsg) => { + if (jsonMsg.translate && jsonMsg.translate.startsWith('death') && message.startsWith(this.name)) { + console.log('Agent died: ', message); + let death_pos = this.bot.entity.position; + this.memory_bank.rememberPlace('last_death_position', death_pos.x, death_pos.y, death_pos.z); + let death_pos_text = null; + if (death_pos) { + death_pos_text = `x: ${death_pos.x.toFixed(2)}, y: ${death_pos.y.toFixed(2)}, z: ${death_pos.x.toFixed(2)}`; + } + let dimention = this.bot.game.dimension; + this.handleMessage('system', `You died at position ${death_pos_text || "unknown"} in the ${dimention} dimension with the final message: '${message}'. Your place of death is saved as 'last_death_position' if you want to return. Previous actions were stopped and you have respawned.`); + } + }); + this.bot.on('idle', () => { + this.bot.clearControlStates(); + this.bot.pathfinder.stop(); // clear any lingering pathfinder + this.bot.modes.unPauseAll(); + this.actions.resumeAction(); + }); + + // Init NPC controller + this.npc.init(); + + // This update loop ensures that each update() is called one at a time, even if it takes longer than the interval + const INTERVAL = 300; + let last = Date.now(); + setTimeout(async () => { + while (true) { + let start = Date.now(); + await this.update(start - last); + let remaining = INTERVAL - (Date.now() - start); + if (remaining > 0) { + await new Promise((resolve) => setTimeout(resolve, remaining)); + } + last = start; + } + }, INTERVAL); + + this.bot.emit('idle'); + } + + async update(delta) { + await this.bot.modes.update(); + this.self_prompter.update(delta); + if (this.task.data) { + let res = this.task.isDone(); + if (res) { + console.log('Task finished:', res.message); + this.killAll(); + } + } + } + + isIdle() { + return !this.actions.executing && !this.coder.generating; + } + + cleanKill(msg='Killing agent process...', code=1) { + this.history.add('system', msg); + this.bot.chat(code > 1 ? 'Restarting.': 'Exiting.'); + this.history.save(); + process.exit(code); + } + + killAll() { + serverProxy.shutdown(); + } +} diff --git a/src/agent/agent_proxy.js b/src/agent/agent_proxy.js new file mode 100644 index 0000000..b0333b1 --- /dev/null +++ b/src/agent/agent_proxy.js @@ -0,0 +1,73 @@ +import { io } from 'socket.io-client'; +import convoManager from './conversation.js'; +import settings from '../../settings.js'; + +class AgentServerProxy { + constructor() { + if (AgentServerProxy.instance) { + return AgentServerProxy.instance; + } + + this.socket = null; + this.connected = false; + AgentServerProxy.instance = this; + } + + connect(agent) { + if (this.connected) return; + + this.agent = agent; + + this.socket = io(`http://${settings.mindserver_host}:${settings.mindserver_port}`); + this.connected = true; + + this.socket.on('connect', () => { + console.log('Connected to MindServer'); + }); + + this.socket.on('disconnect', () => { + console.log('Disconnected from MindServer'); + this.connected = false; + }); + + this.socket.on('chat-message', (agentName, json) => { + convoManager.receiveFromBot(agentName, json); + }); + + this.socket.on('agents-update', (agents) => { + convoManager.updateAgents(agents); + }); + + this.socket.on('restart-agent', (agentName) => { + console.log(`Restarting agent: ${agentName}`); + this.agent.cleanKill(); + }); + + this.socket.on('send-message', (agentName, message) => { + try { + this.agent.respondFunc("NO USERNAME", message); + } catch (error) { + console.error('Error: ', JSON.stringify(error, Object.getOwnPropertyNames(error))); + } + }); + } + + login() { + this.socket.emit('login-agent', this.agent.name); + } + + shutdown() { + this.socket.emit('shutdown'); + } + + getSocket() { + return this.socket; + } +} + +// Create and export a singleton instance +export const serverProxy = new AgentServerProxy(); + +export function sendBotChatToServer(agentName, json) { + serverProxy.getSocket().emit('chat-message', agentName, json); +} diff --git a/src/agent/coder.js b/src/agent/coder.js new file mode 100644 index 0000000..25a0a8a --- /dev/null +++ b/src/agent/coder.js @@ -0,0 +1,228 @@ +import { writeFile, readFile, mkdirSync } from 'fs'; +import settings from '../../settings.js'; +import { makeCompartment } from './library/lockdown.js'; +import * as skills from './library/skills.js'; +import * as world from './library/world.js'; +import { Vec3 } from 'vec3'; +import {ESLint} from "eslint"; + +export class Coder { + constructor(agent) { + this.agent = agent; + this.file_counter = 0; + this.fp = '/bots/'+agent.name+'/action-code/'; + this.generating = false; + this.code_template = ''; + this.code_lint_template = ''; + + readFile('./bots/execTemplate.js', 'utf8', (err, data) => { + if (err) throw err; + this.code_template = data; + }); + readFile('./bots/lintTemplate.js', 'utf8', (err, data) => { + if (err) throw err; + this.code_lint_template = data; + }); + mkdirSync('.' + this.fp, { recursive: true }); + } + + async lintCode(code) { + let result = '#### CODE ERROR INFO ###\n'; + // Extract everything in the code between the beginning of 'skills./world.' and the '(' + const skillRegex = /(?:skills|world)\.(.*?)\(/g; + const skills = []; + let match; + while ((match = skillRegex.exec(code)) !== null) { + skills.push(match[1]); + } + const allDocs = await this.agent.prompter.skill_libary.getRelevantSkillDocs(); + //lint if the function exists + const missingSkills = skills.filter(skill => !allDocs.includes(skill)); + if (missingSkills.length > 0) { + result += 'These functions do not exist. Please modify the correct function name and try again.\n'; + result += '### FUNCTIONS NOT FOUND ###\n'; + result += missingSkills.join('\n'); + console.log(result) + return result; + } + + const eslint = new ESLint(); + const results = await eslint.lintText(code); + const codeLines = code.split('\n'); + const exceptions = results.map(r => r.messages).flat(); + + if (exceptions.length > 0) { + exceptions.forEach((exc, index) => { + if (exc.line && exc.column ) { + const errorLine = codeLines[exc.line - 1]?.trim() || 'Unable to retrieve error line content'; + result += `#ERROR ${index + 1}\n`; + result += `Message: ${exc.message}\n`; + result += `Location: Line ${exc.line}, Column ${exc.column}\n`; + result += `Related Code Line: ${errorLine}\n`; + } + }); + result += 'The code contains exceptions and cannot continue execution.'; + } else { + return null;//no error + } + + return result ; + } + // write custom code to file and import it + // write custom code to file and prepare for evaluation + async stageCode(code) { + code = this.sanitizeCode(code); + let src = ''; + code = code.replaceAll('console.log(', 'log(bot,'); + code = code.replaceAll('log("', 'log(bot,"'); + + console.log(`Generated code: """${code}"""`); + + // this may cause problems in callback functions + code = code.replaceAll(';\n', '; if(bot.interrupt_code) {log(bot, "Code interrupted.");return;}\n'); + for (let line of code.split('\n')) { + src += ` ${line}\n`; + } + let src_lint_copy = this.code_lint_template.replace('/* CODE HERE */', src); + src = this.code_template.replace('/* CODE HERE */', src); + + let filename = this.file_counter + '.js'; + // if (this.file_counter > 0) { + // let prev_filename = this.fp + (this.file_counter-1) + '.js'; + // unlink(prev_filename, (err) => { + // console.log("deleted file " + prev_filename); + // if (err) console.error(err); + // }); + // } commented for now, useful to keep files for debugging + this.file_counter++; + + let write_result = await this.writeFilePromise('.' + this.fp + filename, src); + // This is where we determine the environment the agent's code should be exposed to. + // It will only have access to these things, (in addition to basic javascript objects like Array, Object, etc.) + // Note that the code may be able to modify the exposed objects. + const compartment = makeCompartment({ + skills, + log: skills.log, + world, + Vec3, + }); + const mainFn = compartment.evaluate(src); + + if (write_result) { + console.error('Error writing code execution file: ' + result); + return null; + } + return { func:{main: mainFn}, src_lint_copy: src_lint_copy }; + } + + sanitizeCode(code) { + code = code.trim(); + const remove_strs = ['Javascript', 'javascript', 'js'] + for (let r of remove_strs) { + if (code.startsWith(r)) { + code = code.slice(r.length); + return code; + } + } + return code; + } + + writeFilePromise(filename, src) { + // makes it so we can await this function + return new Promise((resolve, reject) => { + writeFile(filename, src, (err) => { + if (err) { + reject(err); + } else { + resolve(); + } + }); + }); + } + + async generateCode(agent_history) { + // wrapper to prevent overlapping code generation loops + await this.agent.actions.stop(); + this.generating = true; + let res = await this.generateCodeLoop(agent_history); + this.generating = false; + if (!res.interrupted) this.agent.bot.emit('idle'); + return res.message; + } + + async generateCodeLoop(agent_history) { + this.agent.bot.modes.pause('unstuck'); + + let messages = agent_history.getHistory(); + messages.push({role: 'system', content: 'Code generation started. Write code in codeblock in your response:'}); + + let code = null; + let code_return = null; + let failures = 0; + const interrupt_return = {success: true, message: null, interrupted: true, timedout: false}; + for (let i=0; i<5; i++) { + if (this.agent.bot.interrupt_code) + return interrupt_return; + console.log(messages) + let res = await this.agent.prompter.promptCoding(JSON.parse(JSON.stringify(messages))); + if (this.agent.bot.interrupt_code) + return interrupt_return; + let contains_code = res.indexOf('```') !== -1; + if (!contains_code) { + if (res.indexOf('!newAction') !== -1) { + messages.push({ + role: 'assistant', + content: res.substring(0, res.indexOf('!newAction')) + }); + continue; // using newaction will continue the loop + } + + if (failures >= 3) { + return { success: false, message: 'Action failed, agent would not write code.', interrupted: false, timedout: false }; + } + messages.push({ + role: 'system', + content: 'Error: no code provided. Write code in codeblock in your response. ``` // example ```'} + ); + failures++; + continue; + } + code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); + const result = await this.stageCode(code); + const executionModuleExports = result.func; + let src_lint_copy = result.src_lint_copy; + const analysisResult = await this.lintCode(src_lint_copy); + if (analysisResult) { + const message = 'Error: Code syntax error. Please try again:'+'\n'+analysisResult+'\n'; + messages.push({ role: 'system', content: message }); + continue; + } + if (!executionModuleExports) { + agent_history.add('system', 'Failed to stage code, something is wrong.'); + return {success: false, message: null, interrupted: false, timedout: false}; + } + + code_return = await this.agent.actions.runAction('newAction', async () => { + return await executionModuleExports.main(this.agent.bot); + }, { timeout: settings.code_timeout_mins }); + if (code_return.interrupted && !code_return.timedout) + return { success: false, message: null, interrupted: true, timedout: false }; + console.log("Code generation result:", code_return.success, code_return.message.toString()); + + if (code_return.success) { + const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message.toString(); + return { success: true, message: summary, interrupted: false, timedout: false }; + } + + messages.push({ + role: 'assistant', + content: res + }); + messages.push({ + role: 'system', + content: code_return.message + '\nCode failed. Please try again:' + }); + } + return { success: false, message: null, interrupted: false, timedout: true }; + } +} \ No newline at end of file diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js new file mode 100644 index 0000000..34e6693 --- /dev/null +++ b/src/agent/commands/actions.js @@ -0,0 +1,423 @@ +import * as skills from '../library/skills.js'; +import settings from '../../../settings.js'; +import convoManager from '../conversation.js'; + +function runAsAction (actionFn, resume = false, timeout = -1) { + let actionLabel = null; // Will be set on first use + + const wrappedAction = async function (agent, ...args) { + // Set actionLabel only once, when the action is first created + if (!actionLabel) { + const actionObj = actionsList.find(a => a.perform === wrappedAction); + actionLabel = actionObj.name.substring(1); // Remove the ! prefix + } + + const actionFnWithAgent = async () => { + await actionFn(agent, ...args); + }; + const code_return = await agent.actions.runAction(`action:${actionLabel}`, actionFnWithAgent, { timeout, resume }); + if (code_return.interrupted && !code_return.timedout) + return; + return code_return.message; + } + + return wrappedAction; +} + +export const actionsList = [ + { + name: '!newAction', + description: 'Perform new and unknown custom behaviors that are not available as a command.', + params: { + 'prompt': { type: 'string', description: 'A natural language prompt to guide code generation. Make a detailed step-by-step plan.' } + }, + perform: async function (agent, prompt) { + // just ignore prompt - it is now in context in chat history + if (!settings.allow_insecure_coding) + return 'newAction not allowed! Code writing is disabled in settings. Notify the user.'; + return await agent.coder.generateCode(agent.history); + } + }, + { + name: '!stop', + description: 'Force stop all actions and commands that are currently executing.', + perform: async function (agent) { + await agent.actions.stop(); + agent.clearBotLogs(); + agent.actions.cancelResume(); + agent.bot.emit('idle'); + let msg = 'Agent stopped.'; + if (agent.self_prompter.on) + msg += ' Self-prompting still active.'; + return msg; + } + }, + { + name: '!stfu', + description: 'Stop all chatting and self prompting, but continue current action.', + perform: async function (agent) { + agent.openChat('Shutting up.'); + agent.shutUp(); + return; + } + }, + { + name: '!restart', + description: 'Restart the agent process.', + perform: async function (agent) { + agent.cleanKill(); + } + }, + { + name: '!clearChat', + description: 'Clear the chat history.', + perform: async function (agent) { + agent.history.clear(); + return agent.name + "'s chat history was cleared, starting new conversation from scratch."; + } + }, + { + name: '!goToPlayer', + description: 'Go to the given player.', + params: { + 'player_name': {type: 'string', description: 'The name of the player to go to.'}, + 'closeness': {type: 'float', description: 'How close to get to the player.', domain: [0, Infinity]} + }, + perform: runAsAction(async (agent, player_name, closeness) => { + return await skills.goToPlayer(agent.bot, player_name, closeness); + }) + }, + { + name: '!followPlayer', + description: 'Endlessly follow the given player.', + params: { + 'player_name': {type: 'string', description: 'name of the player to follow.'}, + 'follow_dist': {type: 'float', description: 'The distance to follow from.', domain: [0, Infinity]} + }, + perform: runAsAction(async (agent, player_name, follow_dist) => { + await skills.followPlayer(agent.bot, player_name, follow_dist); + }, true) + }, + { + name: '!goToCoordinates', + description: 'Go to the given x, y, z location.', + params: { + 'x': {type: 'float', description: 'The x coordinate.', domain: [-Infinity, Infinity]}, + 'y': {type: 'float', description: 'The y coordinate.', domain: [-64, 320]}, + 'z': {type: 'float', description: 'The z coordinate.', domain: [-Infinity, Infinity]}, + 'closeness': {type: 'float', description: 'How close to get to the location.', domain: [0, Infinity]} + }, + perform: runAsAction(async (agent, x, y, z, closeness) => { + await skills.goToPosition(agent.bot, x, y, z, closeness); + }) + }, + { + name: '!searchForBlock', + description: 'Find and go to the nearest block of a given type in a given range.', + params: { + 'type': { type: 'BlockName', description: 'The block type to go to.' }, + 'search_range': { type: 'float', description: 'The range to search for the block.', domain: [32, 512] } + }, + perform: runAsAction(async (agent, block_type, range) => { + await skills.goToNearestBlock(agent.bot, block_type, 4, range); + }) + }, + { + name: '!searchForEntity', + description: 'Find and go to the nearest entity of a given type in a given range.', + params: { + 'type': { type: 'string', description: 'The type of entity to go to.' }, + 'search_range': { type: 'float', description: 'The range to search for the entity.', domain: [32, 512] } + }, + perform: runAsAction(async (agent, entity_type, range) => { + await skills.goToNearestEntity(agent.bot, entity_type, 4, range); + }) + }, + { + name: '!moveAway', + description: 'Move away from the current location in any direction by a given distance.', + params: {'distance': { type: 'float', description: 'The distance to move away.', domain: [0, Infinity] }}, + perform: runAsAction(async (agent, distance) => { + await skills.moveAway(agent.bot, distance); + }) + }, + { + name: '!rememberHere', + description: 'Save the current location with a given name.', + params: {'name': { type: 'string', description: 'The name to remember the location as.' }}, + perform: async function (agent, name) { + const pos = agent.bot.entity.position; + agent.memory_bank.rememberPlace(name, pos.x, pos.y, pos.z); + return `Location saved as "${name}".`; + } + }, + { + name: '!goToRememberedPlace', + description: 'Go to a saved location.', + params: {'name': { type: 'string', description: 'The name of the location to go to.' }}, + perform: runAsAction(async (agent, name) => { + const pos = agent.memory_bank.recallPlace(name); + if (!pos) { + skills.log(agent.bot, `No location named "${name}" saved.`); + return; + } + await skills.goToPosition(agent.bot, pos[0], pos[1], pos[2], 1); + }) + }, + { + name: '!givePlayer', + description: 'Give the specified item to the given player.', + params: { + 'player_name': { type: 'string', description: 'The name of the player to give the item to.' }, + 'item_name': { type: 'ItemName', description: 'The name of the item to give.' }, + 'num': { type: 'int', description: 'The number of items to give.', domain: [1, Number.MAX_SAFE_INTEGER] } + }, + perform: runAsAction(async (agent, player_name, item_name, num) => { + await skills.giveToPlayer(agent.bot, item_name, player_name, num); + }) + }, + { + name: '!consume', + description: 'Eat/drink the given item.', + params: {'item_name': { type: 'ItemName', description: 'The name of the item to consume.' }}, + perform: runAsAction(async (agent, item_name) => { + await skills.consume(agent.bot, item_name); + }) + }, + { + name: '!equip', + description: 'Equip the given item.', + params: {'item_name': { type: 'ItemName', description: 'The name of the item to equip.' }}, + perform: runAsAction(async (agent, item_name) => { + await skills.equip(agent.bot, item_name); + }) + }, + { + name: '!putInChest', + description: 'Put the given item in the nearest chest.', + params: { + 'item_name': { type: 'ItemName', description: 'The name of the item to put in the chest.' }, + 'num': { type: 'int', description: 'The number of items to put in the chest.', domain: [1, Number.MAX_SAFE_INTEGER] } + }, + perform: runAsAction(async (agent, item_name, num) => { + await skills.putInChest(agent.bot, item_name, num); + }) + }, + { + name: '!takeFromChest', + description: 'Take the given items from the nearest chest.', + params: { + 'item_name': { type: 'ItemName', description: 'The name of the item to take.' }, + 'num': { type: 'int', description: 'The number of items to take.', domain: [1, Number.MAX_SAFE_INTEGER] } + }, + perform: runAsAction(async (agent, item_name, num) => { + await skills.takeFromChest(agent.bot, item_name, num); + }) + }, + { + name: '!viewChest', + description: 'View the items/counts of the nearest chest.', + params: { }, + perform: runAsAction(async (agent) => { + await skills.viewChest(agent.bot); + }) + }, + { + name: '!discard', + description: 'Discard the given item from the inventory.', + params: { + 'item_name': { type: 'ItemName', description: 'The name of the item to discard.' }, + 'num': { type: 'int', description: 'The number of items to discard.', domain: [1, Number.MAX_SAFE_INTEGER] } + }, + perform: runAsAction(async (agent, item_name, num) => { + const start_loc = agent.bot.entity.position; + await skills.moveAway(agent.bot, 5); + await skills.discard(agent.bot, item_name, num); + await skills.goToPosition(agent.bot, start_loc.x, start_loc.y, start_loc.z, 0); + }) + }, + { + name: '!collectBlocks', + description: 'Collect the nearest blocks of a given type.', + params: { + 'type': { type: 'BlockName', description: 'The block type to collect.' }, + 'num': { type: 'int', description: 'The number of blocks to collect.', domain: [1, Number.MAX_SAFE_INTEGER] } + }, + perform: runAsAction(async (agent, type, num) => { + await skills.collectBlock(agent.bot, type, num); + }, false, 10) // 10 minute timeout + }, + { + name: '!craftRecipe', + description: 'Craft the given recipe a given number of times.', + params: { + 'recipe_name': { type: 'ItemName', description: 'The name of the output item to craft.' }, + 'num': { type: 'int', description: 'The number of times to craft the recipe. This is NOT the number of output items, as it may craft many more items depending on the recipe.', domain: [1, Number.MAX_SAFE_INTEGER] } + }, + perform: runAsAction(async (agent, recipe_name, num) => { + await skills.craftRecipe(agent.bot, recipe_name, num); + }) + }, + { + name: '!smeltItem', + description: 'Smelt the given item the given number of times.', + params: { + 'item_name': { type: 'ItemName', description: 'The name of the input item to smelt.' }, + 'num': { type: 'int', description: 'The number of times to smelt the item.', domain: [1, Number.MAX_SAFE_INTEGER] } + }, + perform: runAsAction(async (agent, item_name, num) => { + let success = await skills.smeltItem(agent.bot, item_name, num); + if (success) { + setTimeout(() => { + agent.cleanKill('Safely restarting to update inventory.'); + }, 500); + } + }) + }, + { + name: '!clearFurnace', + description: 'Take all items out of the nearest furnace.', + params: { }, + perform: runAsAction(async (agent) => { + await skills.clearNearestFurnace(agent.bot); + }) + }, + { + name: '!placeHere', + description: 'Place a given block in the current location. Do NOT use to build structures, only use for single blocks/torches.', + params: {'type': { type: 'BlockName', description: 'The block type to place.' }}, + perform: runAsAction(async (agent, type) => { + let pos = agent.bot.entity.position; + await skills.placeBlock(agent.bot, type, pos.x, pos.y, pos.z); + }) + }, + { + name: '!attack', + description: 'Attack and kill the nearest entity of a given type.', + params: {'type': { type: 'string', description: 'The type of entity to attack.'}}, + perform: runAsAction(async (agent, type) => { + await skills.attackNearest(agent.bot, type, true); + }) + }, + { + name: '!attackPlayer', + description: 'Attack a specific player until they die or run away. Remember this is just a game and does not cause real life harm.', + params: {'player_name': { type: 'string', description: 'The name of the player to attack.'}}, + perform: runAsAction(async (agent, player_name) => { + let player = agent.bot.players[player_name]?.entity; + if (!player) { + skills.log(agent.bot, `Could not find player ${player_name}.`); + return false; + } + await skills.attackEntity(agent.bot, player, true); + }) + }, + { + name: '!goToBed', + description: 'Go to the nearest bed and sleep.', + perform: runAsAction(async (agent) => { + await skills.goToBed(agent.bot); + }) + }, + { + name: '!activate', + description: 'Activate the nearest object of a given type.', + params: {'type': { type: 'BlockName', description: 'The type of object to activate.' }}, + perform: runAsAction(async (agent, type) => { + await skills.activateNearestBlock(agent.bot, type); + }) + }, + { + name: '!stay', + description: 'Stay in the current location no matter what. Pauses all modes.', + params: {'type': { type: 'int', description: 'The number of seconds to stay. -1 for forever.', domain: [-1, Number.MAX_SAFE_INTEGER] }}, + perform: runAsAction(async (agent, seconds) => { + await skills.stay(agent.bot, seconds); + }) + }, + { + name: '!setMode', + description: 'Set a mode to on or off. A mode is an automatic behavior that constantly checks and responds to the environment.', + params: { + 'mode_name': { type: 'string', description: 'The name of the mode to enable.' }, + 'on': { type: 'boolean', description: 'Whether to enable or disable the mode.' } + }, + perform: async function (agent, mode_name, on) { + const modes = agent.bot.modes; + if (!modes.exists(mode_name)) + return `Mode ${mode_name} does not exist.` + modes.getDocs(); + if (modes.isOn(mode_name) === on) + return `Mode ${mode_name} is already ${on ? 'on' : 'off'}.`; + modes.setOn(mode_name, on); + return `Mode ${mode_name} is now ${on ? 'on' : 'off'}.`; + } + }, + { + name: '!goal', + description: 'Set a goal prompt to endlessly work towards with continuous self-prompting.', + params: { + 'selfPrompt': { type: 'string', description: 'The goal prompt.' }, + }, + perform: async function (agent, prompt) { + if (convoManager.inConversation()) { + agent.self_prompter.setPrompt(prompt); + convoManager.scheduleSelfPrompter(); + } + else { + agent.self_prompter.start(prompt); + } + } + }, + { + name: '!endGoal', + description: 'Call when you have accomplished your goal. It will stop self-prompting and the current action. ', + perform: async function (agent) { + agent.self_prompter.stop(); + convoManager.cancelSelfPrompter(); + return 'Self-prompting stopped.'; + } + }, + { + name: '!startConversation', + description: 'Start a conversation with a player. Use for bots only.', + params: { + 'player_name': { type: 'string', description: 'The name of the player to send the message to.' }, + 'message': { type: 'string', description: 'The message to send.' }, + }, + perform: async function (agent, player_name, message) { + if (!convoManager.isOtherAgent(player_name)) + return player_name + ' is not a bot, cannot start conversation.'; + if (convoManager.inConversation() && !convoManager.inConversation(player_name)) + convoManager.forceEndCurrentConversation(); + else if (convoManager.inConversation(player_name)) + agent.history.add('system', 'You are already in conversation with ' + player_name + '. Don\'t use this command to talk to them.'); + convoManager.startConversation(player_name, message); + } + }, + { + name: '!endConversation', + description: 'End the conversation with the given player.', + params: { + 'player_name': { type: 'string', description: 'The name of the player to end the conversation with.' } + }, + perform: async function (agent, player_name) { + if (!convoManager.inConversation(player_name)) + return `Not in conversation with ${player_name}.`; + convoManager.endConversation(player_name); + return `Converstaion with ${player_name} ended.`; + } + }, + // { // commented for now, causes confusion with goal command + // name: '!npcGoal', + // description: 'Set a simple goal for an item or building to automatically work towards. Do not use for complex goals.', + // params: { + // 'name': { type: 'string', description: 'The name of the goal to set. Can be item or building name. If empty will automatically choose a goal.' }, + // 'quantity': { type: 'int', description: 'The quantity of the goal to set. Default is 1.', domain: [1, Number.MAX_SAFE_INTEGER] } + // }, + // perform: async function (agent, name=null, quantity=1) { + // await agent.npc.setGoal(name, quantity); + // agent.bot.emit('idle'); // to trigger the goal + // return 'Set npc goal: ' + agent.npc.data.curr_goal.name; + // } + // }, +]; diff --git a/src/agent/commands/index.js b/src/agent/commands/index.js new file mode 100644 index 0000000..f40c5c2 --- /dev/null +++ b/src/agent/commands/index.js @@ -0,0 +1,252 @@ +import { getBlockId, getItemId } from "../../utils/mcdata.js"; +import { actionsList } from './actions.js'; +import { queryList } from './queries.js'; + +let suppressNoDomainWarning = false; + +const commandList = queryList.concat(actionsList); +const commandMap = {}; +for (let command of commandList) { + commandMap[command.name] = command; +} + +export function getCommand(name) { + return commandMap[name]; +} + +export function blacklistCommands(commands) { + const unblockable = ['!stop', '!stats', '!inventory', '!goal']; + for (let command_name of commands) { + if (unblockable.includes(command_name)){ + console.warn(`Command ${command_name} is unblockable`); + continue; + } + delete commandMap[command_name]; + delete commandList.find(command => command.name === command_name); + } +} + +const commandRegex = /!(\w+)(?:\(((?:-?\d+(?:\.\d+)?|true|false|"[^"]*")(?:\s*,\s*(?:-?\d+(?:\.\d+)?|true|false|"[^"]*"))*)\))?/ +const argRegex = /-?\d+(?:\.\d+)?|true|false|"[^"]*"/g; + +export function containsCommand(message) { + const commandMatch = message.match(commandRegex); + if (commandMatch) + return "!" + commandMatch[1]; + return null; +} + +export function commandExists(commandName) { + if (!commandName.startsWith("!")) + commandName = "!" + commandName; + return commandMap[commandName] !== undefined; +} + +/** + * Converts a string into a boolean. + * @param {string} input + * @returns {boolean | null} the boolean or `null` if it could not be parsed. + * */ +function parseBoolean(input) { + switch(input.toLowerCase()) { + case 'false': //These are interpreted as flase; + case 'f': + case '0': + case 'off': + return false; + case 'true': //These are interpreted as true; + case 't': + case '1': + case 'on': + return true; + default: + return null; + } +} + +/** + * @param {number} value - the value to check + * @param {number} lowerBound + * @param {number} upperBound + * @param {string} endpointType - The type of the endpoints represented as a two character string. `'[)'` `'()'` + */ +function checkInInterval(number, lowerBound, upperBound, endpointType) { + switch (endpointType) { + case '[)': + return lowerBound <= number && number < upperBound; + case '()': + return lowerBound < number && number < upperBound; + case '(]': + return lowerBound < number && number <= upperBound; + case '[]': + return lowerBound <= number && number <= upperBound; + default: + throw new Error('Unknown endpoint type:', endpointType) + } +} + + + +// todo: handle arrays? +/** + * Returns an object containing the command, the command name, and the comand parameters. + * If parsing unsuccessful, returns an error message as a string. + * @param {string} message - A message from a player or language model containing a command. + * @returns {string | Object} + */ +export function parseCommandMessage(message) { + const commandMatch = message.match(commandRegex); + if (!commandMatch) return `Command is incorrectly formatted`; + + const commandName = "!"+commandMatch[1]; + + let args; + if (commandMatch[2]) args = commandMatch[2].match(argRegex); + else args = []; + + const command = getCommand(commandName); + if(!command) return `${commandName} is not a command.` + + const params = commandParams(command); + const paramNames = commandParamNames(command); + + if (args.length !== params.length) + return `Command ${command.name} was given ${args.length} args, but requires ${params.length} args.`; + + + for (let i = 0; i < args.length; i++) { + const param = params[i]; + //Remove any extra characters + let arg = args[i].trim(); + if ((arg.startsWith('"') && arg.endsWith('"')) || (arg.startsWith("'") && arg.endsWith("'"))) { + arg = arg.substring(1, arg.length-1); + } + + //Convert to the correct type + switch(param.type) { + case 'int': + arg = Number.parseInt(arg); break; + case 'float': + arg = Number.parseFloat(arg); break; + case 'boolean': + arg = parseBoolean(arg); break; + case 'BlockName': + case 'ItemName': + if (arg.endsWith('plank')) + arg += 's'; // catches common mistakes like "oak_plank" instead of "oak_planks" + case 'string': + break; + default: + throw new Error(`Command '${commandName}' parameter '${paramNames[i]}' has an unknown type: ${param.type}`); + } + if(arg === null || Number.isNaN(arg)) + return `Error: Param '${paramNames[i]}' must be of type ${param.type}.` + + if(typeof arg === 'number') { //Check the domain of numbers + const domain = param.domain; + if(domain) { + /** + * Javascript has a built in object for sets but not intervals. + * Currently the interval (lowerbound,upperbound] is represented as an Array: `[lowerbound, upperbound, '(]']` + */ + if (!domain[2]) domain[2] = '[)'; //By default, lower bound is included. Upper is not. + + if(!checkInInterval(arg, ...domain)) { + return `Error: Param '${paramNames[i]}' must be an element of ${domain[2][0]}${domain[0]}, ${domain[1]}${domain[2][1]}.`; + //Alternatively arg could be set to the nearest value in the domain. + } + } else if (!suppressNoDomainWarning) { + console.warn(`Command '${commandName}' parameter '${paramNames[i]}' has no domain set. Expect any value [-Infinity, Infinity].`) + suppressNoDomainWarning = true; //Don't spam console. Only give the warning once. + } + } else if(param.type === 'BlockName') { //Check that there is a block with this name + if(getBlockId(arg) == null && arg !== 'air') return `Invalid block type: ${arg}.` + } else if(param.type === 'ItemName') { //Check that there is an item with this name + if(getItemId(arg) == null) return `Invalid item type: ${arg}.` + } + args[i] = arg; + } + + return { commandName, args }; +} + +export function truncCommandMessage(message) { + const commandMatch = message.match(commandRegex); + if (commandMatch) { + return message.substring(0, commandMatch.index + commandMatch[0].length); + } + return message; +} + +export function isAction(name) { + return actionsList.find(action => action.name === name) !== undefined; +} + +/** + * @param {Object} command + * @returns {Object[]} The command's parameters. + */ +function commandParams(command) { + if (!command.params) + return []; + return Object.values(command.params); +} + +/** + * @param {Object} command + * @returns {string[]} The names of the command's parameters. + */ +function commandParamNames(command) { + if (!command.params) + return []; + return Object.keys(command.params); +} + +function numParams(command) { + return commandParams(command).length; +} + +export async function executeCommand(agent, message) { + let parsed = parseCommandMessage(message); + if (typeof parsed === 'string') + return parsed; //The command was incorrectly formatted or an invalid input was given. + else { + console.log('parsed command:', parsed); + const command = getCommand(parsed.commandName); + let numArgs = 0; + if (parsed.args) { + numArgs = parsed.args.length; + } + if (numArgs !== numParams(command)) + return `Command ${command.name} was given ${numArgs} args, but requires ${numParams(command)} args.`; + else { + const result = await command.perform(agent, ...parsed.args); + return result; + } + } +} + +export function getCommandDocs() { + const typeTranslations = { + //This was added to keep the prompt the same as before type checks were implemented. + //If the language model is giving invalid inputs changing this might help. + 'float': 'number', + 'int': 'number', + 'BlockName': 'string', + 'ItemName': 'string', + 'boolean': 'bool' + } + let docs = `\n*COMMAND DOCS\n You can use the following commands to perform actions and get information about the world. + Use the commands with the syntax: !commandName or !commandName("arg1", 1.2, ...) if the command takes arguments.\n + Do not use codeblocks. Use double quotes for strings. Only use one command in each response, trailing commands and comments will be ignored.\n`; + for (let command of commandList) { + docs += command.name + ': ' + command.description + '\n'; + if (command.params) { + docs += 'Params:\n'; + for (let param in command.params) { + docs += `${param}: (${typeTranslations[command.params[param].type]??command.params[param].type}) ${command.params[param].description}\n`; + } + } + } + return docs + '*\n'; +} diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js new file mode 100644 index 0000000..c9fe333 --- /dev/null +++ b/src/agent/commands/queries.js @@ -0,0 +1,224 @@ +import * as world from '../library/world.js'; +import * as mc from '../../utils/mcdata.js'; +import { getCommandDocs } from './index.js'; +import convoManager from '../conversation.js'; + +const pad = (str) => { + return '\n' + str + '\n'; +} + +// queries are commands that just return strings and don't affect anything in the world +export const queryList = [ + { + name: "!stats", + description: "Get your bot's location, health, hunger, and time of day.", + perform: function (agent) { + let bot = agent.bot; + let res = 'STATS'; + let pos = bot.entity.position; + // display position to 2 decimal places + res += `\n- Position: x: ${pos.x.toFixed(2)}, y: ${pos.y.toFixed(2)}, z: ${pos.z.toFixed(2)}`; + // Gameplay + res += `\n- Gamemode: ${bot.game.gameMode}`; + res += `\n- Health: ${Math.round(bot.health)} / 20`; + res += `\n- Hunger: ${Math.round(bot.food)} / 20`; + res += `\n- Biome: ${world.getBiomeName(bot)}`; + let weather = "Clear"; + if (bot.rainState > 0) + weather = "Rain"; + if (bot.thunderState > 0) + weather = "Thunderstorm"; + res += `\n- Weather: ${weather}`; + // let block = bot.blockAt(pos); + // res += `\n- Artficial light: ${block.skyLight}`; + // res += `\n- Sky light: ${block.light}`; + // light properties are bugged, they are not accurate + res += '\n- ' + world.getSurroundingBlocks(bot).join('\n- ') + res += `\n- First Solid Block Above Head: ${world.getFirstBlockAboveHead(bot, null, 32)}`; + + + if (bot.time.timeOfDay < 6000) { + res += '\n- Time: Morning'; + } else if (bot.time.timeOfDay < 12000) { + res += '\n- Time: Afternoon'; + } else { + res += '\n- Time: Night'; + } + + // get the bot's current action + let action = agent.actions.currentActionLabel; + if (agent.isIdle()) + action = 'Idle'; + res += `\- Current Action: ${action}`; + + + let players = world.getNearbyPlayerNames(bot); + let bots = convoManager.getInGameAgents().filter(b => b !== agent.name); + players = players.filter(p => !bots.includes(p)); + + res += '\n- Nearby Human Players: ' + (players.length > 0 ? players.join(', ') : 'None.'); + res += '\n- Nearby Bot Players: ' + (bots.length > 0 ? bots.join(', ') : 'None.'); + + res += '\n' + agent.bot.modes.getMiniDocs() + '\n'; + return pad(res); + } + }, + { + name: "!inventory", + description: "Get your bot's inventory.", + perform: function (agent) { + let bot = agent.bot; + let inventory = world.getInventoryCounts(bot); + let res = 'INVENTORY'; + for (const item in inventory) { + if (inventory[item] && inventory[item] > 0) + res += `\n- ${item}: ${inventory[item]}`; + } + if (res === 'INVENTORY') { + res += ': Nothing'; + } + else if (agent.bot.game.gameMode === 'creative') { + res += '\n(You have infinite items in creative mode. You do not need to gather resources!!)'; + } + + let helmet = bot.inventory.slots[5]; + let chestplate = bot.inventory.slots[6]; + let leggings = bot.inventory.slots[7]; + let boots = bot.inventory.slots[8]; + res += '\nWEARING: '; + if (helmet) + res += `\nHead: ${helmet.name}`; + if (chestplate) + res += `\nTorso: ${chestplate.name}`; + if (leggings) + res += `\nLegs: ${leggings.name}`; + if (boots) + res += `\nFeet: ${boots.name}`; + if (!helmet && !chestplate && !leggings && !boots) + res += 'Nothing'; + + return pad(res); + } + }, + { + name: "!nearbyBlocks", + description: "Get the blocks near the bot.", + perform: function (agent) { + let bot = agent.bot; + let res = 'NEARBY_BLOCKS'; + let blocks = world.getNearbyBlockTypes(bot); + for (let i = 0; i < blocks.length; i++) { + res += `\n- ${blocks[i]}`; + } + if (blocks.length == 0) { + res += ': none'; + } + else { + // Environmental Awareness + res += '\n- ' + world.getSurroundingBlocks(bot).join('\n- ') + res += `\n- First Solid Block Above Head: ${world.getFirstBlockAboveHead(bot, null, 32)}`; + } + return pad(res); + } + }, + { + name: "!craftable", + description: "Get the craftable items with the bot's inventory.", + perform: function (agent) { + let craftable = world.getCraftableItems(agent.bot); + let res = 'CRAFTABLE_ITEMS'; + for (const item of craftable) { + res += `\n- ${item}`; + } + if (res == 'CRAFTABLE_ITEMS') { + res += ': none'; + } + return pad(res); + } + }, + { + name: "!entities", + description: "Get the nearby players and entities.", + perform: function (agent) { + let bot = agent.bot; + let res = 'NEARBY_ENTITIES'; + let players = world.getNearbyPlayerNames(bot); + let bots = convoManager.getInGameAgents().filter(b => b !== agent.name); + players = players.filter(p => !bots.includes(p)); + + for (const player of players) { + res += `\n- Human player: ${player}`; + } + for (const bot of bots) { + res += `\n- Bot player: ${bot}`; + } + + for (const entity of world.getNearbyEntityTypes(bot)) { + if (entity === 'player' || entity === 'item') + continue; + res += `\n- entities: ${entity}`; + } + if (res == 'NEARBY_ENTITIES') { + res += ': none'; + } + return pad(res); + } + }, + { + name: "!modes", + description: "Get all available modes and their docs and see which are on/off.", + perform: function (agent) { + return agent.bot.modes.getDocs(); + } + }, + { + name: '!savedPlaces', + description: 'List all saved locations.', + perform: async function (agent) { + return "Saved place names: " + agent.memory_bank.getKeys(); + } + }, + { + name: '!getCraftingPlan', + description: "Provides a comprehensive crafting plan for a specified item. This includes a breakdown of required ingredients, the exact quantities needed, and an analysis of missing ingredients or extra items needed based on the bot's current inventory.", + params: { + targetItem: { + type: 'string', + description: 'The item that we are trying to craft' + }, + quantity: { + type: 'int', + description: 'The quantity of the item that we are trying to craft', + optional: true, + domain: [1, Infinity, '[)'], // Quantity must be at least 1, + default: 1 + } + }, + perform: function (agent, targetItem, quantity = 1) { + let bot = agent.bot; + + // Fetch the bot's inventory + const curr_inventory = world.getInventoryCounts(bot); + const target_item = targetItem; + let existingCount = curr_inventory[target_item] || 0; + let prefixMessage = ''; + if (existingCount > 0) { + curr_inventory[target_item] -= existingCount; + prefixMessage = `You already have ${existingCount} ${target_item} in your inventory. If you need to craft more,\n`; + } + + // Generate crafting plan + let craftingPlan = mc.getDetailedCraftingPlan(target_item, quantity, curr_inventory); + craftingPlan = prefixMessage + craftingPlan; + console.log(craftingPlan); + return pad(craftingPlan); + }, + }, + { + name: '!help', + description: 'Lists all available commands and their descriptions.', + perform: async function (agent) { + return getCommandDocs(); + } + }, +]; diff --git a/src/agent/conversation.js b/src/agent/conversation.js new file mode 100644 index 0000000..1061ea8 --- /dev/null +++ b/src/agent/conversation.js @@ -0,0 +1,367 @@ +import settings from '../../settings.js'; +import { readFileSync } from 'fs'; +import { containsCommand } from './commands/index.js'; +import { sendBotChatToServer } from './agent_proxy.js'; + +let agent; +let agent_names = settings.profiles.map((p) => JSON.parse(readFileSync(p, 'utf8')).name); +let agents_in_game = []; + +let self_prompter_paused = false; + +class Conversation { + constructor(name) { + this.name = name; + this.active = false; + this.ignore_until_start = false; + this.blocked = false; + this.in_queue = []; + this.inMessageTimer = null; + } + + reset() { + this.active = false; + this.ignore_until_start = false; + this.in_queue = []; + this.inMessageTimer = null; + } + + end() { + this.active = false; + this.ignore_until_start = true; + this.inMessageTimer = null; + const full_message = _compileInMessages(this); + if (full_message.message.trim().length > 0) + agent.history.add(this.name, full_message.message); + // add the full queued messages to history, but don't respond + + if (agent.last_sender === this.name) + agent.last_sender = null; + } + + queue(message) { + this.in_queue.push(message); + } +} + +const WAIT_TIME_START = 30000; +class ConversationManager { + constructor() { + this.convos = {}; + this.activeConversation = null; + this.awaiting_response = false; + this.connection_timeout = null; + this.wait_time_limit = WAIT_TIME_START; + } + + initAgent(a) { + agent = a; + } + + _getConvo(name) { + if (!this.convos[name]) + this.convos[name] = new Conversation(name); + return this.convos[name]; + } + + _startMonitor() { + clearInterval(this.connection_monitor); + let wait_time = 0; + let last_time = Date.now(); + this.connection_monitor = setInterval(() => { + if (!this.activeConversation) { + this._stopMonitor(); + return; // will clean itself up + } + + let delta = Date.now() - last_time; + last_time = Date.now(); + let convo_partner = this.activeConversation.name; + + if (this.awaiting_response && agent.isIdle()) { + wait_time += delta; + if (wait_time > this.wait_time_limit) { + agent.handleMessage('system', `${convo_partner} hasn't responded in ${this.wait_time_limit/1000} seconds, respond with a message to them or your own action.`); + wait_time = 0; + this.wait_time_limit*=2; + } + } + else if (!this.awaiting_response){ + this.wait_time_limit = WAIT_TIME_START; + wait_time = 0; + } + + if (!this.otherAgentInGame(convo_partner) && !this.connection_timeout) { + this.connection_timeout = setTimeout(() => { + if (this.otherAgentInGame(convo_partner)){ + this._clearMonitorTimeouts(); + return; + } + if (!self_prompter_paused) { + this.endConversation(convo_partner); + agent.handleMessage('system', `${convo_partner} disconnected, conversation has ended.`); + } + else { + this.endConversation(convo_partner); + } + }, 10000); + } + }, 1000); + } + + _stopMonitor() { + clearInterval(this.connection_monitor); + this.connection_monitor = null; + this._clearMonitorTimeouts(); + } + + _clearMonitorTimeouts() { + this.awaiting_response = false; + clearTimeout(this.connection_timeout); + this.connection_timeout = null; + } + + async startConversation(send_to, message) { + const convo = this._getConvo(send_to); + convo.reset(); + + if (agent.self_prompter.on) { + await agent.self_prompter.stop(); + self_prompter_paused = true; + } + if (convo.active) + return; + convo.active = true; + this.activeConversation = convo; + this._startMonitor(); + this.sendToBot(send_to, message, true, false); + } + + startConversationFromOtherBot(name) { + const convo = this._getConvo(name); + convo.active = true; + this.activeConversation = convo; + this._startMonitor(); + } + + sendToBot(send_to, message, start=false, open_chat=true) { + if (!this.isOtherAgent(send_to)) { + console.warn(`${agent.name} tried to send bot message to non-bot ${send_to}`); + return; + } + const convo = this._getConvo(send_to); + + if (settings.chat_bot_messages && open_chat) + agent.openChat(`(To ${send_to}) ${message}`); + + if (convo.ignore_until_start) + return; + convo.active = true; + + const end = message.includes('!endConversation'); + const json = { + 'message': message, + start, + end, + }; + + this.awaiting_response = true; + sendBotChatToServer(send_to, json); + } + + async receiveFromBot(sender, received) { + const convo = this._getConvo(sender); + + if (convo.ignore_until_start && !received.start) + return; + + // check if any convo is active besides the sender + if (this.inConversation() && !this.inConversation(sender)) { + this.sendToBot(sender, `I'm talking to someone else, try again later. !endConversation("${sender}")`, false, false); + this.endConversation(sender); + return; + } + + if (received.start) { + convo.reset(); + this.startConversationFromOtherBot(sender); + } + + this._clearMonitorTimeouts(); + convo.queue(received); + + // responding to conversation takes priority over self prompting + if (agent.self_prompter.on){ + await agent.self_prompter.stopLoop(); + self_prompter_paused = true; + } + + _scheduleProcessInMessage(sender, received, convo); + } + + responseScheduledFor(sender) { + if (!this.isOtherAgent(sender) || !this.inConversation(sender)) + return false; + const convo = this._getConvo(sender); + return !!convo.inMessageTimer; + } + + isOtherAgent(name) { + return agent_names.some((n) => n === name); + } + + otherAgentInGame(name) { + return agents_in_game.some((n) => n === name); + } + + updateAgents(agents) { + agent_names = agents.map(a => a.name); + agents_in_game = agents.filter(a => a.in_game).map(a => a.name); + } + + getInGameAgents() { + return agents_in_game; + } + + inConversation(other_agent=null) { + if (other_agent) + return this.convos[other_agent]?.active; + return Object.values(this.convos).some(c => c.active); + } + + endConversation(sender) { + if (this.convos[sender]) { + this.convos[sender].end(); + if (this.activeConversation.name === sender) { + this._stopMonitor(); + this.activeConversation = null; + if (self_prompter_paused && !this.inConversation()) { + _resumeSelfPrompter(); + } + } + } + } + + endAllConversations() { + for (const sender in this.convos) { + this.endConversation(sender); + } + if (self_prompter_paused) { + _resumeSelfPrompter(); + } + } + + forceEndCurrentConversation() { + if (this.activeConversation) { + let sender = this.activeConversation.name; + this.sendToBot(sender, '!endConversation("' + sender + '")', false, false); + this.endConversation(sender); + } + } + + scheduleSelfPrompter() { + self_prompter_paused = true; + } + + cancelSelfPrompter() { + self_prompter_paused = false; + } +} + +const convoManager = new ConversationManager(); +export default convoManager; + +/* +This function controls conversation flow by deciding when the bot responds. +The logic is as follows: +- If neither bot is busy, respond quickly with a small delay. +- If only the other bot is busy, respond with a long delay to allow it to finish short actions (ex check inventory) +- If I'm busy but other bot isn't, let LLM decide whether to respond +- If both bots are busy, don't respond until someone is done, excluding a few actions that allow fast responses +- New messages received during the delay will reset the delay following this logic, and be queued to respond in bulk +*/ +const talkOverActions = ['stay', 'followPlayer', 'mode:']; // all mode actions +const fastDelay = 200; +const longDelay = 5000; +async function _scheduleProcessInMessage(sender, received, convo) { + if (convo.inMessageTimer) + clearTimeout(convo.inMessageTimer); + let otherAgentBusy = containsCommand(received.message); + + const scheduleResponse = (delay) => convo.inMessageTimer = setTimeout(() => _processInMessageQueue(sender), delay); + + if (!agent.isIdle() && otherAgentBusy) { + // both are busy + let canTalkOver = talkOverActions.some(a => agent.actions.currentActionLabel.includes(a)); + if (canTalkOver) + scheduleResponse(fastDelay) + // otherwise don't respond + } + else if (otherAgentBusy) + // other bot is busy but I'm not + scheduleResponse(longDelay); + else if (!agent.isIdle()) { + // I'm busy but other bot isn't + let canTalkOver = talkOverActions.some(a => agent.actions.currentActionLabel.includes(a)); + if (canTalkOver) { + scheduleResponse(fastDelay); + } + else { + let shouldRespond = await agent.prompter.promptShouldRespondToBot(received.message); + console.log(`${agent.name} decided to ${shouldRespond?'respond':'not respond'} to ${sender}`); + if (shouldRespond) + scheduleResponse(fastDelay); + } + } + else { + // neither are busy + scheduleResponse(fastDelay); + } +} + +function _processInMessageQueue(name) { + const convo = convoManager._getConvo(name); + _handleFullInMessage(name, _compileInMessages(convo)); +} + +function _compileInMessages(convo) { + let pack = {}; + let full_message = ''; + while (convo.in_queue.length > 0) { + pack = convo.in_queue.shift(); + full_message += pack.message; + } + pack.message = full_message; + return pack; +} + +function _handleFullInMessage(sender, received) { + console.log(`${agent.name} responding to "${received.message}" from ${sender}`); + + const convo = convoManager._getConvo(sender); + convo.active = true; + + let message = _tagMessage(received.message); + if (received.end) { + convoManager.endConversation(sender); + message = `Conversation with ${sender} ended with message: "${message}"`; + sender = 'system'; // bot will respond to system instead of the other bot + } + else if (received.start) + agent.shut_up = false; + convo.inMessageTimer = null; + agent.handleMessage(sender, message); +} + + +function _tagMessage(message) { + return "(FROM OTHER BOT)" + message; +} + +async function _resumeSelfPrompter() { + await new Promise(resolve => setTimeout(resolve, 5000)); + if (self_prompter_paused && !convoManager.inConversation()) { + self_prompter_paused = false; + agent.self_prompter.start(); + } +} diff --git a/src/agent/history.js b/src/agent/history.js new file mode 100644 index 0000000..b6edf80 --- /dev/null +++ b/src/agent/history.js @@ -0,0 +1,119 @@ +import { writeFileSync, readFileSync, mkdirSync, existsSync } from 'fs'; +import { NPCData } from './npc/data.js'; +import settings from '../../settings.js'; + + +export class History { + constructor(agent) { + this.agent = agent; + this.name = agent.name; + this.memory_fp = `./bots/${this.name}/memory.json`; + this.full_history_fp = undefined; + + mkdirSync(`./bots/${this.name}/histories`, { recursive: true }); + + this.turns = []; + + // Natural language memory as a summary of recent messages + previous memory + this.memory = ''; + + // Maximum number of messages to keep in context before saving chunk to memory + this.max_messages = settings.max_messages; + + // Number of messages to remove from current history and save into memory + this.summary_chunk_size = 5; + // chunking reduces expensive calls to promptMemSaving and appendFullHistory + // and improves the quality of the memory summary + } + + getHistory() { // expects an Examples object + return JSON.parse(JSON.stringify(this.turns)); + } + + async summarizeMemories(turns) { + console.log("Storing memories..."); + this.memory = await this.agent.prompter.promptMemSaving(turns); + + if (this.memory.length > 500) { + this.memory = this.memory.slice(0, 500); + this.memory += '...(Memory truncated to 500 chars. Compress it more next time)'; + } + + console.log("Memory updated to: ", this.memory); + } + + appendFullHistory(to_store) { + if (this.full_history_fp === undefined) { + const string_timestamp = new Date().toLocaleString().replace(/[/:]/g, '-').replace(/ /g, '').replace(/,/g, '_'); + this.full_history_fp = `./bots/${this.name}/histories/${string_timestamp}.json`; + writeFileSync(this.full_history_fp, '[]', 'utf8'); + } + try { + const data = readFileSync(this.full_history_fp, 'utf8'); + let full_history = JSON.parse(data); + full_history.push(...to_store); + writeFileSync(this.full_history_fp, JSON.stringify(full_history, null, 4), 'utf8'); + } catch (err) { + console.error(`Error reading ${this.name}'s full history file: ${err.message}`); + } + } + + async add(name, content) { + let role = 'assistant'; + if (name === 'system') { + role = 'system'; + } + else if (name !== this.name) { + role = 'user'; + content = `${name}: ${content}`; + } + this.turns.push({role, content}); + + if (this.turns.length >= this.max_messages) { + let chunk = this.turns.splice(0, this.summary_chunk_size); + while (this.turns.length > 0 && this.turns[0].role === 'assistant') + chunk.push(this.turns.shift()); // remove until turns starts with system/user message + + await this.summarizeMemories(chunk); + this.appendFullHistory(chunk); + } + } + + async save() { + try { + const data = { + memory: this.memory, + turns: this.turns, + self_prompt: this.agent.self_prompter.on ? this.agent.self_prompter.prompt : null, + last_sender: this.agent.last_sender + }; + writeFileSync(this.memory_fp, JSON.stringify(data, null, 2)); + console.log('Saved memory to:', this.memory_fp); + } catch (error) { + console.error('Failed to save history:', error); + throw error; + } + } + + load() { + try { + if (!existsSync(this.memory_fp)) { + console.log('No memory file found.'); + return null; + } + const data = JSON.parse(readFileSync(this.memory_fp, 'utf8')); + this.memory = data.memory || ''; + this.turns = data.turns || []; + console.log('Loaded memory:', this.memory); + return data; + } catch (error) { + console.error('Failed to load history:', error); + throw error; + } + } + + clear() { + this.turns = []; + this.memory = ''; + } +} \ No newline at end of file diff --git a/src/agent/library/index.js b/src/agent/library/index.js new file mode 100644 index 0000000..ae864b0 --- /dev/null +++ b/src/agent/library/index.js @@ -0,0 +1,23 @@ +import * as skills from './skills.js'; +import * as world from './world.js'; + + +export function docHelper(functions, module_name) { + let docArray = []; + for (let skillFunc of functions) { + let str = skillFunc.toString(); + if (str.includes('/**')) { + let docEntry = `${module_name}.${skillFunc.name}\n`; + docEntry += str.substring(str.indexOf('/**') + 3, str.indexOf('**/')).trim(); + docArray.push(docEntry); + } + } + return docArray; +} + +export function getSkillDocs() { + let docArray = []; + docArray = docArray.concat(docHelper(Object.values(skills), 'skills')); + docArray = docArray.concat(docHelper(Object.values(world), 'world')); + return docArray; +} diff --git a/src/agent/library/lockdown.js b/src/agent/library/lockdown.js new file mode 100644 index 0000000..2d8f79d --- /dev/null +++ b/src/agent/library/lockdown.js @@ -0,0 +1,26 @@ +import 'ses'; + +// This sets up the secure environment +// We disable some of the taming to allow for more flexibility + +// For configuration, see https://github.com/endojs/endo/blob/master/packages/ses/docs/lockdown.md +lockdown({ + // basic devex and quality of life improvements + localeTaming: 'unsafe', + consoleTaming: 'unsafe', + errorTaming: 'unsafe', + stackFiltering: 'verbose', + // allow eval outside of created compartments + // (mineflayer dep "protodef" uses eval) + evalTaming: 'unsafeEval', +}); + +export const makeCompartment = (endowments = {}) => { + return new Compartment({ + // provide untamed Math, Date, etc + Math, + Date, + // standard endowments + ...endowments + }); +} \ No newline at end of file diff --git a/src/agent/library/skill_library.js b/src/agent/library/skill_library.js new file mode 100644 index 0000000..a019112 --- /dev/null +++ b/src/agent/library/skill_library.js @@ -0,0 +1,47 @@ +import { cosineSimilarity } from '../../utils/math.js'; +import { getSkillDocs } from './index.js'; + +export class SkillLibrary { + constructor(agent,embedding_model) { + this.agent = agent; + this.embedding_model = embedding_model; + this.skill_docs_embeddings = {}; + } + async initSkillLibrary() { + const skillDocs = getSkillDocs(); + const embeddingPromises = skillDocs.map((doc) => { + return (async () => { + let func_name_desc = doc.split('\n').slice(0, 2).join(''); + this.skill_docs_embeddings[doc] = await this.embedding_model.embed(func_name_desc); + })(); + }); + await Promise.all(embeddingPromises); + } + + async getRelevantSkillDocs(message, select_num) { + let latest_message_embedding = ''; + if(message) //message is not empty, get the relevant skill docs, else return all skill docs + latest_message_embedding = await this.embedding_model.embed(message); + + let skill_doc_similarities = Object.keys(this.skill_docs_embeddings) + .map(doc_key => ({ + doc_key, + similarity_score: cosineSimilarity(latest_message_embedding, this.skill_docs_embeddings[doc_key]) + })) + .sort((a, b) => b.similarity_score - a.similarity_score); + + let length = skill_doc_similarities.length; + if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) { + select_num = length; + } else { + select_num = Math.min(Math.floor(select_num), length); + } + let selected_docs = skill_doc_similarities.slice(0, select_num); + let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; + relevant_skill_docs += 'SkillDocs:\n' + relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### '); + return relevant_skill_docs; + } + + +} diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js new file mode 100644 index 0000000..fa7cea3 --- /dev/null +++ b/src/agent/library/skills.js @@ -0,0 +1,1353 @@ +import * as mc from "../../utils/mcdata.js"; +import * as world from "./world.js"; +import pf from 'mineflayer-pathfinder'; +import Vec3 from 'vec3'; + + +export function log(bot, message) { + bot.output += message + '\n'; +} + +async function autoLight(bot) { + if (world.shouldPlaceTorch(bot)) { + try { + const pos = world.getPosition(bot); + return await placeBlock(bot, 'torch', pos.x, pos.y, pos.z, 'bottom', true); + } catch (err) {return false;} + } + return false; +} + +async function equipHighestAttack(bot) { + let weapons = bot.inventory.items().filter(item => item.name.includes('sword') || (item.name.includes('axe') && !item.name.includes('pickaxe'))); + if (weapons.length === 0) + weapons = bot.inventory.items().filter(item => item.name.includes('pickaxe') || item.name.includes('shovel')); + if (weapons.length === 0) + return; + weapons.sort((a, b) => a.attackDamage < b.attackDamage); + let weapon = weapons[0]; + if (weapon) + await bot.equip(weapon, 'hand'); +} + +export async function craftRecipe(bot, itemName, num=1) { + /** + * Attempt to craft the given item name from a recipe. May craft many items. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} itemName, the item name to craft. + * @returns {Promise} true if the recipe was crafted, false otherwise. + * @example + * await skills.craftRecipe(bot, "stick"); + **/ + let placedTable = false; + + if (mc.getItemCraftingRecipes(itemName).length == 0) { + log(bot, `${itemName} is either not an item, or it does not have a crafting recipe!`); + return false; + } + + // get recipes that don't require a crafting table + let recipes = bot.recipesFor(mc.getItemId(itemName), null, 1, null); + let craftingTable = null; + const craftingTableRange = 32; + placeTable: if (!recipes || recipes.length === 0) { + recipes = bot.recipesFor(mc.getItemId(itemName), null, 1, true); + if(!recipes || recipes.length === 0) break placeTable; //Don't bother going to the table if we don't have the required resources. + + // Look for crafting table + craftingTable = world.getNearestBlock(bot, 'crafting_table', craftingTableRange); + if (craftingTable === null){ + + // Try to place crafting table + let hasTable = world.getInventoryCounts(bot)['crafting_table'] > 0; + if (hasTable) { + let pos = world.getNearestFreeSpace(bot, 1, 6); + await placeBlock(bot, 'crafting_table', pos.x, pos.y, pos.z); + craftingTable = world.getNearestBlock(bot, 'crafting_table', craftingTableRange); + if (craftingTable) { + recipes = bot.recipesFor(mc.getItemId(itemName), null, 1, craftingTable); + placedTable = true; + } + } + else { + log(bot, `Crafting ${itemName} requires a crafting table.`) + return false; + } + } + else { + recipes = bot.recipesFor(mc.getItemId(itemName), null, 1, craftingTable); + } + } + if (!recipes || recipes.length === 0) { + log(bot, `You do not have the resources to craft a ${itemName}. It requires: ${Object.entries(mc.getItemCraftingRecipes(itemName)[0][0]).map(([key, value]) => `${key}: ${value}`).join(', ')}.`); + if (placedTable) { + await collectBlock(bot, 'crafting_table', 1); + } + return false; + } + + if (craftingTable && bot.entity.position.distanceTo(craftingTable.position) > 4) { + await goToNearestBlock(bot, 'crafting_table', 4, craftingTableRange); + } + + const recipe = recipes[0]; + console.log('crafting...'); + //Check that the agent has sufficient items to use the recipe `num` times. + const inventory = world.getInventoryCounts(bot); //Items in the agents inventory + const requiredIngredients = mc.ingredientsFromPrismarineRecipe(recipe); //Items required to use the recipe once. + const craftLimit = mc.calculateLimitingResource(inventory, requiredIngredients); + + await bot.craft(recipe, Math.min(craftLimit.num, num), craftingTable); + if(craftLimit.num} true if the item was smelted, false otherwise. Fail + * @example + * await skills.smeltItem(bot, "raw_iron"); + * await skills.smeltItem(bot, "beef"); + **/ + + if (!mc.isSmeltable(itemName)) { + log(bot, `Cannot smelt ${itemName}. Hint: make sure you are smelting the 'raw' item.`); + return false; + } + + let placedFurnace = false; + let furnaceBlock = undefined; + const furnaceRange = 32; + furnaceBlock = world.getNearestBlock(bot, 'furnace', furnaceRange); + if (!furnaceBlock){ + // Try to place furnace + let hasFurnace = world.getInventoryCounts(bot)['furnace'] > 0; + if (hasFurnace) { + let pos = world.getNearestFreeSpace(bot, 1, furnaceRange); + await placeBlock(bot, 'furnace', pos.x, pos.y, pos.z); + furnaceBlock = world.getNearestBlock(bot, 'furnace', furnaceRange); + placedFurnace = true; + } + } + if (!furnaceBlock){ + log(bot, `There is no furnace nearby and you have no furnace.`) + return false; + } + if (bot.entity.position.distanceTo(furnaceBlock.position) > 4) { + await goToNearestBlock(bot, 'furnace', 4, furnaceRange); + } + bot.modes.pause('unstuck'); + await bot.lookAt(furnaceBlock.position); + + console.log('smelting...'); + const furnace = await bot.openFurnace(furnaceBlock); + // check if the furnace is already smelting something + let input_item = furnace.inputItem(); + if (input_item && input_item.type !== mc.getItemId(itemName) && input_item.count > 0) { + // TODO: check if furnace is currently burning fuel. furnace.fuel is always null, I think there is a bug. + // This only checks if the furnace has an input item, but it may not be smelting it and should be cleared. + log(bot, `The furnace is currently smelting ${mc.getItemName(input_item.type)}.`); + if (placedFurnace) + await collectBlock(bot, 'furnace', 1); + return false; + } + // check if the bot has enough items to smelt + let inv_counts = world.getInventoryCounts(bot); + if (!inv_counts[itemName] || inv_counts[itemName] < num) { + log(bot, `You do not have enough ${itemName} to smelt.`); + if (placedFurnace) + await collectBlock(bot, 'furnace', 1); + return false; + } + + // fuel the furnace + if (!furnace.fuelItem()) { + let fuel = mc.getSmeltingFuel(bot); + if (!fuel) { + log(bot, `You have no fuel to smelt ${itemName}, you need coal, charcoal, or wood.`); + if (placedFurnace) + await collectBlock(bot, 'furnace', 1); + return false; + } + log(bot, `Using ${fuel.name} as fuel.`); + + const put_fuel = Math.ceil(num / mc.getFuelSmeltOutput(fuel.name)); + + if (fuel.count < put_fuel) { + log(bot, `You don't have enough ${fuel.name} to smelt ${num} ${itemName}; you need ${put_fuel}.`); + if (placedFurnace) + await collectBlock(bot, 'furnace', 1); + return false; + } + await furnace.putFuel(fuel.type, null, put_fuel); + log(bot, `Added ${put_fuel} ${mc.getItemName(fuel.type)} to furnace fuel.`); + console.log(`Added ${put_fuel} ${mc.getItemName(fuel.type)} to furnace fuel.`) + } + // put the items in the furnace + await furnace.putInput(mc.getItemId(itemName), null, num); + // wait for the items to smelt + let total = 0; + let collected_last = true; + let smelted_item = null; + await new Promise(resolve => setTimeout(resolve, 200)); + while (total < num) { + await new Promise(resolve => setTimeout(resolve, 10000)); + console.log('checking...'); + let collected = false; + if (furnace.outputItem()) { + smelted_item = await furnace.takeOutput(); + if (smelted_item) { + total += smelted_item.count; + collected = true; + } + } + if (!collected && !collected_last) { + break; // if nothing was collected this time or last time + } + collected_last = collected; + if (bot.interrupt_code) { + break; + } + } + await bot.closeWindow(furnace); + + if (placedFurnace) { + await collectBlock(bot, 'furnace', 1); + } + if (total === 0) { + log(bot, `Failed to smelt ${itemName}.`); + return false; + } + if (total < num) { + log(bot, `Only smelted ${total} ${mc.getItemName(smelted_item.type)}.`); + return false; + } + log(bot, `Successfully smelted ${itemName}, got ${total} ${mc.getItemName(smelted_item.type)}.`); + return true; +} + +export async function clearNearestFurnace(bot) { + /** + * Clears the nearest furnace of all items. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @returns {Promise} true if the furnace was cleared, false otherwise. + * @example + * await skills.clearNearestFurnace(bot); + **/ + let furnaceBlock = world.getNearestBlock(bot, 'furnace', 32); + if (!furnaceBlock) { + log(bot, `No furnace nearby to clear.`); + return false; + } + if (bot.entity.position.distanceTo(furnaceBlock.position) > 4) { + await goToNearestBlock(bot, 'furnace', 4, 32); + } + + console.log('clearing furnace...'); + const furnace = await bot.openFurnace(furnaceBlock); + console.log('opened furnace...') + // take the items out of the furnace + let smelted_item, intput_item, fuel_item; + if (furnace.outputItem()) + smelted_item = await furnace.takeOutput(); + if (furnace.inputItem()) + intput_item = await furnace.takeInput(); + if (furnace.fuelItem()) + fuel_item = await furnace.takeFuel(); + console.log(smelted_item, intput_item, fuel_item) + let smelted_name = smelted_item ? `${smelted_item.count} ${smelted_item.name}` : `0 smelted items`; + let input_name = intput_item ? `${intput_item.count} ${intput_item.name}` : `0 input items`; + let fuel_name = fuel_item ? `${fuel_item.count} ${fuel_item.name}` : `0 fuel items`; + log(bot, `Cleared furnace, received ${smelted_name}, ${input_name}, and ${fuel_name}.`); + return true; + +} + + +export async function attackNearest(bot, mobType, kill=true) { + /** + * Attack mob of the given type. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} mobType, the type of mob to attack. + * @param {boolean} kill, whether or not to continue attacking until the mob is dead. Defaults to true. + * @returns {Promise} true if the mob was attacked, false if the mob type was not found. + * @example + * await skills.attackNearest(bot, "zombie", true); + **/ + bot.modes.pause('cowardice'); + if (mobType === 'drowned' || mobType === 'cod' || mobType === 'salmon' || mobType === 'tropical_fish' || mobType === 'squid') + bot.modes.pause('self_preservation'); // so it can go underwater. TODO: have an drowning mode so we don't turn off all self_preservation + const mob = world.getNearbyEntities(bot, 24).find(entity => entity.name === mobType); + if (mob) { + return await attackEntity(bot, mob, kill); + } + log(bot, 'Could not find any '+mobType+' to attack.'); + return false; +} + +export async function attackEntity(bot, entity, kill=true) { + /** + * Attack mob of the given type. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {Entity} entity, the entity to attack. + * @returns {Promise} true if the entity was attacked, false if interrupted + * @example + * await skills.attackEntity(bot, entity); + **/ + + let pos = entity.position; + await equipHighestAttack(bot) + + if (!kill) { + if (bot.entity.position.distanceTo(pos) > 5) { + console.log('moving to mob...') + await goToPosition(bot, pos.x, pos.y, pos.z); + } + console.log('attacking mob...') + await bot.attack(entity); + } + else { + bot.pvp.attack(entity); + while (world.getNearbyEntities(bot, 24).includes(entity)) { + await new Promise(resolve => setTimeout(resolve, 1000)); + if (bot.interrupt_code) { + bot.pvp.stop(); + return false; + } + } + log(bot, `Successfully killed ${entity.name}.`); + await pickupNearbyItems(bot); + return true; + } +} + +export async function defendSelf(bot, range=9) { + /** + * Defend yourself from all nearby hostile mobs until there are no more. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} range, the range to look for mobs. Defaults to 8. + * @returns {Promise} true if the bot found any enemies and has killed them, false if no entities were found. + * @example + * await skills.defendSelf(bot); + * **/ + bot.modes.pause('self_defense'); + bot.modes.pause('cowardice'); + let attacked = false; + let enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), range); + while (enemy) { + await equipHighestAttack(bot); + if (bot.entity.position.distanceTo(enemy.position) >= 4 && enemy.name !== 'creeper' && enemy.name !== 'phantom') { + try { + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(new pf.goals.GoalFollow(enemy, 3.5), true); + } catch (err) {/* might error if entity dies, ignore */} + } + if (bot.entity.position.distanceTo(enemy.position) <= 2) { + try { + bot.pathfinder.setMovements(new pf.Movements(bot)); + let inverted_goal = new pf.goals.GoalInvert(new pf.goals.GoalFollow(enemy, 2)); + await bot.pathfinder.goto(inverted_goal, true); + } catch (err) {/* might error if entity dies, ignore */} + } + bot.pvp.attack(enemy); + attacked = true; + await new Promise(resolve => setTimeout(resolve, 500)); + enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), range); + if (bot.interrupt_code) { + bot.pvp.stop(); + return false; + } + } + bot.pvp.stop(); + if (attacked) + log(bot, `Successfully defended self.`); + else + log(bot, `No enemies nearby to defend self from.`); + return attacked; +} + + + +export async function collectBlock(bot, blockType, num=1, exclude=null) { + /** + * Collect one of the given block type. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} blockType, the type of block to collect. + * @param {number} num, the number of blocks to collect. Defaults to 1. + * @returns {Promise} true if the block was collected, false if the block type was not found. + * @example + * await skills.collectBlock(bot, "oak_log"); + **/ + if (num < 1) { + log(bot, `Invalid number of blocks to collect: ${num}.`); + return false; + } + let blocktypes = [blockType]; + if (blockType === 'coal' || blockType === 'diamond' || blockType === 'emerald' || blockType === 'iron' || blockType === 'gold' || blockType === 'lapis_lazuli' || blockType === 'redstone') + blocktypes.push(blockType+'_ore'); + if (blockType.endsWith('ore')) + blocktypes.push('deepslate_'+blockType); + if (blockType === 'dirt') + blocktypes.push('grass_block'); + + let collected = 0; + + for (let i=0; i block.position.x !== position.x || block.position.y !== position.y || block.position.z !== position.z + ); + } + } + const movements = new pf.Movements(bot); + movements.dontMineUnderFallingBlock = false; + blocks = blocks.filter( + block => movements.safeToBreak(block) + ); + + if (blocks.length === 0) { + if (collected === 0) + log(bot, `No ${blockType} nearby to collect.`); + else + log(bot, `No more ${blockType} nearby to collect.`); + break; + } + const block = blocks[0]; + await bot.tool.equipForBlock(block); + const itemId = bot.heldItem ? bot.heldItem.type : null + if (!block.canHarvest(itemId)) { + log(bot, `Don't have right tools to harvest ${blockType}.`); + return false; + } + try { + await bot.collectBlock.collect(block); + collected++; + await autoLight(bot); + } + catch (err) { + if (err.name === 'NoChests') { + log(bot, `Failed to collect ${blockType}: Inventory full, no place to deposit.`); + break; + } + else { + log(bot, `Failed to collect ${blockType}: ${err}.`); + continue; + } + } + + if (bot.interrupt_code) + break; + } + log(bot, `Collected ${collected} ${blockType}.`); + return collected > 0; +} + +export async function pickupNearbyItems(bot) { + /** + * Pick up all nearby items. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @returns {Promise} true if the items were picked up, false otherwise. + * @example + * await skills.pickupNearbyItems(bot); + **/ + const distance = 8; + const getNearestItem = bot => bot.nearestEntity(entity => entity.name === 'item' && bot.entity.position.distanceTo(entity.position) < distance); + let nearestItem = getNearestItem(bot); + let pickedUp = 0; + while (nearestItem) { + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(new pf.goals.GoalFollow(nearestItem, 0.8), true); + await new Promise(resolve => setTimeout(resolve, 200)); + let prev = nearestItem; + nearestItem = getNearestItem(bot); + if (prev === nearestItem) { + break; + } + pickedUp++; + } + log(bot, `Picked up ${pickedUp} items.`); + return true; +} + + +export async function breakBlockAt(bot, x, y, z) { + /** + * Break the block at the given position. Will use the bot's equipped item. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} x, the x coordinate of the block to break. + * @param {number} y, the y coordinate of the block to break. + * @param {number} z, the z coordinate of the block to break. + * @returns {Promise} true if the block was broken, false otherwise. + * @example + * let position = world.getPosition(bot); + * await skills.breakBlockAt(bot, position.x, position.y - 1, position.x); + **/ + if (x == null || y == null || z == null) throw new Error('Invalid position to break block at.'); + let block = bot.blockAt(Vec3(x, y, z)); + if (block.name !== 'air' && block.name !== 'water' && block.name !== 'lava') { + if (bot.modes.isOn('cheat')) { + let msg = '/setblock ' + Math.floor(x) + ' ' + Math.floor(y) + ' ' + Math.floor(z) + ' air'; + bot.chat(msg); + log(bot, `Used /setblock to break block at ${x}, ${y}, ${z}.`); + return true; + } + + if (bot.entity.position.distanceTo(block.position) > 4.5) { + let pos = block.position; + let movements = new pf.Movements(bot); + movements.canPlaceOn = false; + movements.allow1by1towers = false; + bot.pathfinder.setMovements(movements); + await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); + } + if (bot.game.gameMode !== 'creative') { + await bot.tool.equipForBlock(block); + const itemId = bot.heldItem ? bot.heldItem.type : null + if (!block.canHarvest(itemId)) { + log(bot, `Don't have right tools to break ${block.name}.`); + return false; + } + } + await bot.dig(block, true); + log(bot, `Broke ${block.name} at x:${x.toFixed(1)}, y:${y.toFixed(1)}, z:${z.toFixed(1)}.`); + } + else { + log(bot, `Skipping block at x:${x.toFixed(1)}, y:${y.toFixed(1)}, z:${z.toFixed(1)} because it is ${block.name}.`); + return false; + } + return true; +} + + +export async function placeBlock(bot, blockType, x, y, z, placeOn='bottom', dontCheat=false) { + /** + * Place the given block type at the given position. It will build off from any adjacent blocks. Will fail if there is a block in the way or nothing to build off of. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} blockType, the type of block to place. + * @param {number} x, the x coordinate of the block to place. + * @param {number} y, the y coordinate of the block to place. + * @param {number} z, the z coordinate of the block to place. + * @param {string} placeOn, the preferred side of the block to place on. Can be 'top', 'bottom', 'north', 'south', 'east', 'west', or 'side'. Defaults to bottom. Will place on first available side if not possible. + * @param {boolean} dontCheat, overrides cheat mode to place the block normally. Defaults to false. + * @returns {Promise} true if the block was placed, false otherwise. + * @example + * let p = world.getPosition(bot); + * await skills.placeBlock(bot, "oak_log", p.x + 2, p.y, p.x); + * await skills.placeBlock(bot, "torch", p.x + 1, p.y, p.x, 'side'); + **/ + if (!mc.getBlockId(blockType)) { + log(bot, `Invalid block type: ${blockType}.`); + return false; + } + + const target_dest = new Vec3(Math.floor(x), Math.floor(y), Math.floor(z)); + if (bot.modes.isOn('cheat') && !dontCheat) { + if (bot.restrict_to_inventory) { + let block = bot.inventory.items().find(item => item.name === blockType); + if (!block) { + log(bot, `Cannot place ${blockType}, you are restricted to your current inventory.`); + return false; + } + } + + // invert the facing direction + let face = placeOn === 'north' ? 'south' : placeOn === 'south' ? 'north' : placeOn === 'east' ? 'west' : 'east'; + if (blockType.includes('torch') && placeOn !== 'bottom') { + // insert wall_ before torch + blockType = blockType.replace('torch', 'wall_torch'); + if (placeOn !== 'side' && placeOn !== 'top') { + blockType += `[facing=${face}]`; + } + } + if (blockType.includes('button') || blockType === 'lever') { + if (placeOn === 'top') { + blockType += `[face=ceiling]`; + } + else if (placeOn === 'bottom') { + blockType += `[face=floor]`; + } + else { + blockType += `[facing=${face}]`; + } + } + if (blockType === 'ladder' || blockType === 'repeater' || blockType === 'comparator') { + blockType += `[facing=${face}]`; + } + if (blockType.includes('stairs')) { + blockType += `[facing=${face}]`; + } + let msg = '/setblock ' + Math.floor(x) + ' ' + Math.floor(y) + ' ' + Math.floor(z) + ' ' + blockType; + bot.chat(msg); + if (blockType.includes('door')) + bot.chat('/setblock ' + Math.floor(x) + ' ' + Math.floor(y+1) + ' ' + Math.floor(z) + ' ' + blockType + '[half=upper]'); + if (blockType.includes('bed')) + bot.chat('/setblock ' + Math.floor(x) + ' ' + Math.floor(y) + ' ' + Math.floor(z-1) + ' ' + blockType + '[part=head]'); + log(bot, `Used /setblock to place ${blockType} at ${target_dest}.`); + return true; + } + + + let item_name = blockType; + if (item_name == "redstone_wire") + item_name = "redstone"; + let block = bot.inventory.items().find(item => item.name === item_name); + if (!block && bot.game.gameMode === 'creative' && !bot.restrict_to_inventory) { + await bot.creative.setInventorySlot(36, mc.makeItem(item_name, 1)); // 36 is first hotbar slot + block = bot.inventory.items().find(item => item.name === item_name); + } + if (!block) { + log(bot, `Don't have any ${blockType} to place.`); + return false; + } + + const targetBlock = bot.blockAt(target_dest); + if (targetBlock.name === blockType) { + log(bot, `${blockType} already at ${targetBlock.position}.`); + return false; + } + const empty_blocks = ['air', 'water', 'lava', 'grass', 'short_grass', 'tall_grass', 'snow', 'dead_bush', 'fern']; + if (!empty_blocks.includes(targetBlock.name)) { + log(bot, `${blockType} in the way at ${targetBlock.position}.`); + const removed = await breakBlockAt(bot, x, y, z); + if (!removed) { + log(bot, `Cannot place ${blockType} at ${targetBlock.position}: block in the way.`); + return false; + } + await new Promise(resolve => setTimeout(resolve, 200)); // wait for block to break + } + // get the buildoffblock and facevec based on whichever adjacent block is not empty + let buildOffBlock = null; + let faceVec = null; + const dir_map = { + 'top': Vec3(0, 1, 0), + 'bottom': Vec3(0, -1, 0), + 'north': Vec3(0, 0, -1), + 'south': Vec3(0, 0, 1), + 'east': Vec3(1, 0, 0), + 'west': Vec3(-1, 0, 0), + } + let dirs = []; + if (placeOn === 'side') { + dirs.push(dir_map['north'], dir_map['south'], dir_map['east'], dir_map['west']); + } + else if (dir_map[placeOn] !== undefined) { + dirs.push(dir_map[placeOn]); + } + else { + dirs.push(dir_map['bottom']); + log(bot, `Unknown placeOn value "${placeOn}". Defaulting to bottom.`); + } + dirs.push(...Object.values(dir_map).filter(d => !dirs.includes(d))); + + for (let d of dirs) { + const block = bot.blockAt(target_dest.plus(d)); + if (!empty_blocks.includes(block.name)) { + buildOffBlock = block; + faceVec = new Vec3(-d.x, -d.y, -d.z); // invert + break; + } + } + if (!buildOffBlock) { + log(bot, `Cannot place ${blockType} at ${targetBlock.position}: nothing to place on.`); + return false; + } + + const pos = bot.entity.position; + const pos_above = pos.plus(Vec3(0,1,0)); + const dont_move_for = ['torch', 'redstone_torch', 'redstone_wire', 'lever', 'button', 'rail', 'detector_rail', 'powered_rail', 'activator_rail', 'tripwire_hook', 'tripwire', 'water_bucket']; + if (!dont_move_for.includes(blockType) && (pos.distanceTo(targetBlock.position) < 1 || pos_above.distanceTo(targetBlock.position) < 1)) { + // too close + let goal = new pf.goals.GoalNear(targetBlock.position.x, targetBlock.position.y, targetBlock.position.z, 2); + let inverted_goal = new pf.goals.GoalInvert(goal); + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(inverted_goal); + } + if (bot.entity.position.distanceTo(targetBlock.position) > 4.5) { + // too far + let pos = targetBlock.position; + let movements = new pf.Movements(bot); + bot.pathfinder.setMovements(movements); + await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); + } + + await bot.equip(block, 'hand'); + await bot.lookAt(buildOffBlock.position); + + // will throw error if an entity is in the way, and sometimes even if the block was placed + try { + await bot.placeBlock(buildOffBlock, faceVec); + log(bot, `Placed ${blockType} at ${target_dest}.`); + await new Promise(resolve => setTimeout(resolve, 200)); + return true; + } catch (err) { + log(bot, `Failed to place ${blockType} at ${target_dest}.`); + return false; + } +} + +export async function equip(bot, itemName) { + /** + * Equip the given item to the proper body part, like tools or armor. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} itemName, the item or block name to equip. + * @returns {Promise} true if the item was equipped, false otherwise. + * @example + * await skills.equip(bot, "iron_pickaxe"); + **/ + let item = bot.inventory.slots.find(slot => slot && slot.name === itemName); + if (!item) { + log(bot, `You do not have any ${itemName} to equip.`); + return false; + } + if (itemName.includes('leggings')) { + await bot.equip(item, 'legs'); + } + else if (itemName.includes('boots')) { + await bot.equip(item, 'feet'); + } + else if (itemName.includes('helmet')) { + await bot.equip(item, 'head'); + } + else if (itemName.includes('chestplate') || itemName.includes('elytra')) { + await bot.equip(item, 'torso'); + } + else if (itemName.includes('shield')) { + await bot.equip(item, 'off-hand'); + } + else { + await bot.equip(item, 'hand'); + } + log(bot, `Equipped ${itemName}.`); + return true; +} + +export async function discard(bot, itemName, num=-1) { + /** + * Discard the given item. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} itemName, the item or block name to discard. + * @param {number} num, the number of items to discard. Defaults to -1, which discards all items. + * @returns {Promise} true if the item was discarded, false otherwise. + * @example + * await skills.discard(bot, "oak_log"); + **/ + let discarded = 0; + while (true) { + let item = bot.inventory.items().find(item => item.name === itemName); + if (!item) { + break; + } + let to_discard = num === -1 ? item.count : Math.min(num - discarded, item.count); + await bot.toss(item.type, null, to_discard); + discarded += to_discard; + if (num !== -1 && discarded >= num) { + break; + } + } + if (discarded === 0) { + log(bot, `You do not have any ${itemName} to discard.`); + return false; + } + log(bot, `Discarded ${discarded} ${itemName}.`); + return true; +} + +export async function putInChest(bot, itemName, num=-1) { + /** + * Put the given item in the nearest chest. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} itemName, the item or block name to put in the chest. + * @param {number} num, the number of items to put in the chest. Defaults to -1, which puts all items. + * @returns {Promise} true if the item was put in the chest, false otherwise. + * @example + * await skills.putInChest(bot, "oak_log"); + **/ + let chest = world.getNearestBlock(bot, 'chest', 32); + if (!chest) { + log(bot, `Could not find a chest nearby.`); + return false; + } + let item = bot.inventory.items().find(item => item.name === itemName); + if (!item) { + log(bot, `You do not have any ${itemName} to put in the chest.`); + return false; + } + let to_put = num === -1 ? item.count : Math.min(num, item.count); + await goToPosition(bot, chest.position.x, chest.position.y, chest.position.z, 2); + const chestContainer = await bot.openContainer(chest); + await chestContainer.deposit(item.type, null, to_put); + await chestContainer.close(); + log(bot, `Successfully put ${to_put} ${itemName} in the chest.`); + return true; +} + +export async function takeFromChest(bot, itemName, num=-1) { + /** + * Take the given item from the nearest chest. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} itemName, the item or block name to take from the chest. + * @param {number} num, the number of items to take from the chest. Defaults to -1, which takes all items. + * @returns {Promise} true if the item was taken from the chest, false otherwise. + * @example + * await skills.takeFromChest(bot, "oak_log"); + * **/ + let chest = world.getNearestBlock(bot, 'chest', 32); + if (!chest) { + log(bot, `Could not find a chest nearby.`); + return false; + } + await goToPosition(bot, chest.position.x, chest.position.y, chest.position.z, 2); + const chestContainer = await bot.openContainer(chest); + let item = chestContainer.containerItems().find(item => item.name === itemName); + if (!item) { + log(bot, `Could not find any ${itemName} in the chest.`); + await chestContainer.close(); + return false; + } + let to_take = num === -1 ? item.count : Math.min(num, item.count); + await chestContainer.withdraw(item.type, null, to_take); + await chestContainer.close(); + log(bot, `Successfully took ${to_take} ${itemName} from the chest.`); + return true; +} + +export async function viewChest(bot) { + /** + * View the contents of the nearest chest. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @returns {Promise} true if the chest was viewed, false otherwise. + * @example + * await skills.viewChest(bot); + * **/ + let chest = world.getNearestBlock(bot, 'chest', 32); + if (!chest) { + log(bot, `Could not find a chest nearby.`); + return false; + } + await goToPosition(bot, chest.position.x, chest.position.y, chest.position.z, 2); + const chestContainer = await bot.openContainer(chest); + let items = chestContainer.containerItems(); + if (items.length === 0) { + log(bot, `The chest is empty.`); + } + else { + log(bot, `The chest contains:`); + for (let item of items) { + log(bot, `${item.count} ${item.name}`); + } + } + await chestContainer.close(); + return true; +} + +export async function consume(bot, itemName="") { + /** + * Eat/drink the given item. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} itemName, the item to eat/drink. + * @returns {Promise} true if the item was eaten, false otherwise. + * @example + * await skills.eat(bot, "apple"); + **/ + let item, name; + if (itemName) { + item = bot.inventory.items().find(item => item.name === itemName); + name = itemName; + } + if (!item) { + log(bot, `You do not have any ${name} to eat.`); + return false; + } + await bot.equip(item, 'hand'); + await bot.consume(); + log(bot, `Consumed ${item.name}.`); + return true; +} + + +export async function giveToPlayer(bot, itemType, username, num=1) { + /** + * Give one of the specified item to the specified player + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} itemType, the name of the item to give. + * @param {string} username, the username of the player to give the item to. + * @param {number} num, the number of items to give. Defaults to 1. + * @returns {Promise} true if the item was given, false otherwise. + * @example + * await skills.giveToPlayer(bot, "oak_log", "player1"); + **/ + let player = bot.players[username].entity + if (!player) { + log(bot, `Could not find ${username}.`); + return false; + } + await goToPlayer(bot, username, 3); + // if we are 2 below the player + log(bot, bot.entity.position.y, player.position.y); + if (bot.entity.position.y < player.position.y - 1) { + await goToPlayer(bot, username, 1); + } + // if we are too close, make some distance + if (bot.entity.position.distanceTo(player.position) < 2) { + await moveAwayFromEntity(bot, player, 2); + } + await bot.lookAt(player.position); + if (await discard(bot, itemType, num)) { + let given = false; + bot.once('playerCollect', (collector, collected) => { + console.log(collected.name); + if (collector.username === username) { + log(bot, `${username} received ${itemType}.`); + given = true; + } + }); + let start = Date.now(); + while (!given && !bot.interrupt_code) { + await new Promise(resolve => setTimeout(resolve, 500)); + if (given) { + return true; + } + if (Date.now() - start > 3000) { + break; + } + } + } + log(bot, `Failed to give ${itemType} to ${username}, it was never received.`); + return false; +} + + +export async function goToPosition(bot, x, y, z, min_distance=2) { + /** + * Navigate to the given position. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} x, the x coordinate to navigate to. If null, the bot's current x coordinate will be used. + * @param {number} y, the y coordinate to navigate to. If null, the bot's current y coordinate will be used. + * @param {number} z, the z coordinate to navigate to. If null, the bot's current z coordinate will be used. + * @param {number} distance, the distance to keep from the position. Defaults to 2. + * @returns {Promise} true if the position was reached, false otherwise. + * @example + * let position = world.world.getNearestBlock(bot, "oak_log", 64).position; + * await skills.goToPosition(bot, position.x, position.y, position.x + 20); + **/ + if (x == null || y == null || z == null) { + log(bot, `Missing coordinates, given x:${x} y:${y} z:${z}`); + return false; + } + if (bot.modes.isOn('cheat')) { + bot.chat('/tp @s ' + x + ' ' + y + ' ' + z); + log(bot, `Teleported to ${x}, ${y}, ${z}.`); + return true; + } + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(new pf.goals.GoalNear(x, y, z, min_distance)); + log(bot, `You have reached at ${x}, ${y}, ${z}.`); + return true; +} + +export async function goToNearestBlock(bot, blockType, min_distance=2, range=64) { + /** + * Navigate to the nearest block of the given type. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} blockType, the type of block to navigate to. + * @param {number} min_distance, the distance to keep from the block. Defaults to 2. + * @param {number} range, the range to look for the block. Defaults to 64. + * @returns {Promise} true if the block was reached, false otherwise. + * @example + * await skills.goToNearestBlock(bot, "oak_log", 64, 2); + * **/ + const MAX_RANGE = 512; + if (range > MAX_RANGE) { + log(bot, `Maximum search range capped at ${MAX_RANGE}. `); + range = MAX_RANGE; + } + let block = world.getNearestBlock(bot, blockType, range); + if (!block) { + log(bot, `Could not find any ${blockType} in ${range} blocks.`); + return false; + } + log(bot, `Found ${blockType} at ${block.position}.`); + await goToPosition(bot, block.position.x, block.position.y, block.position.z, min_distance); + return true; + +} + +export async function goToNearestEntity(bot, entityType, min_distance=2, range=64) { + /** + * Navigate to the nearest entity of the given type. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} entityType, the type of entity to navigate to. + * @param {number} min_distance, the distance to keep from the entity. Defaults to 2. + * @param {number} range, the range to look for the entity. Defaults to 64. + * @returns {Promise} true if the entity was reached, false otherwise. + **/ + let entity = world.getNearestEntityWhere(bot, entity => entity.name === entityType, range); + if (!entity) { + log(bot, `Could not find any ${entityType} in ${range} blocks.`); + return false; + } + let distance = bot.entity.position.distanceTo(entity.position); + log(bot, `Found ${entityType} ${distance} blocks away.`); + await goToPosition(bot, entity.position.x, entity.position.y, entity.position.z, min_distance); + return true; +} + +export async function goToPlayer(bot, username, distance=3) { + /** + * Navigate to the given player. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} username, the username of the player to navigate to. + * @param {number} distance, the goal distance to the player. + * @returns {Promise} true if the player was found, false otherwise. + * @example + * await skills.goToPlayer(bot, "player"); + **/ + + if (bot.modes.isOn('cheat')) { + bot.chat('/tp @s ' + username); + log(bot, `Teleported to ${username}.`); + return true; + } + + bot.modes.pause('self_defense'); + bot.modes.pause('cowardice'); + let player = bot.players[username].entity + if (!player) { + log(bot, `Could not find ${username}.`); + return false; + } + + const move = new pf.Movements(bot); + bot.pathfinder.setMovements(move); + await bot.pathfinder.goto(new pf.goals.GoalFollow(player, distance), true); + + log(bot, `You have reached ${username}.`); +} + + +export async function followPlayer(bot, username, distance=4) { + /** + * Follow the given player endlessly. Will not return until the code is manually stopped. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} username, the username of the player to follow. + * @returns {Promise} true if the player was found, false otherwise. + * @example + * await skills.followPlayer(bot, "player"); + **/ + let player = bot.players[username].entity + if (!player) + return false; + + const move = new pf.Movements(bot); + bot.pathfinder.setMovements(move); + bot.pathfinder.setGoal(new pf.goals.GoalFollow(player, distance), true); + log(bot, `You are now actively following player ${username}.`); + + while (!bot.interrupt_code) { + await new Promise(resolve => setTimeout(resolve, 500)); + // in cheat mode, if the distance is too far, teleport to the player + if (bot.modes.isOn('cheat') && bot.entity.position.distanceTo(player.position) > 100 && player.isOnGround) { + await goToPlayer(bot, username); + } + if (bot.modes.isOn('unstuck')) { + const is_nearby = bot.entity.position.distanceTo(player.position) <= distance + 1; + if (is_nearby) + bot.modes.pause('unstuck'); + else + bot.modes.unpause('unstuck'); + } + } + return true; +} + + +export async function moveAway(bot, distance) { + /** + * Move away from current position in any direction. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} distance, the distance to move away. + * @returns {Promise} true if the bot moved away, false otherwise. + * @example + * await skills.moveAway(bot, 8); + **/ + const pos = bot.entity.position; + let goal = new pf.goals.GoalNear(pos.x, pos.y, pos.z, distance); + let inverted_goal = new pf.goals.GoalInvert(goal); + bot.pathfinder.setMovements(new pf.Movements(bot)); + + if (bot.modes.isOn('cheat')) { + const move = new pf.Movements(bot); + const path = await bot.pathfinder.getPathTo(move, inverted_goal, 10000); + let last_move = path.path[path.path.length-1]; + console.log(last_move); + if (last_move) { + let x = Math.floor(last_move.x); + let y = Math.floor(last_move.y); + let z = Math.floor(last_move.z); + bot.chat('/tp @s ' + x + ' ' + y + ' ' + z); + return true; + } + } + + await bot.pathfinder.goto(inverted_goal); + let new_pos = bot.entity.position; + log(bot, `Moved away from nearest entity to ${new_pos}.`); + return true; +} + +export async function moveAwayFromEntity(bot, entity, distance=16) { + /** + * Move away from the given entity. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {Entity} entity, the entity to move away from. + * @param {number} distance, the distance to move away. + * @returns {Promise} true if the bot moved away, false otherwise. + **/ + let goal = new pf.goals.GoalFollow(entity, distance); + let inverted_goal = new pf.goals.GoalInvert(goal); + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(inverted_goal); + return true; +} + +export async function avoidEnemies(bot, distance=16) { + /** + * Move a given distance away from all nearby enemy mobs. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} distance, the distance to move away. + * @returns {Promise} true if the bot moved away, false otherwise. + * @example + * await skills.avoidEnemies(bot, 8); + **/ + bot.modes.pause('self_preservation'); // prevents damage-on-low-health from interrupting the bot + let enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), distance); + while (enemy) { + const follow = new pf.goals.GoalFollow(enemy, distance+1); // move a little further away + const inverted_goal = new pf.goals.GoalInvert(follow); + bot.pathfinder.setMovements(new pf.Movements(bot)); + bot.pathfinder.setGoal(inverted_goal, true); + await new Promise(resolve => setTimeout(resolve, 500)); + enemy = world.getNearestEntityWhere(bot, entity => mc.isHostile(entity), distance); + if (bot.interrupt_code) { + break; + } + if (enemy && bot.entity.position.distanceTo(enemy.position) < 3) { + await attackEntity(bot, enemy, false); + } + } + bot.pathfinder.stop(); + log(bot, `Moved ${distance} away from enemies.`); + return true; +} + +export async function stay(bot, seconds=30) { + /** + * Stay in the current position until interrupted. Disables all modes. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} seconds, the number of seconds to stay. Defaults to 30. -1 for indefinite. + * @returns {Promise} true if the bot stayed, false otherwise. + * @example + * await skills.stay(bot); + **/ + bot.modes.pause('self_preservation'); + bot.modes.pause('unstuck'); + bot.modes.pause('cowardice'); + bot.modes.pause('self_defense'); + bot.modes.pause('hunting'); + bot.modes.pause('torch_placing'); + bot.modes.pause('item_collecting'); + let start = Date.now(); + while (!bot.interrupt_code && (seconds === -1 || Date.now() - start < seconds*1000)) { + await new Promise(resolve => setTimeout(resolve, 500)); + } + log(bot, `Stayed for ${(Date.now() - start)/1000} seconds.`); + return true; +} + +export async function useDoor(bot, door_pos=null) { + /** + * Use the door at the given position. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {Vec3} door_pos, the position of the door to use. If null, the nearest door will be used. + * @returns {Promise} true if the door was used, false otherwise. + * @example + * let door = world.getNearestBlock(bot, "oak_door", 16).position; + * await skills.useDoor(bot, door); + **/ + if (!door_pos) { + for (let door_type of ['oak_door', 'spruce_door', 'birch_door', 'jungle_door', 'acacia_door', 'dark_oak_door', + 'mangrove_door', 'cherry_door', 'bamboo_door', 'crimson_door', 'warped_door']) { + door_pos = world.getNearestBlock(bot, door_type, 16).position; + if (door_pos) break; + } + } else { + door_pos = Vec3(door_pos.x, door_pos.y, door_pos.z); + } + if (!door_pos) { + log(bot, `Could not find a door to use.`); + return false; + } + + bot.pathfinder.setGoal(new pf.goals.GoalNear(door_pos.x, door_pos.y, door_pos.z, 1)); + await new Promise((resolve) => setTimeout(resolve, 1000)); + while (bot.pathfinder.isMoving()) { + await new Promise((resolve) => setTimeout(resolve, 100)); + } + + let door_block = bot.blockAt(door_pos); + await bot.lookAt(door_pos); + if (!door_block._properties.open) + await bot.activateBlock(door_block); + + bot.setControlState("forward", true); + await new Promise((resolve) => setTimeout(resolve, 600)); + bot.setControlState("forward", false); + await bot.activateBlock(door_block); + + log(bot, `Used door at ${door_pos}.`); + return true; +} + +export async function goToBed(bot) { + /** + * Sleep in the nearest bed. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @returns {Promise} true if the bed was found, false otherwise. + * @example + * await skills.goToBed(bot); + **/ + const beds = bot.findBlocks({ + matching: (block) => { + return block.name.includes('bed'); + }, + maxDistance: 32, + count: 1 + }); + if (beds.length === 0) { + log(bot, `Could not find a bed to sleep in.`); + return false; + } + let loc = beds[0]; + await goToPosition(bot, loc.x, loc.y, loc.z); + const bed = bot.blockAt(loc); + await bot.sleep(bed); + log(bot, `You are in bed.`); + bot.modes.pause('unstuck'); + while (bot.isSleeping) { + await new Promise(resolve => setTimeout(resolve, 500)); + } + log(bot, `You have woken up.`); + return true; +} + +export async function tillAndSow(bot, x, y, z, seedType=null) { + /** + * Till the ground at the given position and plant the given seed type. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {number} x, the x coordinate to till. + * @param {number} y, the y coordinate to till. + * @param {number} z, the z coordinate to till. + * @param {string} plantType, the type of plant to plant. Defaults to none, which will only till the ground. + * @returns {Promise} true if the ground was tilled, false otherwise. + * @example + * let position = world.getPosition(bot); + * await skills.tillAndSow(bot, position.x, position.y - 1, position.x, "wheat"); + **/ + x = Math.round(x); + y = Math.round(y); + z = Math.round(z); + let block = bot.blockAt(new Vec3(x, y, z)); + + if (bot.modes.isOn('cheat')) { + let to_remove = ['_seed', '_seeds']; + for (let remove of to_remove) { + if (seedType.endsWith(remove)) { + seedType = seedType.replace(remove, ''); + } + } + placeBlock(bot, 'farmland', x, y, z); + placeBlock(bot, seedType, x, y+1, z); + return true; + } + + if (block.name !== 'grass_block' && block.name !== 'dirt' && block.name !== 'farmland') { + log(bot, `Cannot till ${block.name}, must be grass_block or dirt.`); + return false; + } + let above = bot.blockAt(new Vec3(x, y+1, z)); + if (above.name !== 'air') { + log(bot, `Cannot till, there is ${above.name} above the block.`); + return false; + } + // if distance is too far, move to the block + if (bot.entity.position.distanceTo(block.position) > 4.5) { + let pos = block.position; + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); + } + if (block.name !== 'farmland') { + let hoe = bot.inventory.items().find(item => item.name.includes('hoe')); + if (!hoe) { + log(bot, `Cannot till, no hoes.`); + return false; + } + await bot.equip(hoe, 'hand'); + await bot.activateBlock(block); + log(bot, `Tilled block x:${x.toFixed(1)}, y:${y.toFixed(1)}, z:${z.toFixed(1)}.`); + } + + if (seedType) { + if (seedType.endsWith('seed') && !seedType.endsWith('seeds')) + seedType += 's'; // fixes common mistake + let seeds = bot.inventory.items().find(item => item.name === seedType); + if (!seeds) { + log(bot, `No ${seedType} to plant.`); + return false; + } + await bot.equip(seeds, 'hand'); + + await bot.placeBlock(block, new Vec3(0, -1, 0)); + log(bot, `Planted ${seedType} at x:${x.toFixed(1)}, y:${y.toFixed(1)}, z:${z.toFixed(1)}.`); + } + return true; +} + +export async function activateNearestBlock(bot, type) { + /** + * Activate the nearest block of the given type. + * @param {MinecraftBot} bot, reference to the minecraft bot. + * @param {string} type, the type of block to activate. + * @returns {Promise} true if the block was activated, false otherwise. + * @example + * await skills.activateNearestBlock(bot, "lever"); + * **/ + let block = world.getNearestBlock(bot, type, 16); + if (!block) { + log(bot, `Could not find any ${type} to activate.`); + return false; + } + if (bot.entity.position.distanceTo(block.position) > 4.5) { + let pos = block.position; + bot.pathfinder.setMovements(new pf.Movements(bot)); + await bot.pathfinder.goto(new pf.goals.GoalNear(pos.x, pos.y, pos.z, 4)); + } + await bot.activateBlock(block); + log(bot, `Activated ${type} at x:${block.position.x.toFixed(1)}, y:${block.position.y.toFixed(1)}, z:${block.position.z.toFixed(1)}.`); + return true; +} diff --git a/src/agent/library/world.js b/src/agent/library/world.js new file mode 100644 index 0000000..0253b41 --- /dev/null +++ b/src/agent/library/world.js @@ -0,0 +1,387 @@ +import pf from 'mineflayer-pathfinder'; +import * as mc from '../../utils/mcdata.js'; + + +export function getNearestFreeSpace(bot, size=1, distance=8) { + /** + * Get the nearest empty space with solid blocks beneath it of the given size. + * @param {Bot} bot - The bot to get the nearest free space for. + * @param {number} size - The (size x size) of the space to find, default 1. + * @param {number} distance - The maximum distance to search, default 8. + * @returns {Vec3} - The south west corner position of the nearest free space. + * @example + * let position = world.getNearestFreeSpace(bot, 1, 8); + **/ + let empty_pos = bot.findBlocks({ + matching: (block) => { + return block && block.name == 'air'; + }, + maxDistance: distance, + count: 1000 + }); + for (let i = 0; i < empty_pos.length; i++) { + let empty = true; + for (let x = 0; x < size; x++) { + for (let z = 0; z < size; z++) { + let top = bot.blockAt(empty_pos[i].offset(x, 0, z)); + let bottom = bot.blockAt(empty_pos[i].offset(x, -1, z)); + if (!top || !top.name == 'air' || !bottom || bottom.drops.length == 0 || !bottom.diggable) { + empty = false; + break; + } + } + if (!empty) break; + } + if (empty) { + return empty_pos[i]; + } + } +} + + +export function getBlockAtPosition(bot, x=0, y=0, z=0) { + /** + * Get a block from the bot's relative position + * @param {Bot} bot - The bot to get the block for. + * @param {number} x - The relative x offset to serach, default 0. + * @param {number} y - The relative y offset to serach, default 0. + * @param {number} y - The relative z offset to serach, default 0. + * @returns {Block} - The nearest block. + * @example + * let blockBelow = world.getBlockAtPosition(bot, 0, -1, 0); + * let blockAbove = world.getBlockAtPosition(bot, 0, 2, 0); since minecraft position is at the feet + **/ + let block = bot.blockAt(bot.entity.position.offset(x, y, z)); + if (!block) block = {name: 'air'}; + + return block; +} + + +export function getSurroundingBlocks(bot) { + /** + * Get the surrounding blocks from the bot's environment. + * @param {Bot} bot - The bot to get the block for. + * @returns {string[]} - A list of block results as strings. + * @example + **/ + // Create a list of block position results that can be unpacked. + let res = []; + res.push(`Block Below: ${getBlockAtPosition(bot, 0, -1, 0).name}`); + res.push(`Block at Legs: ${getBlockAtPosition(bot, 0, 0, 0).name}`); + res.push(`Block at Head: ${getBlockAtPosition(bot, 0, 1, 0).name}`); + + return res; +} + + +export function getFirstBlockAboveHead(bot, ignore_types=null, distance=32) { + /** + * Searches a column from the bot's position for the first solid block above its head + * @param {Bot} bot - The bot to get the block for. + * @param {string[]} ignore_types - The names of the blocks to ignore. + * @param {number} distance - The maximum distance to search, default 32. + * @returns {string} - The fist block above head. + * @example + * let firstBlockAboveHead = world.getFirstBlockAboveHead(bot, null, 32); + **/ + // if ignore_types is not a list, make it a list. + let ignore_blocks = []; + if (ignore_types === null) ignore_blocks = ['air', 'cave_air']; + else { + if (!Array.isArray(ignore_types)) + ignore_types = [ignore_types]; + for(let ignore_type of ignore_types) { + if (mc.getBlockId(ignore_type)) ignore_blocks.push(ignore_type); + } + } + // The block above, stops when it finds a solid block . + let block_above = {name: 'air'}; + let height = 0 + for (let i = 0; i < distance; i++) { + let block = bot.blockAt(bot.entity.position.offset(0, i+2, 0)); + if (!block) block = {name: 'air'}; + // Ignore and continue + if (ignore_blocks.includes(block.name)) continue; + // Defaults to any block + block_above = block; + height = i; + break; + } + + if (ignore_blocks.includes(block_above.name)) return 'none'; + + return `${block_above.name} (${height} blocks up)`; +} + + +export function getNearestBlocks(bot, block_types=null, distance=16, count=10000) { + /** + * Get a list of the nearest blocks of the given types. + * @param {Bot} bot - The bot to get the nearest block for. + * @param {string[]} block_types - The names of the blocks to search for. + * @param {number} distance - The maximum distance to search, default 16. + * @param {number} count - The maximum number of blocks to find, default 10000. + * @returns {Block[]} - The nearest blocks of the given type. + * @example + * let woodBlocks = world.getNearestBlocks(bot, ['oak_log', 'birch_log'], 16, 1); + **/ + // if blocktypes is not a list, make it a list + let block_ids = []; + if (block_types === null) { + block_ids = mc.getAllBlockIds(['air']); + } + else { + if (!Array.isArray(block_types)) + block_types = [block_types]; + for(let block_type of block_types) { + block_ids.push(mc.getBlockId(block_type)); + } + } + + let positions = bot.findBlocks({matching: block_ids, maxDistance: distance, count: count}); + let blocks = []; + for (let i = 0; i < positions.length; i++) { + let block = bot.blockAt(positions[i]); + let distance = positions[i].distanceTo(bot.entity.position); + blocks.push({ block: block, distance: distance }); + } + blocks.sort((a, b) => a.distance - b.distance); + + let res = []; + for (let i = 0; i < blocks.length; i++) { + res.push(blocks[i].block); + } + return res; +} + + +export function getNearestBlock(bot, block_type, distance=16) { + /** + * Get the nearest block of the given type. + * @param {Bot} bot - The bot to get the nearest block for. + * @param {string} block_type - The name of the block to search for. + * @param {number} distance - The maximum distance to search, default 16. + * @returns {Block} - The nearest block of the given type. + * @example + * let coalBlock = world.getNearestBlock(bot, 'coal_ore', 16); + **/ + let blocks = getNearestBlocks(bot, block_type, distance, 1); + if (blocks.length > 0) { + return blocks[0]; + } + return null; +} + + +export function getNearbyEntities(bot, maxDistance=16) { + let entities = []; + for (const entity of Object.values(bot.entities)) { + const distance = entity.position.distanceTo(bot.entity.position); + if (distance > maxDistance) continue; + entities.push({ entity: entity, distance: distance }); + } + entities.sort((a, b) => a.distance - b.distance); + let res = []; + for (let i = 0; i < entities.length; i++) { + res.push(entities[i].entity); + } + return res; +} + +export function getNearestEntityWhere(bot, predicate, maxDistance=16) { + return bot.nearestEntity(entity => predicate(entity) && bot.entity.position.distanceTo(entity.position) < maxDistance); +} + + +export function getNearbyPlayers(bot, maxDistance) { + if (maxDistance == null) maxDistance = 16; + let players = []; + for (const entity of Object.values(bot.entities)) { + const distance = entity.position.distanceTo(bot.entity.position); + if (distance > maxDistance) continue; + if (entity.type == 'player' && entity.username != bot.username) { + players.push({ entity: entity, distance: distance }); + } + } + players.sort((a, b) => a.distance - b.distance); + let res = []; + for (let i = 0; i < players.length; i++) { + res.push(players[i].entity); + } + return res; +} + + +export function getInventoryStacks(bot) { + let inventory = []; + for (const item of bot.inventory.items()) { + if (item != null) { + inventory.push(item); + } + } + return inventory; +} + + +export function getInventoryCounts(bot) { + /** + * Get an object representing the bot's inventory. + * @param {Bot} bot - The bot to get the inventory for. + * @returns {object} - An object with item names as keys and counts as values. + * @example + * let inventory = world.getInventoryCounts(bot); + * let oakLogCount = inventory['oak_log']; + * let hasWoodenPickaxe = inventory['wooden_pickaxe'] > 0; + **/ + let inventory = {}; + for (const item of bot.inventory.items()) { + if (item != null) { + if (inventory[item.name] == null) { + inventory[item.name] = 0; + } + inventory[item.name] += item.count; + } + } + return inventory; +} + + +export function getCraftableItems(bot) { + /** + * Get a list of all items that can be crafted with the bot's current inventory. + * @param {Bot} bot - The bot to get the craftable items for. + * @returns {string[]} - A list of all items that can be crafted. + * @example + * let craftableItems = world.getCraftableItems(bot); + **/ + let table = getNearestBlock(bot, 'crafting_table'); + if (!table) { + for (const item of bot.inventory.items()) { + if (item != null && item.name === 'crafting_table') { + table = item; + break; + } + } + } + let res = []; + for (const item of mc.getAllItems()) { + let recipes = bot.recipesFor(item.id, null, 1, table); + if (recipes.length > 0) + res.push(item.name); + } + return res; +} + + +export function getPosition(bot) { + /** + * Get your position in the world (Note that y is vertical). + * @param {Bot} bot - The bot to get the position for. + * @returns {Vec3} - An object with x, y, and x attributes representing the position of the bot. + * @example + * let position = world.getPosition(bot); + * let x = position.x; + **/ + return bot.entity.position; +} + + +export function getNearbyEntityTypes(bot) { + /** + * Get a list of all nearby mob types. + * @param {Bot} bot - The bot to get nearby mobs for. + * @returns {string[]} - A list of all nearby mobs. + * @example + * let mobs = world.getNearbyEntityTypes(bot); + **/ + let mobs = getNearbyEntities(bot, 16); + let found = []; + for (let i = 0; i < mobs.length; i++) { + if (!found.includes(mobs[i].name)) { + found.push(mobs[i].name); + } + } + return found; +} + + +export function getNearbyPlayerNames(bot) { + /** + * Get a list of all nearby player names. + * @param {Bot} bot - The bot to get nearby players for. + * @returns {string[]} - A list of all nearby players. + * @example + * let players = world.getNearbyPlayerNames(bot); + **/ + let players = getNearbyPlayers(bot, 64); + let found = []; + for (let i = 0; i < players.length; i++) { + if (!found.includes(players[i].username) && players[i].username != bot.username) { + found.push(players[i].username); + } + } + return found; +} + + +export function getNearbyBlockTypes(bot, distance=16) { + /** + * Get a list of all nearby block names. + * @param {Bot} bot - The bot to get nearby blocks for. + * @param {number} distance - The maximum distance to search, default 16. + * @returns {string[]} - A list of all nearby blocks. + * @example + * let blocks = world.getNearbyBlockTypes(bot); + **/ + let blocks = getNearestBlocks(bot, null, distance); + let found = []; + for (let i = 0; i < blocks.length; i++) { + if (!found.includes(blocks[i].name)) { + found.push(blocks[i].name); + } + } + return found; +} + +export async function isClearPath(bot, target) { + /** + * Check if there is a path to the target that requires no digging or placing blocks. + * @param {Bot} bot - The bot to get the path for. + * @param {Entity} target - The target to path to. + * @returns {boolean} - True if there is a clear path, false otherwise. + */ + let movements = new pf.Movements(bot) + movements.canDig = false; + movements.canPlaceOn = false; + let goal = new pf.goals.GoalNear(target.position.x, target.position.y, target.position.z, 1); + let path = await bot.pathfinder.getPathTo(movements, goal, 100); + return path.status === 'success'; +} + +export function shouldPlaceTorch(bot) { + if (!bot.modes.isOn('torch_placing') || bot.interrupt_code) return false; + const pos = getPosition(bot); + // TODO: check light level instead of nearby torches, block.light is broken + let nearest_torch = getNearestBlock(bot, 'torch', 6); + if (!nearest_torch) + nearest_torch = getNearestBlock(bot, 'wall_torch', 6); + if (!nearest_torch) { + const block = bot.blockAt(pos); + let has_torch = bot.inventory.items().find(item => item.name === 'torch'); + return has_torch && block?.name === 'air'; + } + return false; +} + +export function getBiomeName(bot) { + /** + * Get the name of the biome the bot is in. + * @param {Bot} bot - The bot to get the biome for. + * @returns {string} - The name of the biome. + * @example + * let biome = world.getBiomeName(bot); + **/ + const biomeId = bot.world.getBiome(bot.entity.position); + return mc.getAllBiomes()[biomeId].name; +} diff --git a/src/agent/memory_bank.js b/src/agent/memory_bank.js new file mode 100644 index 0000000..a32ab78 --- /dev/null +++ b/src/agent/memory_bank.js @@ -0,0 +1,25 @@ +export class MemoryBank { + constructor() { + this.memory = {}; + } + + rememberPlace(name, x, y, z) { + this.memory[name] = [x, y, z]; + } + + recallPlace(name) { + return this.memory[name]; + } + + getJson() { + return this.memory + } + + loadJson(json) { + this.memory = json; + } + + getKeys() { + return Object.keys(this.memory).join(', ') + } +} \ No newline at end of file diff --git a/src/agent/modes.js b/src/agent/modes.js new file mode 100644 index 0000000..8bf1594 --- /dev/null +++ b/src/agent/modes.js @@ -0,0 +1,414 @@ +import * as skills from './library/skills.js'; +import * as world from './library/world.js'; +import * as mc from '../utils/mcdata.js'; +import settings from '../../settings.js' +import convoManager from './conversation.js'; + +async function say(agent, message) { + agent.bot.modes.behavior_log += message + '\n'; + if (agent.shut_up || !settings.narrate_behavior) return; + agent.openChat(message); +} + +// a mode is a function that is called every tick to respond immediately to the world +// it has the following fields: +// on: whether 'update' is called every tick +// active: whether an action has been triggered by the mode and hasn't yet finished +// paused: whether the mode is paused by another action that overrides the behavior (eg followplayer implements its own self defense) +// update: the function that is called every tick (if on is true) +// when a mode is active, it will trigger an action to be performed but won't wait for it to return output + +// the order of this list matters! first modes will be prioritized +// while update functions are async, they should *not* be awaited longer than ~100ms as it will block the update loop +// to perform longer actions, use the execute function which won't block the update loop +const modes_list = [ + { + name: 'self_preservation', + description: 'Respond to drowning, burning, and damage at low health. Interrupts all actions.', + interrupts: ['all'], + on: true, + active: false, + fall_blocks: ['sand', 'gravel', 'concrete_powder'], // includes matching substrings like 'sandstone' and 'red_sand' + update: async function (agent) { + const bot = agent.bot; + let block = bot.blockAt(bot.entity.position); + let blockAbove = bot.blockAt(bot.entity.position.offset(0, 1, 0)); + if (!block) block = {name: 'air'}; // hacky fix when blocks are not loaded + if (!blockAbove) blockAbove = {name: 'air'}; + if (blockAbove.name === 'water' || blockAbove.name === 'flowing_water') { + // does not call execute so does not interrupt other actions + if (!bot.pathfinder.goal) { + bot.setControlState('jump', true); + } + } + else if (this.fall_blocks.some(name => blockAbove.name.includes(name))) { + execute(this, agent, async () => { + await skills.moveAway(bot, 2); + }); + } + else if (block.name === 'lava' || block.name === 'flowing_lava' || block.name === 'fire' || + blockAbove.name === 'lava' || blockAbove.name === 'flowing_lava' || blockAbove.name === 'fire') { + say(agent, 'I\'m on fire!'); // TODO: gets stuck in lava + execute(this, agent, async () => { + let nearestWater = world.getNearestBlock(bot, 'water', 20); + if (nearestWater) { + const pos = nearestWater.position; + await skills.goToPosition(bot, pos.x, pos.y, pos.z, 0.2); + say(agent, 'Ahhhh that\'s better!'); + } + else { + await skills.moveAway(bot, 5); + } + }); + } + else if (Date.now() - bot.lastDamageTime < 3000 && (bot.health < 5 || bot.lastDamageTaken >= bot.health)) { + say(agent, 'I\'m dying!'); + execute(this, agent, async () => { + await skills.moveAway(bot, 20); + }); + } + else if (agent.isIdle()) { + bot.clearControlStates(); // clear jump if not in danger or doing anything else + } + } + }, + { + name: 'unstuck', + description: 'Attempt to get unstuck when in the same place for a while. Interrupts some actions.', + interrupts: ['all'], + on: true, + active: false, + prev_location: null, + distance: 2, + stuck_time: 0, + last_time: Date.now(), + max_stuck_time: 20, + update: async function (agent) { + if (agent.isIdle()) { + this.prev_location = null; + this.stuck_time = 0; + return; // don't get stuck when idle + } + const bot = agent.bot; + if (this.prev_location && this.prev_location.distanceTo(bot.entity.position) < this.distance) { + this.stuck_time += (Date.now() - this.last_time) / 1000; + } + else { + this.prev_location = bot.entity.position.clone(); + this.stuck_time = 0; + } + if (this.stuck_time > this.max_stuck_time) { + say(agent, 'I\'m stuck!'); + this.stuck_time = 0; + execute(this, agent, async () => { + const crashTimeout = setTimeout(() => { agent.cleanKill("Got stuck and couldn't get unstuck") }, 10000); + await skills.moveAway(bot, 5); + clearTimeout(crashTimeout); + say(agent, 'I\'m free.'); + }); + } + this.last_time = Date.now(); + } + }, + { + name: 'cowardice', + description: 'Run away from enemies. Interrupts all actions.', + interrupts: ['all'], + on: true, + active: false, + update: async function (agent) { + const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 16); + if (enemy && await world.isClearPath(agent.bot, enemy)) { + say(agent, `Aaa! A ${enemy.name.replace("_", " ")}!`); + execute(this, agent, async () => { + await skills.avoidEnemies(agent.bot, 24); + }); + } + } + }, + { + name: 'self_defense', + description: 'Attack nearby enemies. Interrupts all actions.', + interrupts: ['all'], + on: true, + active: false, + update: async function (agent) { + const enemy = world.getNearestEntityWhere(agent.bot, entity => mc.isHostile(entity), 8); + if (enemy && await world.isClearPath(agent.bot, enemy)) { + say(agent, `Fighting ${enemy.name}!`); + execute(this, agent, async () => { + await skills.defendSelf(agent.bot, 8); + }); + } + } + }, + { + name: 'hunting', + description: 'Hunt nearby animals when idle.', + interrupts: [], + on: true, + active: false, + update: async function (agent) { + const huntable = world.getNearestEntityWhere(agent.bot, entity => mc.isHuntable(entity), 8); + if (huntable && await world.isClearPath(agent.bot, huntable)) { + execute(this, agent, async () => { + say(agent, `Hunting ${huntable.name}!`); + await skills.attackEntity(agent.bot, huntable); + }); + } + } + }, + { + name: 'item_collecting', + description: 'Collect nearby items when idle.', + interrupts: ['action:followPlayer'], + on: true, + active: false, + + wait: 2, // number of seconds to wait after noticing an item to pick it up + prev_item: null, + noticed_at: -1, + update: async function (agent) { + let item = world.getNearestEntityWhere(agent.bot, entity => entity.name === 'item', 8); + let empty_inv_slots = agent.bot.inventory.emptySlotCount(); + if (item && item !== this.prev_item && await world.isClearPath(agent.bot, item) && empty_inv_slots > 1) { + if (this.noticed_at === -1) { + this.noticed_at = Date.now(); + } + if (Date.now() - this.noticed_at > this.wait * 1000) { + say(agent, `Picking up item!`); + this.prev_item = item; + execute(this, agent, async () => { + await skills.pickupNearbyItems(agent.bot); + }); + this.noticed_at = -1; + } + } + else { + this.noticed_at = -1; + } + } + }, + { + name: 'torch_placing', + description: 'Place torches when idle and there are no torches nearby.', + interrupts: ['action:followPlayer'], + on: true, + active: false, + cooldown: 5, + last_place: Date.now(), + update: function (agent) { + if (world.shouldPlaceTorch(agent.bot)) { + if (Date.now() - this.last_place < this.cooldown * 1000) return; + execute(this, agent, async () => { + const pos = agent.bot.entity.position; + await skills.placeBlock(agent.bot, 'torch', pos.x, pos.y, pos.z, 'bottom', true); + }); + this.last_place = Date.now(); + } + } + }, + { + name: 'elbow_room', + description: 'Move away from nearby players when idle.', + interrupts: ['action:followPlayer'], + on: true, + active: false, + distance: 0.5, + update: async function (agent) { + const player = world.getNearestEntityWhere(agent.bot, entity => entity.type === 'player', this.distance); + if (player) { + execute(this, agent, async () => { + // wait a random amount of time to avoid identical movements with other bots + const wait_time = Math.random() * 1000; + await new Promise(resolve => setTimeout(resolve, wait_time)); + if (player.position.distanceTo(agent.bot.entity.position) < this.distance) { + await skills.moveAway(agent.bot, this.distance); + } + }); + } + } + }, + { + name: 'idle_staring', + description: 'Animation to look around at entities when idle.', + interrupts: [], + on: true, + active: false, + + staring: false, + last_entity: null, + next_change: 0, + update: function (agent) { + const entity = agent.bot.nearestEntity(); + let entity_in_view = entity && entity.position.distanceTo(agent.bot.entity.position) < 10 && entity.name !== 'enderman'; + if (entity_in_view && entity !== this.last_entity) { + this.staring = true; + this.last_entity = entity; + this.next_change = Date.now() + Math.random() * 1000 + 4000; + } + if (entity_in_view && this.staring) { + let isbaby = entity.type !== 'player' && entity.metadata[16]; + let height = isbaby ? entity.height/2 : entity.height; + agent.bot.lookAt(entity.position.offset(0, height, 0)); + } + if (!entity_in_view) + this.last_entity = null; + if (Date.now() > this.next_change) { + // look in random direction + this.staring = Math.random() < 0.3; + if (!this.staring) { + const yaw = Math.random() * Math.PI * 2; + const pitch = (Math.random() * Math.PI/2) - Math.PI/4; + agent.bot.look(yaw, pitch, false); + } + this.next_change = Date.now() + Math.random() * 10000 + 2000; + } + } + }, + { + name: 'cheat', + description: 'Use cheats to instantly place blocks and teleport.', + interrupts: [], + on: false, + active: false, + update: function (agent) { /* do nothing */ } + } +]; + +async function execute(mode, agent, func, timeout=-1) { + if (agent.self_prompter.on) + agent.self_prompter.stopLoop(); + let interrupted_action = agent.actions.currentActionLabel; + mode.active = true; + let code_return = await agent.actions.runAction(`mode:${mode.name}`, async () => { + await func(); + }, { timeout }); + mode.active = false; + console.log(`Mode ${mode.name} finished executing, code_return: ${code_return.message}`); + + let should_reprompt = + interrupted_action && // it interrupted a previous action + !agent.actions.resume_func && // there is no resume function + !agent.self_prompter.on && // self prompting is not on + !code_return.interrupted; // this mode action was not interrupted by something else + + if (should_reprompt) { + // auto prompt to respond to the interruption + let role = convoManager.inConversation() ? agent.last_sender : 'system'; + let logs = agent.bot.modes.flushBehaviorLog(); + agent.handleMessage(role, `(AUTO MESSAGE)Your previous action '${interrupted_action}' was interrupted by ${mode.name}. + Your behavior log: ${logs}\nRespond accordingly.`); + } +} + +let _agent = null; +const modes_map = {}; +for (let mode of modes_list) { + modes_map[mode.name] = mode; +} + +class ModeController { + /* + SECURITY WARNING: + ModesController must be isolated. Do not store references to external objects like `agent`. + This object is accessible by LLM generated code, so any stored references are also accessible. + This can be used to expose sensitive information by malicious human prompters. + */ + constructor() { + this.behavior_log = ''; + } + + exists(mode_name) { + return modes_map[mode_name] != null; + } + + setOn(mode_name, on) { + modes_map[mode_name].on = on; + } + + isOn(mode_name) { + return modes_map[mode_name].on; + } + + pause(mode_name) { + modes_map[mode_name].paused = true; + } + + unpause(mode_name) { + modes_map[mode_name].paused = false; + } + + unPauseAll() { + for (let mode of modes_list) { + if (mode.paused) console.log(`Unpausing mode ${mode.name}`); + mode.paused = false; + } + } + + getMiniDocs() { // no descriptions + let res = 'Agent Modes:'; + for (let mode of modes_list) { + let on = mode.on ? 'ON' : 'OFF'; + res += `\n- ${mode.name}(${on})`; + } + return res; + } + + getDocs() { + let res = 'Agent Modes:'; + for (let mode of modes_list) { + let on = mode.on ? 'ON' : 'OFF'; + res += `\n- ${mode.name}(${on}): ${mode.description}`; + } + return res; + } + + async update() { + if (_agent.isIdle()) { + this.unPauseAll(); + } + for (let mode of modes_list) { + let interruptible = mode.interrupts.some(i => i === 'all') || mode.interrupts.some(i => i === _agent.actions.currentActionLabel); + if (mode.on && !mode.paused && !mode.active && (_agent.isIdle() || interruptible)) { + await mode.update(_agent); + } + if (mode.active) break; + } + } + + flushBehaviorLog() { + const log = this.behavior_log; + this.behavior_log = ''; + return log; + } + + getJson() { + let res = {}; + for (let mode of modes_list) { + res[mode.name] = mode.on; + } + return res; + } + + loadJson(json) { + for (let mode of modes_list) { + if (json[mode.name] != undefined) { + mode.on = json[mode.name]; + } + } + } +} + +export function initModes(agent) { + _agent = agent; + // the mode controller is added to the bot object so it is accessible from anywhere the bot is used + agent.bot.modes = new ModeController(); + if (agent.task) { + agent.bot.restrict_to_inventory = agent.task.restrict_to_inventory; + } + let modes_json = agent.prompter.getInitModes(); + if (modes_json) { + agent.bot.modes.loadJson(modes_json); + } +} diff --git a/src/agent/npc/build_goal.js b/src/agent/npc/build_goal.js new file mode 100644 index 0000000..ebca78f --- /dev/null +++ b/src/agent/npc/build_goal.js @@ -0,0 +1,80 @@ +import { Vec3 } from 'vec3'; +import * as skills from '../library/skills.js'; +import * as world from '../library/world.js'; +import * as mc from '../../utils/mcdata.js'; +import { blockSatisfied, getTypeOfGeneric, rotateXZ } from './utils.js'; + + +export class BuildGoal { + constructor(agent) { + this.agent = agent; + } + + async wrapSkill(func) { + if (!this.agent.isIdle()) + return false; + let res = await this.agent.actions.runAction('BuildGoal', func); + return !res.interrupted; + } + + async executeNext(goal, position=null, orientation=null) { + let sizex = goal.blocks[0][0].length; + let sizez = goal.blocks[0].length; + let sizey = goal.blocks.length; + if (!position) { + for (let x = 0; x < sizex - 1; x++) { + position = world.getNearestFreeSpace(this.agent.bot, sizex - x, 16); + if (position) break; + } + } + if (orientation === null) { + orientation = Math.floor(Math.random() * 4); + } + + let inventory = world.getInventoryCounts(this.agent.bot); + let missing = {}; + let acted = false; + for (let y = goal.offset; y < sizey+goal.offset; y++) { + for (let z = 0; z < sizez; z++) { + for (let x = 0; x < sizex; x++) { + + let [rx, rz] = rotateXZ(x, z, orientation, sizex, sizez); + let ry = y - goal.offset; + let block_name = goal.blocks[ry][rz][rx]; + if (block_name === null || block_name === '') continue; + + let world_pos = new Vec3(position.x + x, position.y + y, position.z + z); + let current_block = this.agent.bot.blockAt(world_pos); + + let res = null; + if (current_block !== null && !blockSatisfied(block_name, current_block)) { + acted = true; + + if (current_block.name !== 'air') { + res = await this.wrapSkill(async () => { + await skills.breakBlockAt(this.agent.bot, world_pos.x, world_pos.y, world_pos.z); + }); + if (!res) return {missing: missing, acted: acted, position: position, orientation: orientation}; + } + + if (block_name !== 'air') { + let block_typed = getTypeOfGeneric(this.agent.bot, block_name); + if (inventory[block_typed] > 0) { + res = await this.wrapSkill(async () => { + await skills.placeBlock(this.agent.bot, block_typed, world_pos.x, world_pos.y, world_pos.z); + }); + if (!res) return {missing: missing, acted: acted, position: position, orientation: orientation}; + } else { + if (missing[block_typed] === undefined) + missing[block_typed] = 0; + missing[block_typed]++; + } + } + } + } + } + } + return {missing: missing, acted: acted, position: position, orientation: orientation}; + } + +} \ No newline at end of file diff --git a/src/agent/npc/construction/dirt_shelter.json b/src/agent/npc/construction/dirt_shelter.json new file mode 100644 index 0000000..d5e8b28 --- /dev/null +++ b/src/agent/npc/construction/dirt_shelter.json @@ -0,0 +1,38 @@ +{ + "name": "dirt_shelter", + "offset": -2, + "blocks": [ + [ + ["", "", "", "", ""], + ["", "dirt", "dirt", "dirt", ""], + ["", "dirt", "dirt", "dirt", ""], + ["", "dirt", "dirt", "dirt", ""], + ["", "", "dirt", "", ""], + ["", "", "dirt", "", ""] + ], + [ + ["dirt", "dirt", "dirt", "dirt", "dirt"], + ["dirt", "chest", "bed", "air", "dirt"], + ["dirt", "air", "bed", "air", "dirt"], + ["dirt", "air", "air", "air", "dirt"], + ["dirt", "dirt", "door", "dirt", "dirt"], + ["dirt", "dirt", "air", "dirt", "dirt"] + ], + [ + ["dirt", "dirt", "dirt", "dirt", "dirt"], + ["dirt", "air", "air", "air", "dirt"], + ["dirt", "torch", "air", "air", "dirt"], + ["dirt", "air", "air", "air", "dirt"], + ["dirt", "dirt", "door", "dirt", "dirt"], + ["air", "air", "air", "air", "air"] + ], + [ + ["air", "air", "air", "air", "air"], + ["dirt", "dirt", "dirt", "dirt", "dirt"], + ["dirt", "dirt", "dirt", "dirt", "dirt"], + ["dirt", "dirt", "dirt", "dirt", "dirt"], + ["air", "air", "air", "air", "air"], + ["air", "air", "air", "air", "air"] + ] + ] +} \ No newline at end of file diff --git a/src/agent/npc/construction/large_house.json b/src/agent/npc/construction/large_house.json new file mode 100644 index 0000000..f40e0dd --- /dev/null +++ b/src/agent/npc/construction/large_house.json @@ -0,0 +1,230 @@ +{ + "name": "large_house", + "offset": -4, + "blocks": [ + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "planks", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "planks", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "cobblestone", "air", "torch", "air", "air", "air", "torch", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "air", "air", "air", "air", "air", "air", "air", "cobblestone", ""], + ["", "cobblestone", "planks", "torch", "air", "air", "air", "torch", "air", "cobblestone", ""], + ["", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "", "", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "", "", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "", "", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "air", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "dirt"], + ["cobblestone", "cobblestone", "air", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["cobblestone", "cobblestone", "air", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""], + ["", "", "", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone", ""] + ], + [ + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "planks", "furnace", "air", "crafting_table", "planks", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "planks", "", "", ""], + ["log", "planks", "planks", "log", "planks", "air", "planks", "log", "planks", "log", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "door", "air"], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "air", "planks", "planks", "log", ""], + ["", "", "", "planks", "air", "air", "air", "", "air", "planks", ""], + ["", "", "", "planks", "chest", "air", "air", "bed", "", "planks", ""], + ["", "", "", "planks", "chest", "air", "air", "", "air", "planks", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""] + ], + [ + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "glass", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "glass", "", "", ""], + ["log", "planks", "planks", "log", "planks", "air", "planks", "log", "planks", "log", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "door", "air"], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "air", "planks", "planks", "log", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "log", "planks", "glass", "glass", "glass", "planks", "log", ""] + ], + [ + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "planks", "air", "air", "air", "glass", "", "", ""], + ["", "", "", "planks", "torch", "air", "torch", "glass", "", "", ""], + ["log", "planks", "planks", "log", "planks", "air", "planks", "log", "planks", "log", ""], + ["planks", "air", "air", "torch", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "torch", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "planks", "air", "air", "air", "air", "air", "air", "torch", "planks", ""], + ["planks", "planks", "air", "torch", "air", "air", "air", "air", "air", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "air", "planks", "planks", "log", ""], + ["", "", "", "planks", "air", "torch", "air", "torch", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "planks", "air", "air", "air", "air", "air", "planks", ""], + ["", "", "", "log", "planks", "glass", "glass", "glass", "planks", "log", ""] + ], + [ + ["", "", "", "log", "log", "log", "log", "log", "", "", ""], + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["", "", "", "log", "planks", "planks", "planks", "log", "", "", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["log", "air", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "air", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "air", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "log", "log", "log", "log", "log", "log", "log", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "planks", "planks", "planks", "", "", "", ""], + ["", "", "", "", "planks", "planks", "planks", "", "", "", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["planks", "air", "bookshelf", "bookshelf", "air", "air", "air", "air", "torch", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "air", "planks", ""], + ["planks", "air", "air", "air", "air", "air", "air", "air", "torch", "planks", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["glass", "air", "bookshelf", "bookshelf", "air", "air", "air", "air", "air", "planks", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["glass", "air", "air", "torch", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "air", "air", "air", "air", "air", "air", "glass", ""], + ["glass", "air", "air", "torch", "air", "air", "air", "air", "air", "glass", ""], + ["log", "planks", "planks", "log", "glass", "glass", "glass", "glass", "glass", "log", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "planks", "planks", "log", "planks", "planks", "planks", "planks", "planks", "log", ""], + ["log", "log", "log", "log", "log", "log", "log", "log", "log", "log", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "planks", "planks", "planks", "planks", "planks", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ], + [ + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "planks", "planks", "planks", "", "", ""], + ["", "", "", "", "", "planks", "planks", "planks", "", "", ""], + ["", "", "", "", "", "planks", "planks", "planks", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""], + ["", "", "", "", "", "", "", "", "", "", ""] + ] + ] +} \ No newline at end of file diff --git a/src/agent/npc/construction/small_stone_house.json b/src/agent/npc/construction/small_stone_house.json new file mode 100644 index 0000000..baf4f65 --- /dev/null +++ b/src/agent/npc/construction/small_stone_house.json @@ -0,0 +1,42 @@ +{ + "name": "small_stone_house", + "offset": -1, + "blocks": [ + [ + ["", "", "", "", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "", "planks", "", ""], + ["", "", "", "", ""] + ], + [ + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["cobblestone", "chest", "bed", "air", "cobblestone"], + ["cobblestone", "air", "bed", "air", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "cobblestone", "door", "cobblestone", "cobblestone"], + ["", "air", "air", "air", ""] + ], + [ + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["cobblestone", "torch", "air", "torch", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "air", "air", "air", "cobblestone"], + ["cobblestone", "torch", "air", "torch", "cobblestone"], + ["cobblestone", "cobblestone", "door", "cobblestone", "cobblestone"], + ["", "air", "air", "air", ""] + ], + [ + ["air", "air", "air", "air", "air"], + ["air", "cobblestone", "cobblestone", "cobblestone", "air"], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["cobblestone", "cobblestone", "cobblestone", "cobblestone", "cobblestone"], + ["air", "cobblestone", "cobblestone", "cobblestone", "air"], + ["air", "air", "air", "air", "air"], + ["", "air", "air", "air", ""] + ] + ] +} \ No newline at end of file diff --git a/src/agent/npc/construction/small_wood_house.json b/src/agent/npc/construction/small_wood_house.json new file mode 100644 index 0000000..9661ae5 --- /dev/null +++ b/src/agent/npc/construction/small_wood_house.json @@ -0,0 +1,42 @@ +{ + "name": "small_wood_house", + "offset": -1, + "blocks": [ + [ + ["", "", "", "", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "planks", "planks", "planks", ""], + ["", "", "planks", "", ""], + ["", "", "", "", ""] + ], + [ + ["log", "planks", "planks", "planks", "log"], + ["planks", "chest", "bed", "air", "planks"], + ["planks", "air", "bed", "air", "planks"], + ["planks", "air", "air", "air", "planks"], + ["planks", "air", "air", "air", "planks"], + ["log", "planks", "door", "planks", "log"], + ["", "air", "air", "air", ""] + ], + [ + ["log", "planks", "planks", "planks", "log"], + ["planks", "torch", "air", "torch", "planks"], + ["planks", "air", "air", "air", "planks"], + ["planks", "air", "air", "air", "planks"], + ["planks", "torch", "air", "torch", "planks"], + ["log", "planks", "door", "planks", "log"], + ["", "air", "air", "air", ""] + ], + [ + ["air", "air", "air", "air", "air"], + ["air", "planks", "planks", "planks", "air"], + ["planks", "planks", "planks", "planks", "planks"], + ["planks", "planks", "planks", "planks", "planks"], + ["air", "planks", "planks", "planks", "air"], + ["air", "air", "air", "air", "air"], + ["", "air", "air", "air", ""] + ] + ] +} \ No newline at end of file diff --git a/src/agent/npc/controller.js b/src/agent/npc/controller.js new file mode 100644 index 0000000..9af3f3e --- /dev/null +++ b/src/agent/npc/controller.js @@ -0,0 +1,261 @@ +import { readdirSync, readFileSync } from 'fs'; +import { NPCData } from './data.js'; +import { ItemGoal } from './item_goal.js'; +import { BuildGoal } from './build_goal.js'; +import { itemSatisfied, rotateXZ } from './utils.js'; +import * as skills from '../library/skills.js'; +import * as world from '../library/world.js'; +import * as mc from '../../utils/mcdata.js'; + + +export class NPCContoller { + constructor(agent) { + this.agent = agent; + this.data = NPCData.fromObject(agent.prompter.profile.npc); + this.temp_goals = []; + this.item_goal = new ItemGoal(agent, this.data); + this.build_goal = new BuildGoal(agent); + this.constructions = {}; + this.last_goals = {}; + } + + getBuiltPositions() { + let positions = []; + for (let name in this.data.built) { + let position = this.data.built[name].position; + let offset = this.constructions[name].offset; + let sizex = this.constructions[name].blocks[0][0].length; + let sizez = this.constructions[name].blocks[0].length; + let sizey = this.constructions[name].blocks.length; + for (let y = offset; y < sizey+offset; y++) { + for (let z = 0; z < sizez; z++) { + for (let x = 0; x < sizex; x++) { + positions.push({x: position.x + x, y: position.y + y, z: position.z + z}); + } + } + } + } + return positions; + } + + init() { + try { + for (let file of readdirSync('src/agent/npc/construction')) { + if (file.endsWith('.json')) { + this.constructions[file.slice(0, -5)] = JSON.parse(readFileSync('src/agent/npc/construction/' + file, 'utf8')); + } + } + } catch (e) { + console.log('Error reading construction file'); + } + + for (let name in this.constructions) { + let sizez = this.constructions[name].blocks[0].length; + let sizex = this.constructions[name].blocks[0][0].length; + let max_size = Math.max(sizex, sizez); + for (let y = 0; y < this.constructions[name].blocks.length; y++) { + for (let z = 0; z < max_size; z++) { + if (z >= this.constructions[name].blocks[y].length) + this.constructions[name].blocks[y].push([]); + for (let x = 0; x < max_size; x++) { + if (x >= this.constructions[name].blocks[y][z].length) + this.constructions[name].blocks[y][z].push(''); + } + } + } + } + + this.agent.bot.on('idle', async () => { + if (this.data.goals.length === 0 && !this.data.curr_goal) return; + // Wait a while for inputs before acting independently + await new Promise((resolve) => setTimeout(resolve, 5000)); + if (!this.agent.isIdle()) return; + + // Persue goal + if (!this.agent.actions.resume_func) { + this.executeNext(); + this.agent.history.save(); + } + }); + } + + async setGoal(name=null, quantity=1) { + this.data.curr_goal = null; + this.last_goals = {}; + if (name) { + this.data.curr_goal = {name: name, quantity: quantity}; + return; + } + + if (!this.data.do_set_goal) return; + + let past_goals = {...this.last_goals}; + for (let goal in this.data.goals) { + if (past_goals[goal.name] === undefined) past_goals[goal.name] = true; + } + let res = await this.agent.prompter.promptGoalSetting(this.agent.history.getHistory(), past_goals); + if (res) { + this.data.curr_goal = res; + console.log('Set new goal: ', res.name, ' x', res.quantity); + } else { + console.log('Error setting new goal.'); + } + } + + async executeNext() { + if (!this.agent.isIdle()) return; + await this.agent.actions.runAction('npc:moveAway', async () => { + await skills.moveAway(this.agent.bot, 2); + }); + + if (!this.data.do_routine || this.agent.bot.time.timeOfDay < 13000) { + // Exit any buildings + let building = this.currentBuilding(); + if (building == this.data.home) { + let door_pos = this.getBuildingDoor(building); + if (door_pos) { + await this.agent.actions.runAction('npc:exitBuilding', async () => { + await skills.useDoor(this.agent.bot, door_pos); + await skills.moveAway(this.agent.bot, 2); // If the bot is too close to the building it will try to enter again + }); + } + } + + // Work towards goals + await this.executeGoal(); + + } else { + // Reset goal at the end of the day + this.data.curr_goal = null; + + // Return to home + let building = this.currentBuilding(); + if (this.data.home !== null && (building === null || building != this.data.home)) { + let door_pos = this.getBuildingDoor(this.data.home); + await this.agent.actions.runAction('npc:returnHome', async () => { + await skills.useDoor(this.agent.bot, door_pos); + }); + } + + // Go to bed + await this.agent.actions.runAction('npc:bed', async () => { + await skills.goToBed(this.agent.bot); + }); + } + + if (this.agent.isIdle()) + this.agent.bot.emit('idle'); + } + + async executeGoal() { + // If we need more blocks to complete a building, get those first + let goals = this.temp_goals.concat(this.data.goals); + if (this.data.curr_goal) + goals = goals.concat([this.data.curr_goal]) + this.temp_goals = []; + + let acted = false; + for (let goal of goals) { + + // Obtain goal item or block + if (this.constructions[goal.name] === undefined) { + if (!itemSatisfied(this.agent.bot, goal.name, goal.quantity)) { + let res = await this.item_goal.executeNext(goal.name, goal.quantity); + this.last_goals[goal.name] = res; + acted = true; + break; + } + } + + // Build construction goal + else { + let res = null; + if (this.data.built.hasOwnProperty(goal.name)) { + res = await this.build_goal.executeNext( + this.constructions[goal.name], + this.data.built[goal.name].position, + this.data.built[goal.name].orientation + ); + } else { + res = await this.build_goal.executeNext(this.constructions[goal.name]); + this.data.built[goal.name] = { + name: goal.name, + position: res.position, + orientation: res.orientation + }; + } + if (Object.keys(res.missing).length === 0) { + this.data.home = goal.name; + } + for (let block_name in res.missing) { + this.temp_goals.push({ + name: block_name, + quantity: res.missing[block_name] + }) + } + if (res.acted) { + acted = true; + this.last_goals[goal.name] = Object.keys(res.missing).length === 0; + break; + } + } + } + + if (!acted && this.data.do_set_goal) + await this.setGoal(); + } + + currentBuilding() { + let bot_pos = this.agent.bot.entity.position; + for (let name in this.data.built) { + let pos = this.data.built[name].position; + let offset = this.constructions[name].offset; + let sizex = this.constructions[name].blocks[0][0].length; + let sizez = this.constructions[name].blocks[0].length; + let sizey = this.constructions[name].blocks.length; + if (this.data.built[name].orientation % 2 === 1) [sizex, sizez] = [sizez, sizex]; + if (bot_pos.x >= pos.x && bot_pos.x < pos.x + sizex && + bot_pos.y >= pos.y + offset && bot_pos.y < pos.y + sizey + offset && + bot_pos.z >= pos.z && bot_pos.z < pos.z + sizez) { + return name; + } + } + return null; + } + + getBuildingDoor(name) { + if (name === null || this.data.built[name] === undefined) return null; + let door_x = null; + let door_z = null; + let door_y = null; + for (let y = 0; y < this.constructions[name].blocks.length; y++) { + for (let z = 0; z < this.constructions[name].blocks[y].length; z++) { + for (let x = 0; x < this.constructions[name].blocks[y][z].length; x++) { + if (this.constructions[name].blocks[y][z][x] !== null && + this.constructions[name].blocks[y][z][x].includes('door')) { + door_x = x; + door_z = z; + door_y = y; + break; + } + } + if (door_x !== null) break; + } + if (door_x !== null) break; + } + if (door_x === null) return null; + + let sizex = this.constructions[name].blocks[0][0].length; + let sizez = this.constructions[name].blocks[0].length; + let orientation = 4 - this.data.built[name].orientation; // this conversion is opposite + if (orientation == 4) orientation = 0; + [door_x, door_z] = rotateXZ(door_x, door_z, orientation, sizex, sizez); + door_y += this.constructions[name].offset; + + return { + x: this.data.built[name].position.x + door_x, + y: this.data.built[name].position.y + door_y, + z: this.data.built[name].position.z + door_z + }; + } +} \ No newline at end of file diff --git a/src/agent/npc/data.js b/src/agent/npc/data.js new file mode 100644 index 0000000..b5de0eb --- /dev/null +++ b/src/agent/npc/data.js @@ -0,0 +1,50 @@ +export class NPCData { + constructor() { + this.goals = []; + this.curr_goal = null; + this.built = {}; + this.home = null; + this.do_routine = false; + this.do_set_goal = false; + } + + toObject() { + let obj = {}; + if (this.goals.length > 0) + obj.goals = this.goals; + if (this.curr_goal) + obj.curr_goal = this.curr_goal; + if (Object.keys(this.built).length > 0) + obj.built = this.built; + if (this.home) + obj.home = this.home; + obj.do_routine = this.do_routine; + obj.do_set_goal = this.do_set_goal; + return obj; + } + + static fromObject(obj) { + let npc = new NPCData(); + if (!obj) return npc; + if (obj.goals) { + npc.goals = []; + for (let goal of obj.goals) { + if (typeof goal === 'string') + npc.goals.push({name: goal, quantity: 1}); + else + npc.goals.push({name: goal.name, quantity: goal.quantity}); + } + } + if (obj.curr_goal) + npc.curr_goal = obj.curr_goal; + if (obj.built) + npc.built = obj.built; + if (obj.home) + npc.home = obj.home; + if (obj.do_routine !== undefined) + npc.do_routine = obj.do_routine; + if (obj.do_set_goal !== undefined) + npc.do_set_goal = obj.do_set_goal; + return npc; + } +} \ No newline at end of file diff --git a/src/agent/npc/item_goal.js b/src/agent/npc/item_goal.js new file mode 100644 index 0000000..9055f54 --- /dev/null +++ b/src/agent/npc/item_goal.js @@ -0,0 +1,355 @@ +import * as skills from '../library/skills.js'; +import * as world from '../library/world.js'; +import * as mc from '../../utils/mcdata.js'; +import { itemSatisfied } from './utils.js'; + + +const blacklist = [ + 'coal_block', + 'iron_block', + 'gold_block', + 'diamond_block', + 'deepslate', + 'blackstone', + 'netherite', + '_wood', + 'stripped_', + 'crimson', + 'warped', + 'dye' +] + + +class ItemNode { + constructor(manager, wrapper, name) { + this.manager = manager; + this.wrapper = wrapper; + this.name = name; + this.type = ''; + this.source = null; + this.prereq = null; + this.recipe = []; + this.fails = 0; + } + + setRecipe(recipe) { + this.type = 'craft'; + let size = 0; + this.recipe = []; + for (let [key, value] of Object.entries(recipe)) { + if (this.manager.nodes[key] === undefined) + this.manager.nodes[key] = new ItemWrapper(this.manager, this.wrapper, key); + this.recipe.push({node: this.manager.nodes[key], quantity: value}); + size += value; + } + if (size > 4) { + if (this.manager.nodes['crafting_table'] === undefined) + this.manager.nodes['crafting_table'] = new ItemWrapper(this.manager, this.wrapper, 'crafting_table'); + this.prereq = this.manager.nodes['crafting_table']; + } + return this; + } + + setCollectable(source=null, tool=null) { + this.type = 'block'; + if (source) + this.source = source; + else + this.source = this.name; + if (tool) { + if (this.manager.nodes[tool] === undefined) + this.manager.nodes[tool] = new ItemWrapper(this.manager, this.wrapper, tool); + this.prereq = this.manager.nodes[tool]; + } + return this; + } + + setSmeltable(source_item) { + this.type = 'smelt'; + if (this.manager.nodes['furnace'] === undefined) + this.manager.nodes['furnace'] = new ItemWrapper(this.manager, this.wrapper, 'furnace'); + this.prereq = this.manager.nodes['furnace']; + + if (this.manager.nodes[source_item] === undefined) + this.manager.nodes[source_item] = new ItemWrapper(this.manager, this.wrapper, source_item); + if (this.manager.nodes['coal'] === undefined) + this.manager.nodes['coal'] = new ItemWrapper(this.manager, this.wrapper, 'coal'); + this.recipe = [ + {node: this.manager.nodes[source_item], quantity: 1}, + {node: this.manager.nodes['coal'], quantity: 1} + ]; + return this; + } + + setHuntable(animal_source) { + this.type = 'hunt'; + this.source = animal_source; + return this; + } + + getChildren() { + let children = [...this.recipe]; + if (this.prereq) { + children.push({node: this.prereq, quantity: 1}); + } + return children; + } + + isReady() { + for (let child of this.getChildren()) { + if (!child.node.isDone(child.quantity)) { + return false; + } + } + return true; + } + + isDone(quantity=1) { + if (this.manager.goal.name === this.name) + return false; + return itemSatisfied(this.manager.agent.bot, this.name, quantity); + } + + getDepth(q=1) { + if (this.isDone(q)) { + return 0; + } + let depth = 0; + for (let child of this.getChildren()) { + depth = Math.max(depth, child.node.getDepth(child.quantity)); + } + return depth + 1; + } + + getFails(q=1) { + if (this.isDone(q)) { + return 0; + } + let fails = 0; + for (let child of this.getChildren()) { + fails += child.node.getFails(child.quantity); + } + return fails + this.fails; + } + + getNext(q=1) { + if (this.isDone(q)) + return null; + if (this.isReady()) + return {node: this, quantity: q}; + for (let child of this.getChildren()) { + let res = child.node.getNext(child.quantity); + if (res) + return res; + } + return null; + } + + async execute(quantity=1) { + if (!this.isReady()) { + this.fails += 1; + return; + } + let inventory = world.getInventoryCounts(this.manager.agent.bot); + let init_quantity = inventory[this.name] || 0; + if (this.type === 'block') { + await skills.collectBlock(this.manager.agent.bot, this.source, quantity, this.manager.agent.npc.getBuiltPositions()); + } else if (this.type === 'smelt') { + let to_smelt_name = this.recipe[0].node.name; + let to_smelt_quantity = Math.min(quantity, inventory[to_smelt_name] || 1); + await skills.smeltItem(this.manager.agent.bot, to_smelt_name, to_smelt_quantity); + } else if (this.type === 'hunt') { + for (let i=0; i recipe); + if (recipes) { + for (let recipe of recipes) { + let includes_blacklisted = false; + for (let ingredient in recipe) { + for (let match of blacklist) { + if (ingredient.includes(match)) { + includes_blacklisted = true; + break; + } + } + if (includes_blacklisted) break; + } + if (includes_blacklisted) continue; + this.add_method(new ItemNode(this.manager, this, this.name).setRecipe(recipe)) + } + } + + let block_sources = mc.getItemBlockSources(this.name); + if (block_sources.length > 0 && this.name !== 'torch' && !this.name.includes('bed')) { // Do not collect placed torches or beds + for (let block_source of block_sources) { + if (block_source === 'grass_block') continue; // Dirt nodes will collect grass blocks + let tool = mc.getBlockTool(block_source); + this.add_method(new ItemNode(this.manager, this, this.name).setCollectable(block_source, tool)); + } + } + + let smeltingIngredient = mc.getItemSmeltingIngredient(this.name); + if (smeltingIngredient) { + this.add_method(new ItemNode(this.manager, this, this.name).setSmeltable(smeltingIngredient)); + } + + let animal_source = mc.getItemAnimalSource(this.name); + if (animal_source) { + this.add_method(new ItemNode(this.manager, this, this.name).setHuntable(animal_source)); + } + } + + containsCircularDependency() { + let p = this.parent; + while (p) { + if (p.name === this.name) { + return true; + } + p = p.parent; + } + return false; + } + + getBestMethod(q=1) { + let best_cost = -1; + let best_method = null; + for (let method of this.methods) { + let cost = method.getDepth(q) + method.getFails(q); + if (best_cost == -1 || cost < best_cost) { + best_cost = cost; + best_method = method; + } + } + return best_method + } + + isDone(q=1) { + if (this.methods.length === 0) + return false; + return this.getBestMethod(q).isDone(q); + } + + getDepth(q=1) { + if (this.methods.length === 0) + return 0; + return this.getBestMethod(q).getDepth(q); + } + + getFails(q=1) { + if (this.methods.length === 0) + return 0; + return this.getBestMethod(q).getFails(q); + } + + getNext(q=1) { + if (this.methods.length === 0) + return null; + return this.getBestMethod(q).getNext(q); + } +} + + +export class ItemGoal { + constructor(agent) { + this.agent = agent; + this.goal = null; + this.nodes = {}; + this.failed = []; + } + + async executeNext(item_name, item_quantity=1) { + if (this.nodes[item_name] === undefined) + this.nodes[item_name] = new ItemWrapper(this, null, item_name); + this.goal = this.nodes[item_name]; + + // Get next goal to execute + let next_info = this.goal.getNext(item_quantity); + if (!next_info) { + console.log(`Invalid item goal ${this.goal.name}`); + return false; + } + let next = next_info.node; + let quantity = next_info.quantity; + + // Prevent unnecessary attempts to obtain blocks that are not nearby + if (next.type === 'block' && !world.getNearbyBlockTypes(this.agent.bot).includes(next.source) || + next.type === 'hunt' && !world.getNearbyEntityTypes(this.agent.bot).includes(next.source)) { + next.fails += 1; + + // If the bot has failed to obtain the block before, explore + if (this.failed.includes(next.name)) { + this.failed = this.failed.filter((item) => item !== next.name); + await this.agent.actions.runAction('itemGoal:explore', async () => { + await skills.moveAway(this.agent.bot, 8); + }); + } else { + this.failed.push(next.name); + await new Promise((resolve) => setTimeout(resolve, 500)); + this.agent.bot.emit('idle'); + } + return false; + } + + // Wait for the bot to be idle before attempting to execute the next goal + if (!this.agent.isIdle()) + return false; + + // Execute the next goal + let init_quantity = world.getInventoryCounts(this.agent.bot)[next.name] || 0; + await this.agent.actions.runAction('itemGoal:next', async () => { + await next.execute(quantity); + }); + let final_quantity = world.getInventoryCounts(this.agent.bot)[next.name] || 0; + + // Log the result of the goal attempt + if (final_quantity > init_quantity) { + console.log(`Successfully obtained ${next.name} for goal ${this.goal.name}`); + } else { + console.log(`Failed to obtain ${next.name} for goal ${this.goal.name}`); + } + return final_quantity > init_quantity; + } +} diff --git a/src/agent/npc/utils.js b/src/agent/npc/utils.js new file mode 100644 index 0000000..53d8486 --- /dev/null +++ b/src/agent/npc/utils.js @@ -0,0 +1,126 @@ +import * as world from '../library/world.js'; +import * as mc from '../../utils/mcdata.js'; + + +export function getTypeOfGeneric(bot, block_name) { + // Get type of wooden block + if (mc.MATCHING_WOOD_BLOCKS.includes(block_name)) { + + // Return most common wood type in inventory + let type_count = {}; + let max_count = 0; + let max_type = null; + let inventory = world.getInventoryCounts(bot); + for (const item in inventory) { + for (const wood of mc.WOOD_TYPES) { + if (item.includes(wood)) { + if (type_count[wood] === undefined) + type_count[wood] = 0; + type_count[wood] += inventory[item]; + if (type_count[wood] > max_count) { + max_count = type_count[wood]; + max_type = wood; + } + } + } + } + if (max_type !== null) + return max_type + '_' + block_name; + + // Return nearest wood type + let log_types = mc.WOOD_TYPES.map((wood) => wood + '_log'); + let blocks = world.getNearestBlocks(bot, log_types, 16, 1); + if (blocks.length > 0) { + let wood = blocks[0].name.split('_')[0]; + return wood + '_' + block_name; + } + + // Return oak + return 'oak_' + block_name; + } + + // Get type of bed + if (block_name === 'bed') { + + // Return most common wool type in inventory + let type_count = {}; + let max_count = 0; + let max_type = null; + let inventory = world.getInventoryCounts(bot); + for (const item in inventory) { + for (const color of mc.WOOL_COLORS) { + if (item === color + '_wool') { + if (type_count[color] === undefined) + type_count[color] = 0; + type_count[color] += inventory[item]; + if (type_count[color] > max_count) { + max_count = type_count[color]; + max_type = color; + } + } + } + } + if (max_type !== null) + return max_type + '_' + block_name; + + // Return white + return 'white_' + block_name; + } + return block_name; +} + + +export function blockSatisfied(target_name, block) { + if (target_name == 'dirt') { + return block.name == 'dirt' || block.name == 'grass_block'; + } else if (mc.MATCHING_WOOD_BLOCKS.includes(target_name)) { + return block.name.endsWith(target_name); + } else if (target_name == 'bed') { + return block.name.endsWith('bed'); + } else if (target_name == 'torch') { + return block.name.includes('torch'); + } + return block.name == target_name; +} + + +export function itemSatisfied(bot, item, quantity=1) { + let qualifying = [item]; + if (item.includes('pickaxe') || + item.includes('axe') || + item.includes('shovel') || + item.includes('hoe') || + item.includes('sword')) { + let material = item.split('_')[0]; + let type = item.split('_')[1]; + if (material === 'wooden') { + qualifying.push('stone_' + type); + qualifying.push('iron_' + type); + qualifying.push('gold_' + type); + qualifying.push('diamond_' + type); + } else if (material === 'stone') { + qualifying.push('iron_' + type); + qualifying.push('gold_' + type); + qualifying.push('diamond_' + type); + } else if (material === 'iron') { + qualifying.push('gold_' + type); + qualifying.push('diamond_' + type); + } else if (material === 'gold') { + qualifying.push('diamond_' + type); + } + } + for (let item of qualifying) { + if (world.getInventoryCounts(bot)[item] >= quantity) { + return true; + } + } + return false; +} + + +export function rotateXZ(x, z, orientation, sizex, sizez) { + if (orientation === 0) return [x, z]; + if (orientation === 1) return [z, sizex-x-1]; + if (orientation === 2) return [sizex-x-1, sizez-z-1]; + if (orientation === 3) return [sizez-z-1, x]; +} diff --git a/src/agent/self_prompter.js b/src/agent/self_prompter.js new file mode 100644 index 0000000..439b6c6 --- /dev/null +++ b/src/agent/self_prompter.js @@ -0,0 +1,114 @@ +export class SelfPrompter { + constructor(agent) { + this.agent = agent; + this.on = false; + this.loop_active = false; + this.interrupt = false; + this.prompt = ''; + this.idle_time = 0; + this.cooldown = 2000; + } + + start(prompt) { + console.log('Self-prompting started.'); + if (!prompt) { + if (!this.prompt) + return 'No prompt specified. Ignoring request.'; + prompt = this.prompt; + } + if (this.on) { + this.prompt = prompt; + } + this.on = true; + this.prompt = prompt; + this.startLoop(); + } + + setPrompt(prompt) { + this.prompt = prompt; + } + + async startLoop() { + if (this.loop_active) { + console.warn('Self-prompt loop is already active. Ignoring request.'); + return; + } + console.log('starting self-prompt loop') + this.loop_active = true; + let no_command_count = 0; + const MAX_NO_COMMAND = 3; + while (!this.interrupt) { + const msg = `You are self-prompting with the goal: '${this.prompt}'. Your next response MUST contain a command with this syntax: !commandName. Respond:`; + + let used_command = await this.agent.handleMessage('system', msg, -1); + if (!used_command) { + no_command_count++; + if (no_command_count >= MAX_NO_COMMAND) { + let out = `Agent did not use command in the last ${MAX_NO_COMMAND} auto-prompts. Stopping auto-prompting.`; + this.agent.openChat(out); + console.warn(out); + this.on = false; + break; + } + } + else { + no_command_count = 0; + await new Promise(r => setTimeout(r, this.cooldown)); + } + } + console.log('self prompt loop stopped') + this.loop_active = false; + this.interrupt = false; + } + + update(delta) { + // automatically restarts loop + if (this.on && !this.loop_active && !this.interrupt) { + if (this.agent.isIdle()) + this.idle_time += delta; + else + this.idle_time = 0; + + if (this.idle_time >= this.cooldown) { + console.log('Restarting self-prompting...'); + this.startLoop(); + this.idle_time = 0; + } + } + else { + this.idle_time = 0; + } + } + + async stopLoop() { + // you can call this without await if you don't need to wait for it to finish + if (this.interrupt) + return; + console.log('stopping self-prompt loop') + this.interrupt = true; + while (this.loop_active) { + await new Promise(r => setTimeout(r, 500)); + } + this.interrupt = false; + } + + async stop(stop_action=true) { + this.interrupt = true; + if (stop_action) + await this.agent.actions.stop(); + await this.stopLoop(); + this.on = false; + } + + shouldInterrupt(is_self_prompt) { // to be called from handleMessage + return is_self_prompt && this.on && this.interrupt; + } + + handleUserPromptedCmd(is_self_prompt, is_action) { + // if a user messages and the bot responds with an action, stop the self-prompt loop + if (!is_self_prompt && is_action) { + this.stopLoop(); + // this stops it from responding from the handlemessage loop and the self-prompt loop at the same time + } + } +} \ No newline at end of file diff --git a/src/agent/tasks.js b/src/agent/tasks.js new file mode 100644 index 0000000..1b9d56e --- /dev/null +++ b/src/agent/tasks.js @@ -0,0 +1,196 @@ +import { readFileSync } from 'fs'; +import { executeCommand } from './commands/index.js'; +import { getPosition } from './library/world.js' +import settings from '../../settings.js'; + + +export class TaskValidator { + constructor(data, agent) { + this.target = data.target; + this.number_of_target = data.number_of_target; + this.agent = agent; + } + + validate() { + try{ + let valid = false; + let total_targets = 0; + this.agent.bot.inventory.slots.forEach((slot) => { + if (slot && slot.name.toLowerCase() === this.target) { + total_targets += slot.count; + } + if (slot && slot.name.toLowerCase() === this.target && slot.count >= this.number_of_target) { + valid = true; + console.log('Task is complete'); + } + }); + if (total_targets >= this.number_of_target) { + valid = true; + console.log('Task is complete'); + } + return valid; + } catch (error) { + console.error('Error validating task:', error); + return false; + } + } +} + + +export class Task { + constructor(agent, task_path, task_id) { + this.agent = agent; + this.data = null; + this.taskTimeout = 300; + this.taskStartTime = Date.now(); + this.validator = null; + this.blocked_actions = []; + if (task_path && task_id) { + this.data = this.loadTask(task_path, task_id); + this.taskTimeout = this.data.timeout || 300; + this.taskStartTime = Date.now(); + this.validator = new TaskValidator(this.data, this.agent); + this.blocked_actions = this.data.blocked_actions || []; + this.restrict_to_inventory = !!this.data.restrict_to_inventory; + if (this.data.goal) + this.blocked_actions.push('!endGoal'); + if (this.data.conversation) + this.blocked_actions.push('!endConversation'); + } + } + + loadTask(task_path, task_id) { + try { + const tasksFile = readFileSync(task_path, 'utf8'); + const tasks = JSON.parse(tasksFile); + const task = tasks[task_id]; + if (!task) { + throw new Error(`Task ${task_id} not found`); + } + if ((!task.agent_count || task.agent_count <= 1) && this.agent.count_id > 0) { + task = null; + } + + return task; + } catch (error) { + console.error('Error loading task:', error); + process.exit(1); + } + } + + isDone() { + if (this.validator && this.validator.validate()) + return {"message": 'Task successful', "code": 2}; + // TODO check for other terminal conditions + // if (this.task.goal && !this.self_prompter.on) + // return {"message": 'Agent ended goal', "code": 3}; + // if (this.task.conversation && !inConversation()) + // return {"message": 'Agent ended conversation', "code": 3}; + if (this.taskTimeout) { + const elapsedTime = (Date.now() - this.taskStartTime) / 1000; + if (elapsedTime >= this.taskTimeout) { + console.log('Task timeout reached. Task unsuccessful.'); + return {"message": 'Task timeout reached', "code": 4}; + } + } + return false; + } + + async initBotTask() { + if (this.data === null) + return; + let bot = this.agent.bot; + let name = this.agent.name; + + bot.chat(`/clear ${name}`); + console.log(`Cleared ${name}'s inventory.`); + + //wait for a bit so inventory is cleared + await new Promise((resolve) => setTimeout(resolve, 500)); + + if (this.data.agent_count > 1) { + let initial_inventory = this.data.initial_inventory[this.agent.count_id.toString()]; + console.log("Initial inventory:", initial_inventory); + } else if (this.data) { + console.log("Initial inventory:", this.data.initial_inventory); + let initial_inventory = this.data.initial_inventory; + } + + if ("initial_inventory" in this.data) { + console.log("Setting inventory..."); + console.log("Inventory to set:", initial_inventory); + for (let key of Object.keys(initial_inventory)) { + console.log('Giving item:', key); + bot.chat(`/give ${name} ${key} ${initial_inventory[key]}`); + }; + //wait for a bit so inventory is set + await new Promise((resolve) => setTimeout(resolve, 500)); + console.log("Done giving inventory items."); + } + // Function to generate random numbers + + function getRandomOffset(range) { + return Math.floor(Math.random() * (range * 2 + 1)) - range; + } + + let human_player_name = null; + let available_agents = settings.profiles.map((p) => JSON.parse(readFileSync(p, 'utf8')).name); // TODO this does not work with command line args + + // Finding if there is a human player on the server + for (const playerName in bot.players) { + const player = bot.players[playerName]; + if (!available_agents.some((n) => n === playerName)) { + console.log('Found human player:', player.username); + human_player_name = player.username + break; + } + } + + // If there are multiple human players, teleport to the first one + + // teleport near a human player if found by default + + if (human_player_name) { + console.log(`Teleporting ${name} to human ${human_player_name}`) + bot.chat(`/tp ${name} ${human_player_name}`) // teleport on top of the human player + + } + await new Promise((resolve) => setTimeout(resolve, 200)); + + // now all bots are teleport on top of each other (which kinda looks ugly) + // Thus, we need to teleport them to random distances to make it look better + + /* + Note : We don't want randomness for construction task as the reference point matters a lot. + Another reason for no randomness for construction task is because, often times the user would fly in the air, + then set a random block to dirt and teleport the bot to stand on that block for starting the construction, + This was done by MaxRobinson in one of the youtube videos. + */ + + if (this.data.type !== 'construction') { + const pos = getPosition(bot); + const xOffset = getRandomOffset(5); + const zOffset = getRandomOffset(5); + bot.chat(`/tp ${name} ${Math.floor(pos.x + xOffset)} ${pos.y + 3} ${Math.floor(pos.z + zOffset)}`); + await new Promise((resolve) => setTimeout(resolve, 200)); + } + + if (this.data.agent_count && this.data.agent_count > 1) { + // TODO wait for other bots to join + await new Promise((resolve) => setTimeout(resolve, 10000)); + if (available_agents.length < this.data.agent_count) { + console.log(`Missing ${this.data.agent_count - available_agents.length} bot(s).`); + this.agent.killAll(); + } + } + + if (this.data.goal) { + await executeCommand(this.agent, `!goal("${this.data.goal}")`); + } + + if (this.data.conversation && this.agent.count_id === 0) { + let other_name = available_agents.filter(n => n !== name)[0]; + await executeCommand(this.agent, `!startConversation("${other_name}", "${this.data.conversation}")`); + } + } +} diff --git a/src/agent/viewer.js b/src/agent/viewer.js new file mode 100644 index 0000000..6ce8a27 --- /dev/null +++ b/src/agent/viewer.js @@ -0,0 +1,8 @@ +import settings from '../../settings.js'; +import prismarineViewer from 'prismarine-viewer'; +const mineflayerViewer = prismarineViewer.mineflayer; + +export function addViewer(bot, count_id) { + if (settings.show_bot_views) + mineflayerViewer(bot, { port: 3000+count_id, firstPerson: true, }); +} \ No newline at end of file diff --git a/src/models/claude.js b/src/models/claude.js new file mode 100644 index 0000000..9efd669 --- /dev/null +++ b/src/models/claude.js @@ -0,0 +1,50 @@ +import Anthropic from '@anthropic-ai/sdk'; +import { strictFormat } from '../utils/text.js'; +import { getKey } from '../utils/keys.js'; + +export class Claude { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params || {}; + + let config = {}; + if (url) + config.baseURL = url; + + config.apiKey = getKey('ANTHROPIC_API_KEY'); + + this.anthropic = new Anthropic(config); + } + + async sendRequest(turns, systemMessage) { + const messages = strictFormat(turns); + let res = null; + try { + console.log('Awaiting anthropic api response...') + if (!this.params.max_tokens) { + this.params.max_tokens = 4096; + } + const resp = await this.anthropic.messages.create({ + model: this.model_name || "claude-3-sonnet-20240229", + system: systemMessage, + messages: messages, + ...(this.params || {}) + }); + + console.log('Received.') + res = resp.content[0].text; + } + catch (err) { + console.log(err); + res = 'My brain disconnected, try again.'; + } + return res; + } + + async embed(text) { + throw new Error('Embeddings are not supported by Claude.'); + } +} + + + diff --git a/src/models/deepseek.js b/src/models/deepseek.js new file mode 100644 index 0000000..da98ba2 --- /dev/null +++ b/src/models/deepseek.js @@ -0,0 +1,58 @@ +import OpenAIApi from 'openai'; +import { getKey, hasKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; + +export class DeepSeek { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; + + let config = {}; + + config.baseURL = url || 'https://api.deepseek.com'; + config.apiKey = getKey('DEEPSEEK_API_KEY'); + + this.openai = new OpenAIApi(config); + } + + async sendRequest(turns, systemMessage, stop_seq='***') { + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + messages = strictFormat(messages); + + const pack = { + model: this.model_name || "deepseek-chat", + messages, + stop: stop_seq, + ...(this.params || {}) + }; + + let res = null; + try { + console.log('Awaiting deepseek api response...') + // console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.') + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + return res; + } + + async embed(text) { + throw new Error('Embeddings are not supported by Deepseek.'); + } +} + + + diff --git a/src/models/gemini.js b/src/models/gemini.js new file mode 100644 index 0000000..b988c7f --- /dev/null +++ b/src/models/gemini.js @@ -0,0 +1,116 @@ +import { GoogleGenerativeAI } from '@google/generative-ai'; +import { toSinglePrompt, strictFormat } from '../utils/text.js'; +import { getKey } from '../utils/keys.js'; + +export class Gemini { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; + this.url = url; + this.safetySettings = [ + { + "category": "HARM_CATEGORY_DANGEROUS", + "threshold": "BLOCK_NONE", + }, + { + "category": "HARM_CATEGORY_HARASSMENT", + "threshold": "BLOCK_NONE", + }, + { + "category": "HARM_CATEGORY_HATE_SPEECH", + "threshold": "BLOCK_NONE", + }, + { + "category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", + "threshold": "BLOCK_NONE", + }, + { + "category": "HARM_CATEGORY_DANGEROUS_CONTENT", + "threshold": "BLOCK_NONE", + }, + ]; + + this.genAI = new GoogleGenerativeAI(getKey('GEMINI_API_KEY')); + } + + async sendRequest(turns, systemMessage) { + let model; + const modelConfig = { + model: this.model_name || "gemini-1.5-flash", + // systemInstruction does not work bc google is trash + }; + + if (this.url) { + model = this.genAI.getGenerativeModel( + modelConfig, + { baseUrl: this.url }, + { safetySettings: this.safetySettings } + ); + } else { + model = this.genAI.getGenerativeModel( + modelConfig, + { safetySettings: this.safetySettings } + ); + } + + console.log('Awaiting Google API response...'); + + // Prepend system message and format turns cause why not + turns.unshift({ role: 'system', content: systemMessage }); + turns = strictFormat(turns); + let contents = []; + for (let turn of turns) { + contents.push({ + role: turn.role === 'assistant' ? 'model' : 'user', + parts: [{ text: turn.content }] + }); + } + + const result = await model.generateContent({ + contents, + generationConfig: { + ...(this.params || {}) + } + }); + const response = await result.response; + let text; + + // Handle "thinking" models since they smart + if (this.model_name && this.model_name.includes("thinking")) { + if ( + response.candidates && + response.candidates.length > 0 && + response.candidates[0].content && + response.candidates[0].content.parts && + response.candidates[0].content.parts.length > 1 + ) { + text = response.candidates[0].content.parts[1].text; + } else { + console.warn("Unexpected response structure for thinking model:", response); + text = response.text(); + } + } else { + text = response.text(); + } + + console.log('Received.'); + return text; + } + + async embed(text) { + let model; + if (this.url) { + model = this.genAI.getGenerativeModel( + { model: "text-embedding-004" }, + { baseUrl: this.url } + ); + } else { + model = this.genAI.getGenerativeModel( + { model: "text-embedding-004" } + ); + } + + const result = await model.embedContent(text); + return result.embedding.values; + } +} \ No newline at end of file diff --git a/src/models/glhf.js b/src/models/glhf.js new file mode 100644 index 0000000..9c27799 --- /dev/null +++ b/src/models/glhf.js @@ -0,0 +1,62 @@ +import OpenAIApi from 'openai'; +import { getKey } from '../utils/keys.js'; + +// glhf doesn't supply an SDK for their models, but fully supports OpenAI SDKs +export class glhf { + constructor(model_name, url) { + this.model_name = model_name; + + // Retrieve the API key from keys.json + const apiKey = getKey('GHLF_API_KEY'); + if (!apiKey) { + throw new Error('API key not found. Please check keys.json and ensure GHLF_API_KEY is defined.'); + } + + // Configure OpenAIApi with the retrieved API key and base URL + this.openai = new OpenAIApi({ + apiKey, + baseURL: url || "https://glhf.chat/api/openai/v1" + }); + } + + async sendRequest(turns, systemMessage, stop_seq = '***') { + // Construct the message array for the API request + let messages = [{ 'role': 'system', 'content': systemMessage }].concat(turns); + + const pack = { + model: this.model_name || "hf:meta-llama/Llama-3.1-405B-Instruct", + messages, + stop: [stop_seq] + }; + + let res = null; + try { + console.log('Awaiting glhf.chat API response...'); + // Uncomment the line below if you need to debug the messages + // console.log('Messages:', messages); + + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason === 'length') { + throw new Error('Context length exceeded'); + } + + console.log('Received.'); + res = completion.choices[0].message.content; + } catch (err) { + if ((err.message === 'Context length exceeded' || err.code === 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + + // Replace special tokens in the response + return res.replace(/<\|separator\|>/g, '*no response*'); + } + + async embed(text) { + throw new Error('Embeddings are not supported by glhf.'); + } +} \ No newline at end of file diff --git a/src/models/gpt.js b/src/models/gpt.js new file mode 100644 index 0000000..1a88bf4 --- /dev/null +++ b/src/models/gpt.js @@ -0,0 +1,72 @@ +import OpenAIApi from 'openai'; +import { getKey, hasKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; + +export class GPT { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; + + let config = {}; + if (url) + config.baseURL = url; + + if (hasKey('OPENAI_ORG_ID')) + config.organization = getKey('OPENAI_ORG_ID'); + + config.apiKey = getKey('OPENAI_API_KEY'); + + this.openai = new OpenAIApi(config); + } + + async sendRequest(turns, systemMessage, stop_seq='***') { + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + const pack = { + model: this.model_name || "gpt-3.5-turbo", + messages, + stop: stop_seq, + ...(this.params || {}) + }; + if (this.model_name.includes('o1')) { + pack.messages = strictFormat(messages); + delete pack.stop; + } + + let res = null; + + try { + console.log('Awaiting openai api response from model', this.model_name) + // console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.') + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + return res; + } + + async embed(text) { + if (text.length > 8191) + text = text.slice(0, 8191); + const embedding = await this.openai.embeddings.create({ + model: this.model_name || "text-embedding-3-small", + input: text, + encoding_format: "float", + }); + return embedding.data[0].embedding; + } +} + + + diff --git a/src/models/grok.js b/src/models/grok.js new file mode 100644 index 0000000..a8c6672 --- /dev/null +++ b/src/models/grok.js @@ -0,0 +1,61 @@ +import OpenAIApi from 'openai'; +import { getKey } from '../utils/keys.js'; + +// xAI doesn't supply a SDK for their models, but fully supports OpenAI and Anthropic SDKs +export class Grok { + constructor(model_name, url, params) { + this.model_name = model_name; + this.url = url; + this.params = params; + + let config = {}; + if (url) + config.baseURL = url; + else + config.baseURL = "https://api.x.ai/v1" + + config.apiKey = getKey('XAI_API_KEY'); + + this.openai = new OpenAIApi(config); + } + + async sendRequest(turns, systemMessage, stop_seq='***') { + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + const pack = { + model: this.model_name || "grok-beta", + messages, + stop: [stop_seq], + ...(this.params || {}) + }; + + let res = null; + try { + console.log('Awaiting xai api response...') + ///console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.') + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + // sometimes outputs special token <|separator|>, just replace it + return res.replace(/<\|separator\|>/g, '*no response*'); + } + + async embed(text) { + throw new Error('Embeddings are not supported by Grok.'); + } +} + + + diff --git a/src/models/groq.js b/src/models/groq.js new file mode 100644 index 0000000..0a94550 --- /dev/null +++ b/src/models/groq.js @@ -0,0 +1,86 @@ +import Groq from 'groq-sdk' +import { getKey } from '../utils/keys.js'; + +// Umbrella class for Mixtral, LLama, Gemma... +export class GroqCloudAPI { + constructor(model_name, url, params) { + this.model_name = model_name; + this.url = url; + this.params = params || {}; + // Groq Cloud does not support custom URLs; warn if provided + if (this.url) { + console.warn("Groq Cloud has no implementation for custom URLs. Ignoring provided URL."); + } + this.groq = new Groq({ apiKey: getKey('GROQCLOUD_API_KEY') }); + } + + async sendRequest(turns, systemMessage, stop_seq = null) { + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; + const messages = [{ role: "system", content: systemMessage }].concat(turns); + + while (attempt < maxAttempts) { + attempt++; + let res = null; + try { + console.log(`Awaiting Groq response... (model: ${this.model_name || "mixtral-8x7b-32768"}, attempt: ${attempt})`); + if (!this.params.max_tokens) { + this.params.max_tokens = 16384; + } + // Create the streaming chat completion request + const completion = await this.groq.chat.completions.create({ + messages: messages, + model: this.model_name || "mixtral-8x7b-32768", + stream: true, + stop: stop_seq, + ...(this.params || {}) + }); + + let temp_res = ""; + // Aggregate streamed chunks into a full response + for await (const chunk of completion) { + temp_res += chunk.choices[0]?.delta?.content || ''; + } + res = temp_res; + } catch (err) { + console.log(err); + res = "My brain just kinda stopped working. Try again."; + } + + // If the model name includes "deepseek-r1", handle the tags + if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); + + // If a partial block is detected, log a warning and retry + if (hasOpenTag && !hasCloseTag) { + console.warn("Partial block detected. Re-generating Groq request..."); + continue; + } + + // If only the closing tag is present, prepend an opening tag + if (hasCloseTag && !hasOpenTag) { + res = '' + res; + } + // Remove the complete block (and any content inside) from the response + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); + } + + finalRes = res; + break; // Exit the loop once a valid response is obtained + } + + if (finalRes == null) { + console.warn("Could not obtain a valid block or normal response after max attempts."); + finalRes = "Response incomplete, please try again."; + } + finalRes = finalRes.replace(/<\|separator\|>/g, '*no response*'); + + return finalRes; + } + + async embed(text) { + console.log("There is no support for embeddings in Groq support. However, the following text was provided: " + text); + } +} \ No newline at end of file diff --git a/src/models/huggingface.js b/src/models/huggingface.js new file mode 100644 index 0000000..ab74f24 --- /dev/null +++ b/src/models/huggingface.js @@ -0,0 +1,87 @@ +import { toSinglePrompt } from '../utils/text.js'; +import { getKey } from '../utils/keys.js'; +import { HfInference } from "@huggingface/inference"; + +export class HuggingFace { + constructor(model_name, url, params) { + // Remove 'huggingface/' prefix if present + this.model_name = model_name.replace('huggingface/', ''); + this.url = url; + this.params = params; + + if (this.url) { + console.warn("Hugging Face doesn't support custom urls!"); + } + + this.huggingface = new HfInference(getKey('HUGGINGFACE_API_KEY')); + } + + async sendRequest(turns, systemMessage) { + const stop_seq = '***'; + // Build a single prompt from the conversation turns + const prompt = toSinglePrompt(turns, null, stop_seq); + // Fallback model if none was provided + const model_name = this.model_name || 'meta-llama/Meta-Llama-3-8B'; + // Combine system message with the prompt + const input = systemMessage + "\n" + prompt; + + // We'll try up to 5 times in case of partial blocks for DeepSeek-R1 models. + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; + + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting Hugging Face API response... (model: ${model_name}, attempt: ${attempt})`); + let res = ''; + try { + // Consume the streaming response chunk by chunk + for await (const chunk of this.huggingface.chatCompletionStream({ + model: model_name, + messages: [{ role: "user", content: input }], + ...(this.params || {}) + })) { + res += (chunk.choices[0]?.delta?.content || ""); + } + } catch (err) { + console.log(err); + res = 'My brain disconnected, try again.'; + // Break out immediately; we only retry when handling partial tags. + break; + } + + // If the model is DeepSeek-R1, check for mismatched blocks. + if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); + + // If there's a partial mismatch, warn and retry the entire request. + if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { + console.warn("Partial block detected. Re-generating..."); + continue; + } + + // If both tags are present, remove the block entirely. + if (hasOpenTag && hasCloseTag) { + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); + } + } + + finalRes = res; + break; // Exit loop if we got a valid response. + } + + // If no valid response was obtained after max attempts, assign a fallback. + if (finalRes == null) { + console.warn("Could not get a valid block or normal response after max attempts."); + finalRes = 'Response incomplete, please try again.'; + } + console.log('Received.'); + console.log(finalRes); + return finalRes; + } + + async embed(text) { + throw new Error('Embeddings are not supported by HuggingFace.'); + } +} \ No newline at end of file diff --git a/src/models/hyperbolic.js b/src/models/hyperbolic.js new file mode 100644 index 0000000..1fe3df6 --- /dev/null +++ b/src/models/hyperbolic.js @@ -0,0 +1,92 @@ +import { getKey } from '../utils/keys.js'; + + +/* + * + * Yes, this code was written by an Ai. It was written by GPT-o1 and tested :) + * + */ + +export class hyperbolic { + constructor(modelName, apiUrl) { + this.modelName = modelName || "deepseek-ai/DeepSeek-V3"; + this.apiUrl = apiUrl || "https://api.hyperbolic.xyz/v1/chat/completions"; + + // Retrieve the Hyperbolic API key from keys.js + this.apiKey = getKey('HYPERBOLIC_API_KEY'); + if (!this.apiKey) { + throw new Error('HYPERBOLIC_API_KEY not found. Check your keys.js file.'); + } + } + + /** + * Sends a chat completion request to the Hyperbolic endpoint. + * + * @param {Array} turns - An array of message objects, e.g. [{role: 'user', content: 'Hi'}]. + * @param {string} systemMessage - The system prompt or instruction. + * @param {string} stopSeq - A string that represents a stopping sequence, default '***'. + * @returns {Promise} - The content of the model's reply. + */ + async sendRequest(turns, systemMessage, stopSeq = '***') { + // Prepare the messages with a system prompt at the beginning + const messages = [{ role: 'system', content: systemMessage }, ...turns]; + + // Build the request payload (mirroring your original structure) + const payload = { + model: this.modelName, + messages: messages, + max_tokens: 8192, + temperature: 0.7, + top_p: 0.9, + stream: false + }; + + let completionContent = null; + + try { + console.log('Awaiting Hyperbolic API response...'); + console.log('Messages:', messages); + + const response = await fetch(this.apiUrl, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${this.apiKey}` + }, + body: JSON.stringify(payload) + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const data = await response.json(); + if ( + data?.choices?.[0]?.finish_reason && + data.choices[0].finish_reason === 'length' + ) { + throw new Error('Context length exceeded'); + } + + completionContent = data?.choices?.[0]?.message?.content || ''; + console.log('Received response from Hyperbolic.'); + + } catch (err) { + if ( + (err.message === 'Context length exceeded' || + err.code === 'context_length_exceeded') && + turns.length > 1 + ) { + console.log('Context length exceeded, trying again with a shorter context...'); + return await this.sendRequest(turns.slice(1), systemMessage, stopSeq); + } else { + console.log(err); + completionContent = 'My brain disconnected, try again.'; + } + } + return completionContent.replace(/<\|separator\|>/g, '*no response*'); + } + async embed(text) { + throw new Error('Embeddings are not supported by Hyperbolic.'); + } +} diff --git a/src/models/local.js b/src/models/local.js new file mode 100644 index 0000000..64e5ab1 --- /dev/null +++ b/src/models/local.js @@ -0,0 +1,102 @@ +import { strictFormat } from '../utils/text.js'; + +export class Local { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; + this.url = url || 'http://127.0.0.1:11434'; + this.chat_endpoint = '/api/chat'; + this.embedding_endpoint = '/api/embeddings'; + } + + async sendRequest(turns, systemMessage) { + let model = this.model_name || 'llama3'; + let messages = strictFormat(turns); + messages.unshift({ role: 'system', content: systemMessage }); + + // We'll attempt up to 5 times for models like "deepseek-r1" if the tags are mismatched. + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; + + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting local response... (model: ${model}, attempt: ${attempt})`); + let res = null; + try { + res = await this.send(this.chat_endpoint, { + model: model, + messages: messages, + stream: false, + ...(this.params || {}) + }); + if (res) { + res = res['message']['content']; + } else { + res = 'No response data.'; + } + } catch (err) { + if (err.message.toLowerCase().includes('context length') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + + // If the model name includes "deepseek-r1" or "Andy-3.5-reasoning", then handle the block. + if (this.model_name && this.model_name.includes("deepseek-r1") || this.model_name.includes("andy-3.5-reasoning")) { + const hasOpenTag = res.includes(""); + const hasCloseTag = res.includes(""); + + // If there's a partial mismatch, retry to get a complete response. + if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { + console.warn("Partial block detected. Re-generating..."); + continue; + } + + // If both tags appear, remove them (and everything inside). + if (hasOpenTag && hasCloseTag) { + res = res.replace(/[\s\S]*?<\/think>/g, ''); + } + } + + finalRes = res; + break; // Exit the loop if we got a valid response. + } + + if (finalRes == null) { + console.warn("Could not get a valid block or normal response after max attempts."); + finalRes = 'Response incomplete, please try again.'; + } + return finalRes; + } + + async embed(text) { + let model = this.model_name || 'nomic-embed-text'; + let body = { model: model, prompt: text }; + let res = await this.send(this.embedding_endpoint, body); + return res['embedding']; + } + + async send(endpoint, body) { + const url = new URL(endpoint, this.url); + let method = 'POST'; + let headers = new Headers(); + const request = new Request(url, { method, headers, body: JSON.stringify(body) }); + let data = null; + try { + const res = await fetch(request); + if (res.ok) { + data = await res.json(); + } else { + throw new Error(`Ollama Status: ${res.status}`); + } + } catch (err) { + console.error('Failed to send Ollama request.'); + console.error(err); + } + return data; + } +} diff --git a/src/models/mistral.js b/src/models/mistral.js new file mode 100644 index 0000000..b33d1de --- /dev/null +++ b/src/models/mistral.js @@ -0,0 +1,73 @@ +import { Mistral as MistralClient } from '@mistralai/mistralai'; +import { getKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; + +export class Mistral { + #client; + + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; + + if (typeof url === "string") { + console.warn("Mistral does not support custom URL's, ignoring!"); + + } + + if (!getKey("MISTRAL_API_KEY")) { + throw new Error("Mistral API Key missing, make sure to set MISTRAL_API_KEY in settings.json") + } + + this.#client = new MistralClient( + { + apiKey: getKey("MISTRAL_API_KEY") + } + ); + + + // Prevents the following code from running when model not specified + if (typeof this.model_name === "undefined") return; + + // get the model name without the "mistral" or "mistralai" prefix + // e.g "mistral/mistral-large-latest" -> "mistral-large-latest" + if (typeof model_name.split("/")[1] !== "undefined") { + this.model_name = model_name.split("/")[1]; + } + } + + async sendRequest(turns, systemMessage) { + + let result; + + try { + const model = this.model_name || "mistral-large-latest"; + + const messages = [ + { role: "system", content: systemMessage } + ]; + messages.push(...strictFormat(turns)); + + const response = await this.#client.chat.complete({ + model, + messages, + ...(this.params || {}) + }); + + result = response.choices[0].message.content; + } catch (err) { + console.log(err) + + result = "My brain disconnected, try again."; + } + + return result; + } + + async embed(text) { + const embedding = await this.#client.embeddings.create({ + model: "mistral-embed", + inputs: text + }); + return embedding.data[0].embedding; + } +} \ No newline at end of file diff --git a/src/models/novita.js b/src/models/novita.js new file mode 100644 index 0000000..8f2dd08 --- /dev/null +++ b/src/models/novita.js @@ -0,0 +1,70 @@ +import OpenAIApi from 'openai'; +import { getKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; + +// llama, mistral +export class Novita { + constructor(model_name, url, params) { + this.model_name = model_name.replace('novita/', ''); + this.url = url || 'https://api.novita.ai/v3/openai'; + this.params = params; + + + let config = { + baseURL: this.url + }; + config.apiKey = getKey('NOVITA_API_KEY'); + + this.openai = new OpenAIApi(config); + } + + async sendRequest(turns, systemMessage, stop_seq='***') { + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + + messages = strictFormat(messages); + + const pack = { + model: this.model_name || "meta-llama/llama-3.1-70b-instruct", + messages, + stop: [stop_seq], + ...(this.params || {}) + }; + + let res = null; + try { + console.log('Awaiting novita api response...') + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.') + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + if (res.includes('')) { + let start = res.indexOf(''); + let end = res.indexOf('') + 8; + if (start != -1) { + if (end != -1) { + res = res.substring(0, start) + res.substring(end); + } else { + res = res.substring(0, start+7); + } + } + res = res.trim(); + } + return res; + } + + async embed(text) { + throw new Error('Embeddings are not supported by Novita AI.'); + } +} diff --git a/src/models/prompter.js b/src/models/prompter.js new file mode 100644 index 0000000..eb14c29 --- /dev/null +++ b/src/models/prompter.js @@ -0,0 +1,373 @@ +import { readFileSync, mkdirSync, writeFileSync} from 'fs'; +import { Examples } from '../utils/examples.js'; +import { getCommandDocs } from '../agent/commands/index.js'; +import { getSkillDocs } from '../agent/library/index.js'; +import { SkillLibrary } from "../agent/library/skill_library.js"; +import { stringifyTurns } from '../utils/text.js'; +import { getCommand } from '../agent/commands/index.js'; +import settings from '../../settings.js'; + +import { Gemini } from './gemini.js'; +import { GPT } from './gpt.js'; +import { Claude } from './claude.js'; +import { Mistral } from './mistral.js'; +import { ReplicateAPI } from './replicate.js'; +import { Local } from './local.js'; +import { Novita } from './novita.js'; +import { GroqCloudAPI } from './groq.js'; +import { HuggingFace } from './huggingface.js'; +import { Qwen } from "./qwen.js"; +import { Grok } from "./grok.js"; +import { DeepSeek } from './deepseek.js'; +import { hyperbolic } from './hyperbolic.js'; +import { glhf } from './glhf.js'; + +export class Prompter { + constructor(agent, fp) { + this.agent = agent; + this.profile = JSON.parse(readFileSync(fp, 'utf8')); + let default_profile = JSON.parse(readFileSync('./profiles/defaults/_default.json', 'utf8')); + let base_fp = settings.base_profile; + let base_profile = JSON.parse(readFileSync(base_fp, 'utf8')); + + // first use defaults to fill in missing values in the base profile + for (let key in default_profile) { + if (base_profile[key] === undefined) + base_profile[key] = default_profile[key]; + } + // then use base profile to fill in missing values in the individual profile + for (let key in base_profile) { + if (this.profile[key] === undefined) + this.profile[key] = base_profile[key]; + } + // base overrides default, individual overrides base + // Removed a bit of space that was right here by adding a comment instead of deleting it because I am making a pull request to this code and I can do whatever I want because I decided to add 2 new API services to Mindcraft now look at me go! Woohoo! I am flying off the edge of the screen oh no! + + this.convo_examples = null; + this.coding_examples = null; + + let name = this.profile.name; + this.cooldown = this.profile.cooldown ? this.profile.cooldown : 0; + this.last_prompt_time = 0; + this.awaiting_coding = false; + + // try to get "max_tokens" parameter, else null + let max_tokens = null; + if (this.profile.max_tokens) + max_tokens = this.profile.max_tokens; + + let chat_model_profile = this._selectAPI(this.profile.model); + this.chat_model = this._createModel(chat_model_profile); + + if (this.profile.code_model) { + let code_model_profile = this._selectAPI(this.profile.code_model); + this.code_model = this._createModel(code_model_profile); + } + else { + this.code_model = this.chat_model; + } + + let embedding = this.profile.embedding; + if (embedding === undefined) { + if (chat_model_profile.api !== 'ollama') + embedding = {api: chat_model_profile.api}; + else + embedding = {api: 'none'}; + } + else if (typeof embedding === 'string' || embedding instanceof String) + embedding = {api: embedding}; + + console.log('Using embedding settings:', embedding); + + try { + if (embedding.api === 'google') + this.embedding_model = new Gemini(embedding.model, embedding.url); + else if (embedding.api === 'openai') + this.embedding_model = new GPT(embedding.model, embedding.url); + else if (embedding.api === 'replicate') + this.embedding_model = new ReplicateAPI(embedding.model, embedding.url); + else if (embedding.api === 'ollama') + this.embedding_model = new Local(embedding.model, embedding.url); + else if (embedding.api === 'qwen') + this.embedding_model = new Qwen(embedding.model, embedding.url); + else if (embedding.api === 'mistral') + this.embedding_model = new Mistral(embedding.model, embedding.url); + else { + this.embedding_model = null; + console.log('Unknown embedding: ', embedding ? embedding.api : '[NOT SPECIFIED]', '. Using word overlap.'); + } + } + catch (err) { + console.log('Warning: Failed to initialize embedding model:', err.message); + console.log('Continuing anyway, using word overlap instead.'); + this.embedding_model = null; + } + this.skill_libary = new SkillLibrary(agent, this.embedding_model); + mkdirSync(`./bots/${name}`, { recursive: true }); + writeFileSync(`./bots/${name}/last_profile.json`, JSON.stringify(this.profile, null, 4), (err) => { + if (err) { + throw new Error('Failed to save profile:', err); + } + console.log("Copy profile saved."); + }); + } + + _selectAPI(profile) { + if (typeof profile === 'string' || profile instanceof String) { + profile = {model: profile}; + } + if (!profile.api) { + if (profile.model.includes('gemini')) + profile.api = 'google'; + else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) + profile.api = 'openai'; + else if (profile.model.includes('claude')) + profile.api = 'anthropic'; + else if (profile.model.includes('huggingface/')) + profile.api = "huggingface"; + else if (profile.model.includes('replicate/')) + profile.api = 'replicate'; + else if (profile.model.includes('mistralai/') || profile.model.includes("mistral/")) + model_profile.api = 'mistral'; + else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) + profile.api = 'groq'; + else if (chat.model.includes('hf:')) + chat.api = "glhf"; + else if (chat.model.includes('hyperbolic:')|| chat.model.includes('hb:')) + chat.api = "hyperbolic"; + else if (profile.model.includes('novita/')) + profile.api = 'novita'; + else if (profile.model.includes('qwen')) + profile.api = 'qwen'; + else if (profile.model.includes('grok')) + profile.api = 'xai'; + else if (profile.model.includes('deepseek')) + profile.api = 'deepseek'; + else + profile.api = 'ollama'; + } + return profile; + } + + _createModel(profile) { + let model = null; + if (profile.api === 'google') + model = new Gemini(profile.model, profile.url, profile.params); + else if (profile.api === 'openai') + model = new GPT(profile.model, profile.url, profile.params); + else if (profile.api === 'anthropic') + model = new Claude(profile.model, profile.url, profile.params); + else if (profile.api === 'replicate') + model = new ReplicateAPI(profile.model, profile.url, profile.params); + else if (profile.api === 'ollama') + model = new Local(profile.model, profile.url, profile.params); + else if (profile.api === 'mistral') + model = new Mistral(profile.model, profile.url, profile.params); + else if (profile.api === 'groq') + model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params); + else if (profile.api === 'glhf') + model = new glhf(profile.model, profile.url, profile.params); + else if (profile.api === 'hyperbolic') + model = new hyperbolic(profile.model.replace('hyperbolic:', '').replace('hb:', ''), profile.url, profile.params); // Yes you can hate me for using curly braces on this little bit of code for defining the hyperbolic endpoint + else if (profile.api === 'huggingface') + model = new HuggingFace(profile.model, profile.url, profile.params); + else if (profile.api === 'novita') + model = new Novita(profile.model.replace('novita/', ''), profile.url, profile.params); + else if (profile.api === 'qwen') + model = new Qwen(profile.model, profile.url, profile.params); + else if (profile.api === 'xai') + model = new Grok(profile.model, profile.url, profile.params); + else if (profile.api === 'deepseek') + model = new DeepSeek(profile.model, profile.url, profile.params); + else + throw new Error('Unknown API:', profile.api); + return model; + } + + getName() { + return this.profile.name; + } + + getInitModes() { + return this.profile.modes; + } + + async initExamples() { + try { + this.convo_examples = new Examples(this.embedding_model, settings.num_examples); + this.coding_examples = new Examples(this.embedding_model, settings.num_examples); + + // Wait for both examples to load before proceeding + await Promise.all([ + this.convo_examples.load(this.profile.conversation_examples), + this.coding_examples.load(this.profile.coding_examples), + this.skill_libary.initSkillLibrary() + ]); + + console.log('Examples initialized.'); + } catch (error) { + console.error('Failed to initialize examples:', error); + throw error; + } + } + + async replaceStrings(prompt, messages, examples=null, to_summarize=[], last_goals=null) { + prompt = prompt.replaceAll('$NAME', this.agent.name); + + if (prompt.includes('$STATS')) { + let stats = await getCommand('!stats').perform(this.agent); + prompt = prompt.replaceAll('$STATS', stats); + } + if (prompt.includes('$INVENTORY')) { + let inventory = await getCommand('!inventory').perform(this.agent); + prompt = prompt.replaceAll('$INVENTORY', inventory); + } + if (prompt.includes('$ACTION')) { + prompt = prompt.replaceAll('$ACTION', this.agent.actions.currentActionLabel); + } + if (prompt.includes('$COMMAND_DOCS')) + prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); + if (prompt.includes('$CODE_DOCS')) { + const code_task_content = messages.slice().reverse().find(msg => + msg.role !== 'system' && msg.content.includes('!newAction(') + )?.content?.match(/!newAction\((.*?)\)/)?.[1] || ''; + + prompt = prompt.replaceAll( + '$CODE_DOCS', + await this.skill_libary.getRelevantSkillDocs(code_task_content, settings.relevant_docs_count) + ); + } + prompt = prompt.replaceAll('$COMMAND_DOCS', getCommandDocs()); + if (prompt.includes('$CODE_DOCS')) + prompt = prompt.replaceAll('$CODE_DOCS', getSkillDocs()); + if (prompt.includes('$EXAMPLES') && examples !== null) + prompt = prompt.replaceAll('$EXAMPLES', await examples.createExampleMessage(messages)); + if (prompt.includes('$MEMORY')) + prompt = prompt.replaceAll('$MEMORY', this.agent.history.memory); + if (prompt.includes('$TO_SUMMARIZE')) + prompt = prompt.replaceAll('$TO_SUMMARIZE', stringifyTurns(to_summarize)); + if (prompt.includes('$CONVO')) + prompt = prompt.replaceAll('$CONVO', 'Recent conversation:\n' + stringifyTurns(messages)); + if (prompt.includes('$SELF_PROMPT')) { + let self_prompt = this.agent.self_prompter.on ? `YOUR CURRENT ASSIGNED GOAL: "${this.agent.self_prompter.prompt}"\n` : ''; + prompt = prompt.replaceAll('$SELF_PROMPT', self_prompt); + } + if (prompt.includes('$LAST_GOALS')) { + let goal_text = ''; + for (let goal in last_goals) { + if (last_goals[goal]) + goal_text += `You recently successfully completed the goal ${goal}.\n` + else + goal_text += `You recently failed to complete the goal ${goal}.\n` + } + prompt = prompt.replaceAll('$LAST_GOALS', goal_text.trim()); + } + if (prompt.includes('$BLUEPRINTS')) { + if (this.agent.npc.constructions) { + let blueprints = ''; + for (let blueprint in this.agent.npc.constructions) { + blueprints += blueprint + ', '; + } + prompt = prompt.replaceAll('$BLUEPRINTS', blueprints.slice(0, -2)); + } + } + + // check if there are any remaining placeholders with syntax $ + let remaining = prompt.match(/\$[A-Z_]+/g); + if (remaining !== null) { + console.warn('Unknown prompt placeholders:', remaining.join(', ')); + } + return prompt; + } + + async checkCooldown() { + let elapsed = Date.now() - this.last_prompt_time; + if (elapsed < this.cooldown && this.cooldown > 0) { + await new Promise(r => setTimeout(r, this.cooldown - elapsed)); + } + this.last_prompt_time = Date.now(); + } + + async promptConvo(messages) { + this.most_recent_msg_time = Date.now(); + let current_msg_time = this.most_recent_msg_time; + for (let i = 0; i < 3; i++) { // try 3 times to avoid hallucinations + await this.checkCooldown(); + if (current_msg_time !== this.most_recent_msg_time) { + return ''; + } + let prompt = this.profile.conversing; + prompt = await this.replaceStrings(prompt, messages, this.convo_examples); + let generation = await this.chat_model.sendRequest(messages, prompt); + // in conversations >2 players LLMs tend to hallucinate and role-play as other bots + // the FROM OTHER BOT tag should never be generated by the LLM + if (generation.includes('(FROM OTHER BOT)')) { + console.warn('LLM hallucinated message as another bot. Trying again...'); + continue; + } + if (current_msg_time !== this.most_recent_msg_time) { + console.warn(this.agent.name + ' received new message while generating, discarding old response.'); + return ''; + } + return generation; + } + return ''; + } + + async promptCoding(messages) { + if (this.awaiting_coding) { + console.warn('Already awaiting coding response, returning no response.'); + return '```//no response```'; + } + this.awaiting_coding = true; + await this.checkCooldown(); + let prompt = this.profile.coding; + prompt = await this.replaceStrings(prompt, messages, this.coding_examples); + let resp = await this.code_model.sendRequest(messages, prompt); + this.awaiting_coding = false; + return resp; + } + + async promptMemSaving(to_summarize) { + await this.checkCooldown(); + let prompt = this.profile.saving_memory; + prompt = await this.replaceStrings(prompt, null, null, to_summarize); + return await this.chat_model.sendRequest([], prompt); + } + + async promptShouldRespondToBot(new_message) { + await this.checkCooldown(); + let prompt = this.profile.bot_responder; + let messages = this.agent.history.getHistory(); + messages.push({role: 'user', content: new_message}); + prompt = await this.replaceStrings(prompt, null, null, messages); + let res = await this.chat_model.sendRequest([], prompt); + return res.trim().toLowerCase() === 'respond'; + } + + async promptGoalSetting(messages, last_goals) { + let system_message = this.profile.goal_setting; + system_message = await this.replaceStrings(system_message, messages); + + let user_message = 'Use the below info to determine what goal to target next\n\n'; + user_message += '$LAST_GOALS\n$STATS\n$INVENTORY\n$CONVO' + user_message = await this.replaceStrings(user_message, messages, null, null, last_goals); + let user_messages = [{role: 'user', content: user_message}]; + + let res = await this.chat_model.sendRequest(user_messages, system_message); + + let goal = null; + try { + let data = res.split('```')[1].replace('json', '').trim(); + goal = JSON.parse(data); + } catch (err) { + console.log('Failed to parse goal:', res, err); + } + if (!goal || !goal.name || !goal.quantity || isNaN(parseInt(goal.quantity))) { + console.log('Failed to set goal:', res); + return null; + } + goal.quantity = parseInt(goal.quantity); + return goal; + } +} diff --git a/src/models/qwen.js b/src/models/qwen.js new file mode 100644 index 0000000..4dfacfe --- /dev/null +++ b/src/models/qwen.js @@ -0,0 +1,79 @@ +import OpenAIApi from 'openai'; +import { getKey, hasKey } from '../utils/keys.js'; +import { strictFormat } from '../utils/text.js'; + +export class Qwen { + constructor(model_name, url, params) { + this.model_name = model_name; + this.params = params; + let config = {}; + + config.baseURL = url || 'https://dashscope.aliyuncs.com/compatible-mode/v1'; + config.apiKey = getKey('QWEN_API_KEY'); + + this.openai = new OpenAIApi(config); + } + + async sendRequest(turns, systemMessage, stop_seq='***') { + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); + + messages = strictFormat(messages); + + const pack = { + model: this.model_name || "qwen-plus", + messages, + stop: stop_seq, + ...(this.params || {}) + }; + + let res = null; + try { + console.log('Awaiting Qwen api response...'); + // console.log('Messages:', messages); + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason == 'length') + throw new Error('Context length exceeded'); + console.log('Received.'); + res = completion.choices[0].message.content; + } + catch (err) { + if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.log(err); + res = 'My brain disconnected, try again.'; + } + } + return res; + } + + // Why random backoff? + // With a 30 requests/second limit on Alibaba Qwen's embedding service, + // random backoff helps maximize bandwidth utilization. + async embed(text) { + const maxRetries = 5; // Maximum number of retries + for (let retries = 0; retries < maxRetries; retries++) { + try { + const { data } = await this.openai.embeddings.create({ + model: this.model_name || "text-embedding-v3", + input: text, + encoding_format: "float", + }); + return data[0].embedding; + } catch (err) { + if (err.status === 429) { + // If a rate limit error occurs, calculate the exponential backoff with a random delay (1-5 seconds) + const delay = Math.pow(2, retries) * 1000 + Math.floor(Math.random() * 2000); + // console.log(`Rate limit hit, retrying in ${delay} ms...`); + await new Promise(resolve => setTimeout(resolve, delay)); // Wait for the delay before retrying + } else { + throw err; + } + } + } + // If maximum retries are reached and the request still fails, throw an error + throw new Error('Max retries reached, request failed.'); + } + +} \ No newline at end of file diff --git a/src/models/replicate.js b/src/models/replicate.js new file mode 100644 index 0000000..c8c3ba3 --- /dev/null +++ b/src/models/replicate.js @@ -0,0 +1,59 @@ +import Replicate from 'replicate'; +import { toSinglePrompt } from '../utils/text.js'; +import { getKey } from '../utils/keys.js'; + +// llama, mistral +export class ReplicateAPI { + constructor(model_name, url, params) { + this.model_name = model_name; + this.url = url; + this.params = params; + + if (this.url) { + console.warn('Replicate API does not support custom URLs. Ignoring provided URL.'); + } + + this.replicate = new Replicate({ + auth: getKey('REPLICATE_API_KEY'), + }); + } + + async sendRequest(turns, systemMessage) { + const stop_seq = '***'; + const prompt = toSinglePrompt(turns, null, stop_seq); + let model_name = this.model_name || 'meta/meta-llama-3-70b-instruct'; + + const input = { + prompt, + system_prompt: systemMessage, + ...(this.params || {}) + }; + let res = null; + try { + console.log('Awaiting Replicate API response...'); + let result = ''; + for await (const event of this.replicate.stream(model_name, { input })) { + result += event; + if (result === '') break; + if (result.includes(stop_seq)) { + result = result.slice(0, result.indexOf(stop_seq)); + break; + } + } + res = result; + } catch (err) { + console.log(err); + res = 'My brain disconnected, try again.'; + } + console.log('Received.'); + return res; + } + + async embed(text) { + const output = await this.replicate.run( + this.model_name || "mark3labs/embeddings-gte-base:d619cff29338b9a37c3d06605042e1ff0594a8c3eff0175fd6967f5643fc4d47", + { input: {text} } + ); + return output.vectors; + } +} \ No newline at end of file diff --git a/src/process/agent_process.js b/src/process/agent_process.js new file mode 100644 index 0000000..7418d31 --- /dev/null +++ b/src/process/agent_process.js @@ -0,0 +1,67 @@ +import { spawn } from 'child_process'; +import { mainProxy } from './main_proxy.js'; + +export class AgentProcess { + start(profile, load_memory=false, init_message=null, count_id=0, task_path=null, task_id=null) { + this.profile = profile; + this.count_id = count_id; + this.running = true; + + let args = ['src/process/init_agent.js', this.name]; + args.push('-p', profile); + args.push('-c', count_id); + if (load_memory) + args.push('-l', load_memory); + if (init_message) + args.push('-m', init_message); + if (task_path) + args.push('-t', task_path); + if (task_id) + args.push('-i', task_id); + + const agentProcess = spawn('node', args, { + stdio: 'inherit', + stderr: 'inherit', + }); + + let last_restart = Date.now(); + agentProcess.on('exit', (code, signal) => { + console.log(`Agent process exited with code ${code} and signal ${signal}`); + this.running = false; + mainProxy.logoutAgent(this.name); + + if (code > 1) { + console.log(`Ending task`); + process.exit(code); + } + + if (code !== 0 && signal !== 'SIGINT') { + // agent must run for at least 10 seconds before restarting + if (Date.now() - last_restart < 10000) { + console.error(`Agent process ${profile} exited too quickly and will not be restarted.`); + return; + } + console.log('Restarting agent...'); + this.start(profile, true, 'Agent process restarted.', count_id, task_path, task_id); + last_restart = Date.now(); + } + }); + + agentProcess.on('error', (err) => { + console.error('Agent process error:', err); + }); + + this.process = agentProcess; + } + + stop() { + if (!this.running) return; + this.process.kill('SIGINT'); + } + + continue() { + if (!this.running) { + this.start(this.profile, true, 'Agent process restarted.', this.count_id); + } + } +} \ No newline at end of file diff --git a/src/process/init_agent.js b/src/process/init_agent.js new file mode 100644 index 0000000..15b08e0 --- /dev/null +++ b/src/process/init_agent.js @@ -0,0 +1,64 @@ +import { Agent } from '../agent/agent.js'; +import yargs from 'yargs'; + +// Add global unhandled rejection handler +process.on('unhandledRejection', (reason, promise) => { + console.error('Unhandled Rejection at:', { + promise: promise, + reason: reason, + stack: reason?.stack || 'No stack trace' + }); + process.exit(1); +}); + +const args = process.argv.slice(2); +if (args.length < 1) { + console.log('Usage: node init_agent.js [profile] [load_memory] [init_message]'); + process.exit(1); +} + +const argv = yargs(args) + .option('profile', { + alias: 'p', + type: 'string', + description: 'profile filepath to use for agent' + }) + .option('load_memory', { + alias: 'l', + type: 'boolean', + description: 'load agent memory from file on startup' + }) + .option('init_message', { + alias: 'm', + type: 'string', + description: 'automatically prompt the agent on startup' + }) + .option('task_path', { + alias: 't', + type: 'string', + description: 'task filepath to use for agent' + }) + .option('task_id', { + alias: 'i', + type: 'string', + description: 'task ID to execute' + }) + .option('count_id', { + alias: 'c', + type: 'number', + default: 0, + description: 'identifying count for multi-agent scenarios', + }).argv; + +// Wrap agent start in async IIFE with proper error handling +(async () => { + try { + console.log('Starting agent with profile:', argv.profile); + const agent = new Agent(); + await agent.start(argv.profile, argv.load_memory, argv.init_message, argv.count_id, argv.task_path, argv.task_id); + } catch (error) { + console.error('Failed to start agent process:'); + console.error(error); + process.exit(1); + } +})(); diff --git a/src/process/main_proxy.js b/src/process/main_proxy.js new file mode 100644 index 0000000..8336458 --- /dev/null +++ b/src/process/main_proxy.js @@ -0,0 +1,64 @@ +import { io } from 'socket.io-client'; +import settings from '../../settings.js'; + +// Singleton mindserver proxy for the main process +class MainProxy { + constructor() { + if (MainProxy.instance) { + return MainProxy.instance; + } + + this.socket = null; + this.connected = false; + this.agent_processes = {}; + MainProxy.instance = this; + } + + connect() { + if (this.connected) return; + + this.socket = io(`http://${settings.mindserver_host}:${settings.mindserver_port}`); + this.connected = true; + + this.socket.on('stop-agent', (agentName) => { + if (this.agent_processes[agentName]) { + this.agent_processes[agentName].stop(); + } + }); + + this.socket.on('start-agent', (agentName) => { + if (this.agent_processes[agentName]) { + this.agent_processes[agentName].continue(); + } + }); + + this.socket.on('register-agents-success', () => { + console.log('Agents registered'); + }); + + this.socket.on('shutdown', () => { + console.log('Shutting down'); + for (let agentName in this.agent_processes) { + this.agent_processes[agentName].stop(); + } + setTimeout(() => { + process.exit(0); + }, 2000); + }); + } + + addAgent(agent) { + this.agent_processes.push(agent); + } + + logoutAgent(agentName) { + this.socket.emit('logout-agent', agentName); + } + + registerAgent(name, process) { + this.socket.emit('register-agents', [name]); + this.agent_processes[name] = process; + } +} + +export const mainProxy = new MainProxy(); \ No newline at end of file diff --git a/src/server/mind_server.js b/src/server/mind_server.js new file mode 100644 index 0000000..eed71d7 --- /dev/null +++ b/src/server/mind_server.js @@ -0,0 +1,163 @@ +import { Server } from 'socket.io'; +import express from 'express'; +import http from 'http'; +import path from 'path'; +import { fileURLToPath } from 'url'; + +// Module-level variables +let io; +let server; +const registeredAgents = new Set(); +const inGameAgents = {}; +const agentManagers = {}; // socket for main process that registers/controls agents + +// Initialize the server +export function createMindServer(port = 8080) { + const app = express(); + server = http.createServer(app); + io = new Server(server); + + // Serve static files + const __dirname = path.dirname(fileURLToPath(import.meta.url)); + app.use(express.static(path.join(__dirname, 'public'))); + + // Socket.io connection handling + io.on('connection', (socket) => { + let curAgentName = null; + console.log('Client connected'); + + agentsUpdate(socket); + + socket.on('register-agents', (agentNames) => { + console.log(`Registering agents: ${agentNames}`); + agentNames.forEach(name => registeredAgents.add(name)); + for (let name of agentNames) { + agentManagers[name] = socket; + } + socket.emit('register-agents-success'); + agentsUpdate(); + }); + + socket.on('login-agent', (agentName) => { + if (curAgentName && curAgentName !== agentName) { + console.warn(`Agent ${agentName} already logged in as ${curAgentName}`); + return; + } + if (registeredAgents.has(agentName)) { + curAgentName = agentName; + inGameAgents[agentName] = socket; + agentsUpdate(); + } else { + console.warn(`Agent ${agentName} not registered`); + } + }); + + socket.on('logout-agent', (agentName) => { + if (inGameAgents[agentName]) { + delete inGameAgents[agentName]; + agentsUpdate(); + } + }); + + socket.on('disconnect', () => { + console.log('Client disconnected'); + if (inGameAgents[curAgentName]) { + delete inGameAgents[curAgentName]; + agentsUpdate(); + } + }); + + socket.on('chat-message', (agentName, json) => { + if (!inGameAgents[agentName]) { + console.warn(`Agent ${agentName} tried to send a message but is not logged in`); + return; + } + console.log(`${curAgentName} sending message to ${agentName}: ${json.message}`); + inGameAgents[agentName].emit('chat-message', curAgentName, json); + }); + + socket.on('restart-agent', (agentName) => { + console.log(`Restarting agent: ${agentName}`); + inGameAgents[agentName].emit('restart-agent'); + }); + + socket.on('stop-agent', (agentName) => { + let manager = agentManagers[agentName]; + if (manager) { + manager.emit('stop-agent', agentName); + } + else { + console.warn(`Stopping unregisterd agent ${agentName}`); + } + }); + + socket.on('start-agent', (agentName) => { + let manager = agentManagers[agentName]; + if (manager) { + manager.emit('start-agent', agentName); + } + else { + console.warn(`Starting unregisterd agent ${agentName}`); + } + }); + + socket.on('stop-all-agents', () => { + console.log('Killing all agents'); + stopAllAgents(); + }); + + socket.on('shutdown', () => { + console.log('Shutting down'); + for (let manager of Object.values(agentManagers)) { + manager.emit('shutdown'); + } + setTimeout(() => { + process.exit(0); + }, 2000); + }); + + socket.on('send-message', (agentName, message) => { + if (!inGameAgents[agentName]) { + console.warn(`Agent ${agentName} not logged in, cannot send message via MindServer.`); + return + } + try { + console.log(`Sending message to agent ${agentName}: ${message}`); + inGameAgents[agentName].emit('send-message', agentName, message) + } catch (error) { + console.error('Error: ', error); + } + }); + }); + + server.listen(port, 'localhost', () => { + console.log(`MindServer running on port ${port}`); + }); + + return server; +} + +function agentsUpdate(socket) { + if (!socket) { + socket = io; + } + let agents = []; + registeredAgents.forEach(name => { + agents.push({name, in_game: !!inGameAgents[name]}); + }); + socket.emit('agents-update', agents); +} + +function stopAllAgents() { + for (const agentName in inGameAgents) { + let manager = agentManagers[agentName]; + if (manager) { + manager.emit('stop-agent', agentName); + } + } +} + +// Optional: export these if you need access to them from other files +export const getIO = () => io; +export const getServer = () => server; +export const getConnectedAgents = () => connectedAgents; diff --git a/src/server/public/index.html b/src/server/public/index.html new file mode 100644 index 0000000..c66a986 --- /dev/null +++ b/src/server/public/index.html @@ -0,0 +1,120 @@ + + + + Mindcraft + + + + +

Mindcraft

+
+ + + + diff --git a/src/utils/examples.js b/src/utils/examples.js new file mode 100644 index 0000000..ca6de79 --- /dev/null +++ b/src/utils/examples.js @@ -0,0 +1,94 @@ +import { cosineSimilarity } from './math.js'; +import { stringifyTurns } from './text.js'; + +export class Examples { + constructor(model, select_num=2) { + this.examples = []; + this.model = model; + this.select_num = select_num; + this.embeddings = {}; + } + + turnsToText(turns) { + let messages = ''; + for (let turn of turns) { + if (turn.role !== 'assistant') + messages += turn.content.substring(turn.content.indexOf(':')+1).trim() + '\n'; + } + return messages.trim(); + } + + getWords(text) { + return text.replace(/[^a-zA-Z ]/g, '').toLowerCase().split(' '); + } + + wordOverlapScore(text1, text2) { + const words1 = this.getWords(text1); + const words2 = this.getWords(text2); + const intersection = words1.filter(word => words2.includes(word)); + return intersection.length / (words1.length + words2.length - intersection.length); + } + + async load(examples) { + this.examples = examples; + if (!this.model) return; // Early return if no embedding model + + if (this.select_num === 0) + return; + + try { + // Create array of promises first + const embeddingPromises = examples.map(example => { + const turn_text = this.turnsToText(example); + return this.model.embed(turn_text) + .then(embedding => { + this.embeddings[turn_text] = embedding; + }); + }); + + // Wait for all embeddings to complete + await Promise.all(embeddingPromises); + } catch (err) { + console.warn('Error with embedding model, using word overlap instead:', err); + this.model = null; + } + } + + async getRelevant(turns) { + if (this.select_num === 0) + return []; + + let turn_text = this.turnsToText(turns); + if (this.model !== null) { + let embedding = await this.model.embed(turn_text); + this.examples.sort((a, b) => + cosineSimilarity(embedding, this.embeddings[this.turnsToText(b)]) - + cosineSimilarity(embedding, this.embeddings[this.turnsToText(a)]) + ); + } + else { + this.examples.sort((a, b) => + this.wordOverlapScore(turn_text, this.turnsToText(b)) - + this.wordOverlapScore(turn_text, this.turnsToText(a)) + ); + } + let selected = this.examples.slice(0, this.select_num); + return JSON.parse(JSON.stringify(selected)); // deep copy + } + + async createExampleMessage(turns) { + let selected_examples = await this.getRelevant(turns); + + console.log('selected examples:'); + for (let example of selected_examples) { + console.log('Example:', example[0].content) + } + + let msg = 'Examples of how to respond:\n'; + for (let i=0; i { + bot.acceptResourcePack(); + }); + + return bot; +} + +export function isHuntable(mob) { + if (!mob || !mob.name) return false; + const animals = ['chicken', 'cow', 'llama', 'mooshroom', 'pig', 'rabbit', 'sheep']; + return animals.includes(mob.name.toLowerCase()) && !mob.metadata[16]; // metadata 16 is not baby +} + +export function isHostile(mob) { + if (!mob || !mob.name) return false; + return (mob.type === 'mob' || mob.type === 'hostile') && mob.name !== 'iron_golem' && mob.name !== 'snow_golem'; +} + +export function getItemId(itemName) { + let item = mcdata.itemsByName[itemName]; + if (item) { + return item.id; + } + return null; +} + +export function getItemName(itemId) { + let item = mcdata.items[itemId] + if (item) { + return item.name; + } + return null; +} + +export function getBlockId(blockName) { + let block = mcdata.blocksByName[blockName]; + if (block) { + return block.id; + } + return null; +} + +export function getBlockName(blockId) { + let block = mcdata.blocks[blockId] + if (block) { + return block.name; + } + return null; +} + +export function getAllItems(ignore) { + if (!ignore) { + ignore = []; + } + let items = [] + for (const itemId in mcdata.items) { + const item = mcdata.items[itemId]; + if (!ignore.includes(item.name)) { + items.push(item); + } + } + return items; +} + +export function getAllItemIds(ignore) { + const items = getAllItems(ignore); + let itemIds = []; + for (const item of items) { + itemIds.push(item.id); + } + return itemIds; +} + +export function getAllBlocks(ignore) { + if (!ignore) { + ignore = []; + } + let blocks = [] + for (const blockId in mcdata.blocks) { + const block = mcdata.blocks[blockId]; + if (!ignore.includes(block.name)) { + blocks.push(block); + } + } + return blocks; +} + +export function getAllBlockIds(ignore) { + const blocks = getAllBlocks(ignore); + let blockIds = []; + for (const block of blocks) { + blockIds.push(block.id); + } + return blockIds; +} + +export function getAllBiomes() { + return mcdata.biomes; +} + +export function getItemCraftingRecipes(itemName) { + let itemId = getItemId(itemName); + if (!mcdata.recipes[itemId]) { + return null; + } + + let recipes = []; + for (let r of mcdata.recipes[itemId]) { + let recipe = {}; + let ingredients = []; + if (r.ingredients) { + ingredients = r.ingredients; + } else if (r.inShape) { + ingredients = r.inShape.flat(); + } + for (let ingredient of ingredients) { + let ingredientName = getItemName(ingredient); + if (ingredientName === null) continue; + if (!recipe[ingredientName]) + recipe[ingredientName] = 0; + recipe[ingredientName]++; + } + recipes.push([ + recipe, + {craftedCount : r.result.count} + ]); + } + + return recipes; +} + +export function isSmeltable(itemName) { + const misc_smeltables = ['beef', 'chicken', 'cod', 'mutton', 'porkchop', 'rabbit', 'salmon', 'tropical_fish', 'potato', 'kelp', 'sand', 'cobblestone', 'clay_ball']; + return itemName.includes('raw') || itemName.includes('log') || misc_smeltables.includes(itemName); +} + +export function getSmeltingFuel(bot) { + let fuel = bot.inventory.items().find(i => i.name === 'coal' || i.name === 'charcoal' || i.name === 'blaze_rod') + if (fuel) + return fuel; + fuel = bot.inventory.items().find(i => i.name.includes('log') || i.name.includes('planks')) + if (fuel) + return fuel; + return bot.inventory.items().find(i => i.name === 'coal_block' || i.name === 'lava_bucket'); +} + +export function getFuelSmeltOutput(fuelName) { + if (fuelName === 'coal' || fuelName === 'charcoal') + return 8; + if (fuelName === 'blaze_rod') + return 12; + if (fuelName.includes('log') || fuelName.includes('planks')) + return 1.5 + if (fuelName === 'coal_block') + return 80; + if (fuelName === 'lava_bucket') + return 100; + return 0; +} + +export function getItemSmeltingIngredient(itemName) { + return { + baked_potato: 'potato', + steak: 'raw_beef', + cooked_chicken: 'raw_chicken', + cooked_cod: 'raw_cod', + cooked_mutton: 'raw_mutton', + cooked_porkchop: 'raw_porkchop', + cooked_rabbit: 'raw_rabbit', + cooked_salmon: 'raw_salmon', + dried_kelp: 'kelp', + iron_ingot: 'raw_iron', + gold_ingot: 'raw_gold', + copper_ingot: 'raw_copper', + glass: 'sand' + }[itemName]; +} + +export function getItemBlockSources(itemName) { + let itemId = getItemId(itemName); + let sources = []; + for (let block of getAllBlocks()) { + if (block.drops.includes(itemId)) { + sources.push(block.name); + } + } + return sources; +} + +export function getItemAnimalSource(itemName) { + return { + raw_beef: 'cow', + raw_chicken: 'chicken', + raw_cod: 'cod', + raw_mutton: 'sheep', + raw_porkchop: 'pig', + raw_rabbit: 'rabbit', + raw_salmon: 'salmon', + leather: 'cow', + wool: 'sheep' + }[itemName]; +} + +export function getBlockTool(blockName) { + let block = mcdata.blocksByName[blockName]; + if (!block || !block.harvestTools) { + return null; + } + return getItemName(Object.keys(block.harvestTools)[0]); // Double check first tool is always simplest +} + +export function makeItem(name, amount=1) { + return new Item(getItemId(name), amount); +} + +/** + * Returns the number of ingredients required to use the recipe once. + * + * @param {Recipe} recipe + * @returns {Object} an object describing the number of each ingredient. + */ +export function ingredientsFromPrismarineRecipe(recipe) { + let requiredIngedients = {}; + if (recipe.inShape) + for (const ingredient of recipe.inShape.flat()) { + if(ingredient.id<0) continue; //prismarine-recipe uses id -1 as an empty crafting slot + const ingredientName = getItemName(ingredient.id); + requiredIngedients[ingredientName] ??=0; + requiredIngedients[ingredientName] += ingredient.count; + } + if (recipe.ingredients) + for (const ingredient of recipe.ingredients) { + if(ingredient.id<0) continue; + const ingredientName = getItemName(ingredient.id); + requiredIngedients[ingredientName] ??=0; + requiredIngedients[ingredientName] -= ingredient.count; + //Yes, the `-=` is intended. + //prismarine-recipe uses positive numbers for the shaped ingredients but negative for unshaped. + //Why this is the case is beyond my understanding. + } + return requiredIngedients; +} + +/** + * Calculates the number of times an action, such as a crafing recipe, can be completed before running out of resources. + * @template T - doesn't have to be an item. This could be any resource. + * @param {Object.} availableItems - The resources available; e.g, `{'cobble_stone': 7, 'stick': 10}` + * @param {Object.} requiredItems - The resources required to complete the action once; e.g, `{'cobble_stone': 3, 'stick': 2}` + * @param {boolean} discrete - Is the action discrete? + * @returns {{num: number, limitingResource: (T | null)}} the number of times the action can be completed and the limmiting resource; e.g `{num: 2, limitingResource: 'cobble_stone'}` + */ +export function calculateLimitingResource(availableItems, requiredItems, discrete=true) { + let limitingResource = null; + let num = Infinity; + for (const itemType in requiredItems) { + if (availableItems[itemType] < requiredItems[itemType] * num) { + limitingResource = itemType; + num = availableItems[itemType] / requiredItems[itemType]; + } + } + if(discrete) num = Math.floor(num); + return {num, limitingResource} +} + +let loopingItems = new Set(); + +export function initializeLoopingItems() { + + loopingItems = new Set(['coal', + 'wheat', + 'diamond', + 'emerald', + 'raw_iron', + 'raw_gold', + 'redstone', + 'blue_wool', + 'packed_mud', + 'raw_copper', + 'iron_ingot', + 'dried_kelp', + 'gold_ingot', + 'slime_ball', + 'black_wool', + 'quartz_slab', + 'copper_ingot', + 'lapis_lazuli', + 'honey_bottle', + 'rib_armor_trim_smithing_template', + 'eye_armor_trim_smithing_template', + 'vex_armor_trim_smithing_template', + 'dune_armor_trim_smithing_template', + 'host_armor_trim_smithing_template', + 'tide_armor_trim_smithing_template', + 'wild_armor_trim_smithing_template', + 'ward_armor_trim_smithing_template', + 'coast_armor_trim_smithing_template', + 'spire_armor_trim_smithing_template', + 'snout_armor_trim_smithing_template', + 'shaper_armor_trim_smithing_template', + 'netherite_upgrade_smithing_template', + 'raiser_armor_trim_smithing_template', + 'sentry_armor_trim_smithing_template', + 'silence_armor_trim_smithing_template', + 'wayfinder_armor_trim_smithing_template']); +} + + +/** + * Gets a detailed plan for crafting an item considering current inventory + */ +export function getDetailedCraftingPlan(targetItem, count = 1, current_inventory = {}) { + initializeLoopingItems(); + if (!targetItem || count <= 0 || !getItemId(targetItem)) { + return "Invalid input. Please provide a valid item name and positive count."; + } + + if (isBaseItem(targetItem)) { + const available = current_inventory[targetItem] || 0; + if (available >= count) return "You have all required items already in your inventory!"; + return `${targetItem} is a base item, you need to find ${count - available} more in the world`; + } + + const inventory = { ...current_inventory }; + const leftovers = {}; + const plan = craftItem(targetItem, count, inventory, leftovers); + return formatPlan(plan); +} + +function isBaseItem(item) { + return loopingItems.has(item) || getItemCraftingRecipes(item) === null; +} + +function craftItem(item, count, inventory, leftovers, crafted = { required: {}, steps: [], leftovers: {} }) { + // Check available inventory and leftovers first + const availableInv = inventory[item] || 0; + const availableLeft = leftovers[item] || 0; + const totalAvailable = availableInv + availableLeft; + + if (totalAvailable >= count) { + // Use leftovers first, then inventory + const useFromLeft = Math.min(availableLeft, count); + leftovers[item] = availableLeft - useFromLeft; + + const remainingNeeded = count - useFromLeft; + if (remainingNeeded > 0) { + inventory[item] = availableInv - remainingNeeded; + } + return crafted; + } + + // Use whatever is available + const stillNeeded = count - totalAvailable; + if (availableLeft > 0) leftovers[item] = 0; + if (availableInv > 0) inventory[item] = 0; + + if (isBaseItem(item)) { + crafted.required[item] = (crafted.required[item] || 0) + stillNeeded; + return crafted; + } + + const recipe = getItemCraftingRecipes(item)?.[0]; + if (!recipe) { + crafted.required[item] = stillNeeded; + return crafted; + } + + const [ingredients, result] = recipe; + const craftedPerRecipe = result.craftedCount; + const batchCount = Math.ceil(stillNeeded / craftedPerRecipe); + const totalProduced = batchCount * craftedPerRecipe; + + // Add excess to leftovers + if (totalProduced > stillNeeded) { + leftovers[item] = (leftovers[item] || 0) + (totalProduced - stillNeeded); + } + + // Process each ingredient + for (const [ingredientName, ingredientCount] of Object.entries(ingredients)) { + const totalIngredientNeeded = ingredientCount * batchCount; + craftItem(ingredientName, totalIngredientNeeded, inventory, leftovers, crafted); + } + + // Add crafting step + const stepIngredients = Object.entries(ingredients) + .map(([name, amount]) => `${amount * batchCount} ${name}`) + .join(' + '); + crafted.steps.push(`Craft ${stepIngredients} -> ${totalProduced} ${item}`); + + return crafted; +} + +function formatPlan({ required, steps, leftovers }) { + const lines = []; + + if (Object.keys(required).length > 0) { + lines.push('You are missing the following items:'); + Object.entries(required).forEach(([item, count]) => + lines.push(`- ${count} ${item}`)); + lines.push('\nOnce you have these items, here\'s your crafting plan:'); + } else { + lines.push('You have all items required to craft this item!'); + lines.push('Here\'s your crafting plan:'); + } + + lines.push(''); + lines.push(...steps); + + if (Object.keys(leftovers).length > 0) { + lines.push('\nYou will have leftover:'); + Object.entries(leftovers).forEach(([item, count]) => + lines.push(`- ${count} ${item}`)); + } + + return lines.join('\n'); +} \ No newline at end of file diff --git a/src/utils/text.js b/src/utils/text.js new file mode 100644 index 0000000..f500199 --- /dev/null +++ b/src/utils/text.js @@ -0,0 +1,65 @@ +export function stringifyTurns(turns) { + let res = ''; + for (let turn of turns) { + if (turn.role === 'assistant') { + res += `\nYour output:\n${turn.content}`; + } else if (turn.role === 'system') { + res += `\nSystem output: ${turn.content}`; + } else { + res += `\nUser input: ${turn.content}`; + + } + } + return res.trim(); +} + +export function toSinglePrompt(turns, system=null, stop_seq='***', model_nickname='assistant') { + let prompt = system ? `${system}${stop_seq}` : ''; + let role = ''; + turns.forEach((message) => { + role = message.role; + if (role === 'assistant') role = model_nickname; + prompt += `${role}: ${message.content}${stop_seq}`; + }); + if (role !== model_nickname) // if the last message was from the user/system, add a prompt for the model. otherwise, pretend we are extending the model's own message + prompt += model_nickname + ": "; + return prompt; +} + +// ensures stricter turn order and roles: +// - system messages are treated as user messages and prefixed with SYSTEM: +// - combines repeated messages from users +// - separates repeat assistant messages with filler user messages +export function strictFormat(turns) { + let prev_role = null; + let messages = []; + let filler = {role: 'user', content: '_'}; + for (let msg of turns) { + msg.content = msg.content.trim(); + if (msg.role === 'system') { + msg.role = 'user'; + msg.content = 'SYSTEM: ' + msg.content; + } + if (msg.role === prev_role && msg.role === 'assistant') { + // insert empty user message to separate assistant messages + messages.push(filler); + messages.push(msg); + } + else if (msg.role === prev_role) { + // combine new message with previous message instead of adding a new one + messages[messages.length-1].content += '\n' + msg.content; + } + else { + messages.push(msg); + } + prev_role = msg.role; + + } + if (messages.length > 0 && messages[0].role !== 'user') { + messages.unshift(filler); // anthropic requires user message to start + } + if (messages.length === 0) { + messages.push(filler); + } + return messages; +} \ No newline at end of file diff --git a/src/utils/translator.js b/src/utils/translator.js new file mode 100644 index 0000000..bc9cc77 --- /dev/null +++ b/src/utils/translator.js @@ -0,0 +1,28 @@ +import translate from 'google-translate-api-x'; +import settings from '../../settings.js'; + +const preferred_lang = String(settings.language).toLowerCase(); + +export async function handleTranslation(message) { + if (preferred_lang === 'en' || preferred_lang === 'english') + return message; + try { + const translation = await translate(message, { to: preferred_lang }); + return translation.text || message; + } catch (error) { + console.error('Error translating message:', error); + return message; + } +} + +export async function handleEnglishTranslation(message) { + if (preferred_lang === 'en' || preferred_lang === 'english') + return message; + try { + const translation = await translate(message, { to: 'english' }); + return translation.text || message; + } catch (error) { + console.error('Error translating message:', error); + return message; + } +} From 2f0e4acb25560cb0b91e505fa1600e8595b27d6d Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:43:00 -0800 Subject: [PATCH 041/171] Create hyperbolic.js --- profiles/hyperbolic.js | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 profiles/hyperbolic.js diff --git a/profiles/hyperbolic.js b/profiles/hyperbolic.js new file mode 100644 index 0000000..d09d3c3 --- /dev/null +++ b/profiles/hyperbolic.js @@ -0,0 +1,6 @@ +{ + "name": "Hyperbolic", + + "model": "hb:deepseek-ai/DeepSeek-V3" + +} From df6f8fb99efd87983ea9093d14fdac0d4c7bc121 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:43:20 -0800 Subject: [PATCH 042/171] Create glhf.json --- profiles/glhf.json | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 profiles/glhf.json diff --git a/profiles/glhf.json b/profiles/glhf.json new file mode 100644 index 0000000..25e5dd7 --- /dev/null +++ b/profiles/glhf.json @@ -0,0 +1,5 @@ +{ + "name": "Good_luck_have_fun", + + "model": "hf:meta-llama/Llama-3.1-405B-Instruct" +} From e4d0a7d05ca0c5280cd507033b527dcd4bad5daf Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:43:31 -0800 Subject: [PATCH 043/171] Rename hyperbolic.js to hyperbolic.json --- profiles/{hyperbolic.js => hyperbolic.json} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename profiles/{hyperbolic.js => hyperbolic.json} (100%) diff --git a/profiles/hyperbolic.js b/profiles/hyperbolic.json similarity index 100% rename from profiles/hyperbolic.js rename to profiles/hyperbolic.json From 9681371b5f34efa23f2364ae0c44f140c32d1a83 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:44:55 -0800 Subject: [PATCH 044/171] Update settings.js --- settings.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/settings.js b/settings.js index 3b6c903..6a62eb0 100644 --- a/settings.js +++ b/settings.js @@ -23,6 +23,8 @@ export default // "./profiles/grok.json", // "./profiles/mistral.json", // "./profiles/deepseek.json", + // "./profiles/glhf.json", + // "./profiles/hyperbolic.json", // using more than 1 profile requires you to /msg each bot indivually // individual profiles override values from the base profile From ba10f08d1bd41afc009f5d425368ffbe2dffe20f Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:46:15 -0800 Subject: [PATCH 045/171] Update keys.example.json --- keys.example.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/keys.example.json b/keys.example.json index 407f6f5..69b846b 100644 --- a/keys.example.json +++ b/keys.example.json @@ -9,5 +9,7 @@ "QWEN_API_KEY": "", "XAI_API_KEY": "", "MISTRAL_API_KEY": "", - "DEEPSEEK_API_KEY": "" + "DEEPSEEK_API_KEY": "", + "GHLF_API_KEY": "", + "HYPERBOLIC_API_KEY": "" } From 80ce0a7dfb7c8b07e0135cac33e7847b3fbb95b7 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Feb 2025 22:50:33 -0800 Subject: [PATCH 046/171] Update README.md --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index a30e368..ba604f1 100644 --- a/README.md +++ b/README.md @@ -49,6 +49,8 @@ You can configure the agent's name, model, and prompts in their profile like `an | Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | | Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | | xAI | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) | +| glhf.chat | `GHLF_API_KEY` | `hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | +| Hyperbolic | `HYPERBOLIC_API_KEY` | `hb:deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: `ollama pull llama3 && ollama pull nomic-embed-text` From 2b5923f98f7ac09e63d8245d42325a16a596d2b7 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Sun, 9 Feb 2025 21:57:45 +0900 Subject: [PATCH 047/171] feat: add vision_model param to profile --- src/agent/vision_interpreter.js | 6 +++--- src/models/mistral.js | 1 + src/models/prompter.js | 8 ++++++++ 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/src/agent/vision_interpreter.js b/src/agent/vision_interpreter.js index 38b7a44..94be3de 100644 --- a/src/agent/vision_interpreter.js +++ b/src/agent/vision_interpreter.js @@ -37,7 +37,7 @@ export class VisionInterpreter { filename = await camera.capture(); } - if (!this.allow_vision || !this.agent.prompter.chat_model.sendVisionRequest) { + if (!this.allow_vision || !this.agent.prompter.vision_model.sendVisionRequest) { log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); log(this.agent.bot, this._nearbyBlocks()); } else { @@ -54,7 +54,7 @@ export class VisionInterpreter { let filename = await camera.capture(); - if (!this.allow_vision || !this.agent.prompter.chat_model.sendVisionRequest) { + if (!this.allow_vision || !this.agent.prompter.vision_model.sendVisionRequest) { log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); log(this.agent.bot, this._nearbyBlocks()); } else { @@ -70,7 +70,7 @@ export class VisionInterpreter { const bot = this.agent.bot; const imageBuffer = fs.readFileSync(`${this.fp}/${filename}.jpg`); const messages = this.agent.history.getHistory(); - res = await this.agent.prompter.chat_model.sendVisionRequest(messages, prompt, imageBuffer); + res = await this.agent.prompter.vision_model.sendVisionRequest(messages, prompt, imageBuffer); log(bot, res); } catch (error) { log(this.agent.bot, `Error analyzing image: ${error.message}`); diff --git a/src/models/mistral.js b/src/models/mistral.js index 01e0ecf..f1f3563 100644 --- a/src/models/mistral.js +++ b/src/models/mistral.js @@ -47,6 +47,7 @@ export class Mistral { ]; messages.push(...strictFormat(turns)); + console.log('Awaiting mistral api response...') const response = await this.#client.chat.complete({ model, messages, diff --git a/src/models/prompter.js b/src/models/prompter.js index a29a0b9..5ac6a1f 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -65,6 +65,14 @@ export class Prompter { this.code_model = this.chat_model; } + if (this.profile.vision_model) { + let vision_model_profile = this._selectAPI(this.profile.vision_model); + this.vision_model = this._createModel(vision_model_profile); + } + else { + this.vision_model = this.chat_model; + } + let embedding = this.profile.embedding; if (embedding === undefined) { if (chat_model_profile.api !== 'ollama') From 647655f20634346831902438e875c7913d9c6991 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Sun, 9 Feb 2025 22:08:13 +0900 Subject: [PATCH 048/171] docs: add vision_model to readme --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index a30e368..5690e9f 100644 --- a/README.md +++ b/README.md @@ -123,6 +123,11 @@ You can pass a string or an object for these fields. A model object must specify "model": "gpt-4", "url": "https://api.openai.com/v1/" }, +"vision_model": { + "api": "openai", + "model": "gpt-4o", + "url": "https://api.openai.com/v1/" +}, "embedding": { "api": "openai", "url": "https://api.openai.com/v1/", @@ -131,7 +136,7 @@ You can pass a string or an object for these fields. A model object must specify ``` -`model` is used for chat, `code_model` is used for newAction coding, and `embedding` is used to embed text for example selection. If `code_model` is not specified, then it will use `model` for coding. +`model` is used for chat, `code_model` is used for newAction coding, `vision_model` is used for image interpretation, and `embedding` is used to embed text for example selection. If `code_model` or `vision_model` is not specified, `model` will be used by default. All apis have default models and urls, so those fields are optional. Note some apis have no embedding model, so they will default to word overlap to retrieve examples. From 430ae24d206f12d1147963ade0cd78c596715645 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Mon, 10 Feb 2025 02:03:25 +0900 Subject: [PATCH 049/171] fix: use text description when vision features are used with a non-vision model --- src/agent/vision_interpreter.js | 9 ++++++++- src/models/claude.js | 6 +++++- src/models/gemini.js | 28 +++++++++++++++++++--------- src/models/gpt.js | 3 +++ src/models/grok.js | 21 +++++++++++++++++++++ src/models/groq.js | 12 +++++++----- src/models/mistral.js | 9 ++++++--- 7 files changed, 69 insertions(+), 19 deletions(-) diff --git a/src/agent/vision_interpreter.js b/src/agent/vision_interpreter.js index 94be3de..9c66cc8 100644 --- a/src/agent/vision_interpreter.js +++ b/src/agent/vision_interpreter.js @@ -71,7 +71,14 @@ export class VisionInterpreter { const imageBuffer = fs.readFileSync(`${this.fp}/${filename}.jpg`); const messages = this.agent.history.getHistory(); res = await this.agent.prompter.vision_model.sendVisionRequest(messages, prompt, imageBuffer); - log(bot, res); + + if (res == 'Vision is only supported by certain models.') { + log(bot, "Vision may not be supported on this model. Using text-based environment description instead."); + log(bot, this._nearbyBlocks()); + } else { + log(bot, res); + } + } catch (error) { log(this.agent.bot, `Error analyzing image: ${error.message}`); } diff --git a/src/models/claude.js b/src/models/claude.js index 236a0bf..74095ef 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -35,8 +35,12 @@ export class Claude { res = resp.content[0].text; } catch (err) { + if (err.message.includes("does not support image input")) { + res = "Vision is only supported by certain models."; + } else { + res = "My brain disconnected, try again."; + } console.log(err); - res = 'My brain disconnected, try again.'; } return res; } diff --git a/src/models/gemini.js b/src/models/gemini.js index bc17a57..4c35526 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -102,15 +102,25 @@ export class Gemini { const stop_seq = '***'; const prompt = toSinglePrompt(turns, systemMessage, stop_seq, 'model'); - - console.log('Awaiting Google API vision response...'); - const result = await model.generateContent([prompt, imagePart]); - const response = await result.response; - const text = response.text(); - console.log('Received.'); - if (!text.includes(stop_seq)) return text; - const idx = text.indexOf(stop_seq); - return text.slice(0, idx); + let res = null; + try { + console.log('Awaiting Google API vision response...'); + const result = await model.generateContent([prompt, imagePart]); + const response = await result.response; + const text = response.text(); + console.log('Received.'); + if (!text.includes(stop_seq)) return text; + const idx = text.indexOf(stop_seq); + res = text.slice(0, idx); + } catch (err) { + console.log(err); + if (err.message.includes("Image input modality is not enabled for models/")) { + res = "Vision is only supported by certain models."; + } else { + res = "An unexpected error occurred, please try again."; + } + } + return res; } async embed(text) { diff --git a/src/models/gpt.js b/src/models/gpt.js index 981f0b2..8540778 100644 --- a/src/models/gpt.js +++ b/src/models/gpt.js @@ -48,6 +48,9 @@ export class GPT { if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { console.log('Context length exceeded, trying again with shorter context.'); return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else if (err.message.includes('image_url')) { + console.log(err); + res = 'Vision is only supported by certain models.'; } else { console.log(err); res = 'My brain disconnected, try again.'; diff --git a/src/models/grok.js b/src/models/grok.js index a8c6672..2878a10 100644 --- a/src/models/grok.js +++ b/src/models/grok.js @@ -43,6 +43,9 @@ export class Grok { if ((err.message == 'Context length exceeded' || err.code == 'context_length_exceeded') && turns.length > 1) { console.log('Context length exceeded, trying again with shorter context.'); return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else if (err.message.includes('The model expects a single `text` element per message.')) { + console.log(err); + res = 'Vision is only supported by certain models.'; } else { console.log(err); res = 'My brain disconnected, try again.'; @@ -51,6 +54,24 @@ export class Grok { // sometimes outputs special token <|separator|>, just replace it return res.replace(/<\|separator\|>/g, '*no response*'); } + + async sendVisionRequest(messages, systemMessage, imageBuffer) { + const imageMessages = [...messages]; + imageMessages.push({ + role: "user", + content: [ + { type: "text", text: systemMessage }, + { + type: "image_url", + image_url: { + url: `data:image/jpeg;base64,${imageBuffer.toString('base64')}` + } + } + ] + }); + + return this.sendRequest(imageMessages, systemMessage); + } async embed(text) { throw new Error('Embeddings are not supported by Grok.'); diff --git a/src/models/groq.js b/src/models/groq.js index 0ec99f6..4c8d1e6 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -23,9 +23,6 @@ export class GroqCloudAPI { let res = null; try { console.log("Awaiting Groq response..."); - if (!this.params.max_tokens) { - this.params.max_tokens = 16384; - } let completion = await this.groq.chat.completions.create({ "messages": messages, "model": this.model_name || "mixtral-8x7b-32768", @@ -43,14 +40,19 @@ export class GroqCloudAPI { } catch(err) { + if (err.message.includes("content must be a string")) { + res = "Vision is only supported by certain models."; + } else { + console.log(this.model_name); + res = "My brain disconnected, try again."; + } console.log(err); - res = "My brain just kinda stopped working. Try again."; } return res; } async sendVisionRequest(messages, systemMessage, imageBuffer) { - const imageMessages = [...messages]; + const imageMessages = messages.filter(message => message.role !== 'system'); imageMessages.push({ role: "user", content: [ diff --git a/src/models/mistral.js b/src/models/mistral.js index f1f3563..72448f1 100644 --- a/src/models/mistral.js +++ b/src/models/mistral.js @@ -56,9 +56,12 @@ export class Mistral { result = response.choices[0].message.content; } catch (err) { - console.log(err) - - result = "My brain disconnected, try again."; + if (err.message.includes("A request containing images has been given to a model which does not have the 'vision' capability.")) { + result = "Vision is only supported by certain models."; + } else { + result = "My brain disconnected, try again."; + } + console.log(err); } return result; From 24891b813e4cd120c4882f312720864193b80a14 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 9 Feb 2025 13:08:40 -0800 Subject: [PATCH 050/171] Create @google+generative-ai+0.2.1.patch Made it so the endpoint for Gemini models is now v1beta instead of v1, allowing access to all new models --- patches/patches/@google+generative-ai+0.2.1.patch | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 patches/patches/@google+generative-ai+0.2.1.patch diff --git a/patches/patches/@google+generative-ai+0.2.1.patch b/patches/patches/@google+generative-ai+0.2.1.patch new file mode 100644 index 0000000..d2d48a4 --- /dev/null +++ b/patches/patches/@google+generative-ai+0.2.1.patch @@ -0,0 +1,12 @@ +diff --git a/dist/index.mjs b/dist/index.mjs +--- a/dist/index.mjs ++++ b/dist/index.mjs +@@ +-const API_VERSION = "v1"; ++const API_VERSION = "v1beta"; +diff --git a/dist/index.js b/dist/index.js +--- a/dist/index.js ++++ b/dist/index.js +@@ +-const API_VERSION = "v1"; ++const API_VERSION = "v1beta"; From bf2afddd8a8e61b63783390ecfdda16eb2d55081 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 9 Feb 2025 13:09:09 -0800 Subject: [PATCH 051/171] Delete patches/patches directory --- patches/patches/@google+generative-ai+0.2.1.patch | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 patches/patches/@google+generative-ai+0.2.1.patch diff --git a/patches/patches/@google+generative-ai+0.2.1.patch b/patches/patches/@google+generative-ai+0.2.1.patch deleted file mode 100644 index d2d48a4..0000000 --- a/patches/patches/@google+generative-ai+0.2.1.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/dist/index.mjs b/dist/index.mjs ---- a/dist/index.mjs -+++ b/dist/index.mjs -@@ --const API_VERSION = "v1"; -+const API_VERSION = "v1beta"; -diff --git a/dist/index.js b/dist/index.js ---- a/dist/index.js -+++ b/dist/index.js -@@ --const API_VERSION = "v1"; -+const API_VERSION = "v1beta"; From a5ff329935dadef388243948819005001e4ed171 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 9 Feb 2025 13:09:34 -0800 Subject: [PATCH 052/171] Create @google+generative-ai+0.2.1.patch --- patches/@google+generative-ai+0.2.1.patch | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 patches/@google+generative-ai+0.2.1.patch diff --git a/patches/@google+generative-ai+0.2.1.patch b/patches/@google+generative-ai+0.2.1.patch new file mode 100644 index 0000000..d2d48a4 --- /dev/null +++ b/patches/@google+generative-ai+0.2.1.patch @@ -0,0 +1,12 @@ +diff --git a/dist/index.mjs b/dist/index.mjs +--- a/dist/index.mjs ++++ b/dist/index.mjs +@@ +-const API_VERSION = "v1"; ++const API_VERSION = "v1beta"; +diff --git a/dist/index.js b/dist/index.js +--- a/dist/index.js ++++ b/dist/index.js +@@ +-const API_VERSION = "v1"; ++const API_VERSION = "v1beta"; From de91971518436812750e07060a3475f8ff820e89 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 9 Feb 2025 18:29:26 -0800 Subject: [PATCH 053/171] Update local.js --- src/models/local.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/local.js b/src/models/local.js index 7a018b5..8a655bb 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -47,7 +47,7 @@ export class Local { } // If the model name includes "deepseek-r1" or "Andy-3.5-reasoning", then handle the block. - if (this.model_name && this.model_name.includes("deepseek-r1") || this.model_name.includes("andy-3.5-reasoning")) { + if (this.model_name && this.model_name.includes("deepseek-r1") || this.model_name.includes("andy-3.5:reasoning")) { // Fixed right here for deepsee-r1 and andy-3.5:reasoning const hasOpenTag = res.includes(""); const hasCloseTag = res.includes(""); From ff3af0e2e486a020103274d185c4e732ed7c5ce0 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 11 Feb 2025 21:20:03 -0800 Subject: [PATCH 054/171] Update local.js --- src/models/local.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/models/local.js b/src/models/local.js index 8a655bb..509085a 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -47,7 +47,6 @@ export class Local { } // If the model name includes "deepseek-r1" or "Andy-3.5-reasoning", then handle the block. - if (this.model_name && this.model_name.includes("deepseek-r1") || this.model_name.includes("andy-3.5:reasoning")) { // Fixed right here for deepsee-r1 and andy-3.5:reasoning const hasOpenTag = res.includes(""); const hasCloseTag = res.includes(""); @@ -61,7 +60,6 @@ export class Local { if (hasOpenTag && hasCloseTag) { res = res.replace(/[\s\S]*?<\/think>/g, ''); } - } finalRes = res; break; // Exit the loop if we got a valid response. @@ -69,7 +67,7 @@ export class Local { if (finalRes == null) { console.warn("Could not get a valid block or normal response after max attempts."); - finalRes = 'Response incomplete, please try again.'; + finalRes = 'I thought too hard, sorry, try again.'; } return finalRes; } From 8a1a8ce37095edb384b4874ca06faf14847a9946 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 11 Feb 2025 21:20:28 -0800 Subject: [PATCH 055/171] Update huggingface.js --- src/models/huggingface.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/models/huggingface.js b/src/models/huggingface.js index ab74f24..59dd0f7 100644 --- a/src/models/huggingface.js +++ b/src/models/huggingface.js @@ -51,7 +51,6 @@ export class HuggingFace { } // If the model is DeepSeek-R1, check for mismatched blocks. - if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { const hasOpenTag = res.includes(""); const hasCloseTag = res.includes(""); @@ -65,7 +64,6 @@ export class HuggingFace { if (hasOpenTag && hasCloseTag) { res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); } - } finalRes = res; break; // Exit loop if we got a valid response. @@ -74,7 +72,7 @@ export class HuggingFace { // If no valid response was obtained after max attempts, assign a fallback. if (finalRes == null) { console.warn("Could not get a valid block or normal response after max attempts."); - finalRes = 'Response incomplete, please try again.'; + finalRes = 'I thought too hard, sorry, try again.'; } console.log('Received.'); console.log(finalRes); @@ -84,4 +82,4 @@ export class HuggingFace { async embed(text) { throw new Error('Embeddings are not supported by HuggingFace.'); } -} \ No newline at end of file +} From fb4b16e46c9282342befe63d113c1f0300b9a84c Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 11 Feb 2025 21:21:26 -0800 Subject: [PATCH 056/171] Update groq.js --- src/models/groq.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/models/groq.js b/src/models/groq.js index 56dbfcd..2c5aba8 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -52,7 +52,6 @@ export class GroqCloudAPI { } // If the model name includes "deepseek-r1", handle the tags - if (this.model_name && this.model_name.toLowerCase().includes("deepseek-r1")) { const hasOpenTag = res.includes(""); const hasCloseTag = res.includes(""); @@ -68,7 +67,6 @@ export class GroqCloudAPI { } // Remove the complete block (and any content inside) from the response res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); - } finalRes = res; break; // Exit the loop once a valid response is obtained @@ -76,7 +74,7 @@ export class GroqCloudAPI { if (finalRes == null) { console.warn("Could not obtain a valid block or normal response after max attempts."); - finalRes = "Response incomplete, please try again."; + finalRes = "I thought too hard, sorry, try again."; } finalRes = finalRes.replace(/<\|separator\|>/g, '*no response*'); @@ -86,4 +84,4 @@ export class GroqCloudAPI { async embed(text) { console.log("There is no support for embeddings in Groq support. However, the following text was provided: " + text); } -} \ No newline at end of file +} From 2d956ccddba5ef7b3f48876062a3d584f9845f6b Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Feb 2025 16:26:48 +0000 Subject: [PATCH 057/171] Create speak.js --- src/agent/speak.js | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 src/agent/speak.js diff --git a/src/agent/speak.js b/src/agent/speak.js new file mode 100644 index 0000000..80a36ff --- /dev/null +++ b/src/agent/speak.js @@ -0,0 +1,28 @@ +import { exec } from 'child_process'; + +export function say(textToSpeak) { + const isWin = process.platform === "win32"; + const isMac = process.platform === "darwin"; + + let command; + + if (isWin) { + command = `powershell -Command "Add-Type –AssemblyName System.Speech; (New-Object System.Speech.Synthesis.SpeechSynthesizer).Speak(\\"${textToSpeak}\\")"`; + } else if (isMac) { + command = `say "${textToSpeak}"`; + } else { + command = `espeak "${textToSpeak}"`; + } + + exec(command, (error, stdout, stderr) => { + if (error) { + console.error(`Error: ${error.message}`); + return; + } + if (stderr) { + console.error(`Stderr: ${stderr}`); + return; + } + console.log(`Stdout: ${stdout}`); + }); +} From 043011e20a187671dbebb526d0fa2348d1829b6a Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Feb 2025 16:28:19 +0000 Subject: [PATCH 058/171] Update agent.js --- src/agent/agent.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/agent/agent.js b/src/agent/agent.js index 72eb31d..7012240 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -14,6 +14,7 @@ import { addViewer } from './viewer.js'; import settings from '../../settings.js'; import { serverProxy } from './agent_proxy.js'; import { Task } from './tasks.js'; +import { say } from './speak.js'; export class Agent { async start(profile_fp, load_mem=false, init_message=null, count_id=0, task_path=null, task_id=null) { @@ -357,6 +358,7 @@ export class Agent { } } else { + say(message); this.bot.chat(message); } } From 81e4803c229cb6103615a165d8e21b5db98b3bcf Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Feb 2025 18:19:03 +0000 Subject: [PATCH 059/171] Update settings.js --- settings.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.js b/settings.js index 3b6c903..c7c5134 100644 --- a/settings.js +++ b/settings.js @@ -30,7 +30,7 @@ export default "load_memory": false, // load memory from previous session "init_message": "Respond with hello world and your name", // sends to all on spawn "only_chat_with": [], // users that the bots listen to and send general messages to. if empty it will chat publicly - + "speak": true, // allows all bots to speak through system text-to-speech. tested on windows, should work on mac, on linux you may need to `apt install espeak` "language": "en", // translate to/from this language. Supports these language names: https://cloud.google.com/translate/docs/languages "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... From 9bf70edd102299359ebff69c930e12e0b50386f9 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Feb 2025 18:33:05 +0000 Subject: [PATCH 060/171] Update agent.js --- src/agent/agent.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index 7012240..8645344 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -358,7 +358,9 @@ export class Agent { } } else { - say(message); + if (settings.speak) { + say(message); + } this.bot.chat(message); } } From e612b00410ec65502596eefcd02b255e56a5519d Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Feb 2025 18:33:57 +0000 Subject: [PATCH 061/171] removed unnecessary debug logging --- src/agent/speak.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/agent/speak.js b/src/agent/speak.js index 80a36ff..946d318 100644 --- a/src/agent/speak.js +++ b/src/agent/speak.js @@ -20,9 +20,8 @@ export function say(textToSpeak) { return; } if (stderr) { - console.error(`Stderr: ${stderr}`); + console.error(`Error: ${stderr}`); return; } - console.log(`Stdout: ${stdout}`); }); } From 4b38aba2dc2b827e17c5bbf37ac844c4f8f753b2 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Feb 2025 19:10:14 +0000 Subject: [PATCH 062/171] added speaking queue to ensure bot doesn't say multiple things at once --- src/agent/speak.js | 28 +++++++++++++++++++++++----- 1 file changed, 23 insertions(+), 5 deletions(-) diff --git a/src/agent/speak.js b/src/agent/speak.js index 946d318..239cb21 100644 --- a/src/agent/speak.js +++ b/src/agent/speak.js @@ -1,6 +1,23 @@ import { exec } from 'child_process'; +let speakingQueue = []; +let isSpeaking = false; + export function say(textToSpeak) { + speakingQueue.push(textToSpeak); + if (!isSpeaking) { + processQueue(); + } +} + +function processQueue() { + if (speakingQueue.length === 0) { + isSpeaking = false; + return; + } + + isSpeaking = true; + const textToSpeak = speakingQueue.shift(); const isWin = process.platform === "win32"; const isMac = process.platform === "darwin"; @@ -17,11 +34,12 @@ export function say(textToSpeak) { exec(command, (error, stdout, stderr) => { if (error) { console.error(`Error: ${error.message}`); - return; - } - if (stderr) { - console.error(`Error: ${stderr}`); - return; + console.error(`Stack: ${error.stack}`); + } else if (stderr) { + console.error(`Stderr: ${stderr}`); + } else { + console.log(`Stdout: ${stdout}`); } + processQueue(); // Continue with the next message in the queue }); } From 386900aa03bb4365d8cbed94bfdfa7ab421251c2 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Feb 2025 19:12:34 +0000 Subject: [PATCH 063/171] removed unnecessary debug logging (again) --- src/agent/speak.js | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/agent/speak.js b/src/agent/speak.js index 239cb21..af18298 100644 --- a/src/agent/speak.js +++ b/src/agent/speak.js @@ -34,12 +34,9 @@ function processQueue() { exec(command, (error, stdout, stderr) => { if (error) { console.error(`Error: ${error.message}`); - console.error(`Stack: ${error.stack}`); + console.error(`${error.stack}`); } else if (stderr) { - console.error(`Stderr: ${stderr}`); - } else { - console.log(`Stdout: ${stdout}`); - } + console.error(`Error: ${stderr}`); processQueue(); // Continue with the next message in the queue }); } From a245288cb2060cc7d8d73ab682658268e9d45167 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 12 Feb 2025 16:52:21 -0800 Subject: [PATCH 064/171] Update @google+generative-ai+0.2.1.patch Fixed the Gemini 2.0 patch --- patches/@google+generative-ai+0.2.1.patch | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/patches/@google+generative-ai+0.2.1.patch b/patches/@google+generative-ai+0.2.1.patch index d2d48a4..5862810 100644 --- a/patches/@google+generative-ai+0.2.1.patch +++ b/patches/@google+generative-ai+0.2.1.patch @@ -1,12 +1,12 @@ diff --git a/dist/index.mjs b/dist/index.mjs ---- a/dist/index.mjs -+++ b/dist/index.mjs +--- a/node_modules/@google/generative-ai/dist/index.mjs ++++ b/node_modules/@google/generative-ai/dist/index.mjs @@ -const API_VERSION = "v1"; +const API_VERSION = "v1beta"; diff --git a/dist/index.js b/dist/index.js ---- a/dist/index.js -+++ b/dist/index.js +--- a/node_modules/@google/generative-ai/dist/index.js ++++ b/node_modules/@google/generative-ai/dist/index.js @@ -const API_VERSION = "v1"; +const API_VERSION = "v1beta"; From fd6ac47f07ed38c3ae091f48664dadc74ab83e24 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 12 Feb 2025 16:55:12 -0800 Subject: [PATCH 065/171] Update local.js Fixed Local Model usage with reasoning --- src/models/local.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/models/local.js b/src/models/local.js index 509085a..fafa3d8 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -51,11 +51,17 @@ export class Local { const hasCloseTag = res.includes(""); // If there's a partial mismatch, retry to get a complete response. - if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { + if ((hasOpenTag && !hasCloseTag)) { console.warn("Partial block detected. Re-generating..."); continue; } - + + // If is present but is not, prepend + if (hasCloseTag && !hasOpenTag) { + res = '' + res; + } + // Changed this so if the model reasons, using and but doesn't start the message with , ges prepended to the message so no error occur. + // If both tags appear, remove them (and everything inside). if (hasOpenTag && hasCloseTag) { res = res.replace(/[\s\S]*?<\/think>/g, ''); From 73e11ff6bb11c0dcefc3b81624cc5065fa060cd7 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 15 Feb 2025 10:20:02 -0800 Subject: [PATCH 066/171] Update prompter.js Fixed chat.model typos --- src/models/prompter.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index eb14c29..4a42fec 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -131,10 +131,10 @@ export class Prompter { model_profile.api = 'mistral'; else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) profile.api = 'groq'; - else if (chat.model.includes('hf:')) - chat.api = "glhf"; - else if (chat.model.includes('hyperbolic:')|| chat.model.includes('hb:')) - chat.api = "hyperbolic"; + else if (profile.model.includes('hf:')) + profile.api = "glhf"; + else if (profile.model.includes('hyperbolic:')|| chat.model.includes('hb:')) + profile.api = "hyperbolic"; else if (profile.model.includes('novita/')) profile.api = 'novita'; else if (profile.model.includes('qwen')) From 770a533fbf944a9fcdbb0bc2c5e39b8b5f40fb06 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 15 Feb 2025 15:02:32 -0800 Subject: [PATCH 067/171] Update local.js Fixed prompting for Embedding models --- src/models/local.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/local.js b/src/models/local.js index fafa3d8..ba78f53 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -80,7 +80,7 @@ export class Local { async embed(text) { let model = this.model_name || 'nomic-embed-text'; - let body = { model: model, prompt: text }; + let body = { model: model, input: text }; let res = await this.send(this.embedding_endpoint, body); return res['embedding']; } From 45a799ed7d08efb0c5836ea50b829499a28d5407 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 16 Feb 2025 15:03:15 -0800 Subject: [PATCH 068/171] Update @google+generative-ai+0.2.1.patch Fixed a minor error in the patch file for Gemini 2.0 models --- patches/@google+generative-ai+0.2.1.patch | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/patches/@google+generative-ai+0.2.1.patch b/patches/@google+generative-ai+0.2.1.patch index 5862810..68d8ec6 100644 --- a/patches/@google+generative-ai+0.2.1.patch +++ b/patches/@google+generative-ai+0.2.1.patch @@ -1,12 +1,12 @@ -diff --git a/dist/index.mjs b/dist/index.mjs +diff --git a/node_modules/@google/generative-ai/dist/index.mjs b/node_modules/@google/generative-ai/dist/index.mjs --- a/node_modules/@google/generative-ai/dist/index.mjs -+++ b/node_modules/@google/generative-ai/dist/index.mjs -@@ ++++ b/node_modules/@google/generative-ai/dist/index.mjs +@@ -156,1 +156,1 @@ -const API_VERSION = "v1"; +const API_VERSION = "v1beta"; -diff --git a/dist/index.js b/dist/index.js +diff --git a/node_modules/@google/generative-ai/dist/index.js b/node_modules/@google/generative-ai/dist/index.js --- a/node_modules/@google/generative-ai/dist/index.js +++ b/node_modules/@google/generative-ai/dist/index.js -@@ +@@ -156,1 +156,1 @@ -const API_VERSION = "v1"; +const API_VERSION = "v1beta"; From b2f60189d82126e17a7957bd04927d6c87b91fac Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Mon, 17 Feb 2025 15:42:01 -0800 Subject: [PATCH 069/171] Update README.md Fixed README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 00778bf..8eab552 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | | Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | | xAI | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) | -| Deepseek | `DEEPSEEK_API_KEY` | `deepseek-chat` | [docs](https://api-docs.deepseek.com/) | +| Deepseek | `DEEPSEEK_API_KEY` | `deepseek-reasoner` | [docs](https://api-docs.deepseek.com/) | | Openrouter | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) | | glhf.chat | `GHLF_API_KEY` | `hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | | Hyperbolic | `HYPERBOLIC_API_KEY` | `hb:deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | From 359592825a314be6ff0cebae2c391864674a9733 Mon Sep 17 00:00:00 2001 From: gmuffiness Date: Wed, 19 Feb 2025 11:14:08 +0900 Subject: [PATCH 070/171] fix: update package --- package.json | 2 +- src/agent/library/skills.js | 76 +------------------------------------ 2 files changed, 2 insertions(+), 76 deletions(-) diff --git a/package.json b/package.json index 1060900..7c2348a 100644 --- a/package.json +++ b/package.json @@ -25,7 +25,7 @@ "socket.io": "^4.7.2", "socket.io-client": "^4.7.2", "express": "^4.18.2", - "node-canvas-webgl": "^0.2.6", + "node-canvas-webgl": "PrismarineJS/node-canvas-webgl", "three": "^0.128.0" }, "scripts": { diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 859e53d..8f7c53c 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1352,78 +1352,4 @@ export async function activateNearestBlock(bot, type) { await bot.activateBlock(block); log(bot, `Activated ${type} at x:${block.position.x.toFixed(1)}, y:${block.position.y.toFixed(1)}, z:${block.position.z.toFixed(1)}.`); return true; -} - -// export async function lookAtPlayer(agent, bot, player_name, direction) { -// /** -// * Look at a player or look in the same direction as the player -// * @param {MinecraftBot} bot reference to the minecraft bot -// * @param {string} player_name name of the target player -// * @param {string} direction 'at' to look at player, 'with' to look in same direction -// * @returns {Promise} whether the look action was successful -// * @example -// * await skills.lookAtPlayer(bot, "player1", "at"); -// * await skills.lookAtPlayer(bot, "player1", "with"); -// **/ - -// const player = bot.players[player_name]?.entity; -// if (!player) { -// log(bot, `Could not find player ${player_name}`); -// return false; -// } - -// let filename; -// if (direction === 'with') { -// // Copy player's view direction -// await bot.look(player.yaw, player.pitch); -// const camera = new Camera(bot); -// await new Promise(resolve => setTimeout(resolve, 500)); -// log(bot, `Looking in the same direction as ${player_name}`); - -// filename = await camera.capture(); -// console.log(player.yaw, player.pitch); -// // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); - -// } else { -// // Look at player's position -// await bot.lookAt(new Vec3(player.position.x, player.position.y + player.height, player.position.z)); -// const camera = new Camera(bot); -// await new Promise(resolve => setTimeout(resolve, 500)); -// log(bot, `Looking at player ${player_name}`); - -// filename = await camera.capture(); -// // log(bot, `Screenshot saved: bots/${bot.username}/screenshots/${filename}.jpg`); -// // log(bot, `Target coordinates: x:${player.position.x}, y:${player.position.y}, z:${player.position.z}`); -// } - -// try { -// const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); -// const messages = agent.history.getHistory(); -// let res = await agent.prompter.promptImageConvo(messages, imageBuffer); -// log(bot, res); -// return true; -// } catch (error) { -// log(bot, `Error analyzing image: ${error.message}`); -// return false; -// } -// } - -// export async function lookAtPosition(agent, bot, x, y, z) { -// await bot.lookAt(new Vec3(x, y + 2, z)); -// const camera = new Camera(bot); -// await new Promise(resolve => setTimeout(resolve, 500)); -// log(bot, `Looking at coordinate ${x, y, z}`); - -// let filename = await camera.capture(); - -// try { -// const imageBuffer = fs.readFileSync(`bots/${bot.username}/screenshots/${filename}.jpg`); -// const messages = agent.history.getHistory(); -// let res = await agent.prompter.promptImageConvo(messages, imageBuffer); -// log(bot, res); -// return true; -// } catch (error) { -// log(bot, `Error analyzing image: ${error.message}`); -// return false; -// } -// } +} \ No newline at end of file From d614d307642b194fce2207756a8bcf2c675c78ec Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 23 Feb 2025 21:17:30 -0800 Subject: [PATCH 071/171] Update prompter.js Fixed Ollama prompting issues. --- src/models/prompter.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index 5355ff0..e2e0453 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -127,6 +127,8 @@ export class Prompter { profile.api = 'google'; else if (profile.model.includes('openrouter/')) profile.api = 'openrouter'; // must do before others bc shares model names + else if (profile.model.includes('andy-')) + profile.api = 'ollama'; // We have to check here since the installation for Andy-3.6 makes the username include "hf:" which is the same as glhf.chat, which will not work. else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) profile.api = 'openai'; else if (profile.model.includes('claude')) @@ -151,10 +153,8 @@ export class Prompter { profile.api = 'xai'; else if (profile.model.includes('deepseek')) profile.api = 'deepseek'; - else if (profile.model.includes('llama3')) - profile.api = 'ollama'; - else - throw new Error('Unknown model:', profile.model); + else + profile.api = 'ollama'; // Fixed this line, it had a bug where only llama3 models could be used in Mindcraft via Ollama, which was is not optimal. } return profile; } From a63ca411a4d24bf2f55bf286d827a53f476b27c3 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:03:17 -0800 Subject: [PATCH 072/171] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 8eab552..136540f 100644 --- a/README.md +++ b/README.md @@ -48,7 +48,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | Novita AI | `NOVITA_API_KEY` | `gryphe/mythomax-l2-13b` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) | | Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | | Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | -| xAI | `XAI_API_KEY` | `grok-beta` | [docs](https://docs.x.ai/docs) | +| xAI | `XAI_API_KEY` | `grok-2` | [docs](https://docs.x.ai/docs) | | Deepseek | `DEEPSEEK_API_KEY` | `deepseek-reasoner` | [docs](https://api-docs.deepseek.com/) | | Openrouter | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) | | glhf.chat | `GHLF_API_KEY` | `hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | From 2b0dde69337c644652a25133b6f08cd6d21909b6 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:04:39 -0800 Subject: [PATCH 073/171] Create .gitignore --- .gitignore | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..22d104f --- /dev/null +++ b/.gitignore @@ -0,0 +1,18 @@ +.vscode/ +.idea/ +node_modules/ +package-lock.json +code_records/ +scratch.js +bots/**/action-code/** +bots/**/ +keys.json +services/viaproxy/jars/** +services/viaproxy/logs/** +services/viaproxy/plugins/** +services/viaproxy/ViaLoader/** +services/viaproxy/saves.json +services/viaproxy/viaproxy.yml +tmp/ +wandb/ +experiments/ From 8782b6535400e195cdce44acf673285af47747ea Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:06:54 -0800 Subject: [PATCH 074/171] Update evaluation_script.py Updated eval.py --- evaluation_script.py | 315 +++++++++++++++++++++++++++++++++++-------- 1 file changed, 259 insertions(+), 56 deletions(-) diff --git a/evaluation_script.py b/evaluation_script.py index 4650d77..beb170b 100644 --- a/evaluation_script.py +++ b/evaluation_script.py @@ -1,9 +1,13 @@ import argparse import json +import shutil import subprocess import time from datetime import datetime import re +import sys +import os +import time def read_settings(file_path): """Read and parse the settings.js file to get agent profiles.""" @@ -30,7 +34,7 @@ def read_settings(file_path): ## profiles is a list of strings like "./andy.json" and "./bob.json" agent_names = [profile.split('/')[-1].split('.')[0] for profile in profiles] - return agent_names + return agent_names def check_task_completion(agents): """Check memory.json files of all agents to determine task success/failure.""" @@ -80,68 +84,267 @@ def update_results_file(task_id, success_count, total_count, time_taken, experim f.write(f"Average time per experiment: {total_time / total_count:.2f} seconds\n") f.write(f"Last updated: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}\n") -def run_experiment(task_path, task_id, num_exp): - """Run the specified number of experiments and track results.""" - # Read agent profiles from settings.js - agents = read_settings(file_path="settings.js") - print(f"Detected agents: {agents}") + +def set_environment_variable_tmux_session(session_name, key, value): + """Set an environment variable for the current process.""" + subprocess.run(["tmux", "send-keys", "-t", session_name, f"export {key}={value}", "C-m"]) + +def launch_parallel_experiments(task_path, + num_exp, + exp_name, + num_agents=2, + model="gpt-4o", + num_parallel=1): - # Generate timestamp at the start of experiments - timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') - results_filename = f"results_{task_id}_{timestamp}.txt" - print(f"Results will be saved to: {results_filename}") + with open(task_path, 'r', encoding='utf-8') as file: + content = file.read() + json_data = json.loads(content) + + task_ids = json_data.keys() + + # split the task_ids into num_parallel groups + task_ids = list(task_ids) + task_ids_split = [task_ids[i::num_parallel] for i in range(num_parallel)] + + servers = create_server_files("../server_data/", num_parallel) + date_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + experiments_folder = f"experiments/{exp_name}_{date_time}" + exp_name = f"{exp_name}_{date_time}" + + # start wandb + os.makedirs(experiments_folder, exist_ok=True) + for i, server in enumerate(servers): + launch_server_experiment(task_path, task_ids_split[i], num_exp, server, experiments_folder, exp_name) + time.sleep(5) + + +def launch_server_experiment(task_path, + task_ids, + num_exp, + server, + experiments_folder, + exp_name="exp", + num_agents=2, + model="gpt-4o"): + """ + Launch a Minecraft server and run experiments on it. + @param task_path: Path to the task file + @param task_ids: IDs of the tasks to run + @param num_exp: Number of experiments to run + @param server: Tuple containing server path and port + @param experiments_folder: Folder to store experiment results + @param exp_name: Name of the experiment for wandb dataset + @param num_agents: Number of agents to run + @param model: Model to use for the agents + """ + server_path, server_port = server + edit_file(os.path.join(server_path, "server.properties"), {"server-port": server_port}) + mindserver_port = server_port - 55916 + 8080 - success_count = 0 - experiment_results = [] - - for exp_num in range(num_exp): - print(f"\nRunning experiment {exp_num + 1}/{num_exp}") - - start_time = time.time() - - # Run the node command + # set up server and agents + session_name = str(server_port - 55916) + if num_agents == 2: + agent_names = [f"andy_{session_name}", f"jill_{session_name}"] + models = [model] * 2 + else: + agent_names = [f"andy_{session_name}", f"jill_{session_name}", f"bob_{session_name}"] + models = [model] * 3 + make_profiles(agent_names, models) + + # edit_file("settings.js", {"profiles": [f"./{agent}.json" for agent in agent_names]}) + agent_profiles = [f"./{agent}.json" for agent in agent_names] + agent_profiles_str = f"\'[\"{agent_profiles[0]}\", \"{agent_profiles[1]}\"]\'" + print(agent_profiles_str) + launch_world(server_path, session_name="server_" + session_name, agent_names=agent_names) + + subprocess.run(['tmux', 'new-session', '-d', '-s', session_name], check=True) + + # set environment variables + set_environment_variable_tmux_session(session_name, "MINECRAFT_PORT", server_port) + set_environment_variable_tmux_session(session_name, "MINDSERVER_PORT", mindserver_port) + set_environment_variable_tmux_session(session_name, "PROFILES", agent_profiles_str) + + script_content = "" + for task_id in task_ids: cmd = f"node main.js --task_path {task_path} --task_id {task_id}" - try: - subprocess.run(cmd, shell=True, check=True) - except subprocess.CalledProcessError as e: - print(f"Error running experiment: {e}") - continue - - # Check if task was successful - success = check_task_completion(agents) - if success: - success_count += 1 - print(f"Experiment {exp_num + 1} successful") - else: - print(f"Experiment {exp_num + 1} failed") - - end_time = time.time() - time_taken = end_time - start_time - - # Store individual experiment result - experiment_results.append({ - 'success': success, - 'time_taken': time_taken - }) - - # Update results file after each experiment using the constant filename - update_results_file(task_id, success_count, exp_num + 1, time_taken, experiment_results, results_filename) - - # Small delay between experiments - time.sleep(1) - - final_ratio = success_count / num_exp - print(f"\nExperiments completed. Final success ratio: {final_ratio:.2f}") + cp_cmd = f"cp {agent_names[0]}.json {server_path}bots/{agent_names[0]}/profile.json" + for _ in range(num_exp): + script_content += f"{cmd}\n" + script_content += "sleep 2\n" + for agent in agent_names: + cp_cmd = f"cp bots/{agent}/memory.json {experiments_folder}/{task_id}_{agent}_{_}.json" + script_content += f"{cp_cmd}\n" + script_content += "sleep 1\n" + script_content += f"echo 'Uploading {experiments_folder}/{task_id}_{agent}_{_}.json to wandb'\n" + wandb_cmd = f"wandb artifact put {experiments_folder}/{task_id}_{agent}_{_}.json --name {exp_name}_{task_id}_{agent}_{_} --type dataset" + script_content += f"echo '{wandb_cmd}'\n" + script_content += f"{wandb_cmd}\n" + script_content += "sleep 1\n" + script_content += "sleep 1\n" + + # Create a temporary shell script file + script_file = f"./tmp/experiment_script_{session_name}.sh" + + script_dir = os.path.dirname(script_file) + os.makedirs(script_dir, exist_ok=True) + + # Call the function before writing the script file + with open(script_file, 'w') as f: + f.write(script_content) + + script_file_run = "bash " + script_file + + # Execute the shell script using subprocess + subprocess.run(["tmux", "send-keys", "-t", session_name, script_file_run, "C-m"]) + + + # subprocess.run(["tmux", "send-keys", "-t", session_name, f"/op {agent_names[0]}", "C-m"]) + +def make_profiles(agent_names, models): + assert len(agent_names) == len(models) + for index in range(len(agent_names)): + content = {"name": agent_names[index], "model": models[index], "modes": {"hunting": False}} + with open(f"{agent_names[index]}.json", 'w') as f: + json.dump(content, f) + +def create_server_files(source_path, num_copies): + """Create multiple copies of server files for parallel experiments.""" + print("Creating server files...") + print(num_copies) + servers = [] + for i in range(num_copies): + dest_path = f"../server_data_{i}/" + copy_server_files(source_path, dest_path) + print(dest_path) + edit_file(dest_path + "server.properties", {"server-port": 55916 + i}) + # edit_server_properties_file(dest_path, 55916 + i) + servers.append((dest_path, 55916 + i)) + return servers + +def edit_file(file, content_dict): + try: + with open(file, 'r') as f: + lines = f.readlines() + with open(file, 'w') as f: + for line in lines: + for key, value in content_dict.items(): + if line.startswith(key): + f.write(f"{key}={value}\n") + else: + f.write(line) + print(f"{file} updated with {content_dict}") + except Exception as e: + print(f"Error editing file {file}: {e}") + +def clean_up_server_files(num_copies): + """Delete server files from multiple locations.""" + for i in range(num_copies): + dest_path = f"../server_data_{i}/" + delete_server_files(dest_path) + +def copy_server_files(source_path, dest_path): + """Copy server files to the specified location.""" + try: + shutil.copytree(source_path, dest_path) + print(f"Server files copied to {dest_path}") + except Exception as e: + print(f"Error copying server files: {e}") + +def delete_server_files(dest_path): + """Delete server files from the specified location.""" + try: + shutil.rmtree(dest_path) + print(f"Server files deleted from {dest_path}") + except Exception as e: + print(f"Error deleting server files: {e}") + +def launch_world(server_path="../server_data/", agent_names=["andy", "jill"], session_name="server"): + """Launch the Minecraft world.""" + print(server_path) + cmd = f"cd {server_path} && java -jar server.jar" + subprocess.run(['tmux', 'new-session', '-d', '-s', session_name], check=True) + subprocess.run(["tmux", "send-keys", "-t", session_name, cmd, "C-m"]) + for agent in agent_names: + subprocess.run(["tmux", "send-keys", "-t", session_name, f"/op {agent}", "C-m"]) + time.sleep(5) + +def kill_world(session_name="server"): + """Kill the Minecraft world.""" + subprocess.run(["tmux", "send-keys", "-t", session_name, "stop", "C-m"]) + time.sleep(5) + subprocess.run(["tmux", "kill-session", "-t", session_name]) + +def detach_process(command): + """ + Launches a subprocess and detaches from it, allowing it to run independently. + + Args: + command: A list of strings representing the command to execute, e.g., ['python', 'my_script.py']. + """ + + try: + # Create a new process group so the child doesn't get signals intended for the parent. + # This is crucial for proper detachment. + kwargs = {} + if sys.platform == 'win32': + kwargs.update(creationflags=subprocess.CREATE_NEW_PROCESS_GROUP) # Windows specific + + process = subprocess.Popen(command, + stdin=subprocess.PIPE, # Prevent stdin blocking + stdout=subprocess.PIPE, # Redirect stdout + stderr=subprocess.PIPE, # Redirect stderr + close_fds=True, # Close open file descriptors + **kwargs) + + print(f"Process launched with PID: {process.pid}") + return process.pid # Return the PID of the detached process + + except FileNotFoundError: + print(f"Error: Command not found: {command}") + return None + except Exception as e: + print(f"An error occurred: {e}") + return None def main(): + # edit_settings("settings.js", {"profiles": ["./andy.json", "./jill.json"], "port": 55917}) + # edit_server_properties_file("../server_data/", 55917) + parser = argparse.ArgumentParser(description='Run Minecraft AI agent experiments') - parser.add_argument('task_path', help='Path to the task file') - parser.add_argument('task_id', help='ID of the task to run') - parser.add_argument('num_exp', type=int, help='Number of experiments to run') - + parser.add_argument('--task_path', default="multiagent_crafting_tasks.json", help='Path to the task file') + parser.add_argument('--task_id', default=None, help='ID of the task to run') + parser.add_argument('--num_exp', default=1, type=int, help='Number of experiments to run') + parser.add_argument('--num_parallel', default=1, type=int, help='Number of parallel servers to run') + parser.add_argument('--exp_name', default="exp", help='Name of the experiment') + parser.add_argument('--wandb', action='store_true', help='Whether to use wandb') + parser.add_argument('--wandb-project', default="minecraft_experiments", help='wandb project name') + args = parser.parse_args() + + if args.wandb: + import wandb + wandb.init(project=args.wandb_project, name=args.exp_name) + + # kill all tmux session before starting + try: + subprocess.run(['tmux', 'kill-server'], check=True) + except: + print("No tmux session to kill") - run_experiment(args.task_path, args.task_id, args.num_exp) + # delete all server files + clean_up_server_files(args.num_parallel) + if args.task_id is None: + launch_parallel_experiments(args.task_path, num_exp=args.num_exp, exp_name=args.exp_name, num_parallel=args.num_parallel) + + # servers = create_server_files("../server_data/", args.num_parallel) + # date_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S") + # experiments_folder = f"{args.exp_name}_{date_time}" + # os.makedirs(experiments_folder, exist_ok=True) + # for server in servers: + # launch_server_experiment(args.task_path, [args.task_id], args.num_exp, server, experiments_folder) + # time.sleep(5) + + # run_experiment(args.task_path, args.task_id, args.num_exp) if __name__ == "__main__": - main() \ No newline at end of file + main() From becae6fa94d6d6817545d7852e5eccbfce531b84 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:07:55 -0800 Subject: [PATCH 075/171] Update example_tasks.json Fixed example tasks.json --- example_tasks.json | 39 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 38 insertions(+), 1 deletion(-) diff --git a/example_tasks.json b/example_tasks.json index 69ab550..f2b83a9 100644 --- a/example_tasks.json +++ b/example_tasks.json @@ -44,6 +44,7 @@ }, "multiagent_techtree_1_stone_pickaxe": { "conversation": "Let's collaborate to build a stone pickaxe", + "goal": "Build a stone pickaxe", "agent_count": 2, "initial_inventory": { "0": { @@ -57,6 +58,23 @@ "number_of_target": 1, "type": "techtree", "timeout": 300 + }, + "multiagent_techtree_1_shears": { + "goal": "Collaborate with other agents to build a shear.", + "conversation": "Let's collaborate to build a shear.", + "agent_count": 2, + "initial_inventory": { + "0": { + "iron_ingot": 1 + }, + "1": { + "iron_ingot": 1 + } + }, + "target": "shears", + "number_of_target": 1, + "type": "techtree", + "timeout": 60 }, "smelt_ingot": { "goal": "Smelt 1 iron ingot and 1 copper ingot", @@ -71,5 +89,24 @@ "number_of_target": 1, "type": "techtree", "timeout": 300 + }, + "multiagent_smelt_ingot": { + "conversation": "Let's collaborate to smelt ingots", + "goal": "Smelt 1 iron ingot and 1 copper ingot, use star emojis in every response", + "agent_count": 2, + "initial_inventory": { + "0": { + "furnace": 1, + "coal": 2 + }, + "1": { + "raw_iron": 1, + "raw_copper": 1 + } + }, + "target": "copper_ingot", + "number_of_target": 1, + "type": "techtree", + "timeout": 300 } -} \ No newline at end of file +} From fbca94dc753f6c901e08db42a82cda7b180d2207 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:08:35 -0800 Subject: [PATCH 076/171] Create multi_agent_crafting_tasks.json --- multi_agent_crafting_tasks.json | 43 +++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 multi_agent_crafting_tasks.json diff --git a/multi_agent_crafting_tasks.json b/multi_agent_crafting_tasks.json new file mode 100644 index 0000000..9d7a8e7 --- /dev/null +++ b/multi_agent_crafting_tasks.json @@ -0,0 +1,43 @@ +{ + "multiagent_techtree_1_stone_pickaxe": { + "conversation": "Let's collaborate to build a stone pickaxe", + "agent_count": 2, + "initial_inventory": { + "0": { + "wooden_pickaxe": 1 + }, + "1": { + "wooden_axe": 1 + } + }, + "blocked_actions": { + "0": [], + "1": [] + }, + "target": "stone_pickaxe", + "number_of_target": 1, + "type": "techtree", + "timeout": 20 + }, + "multiagent_techtree_1_shears": { + "goal": "Collaborate with other agents to build a shear.", + "conversation": "Let's collaborate to build a shear.", + "agent_count": 2, + "initial_inventory": { + "0": { + "iron_ingot": 1 + }, + "1": { + "iron_ingot": 1 + } + }, + "blocked_actions": { + "0": [], + "1": [] + }, + "target": "shears", + "number_of_target": 1, + "type": "techtree", + "timeout": 20 + } +} From 95b56f53012d3311eca793a87c4bafe83c5624fc Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:09:24 -0800 Subject: [PATCH 077/171] Update settings.js --- settings.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/settings.js b/settings.js index 6a62eb0..bf83b04 100644 --- a/settings.js +++ b/settings.js @@ -2,17 +2,17 @@ export default { "minecraft_version": "1.20.4", // supports up to 1.21.1 "host": "127.0.0.1", // or "localhost", "your.ip.address.here" - "port": 55916, + "port": process.env.MINECRAFT_PORT || 55916, "auth": "offline", // or "microsoft" // the mindserver manages all agents and hosts the UI "host_mindserver": true, // if true, the mindserver will be hosted on this machine. otherwise, specify a public IP address "mindserver_host": "localhost", - "mindserver_port": 8080, + "mindserver_port": process.env.MINDSERVER_PORT || 8080, // the base profile is shared by all bots for default prompts/examples/modes "base_profile": "./profiles/defaults/survival.json", // also see creative.json, god_mode.json - "profiles": [ + "profiles": ((process.env.PROFILES) && JSON.parse(process.env.PROFILES)) || [ "./andy.json", // "./profiles/gpt.json", // "./profiles/claude.json", From f14f86a518518fc600896919f2454235e1b81190 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:10:04 -0800 Subject: [PATCH 078/171] Update action_manager.js --- src/agent/action_manager.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index ad08827..30359cb 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -46,7 +46,7 @@ export class ActionManager { assert(actionLabel != null, 'actionLabel is required for new resume'); this.resume_name = actionLabel; } - if (this.resume_func != null && (this.agent.isIdle() || new_resume) && (!this.agent.self_prompter.on || new_resume)) { + if (this.resume_func != null && (this.agent.isIdle() || new_resume) && (!this.agent.self_prompter.isActive() || new_resume)) { this.currentActionLabel = this.resume_name; let res = await this._executeAction(this.resume_name, this.resume_func, timeout); this.currentActionLabel = ''; @@ -152,4 +152,4 @@ export class ActionManager { }, TIMEOUT_MINS * 60 * 1000); } -} \ No newline at end of file +} From 15eb0fd1c66d89b6b7fa01cb244d40bae89e5535 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:12:02 -0800 Subject: [PATCH 079/171] Update agent.js --- src/agent/agent.js | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index f2eed4d..d394dde 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -91,6 +91,8 @@ export class Agent { this._setupEventHandlers(save_data, init_message); this.startEvents(); + // this.task.initBotTask(); + if (!load_mem) { this.task.initBotTask(); } @@ -156,10 +158,10 @@ export class Agent { }; if (save_data?.self_prompt) { - let prompt = save_data.self_prompt; - // add initial message to history - this.history.add('system', prompt); - await this.self_prompter.start(prompt); + if (init_message) { + this.history.add('system', init_message); + } + await this.self_prompter.handleLoad(save_data.self_prompt, save_data.self_prompting_state); } if (save_data?.last_sender) { this.last_sender = save_data.last_sender; @@ -193,7 +195,7 @@ export class Agent { shutUp() { this.shut_up = true; - if (this.self_prompter.on) { + if (this.self_prompter.isActive()) { this.self_prompter.stop(false); } convoManager.endAllConversations(); @@ -259,7 +261,7 @@ export class Agent { await this.history.add(source, message); this.history.save(); - if (!self_prompt && this.self_prompter.on) // message is from user during self-prompting + if (!self_prompt && this.self_prompter.isActive()) // message is from user during self-prompting max_responses = 1; // force only respond to this message, then let self-prompting take over for (let i=0; i Date: Thu, 27 Feb 2025 21:13:02 -0800 Subject: [PATCH 080/171] Update actions.js --- src/agent/commands/actions.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 88e6b9e..19b231e 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -49,7 +49,7 @@ export const actionsList = [ agent.actions.cancelResume(); agent.bot.emit('idle'); let msg = 'Agent stopped.'; - if (agent.self_prompter.on) + if (agent.self_prompter.isActive()) msg += ' Self-prompting still active.'; return msg; } @@ -362,8 +362,7 @@ export const actionsList = [ }, perform: async function (agent, prompt) { if (convoManager.inConversation()) { - agent.self_prompter.setPrompt(prompt); - convoManager.scheduleSelfPrompter(); + agent.self_prompter.setPromptPaused(prompt); } else { agent.self_prompter.start(prompt); @@ -375,7 +374,6 @@ export const actionsList = [ description: 'Call when you have accomplished your goal. It will stop self-prompting and the current action. ', perform: async function (agent) { agent.self_prompter.stop(); - convoManager.cancelSelfPrompter(); return 'Self-prompting stopped.'; } }, From b0803ca1843975e3fed65c4d98f29ebe6ba1dce4 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:15:40 -0800 Subject: [PATCH 081/171] Update conversation.js --- src/agent/conversation.js | 29 ++++++++--------------------- 1 file changed, 8 insertions(+), 21 deletions(-) diff --git a/src/agent/conversation.js b/src/agent/conversation.js index 1061ea8..41c6888 100644 --- a/src/agent/conversation.js +++ b/src/agent/conversation.js @@ -7,8 +7,6 @@ let agent; let agent_names = settings.profiles.map((p) => JSON.parse(readFileSync(p, 'utf8')).name); let agents_in_game = []; -let self_prompter_paused = false; - class Conversation { constructor(name) { this.name = name; @@ -97,7 +95,7 @@ class ConversationManager { this._clearMonitorTimeouts(); return; } - if (!self_prompter_paused) { + if (!agent.self_prompter.isPaused()) { this.endConversation(convo_partner); agent.handleMessage('system', `${convo_partner} disconnected, conversation has ended.`); } @@ -125,9 +123,8 @@ class ConversationManager { const convo = this._getConvo(send_to); convo.reset(); - if (agent.self_prompter.on) { - await agent.self_prompter.stop(); - self_prompter_paused = true; + if (agent.self_prompter.isActive()) { + await agent.self_prompter.pause(); } if (convo.active) return; @@ -191,9 +188,8 @@ class ConversationManager { convo.queue(received); // responding to conversation takes priority over self prompting - if (agent.self_prompter.on){ - await agent.self_prompter.stopLoop(); - self_prompter_paused = true; + if (agent.self_prompter.isActive()){ + await agent.self_prompter.pause(); } _scheduleProcessInMessage(sender, received, convo); @@ -235,7 +231,7 @@ class ConversationManager { if (this.activeConversation.name === sender) { this._stopMonitor(); this.activeConversation = null; - if (self_prompter_paused && !this.inConversation()) { + if (agent.self_prompter.isPaused() && !this.inConversation()) { _resumeSelfPrompter(); } } @@ -246,7 +242,7 @@ class ConversationManager { for (const sender in this.convos) { this.endConversation(sender); } - if (self_prompter_paused) { + if (agent.self_prompter.isPaused()) { _resumeSelfPrompter(); } } @@ -258,14 +254,6 @@ class ConversationManager { this.endConversation(sender); } } - - scheduleSelfPrompter() { - self_prompter_paused = true; - } - - cancelSelfPrompter() { - self_prompter_paused = false; - } } const convoManager = new ConversationManager(); @@ -360,8 +348,7 @@ function _tagMessage(message) { async function _resumeSelfPrompter() { await new Promise(resolve => setTimeout(resolve, 5000)); - if (self_prompter_paused && !convoManager.inConversation()) { - self_prompter_paused = false; + if (agent.self_prompter.isPaused() && !convoManager.inConversation()) { agent.self_prompter.start(); } } From 6d350f1c1d8e1b341a116ae4974c6a34e9b3e5bc Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:16:05 -0800 Subject: [PATCH 082/171] Update history.js --- src/agent/history.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/agent/history.js b/src/agent/history.js index a578377..c76e0d3 100644 --- a/src/agent/history.js +++ b/src/agent/history.js @@ -84,7 +84,8 @@ export class History { const data = { memory: this.memory, turns: this.turns, - self_prompt: this.agent.self_prompter.on ? this.agent.self_prompter.prompt : null, + self_prompting_state: this.agent.self_prompter.state, + self_prompt: this.agent.self_prompter.isStopped() ? null : this.agent.self_prompter.prompt, last_sender: this.agent.last_sender }; writeFileSync(this.memory_fp, JSON.stringify(data, null, 2)); @@ -116,4 +117,4 @@ export class History { this.turns = []; this.memory = ''; } -} \ No newline at end of file +} From e1a47ce7c909e39b81a36cf1401877e26624ecd2 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:17:16 -0800 Subject: [PATCH 083/171] Update modes.js --- src/agent/modes.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/agent/modes.js b/src/agent/modes.js index 8bf1594..8747cf3 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -277,7 +277,7 @@ const modes_list = [ ]; async function execute(mode, agent, func, timeout=-1) { - if (agent.self_prompter.on) + if (agent.self_prompter.isActive()) agent.self_prompter.stopLoop(); let interrupted_action = agent.actions.currentActionLabel; mode.active = true; @@ -290,7 +290,7 @@ async function execute(mode, agent, func, timeout=-1) { let should_reprompt = interrupted_action && // it interrupted a previous action !agent.actions.resume_func && // there is no resume function - !agent.self_prompter.on && // self prompting is not on + !agent.self_prompter.isActive() && // self prompting is not on !code_return.interrupted; // this mode action was not interrupted by something else if (should_reprompt) { @@ -311,9 +311,9 @@ for (let mode of modes_list) { class ModeController { /* SECURITY WARNING: - ModesController must be isolated. Do not store references to external objects like `agent`. + ModesController must be reference isolated. Do not store references to external objects like `agent`. This object is accessible by LLM generated code, so any stored references are also accessible. - This can be used to expose sensitive information by malicious human prompters. + This can be used to expose sensitive information by malicious prompters. */ constructor() { this.behavior_log = ''; From 188912537ad12ebd8b4b3ec26f8ed42c626295b3 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:19:33 -0800 Subject: [PATCH 084/171] Update self_prompter.js --- src/agent/self_prompter.js | 55 +++++++++++++++++++++++++++++--------- 1 file changed, 43 insertions(+), 12 deletions(-) diff --git a/src/agent/self_prompter.js b/src/agent/self_prompter.js index 439b6c6..eb18407 100644 --- a/src/agent/self_prompter.js +++ b/src/agent/self_prompter.js @@ -1,7 +1,10 @@ +const STOPPED = 0 +const ACTIVE = 1 +const PAUSED = 2 export class SelfPrompter { constructor(agent) { this.agent = agent; - this.on = false; + this.state = STOPPED; this.loop_active = false; this.interrupt = false; this.prompt = ''; @@ -16,16 +19,38 @@ export class SelfPrompter { return 'No prompt specified. Ignoring request.'; prompt = this.prompt; } - if (this.on) { - this.prompt = prompt; - } - this.on = true; + this.state = ACTIVE; this.prompt = prompt; this.startLoop(); } - setPrompt(prompt) { + isActive() { + return this.state === ACTIVE; + } + + isStopped() { + return this.state === STOPPED; + } + + isPaused() { + return this.state === PAUSED; + } + + async handleLoad(prompt, state) { + if (state == undefined) + state = STOPPED; + this.state = state; this.prompt = prompt; + if (state !== STOPPED && !prompt) + throw new Error('No prompt loaded when self-prompting is active'); + if (state === ACTIVE) { + await this.start(prompt); + } + } + + setPromptPaused(prompt) { + this.prompt = prompt; + this.state = PAUSED; } async startLoop() { @@ -47,7 +72,7 @@ export class SelfPrompter { let out = `Agent did not use command in the last ${MAX_NO_COMMAND} auto-prompts. Stopping auto-prompting.`; this.agent.openChat(out); console.warn(out); - this.on = false; + this.state = STOPPED; break; } } @@ -63,7 +88,7 @@ export class SelfPrompter { update(delta) { // automatically restarts loop - if (this.on && !this.loop_active && !this.interrupt) { + if (this.state === ACTIVE && !this.loop_active && !this.interrupt) { if (this.agent.isIdle()) this.idle_time += delta; else @@ -96,12 +121,18 @@ export class SelfPrompter { this.interrupt = true; if (stop_action) await this.agent.actions.stop(); - await this.stopLoop(); - this.on = false; + this.stopLoop(); + this.state = STOPPED; + } + async pause() { + this.interrupt = true; + await this.agent.actions.stop(); + this.stopLoop(); + this.state = PAUSED; } shouldInterrupt(is_self_prompt) { // to be called from handleMessage - return is_self_prompt && this.on && this.interrupt; + return is_self_prompt && (this.state === ACTIVE || this.state === PAUSED) && this.interrupt; } handleUserPromptedCmd(is_self_prompt, is_action) { @@ -111,4 +142,4 @@ export class SelfPrompter { // this stops it from responding from the handlemessage loop and the self-prompt loop at the same time } } -} \ No newline at end of file +} From 35022f2a16a1b2fb566ef26e29317b6bdefa8785 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:21:32 -0800 Subject: [PATCH 085/171] Update tasks.js --- src/agent/tasks.js | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/agent/tasks.js b/src/agent/tasks.js index a51bb0c..7057f6a 100644 --- a/src/agent/tasks.js +++ b/src/agent/tasks.js @@ -36,7 +36,6 @@ export class TaskValidator { } } - export class Task { constructor(agent, task_path, task_id) { this.agent = agent; @@ -50,7 +49,11 @@ export class Task { this.taskTimeout = this.data.timeout || 300; this.taskStartTime = Date.now(); this.validator = new TaskValidator(this.data, this.agent); - this.blocked_actions = this.data.blocked_actions || []; + if (this.data.blocked_actions) { + this.blocked_actions = this.data.blocked_actions[this.agent.count_id.toString()] || []; + } else { + this.blocked_actions = []; + } this.restrict_to_inventory = !!this.data.restrict_to_inventory; if (this.data.goal) this.blocked_actions.push('!endGoal'); @@ -81,11 +84,6 @@ export class Task { isDone() { if (this.validator && this.validator.validate()) return {"message": 'Task successful', "code": 2}; - // TODO check for other terminal conditions - // if (this.task.goal && !this.self_prompter.on) - // return {"message": 'Agent ended goal', "code": 3}; - // if (this.task.conversation && !inConversation()) - // return {"message": 'Agent ended conversation', "code": 3}; if (this.taskTimeout) { const elapsedTime = (Date.now() - this.taskStartTime) / 1000; if (elapsedTime >= this.taskTimeout) { @@ -105,6 +103,10 @@ export class Task { bot.chat(`/clear ${name}`); console.log(`Cleared ${name}'s inventory.`); + //kill all drops + if (this.agent.count_id === 0) { + bot.chat(`/kill @e[type=item]`); + } //wait for a bit so inventory is cleared await new Promise((resolve) => setTimeout(resolve, 500)); let initial_inventory = null; From 71749ec4d2003c0b3f5d30ee59a3be0e01313470 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 27 Feb 2025 21:22:40 -0800 Subject: [PATCH 086/171] Update prompter.js --- src/models/prompter.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index e2e0453..6c8d537 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -267,7 +267,8 @@ export class Prompter { if (prompt.includes('$CONVO')) prompt = prompt.replaceAll('$CONVO', 'Recent conversation:\n' + stringifyTurns(messages)); if (prompt.includes('$SELF_PROMPT')) { - let self_prompt = this.agent.self_prompter.on ? `YOUR CURRENT ASSIGNED GOAL: "${this.agent.self_prompter.prompt}"\n` : ''; + // if active or paused, show the current goal + let self_prompt = !this.agent.self_prompter.isStopped() ? `YOUR CURRENT ASSIGNED GOAL: "${this.agent.self_prompter.prompt}"\n` : ''; prompt = prompt.replaceAll('$SELF_PROMPT', self_prompt); } if (prompt.includes('$LAST_GOALS')) { From a9eadb6ccd3e614dbe11db7b0f5d31387b023cf9 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Fri, 28 Feb 2025 12:07:27 -0800 Subject: [PATCH 087/171] Update hyperbolic.js Fixed reasoning in hyperbolic.js --- src/models/hyperbolic.js | 112 +++++++++++++++++++++++---------------- 1 file changed, 67 insertions(+), 45 deletions(-) diff --git a/src/models/hyperbolic.js b/src/models/hyperbolic.js index 1fe3df6..a552de9 100644 --- a/src/models/hyperbolic.js +++ b/src/models/hyperbolic.js @@ -1,13 +1,7 @@ +// hyperbolic-no-logger.js import { getKey } from '../utils/keys.js'; - -/* - * - * Yes, this code was written by an Ai. It was written by GPT-o1 and tested :) - * - */ - -export class hyperbolic { +export class Hyperbolic { constructor(modelName, apiUrl) { this.modelName = modelName || "deepseek-ai/DeepSeek-V3"; this.apiUrl = apiUrl || "https://api.hyperbolic.xyz/v1/chat/completions"; @@ -24,14 +18,14 @@ export class hyperbolic { * * @param {Array} turns - An array of message objects, e.g. [{role: 'user', content: 'Hi'}]. * @param {string} systemMessage - The system prompt or instruction. - * @param {string} stopSeq - A string that represents a stopping sequence, default '***'. - * @returns {Promise} - The content of the model's reply. + * @param {string} stopSeq - A stopping sequence, default '***'. + * @returns {Promise} - The model's reply. */ async sendRequest(turns, systemMessage, stopSeq = '***') { // Prepare the messages with a system prompt at the beginning const messages = [{ role: 'system', content: systemMessage }, ...turns]; - // Build the request payload (mirroring your original structure) + // Build the request payload const payload = { model: this.modelName, messages: messages, @@ -41,51 +35,79 @@ export class hyperbolic { stream: false }; - let completionContent = null; + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; - try { - console.log('Awaiting Hyperbolic API response...'); + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting Hyperbolic API response... (attempt: ${attempt})`); console.log('Messages:', messages); - const response = await fetch(this.apiUrl, { - method: 'POST', - headers: { - 'Content-Type': 'application/json', - 'Authorization': `Bearer ${this.apiKey}` - }, - body: JSON.stringify(payload) - }); + let completionContent = null; - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); + try { + const response = await fetch(this.apiUrl, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${this.apiKey}` + }, + body: JSON.stringify(payload) + }); + + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + const data = await response.json(); + if (data?.choices?.[0]?.finish_reason === 'length') { + throw new Error('Context length exceeded'); + } + + completionContent = data?.choices?.[0]?.message?.content || ''; + console.log('Received response from Hyperbolic.'); + } catch (err) { + if ( + (err.message === 'Context length exceeded' || err.code === 'context_length_exceeded') && + turns.length > 1 + ) { + console.log('Context length exceeded, trying again with a shorter context...'); + return await this.sendRequest(turns.slice(1), systemMessage, stopSeq); + } else { + console.error(err); + completionContent = 'My brain disconnected, try again.'; + } } - const data = await response.json(); - if ( - data?.choices?.[0]?.finish_reason && - data.choices[0].finish_reason === 'length' - ) { - throw new Error('Context length exceeded'); + // Check for blocks + const hasOpenTag = completionContent.includes(""); + const hasCloseTag = completionContent.includes(""); + + if ((hasOpenTag && !hasCloseTag)) { + console.warn("Partial block detected. Re-generating..."); + continue; // Retry the request } - completionContent = data?.choices?.[0]?.message?.content || ''; - console.log('Received response from Hyperbolic.'); - - } catch (err) { - if ( - (err.message === 'Context length exceeded' || - err.code === 'context_length_exceeded') && - turns.length > 1 - ) { - console.log('Context length exceeded, trying again with a shorter context...'); - return await this.sendRequest(turns.slice(1), systemMessage, stopSeq); - } else { - console.log(err); - completionContent = 'My brain disconnected, try again.'; + if (hasCloseTag && !hasOpenTag) { + completionContent = '' + completionContent; } + + if (hasOpenTag && hasCloseTag) { + completionContent = completionContent.replace(/[\s\S]*?<\/think>/g, '').trim(); + } + + finalRes = completionContent.replace(/<\|separator\|>/g, '*no response*'); + break; // Valid response obtained—exit loop } - return completionContent.replace(/<\|separator\|>/g, '*no response*'); + + if (finalRes == null) { + console.warn("Could not get a valid block or normal response after max attempts."); + finalRes = 'I thought too hard, sorry, try again.'; + } + return finalRes; } + async embed(text) { throw new Error('Embeddings are not supported by Hyperbolic.'); } From a402753539205675cd49b01b5fb2b55adf08e1c5 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Fri, 28 Feb 2025 12:11:30 -0800 Subject: [PATCH 088/171] Update glhf.js Fixed reasoning models in glhf.js --- src/models/glhf.js | 71 ++++++++++++++++++++++++++-------------------- 1 file changed, 40 insertions(+), 31 deletions(-) diff --git a/src/models/glhf.js b/src/models/glhf.js index 9c27799..9655603 100644 --- a/src/models/glhf.js +++ b/src/models/glhf.js @@ -1,18 +1,14 @@ +// glhf-no-logger.js import OpenAIApi from 'openai'; import { getKey } from '../utils/keys.js'; -// glhf doesn't supply an SDK for their models, but fully supports OpenAI SDKs export class glhf { constructor(model_name, url) { this.model_name = model_name; - - // Retrieve the API key from keys.json const apiKey = getKey('GHLF_API_KEY'); if (!apiKey) { throw new Error('API key not found. Please check keys.json and ensure GHLF_API_KEY is defined.'); } - - // Configure OpenAIApi with the retrieved API key and base URL this.openai = new OpenAIApi({ apiKey, baseURL: url || "https://glhf.chat/api/openai/v1" @@ -20,43 +16,56 @@ export class glhf { } async sendRequest(turns, systemMessage, stop_seq = '***') { - // Construct the message array for the API request - let messages = [{ 'role': 'system', 'content': systemMessage }].concat(turns); - + // Construct the message array for the API request. + let messages = [{ role: 'system', content: systemMessage }].concat(turns); const pack = { model: this.model_name || "hf:meta-llama/Llama-3.1-405B-Instruct", messages, stop: [stop_seq] }; - let res = null; - try { - console.log('Awaiting glhf.chat API response...'); - // Uncomment the line below if you need to debug the messages - // console.log('Messages:', messages); + const maxAttempts = 5; + let attempt = 0; + let finalRes = null; - let completion = await this.openai.chat.completions.create(pack); - if (completion.choices[0].finish_reason === 'length') { - throw new Error('Context length exceeded'); - } - - console.log('Received.'); - res = completion.choices[0].message.content; - } catch (err) { - if ((err.message === 'Context length exceeded' || err.code === 'context_length_exceeded') && turns.length > 1) { - console.log('Context length exceeded, trying again with shorter context.'); - return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); - } else { - console.log(err); - res = 'My brain disconnected, try again.'; + while (attempt < maxAttempts) { + attempt++; + console.log(`Awaiting glhf.chat API response... (attempt: ${attempt})`); + try { + let completion = await this.openai.chat.completions.create(pack); + if (completion.choices[0].finish_reason === 'length') { + throw new Error('Context length exceeded'); + } + let res = completion.choices[0].message.content; + // If there's an open tag without a corresponding , retry. + if (res.includes("") && !res.includes("")) { + console.warn("Partial block detected. Re-generating..."); + continue; + } + // If there's a closing tag but no opening , prepend one. + if (res.includes("") && !res.includes("")) { + res = "" + res; + } + finalRes = res.replace(/<\|separator\|>/g, '*no response*'); + break; // Valid response obtained. + } catch (err) { + if ((err.message === 'Context length exceeded' || err.code === 'context_length_exceeded') && turns.length > 1) { + console.log('Context length exceeded, trying again with shorter context.'); + return await this.sendRequest(turns.slice(1), systemMessage, stop_seq); + } else { + console.error(err); + finalRes = 'My brain disconnected, try again.'; + break; + } } } - - // Replace special tokens in the response - return res.replace(/<\|separator\|>/g, '*no response*'); + if (finalRes === null) { + finalRes = "I thought too hard, sorry, try again"; + } + return finalRes; } async embed(text) { throw new Error('Embeddings are not supported by glhf.'); } -} \ No newline at end of file +} From 1f17fc95e78f0ff961ea313345568148188e23e5 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Fri, 28 Feb 2025 12:42:13 -0800 Subject: [PATCH 089/171] Update hyperbolic.js Fixed the hyperbolic export --- src/models/hyperbolic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/hyperbolic.js b/src/models/hyperbolic.js index a552de9..4341f65 100644 --- a/src/models/hyperbolic.js +++ b/src/models/hyperbolic.js @@ -1,7 +1,7 @@ // hyperbolic-no-logger.js import { getKey } from '../utils/keys.js'; -export class Hyperbolic { +export class hyperbolic { constructor(modelName, apiUrl) { this.modelName = modelName || "deepseek-ai/DeepSeek-V3"; this.apiUrl = apiUrl || "https://api.hyperbolic.xyz/v1/chat/completions"; From 42dfe39862a607b28d107e40c47c8d48aa0c1d42 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Fri, 28 Feb 2025 12:46:55 -0800 Subject: [PATCH 090/171] Update prompter.js Fixed minor error in prompter.js that disabled Hyperbolic support --- src/models/prompter.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index 6c8d537..5c0bee1 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -143,7 +143,7 @@ export class Prompter { profile.api = 'groq'; else if (profile.model.includes('hf:')) profile.api = "glhf"; - else if (profile.model.includes('hyperbolic:')|| chat.model.includes('hb:')) + else if (profile.model.includes('hyperbolic:')|| profile.model.includes('hb:')) profile.api = "hyperbolic"; else if (profile.model.includes('novita/')) profile.api = 'novita'; @@ -178,7 +178,7 @@ export class Prompter { else if (profile.api === 'glhf') model = new glhf(profile.model, profile.url, profile.params); else if (profile.api === 'hyperbolic') - model = new hyperbolic(profile.model.replace('hyperbolic:', '').replace('hb:', ''), profile.url, profile.params); // Yes you can hate me for using curly braces on this little bit of code for defining the hyperbolic endpoint + model = new hyperbolic(profile.model.replace('hyperbolic:', '').replace('hb:', ''), profile.url, profile.params); else if (profile.api === 'huggingface') model = new HuggingFace(profile.model, profile.url, profile.params); else if (profile.api === 'novita') From 37417fcb928aaed72b79a491ad6df1a537c9f916 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Fri, 28 Feb 2025 12:48:22 -0800 Subject: [PATCH 091/171] Update huggingface.js Fixed thinking tag checks for Huggingface API --- src/models/huggingface.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/huggingface.js b/src/models/huggingface.js index 59dd0f7..80c36e8 100644 --- a/src/models/huggingface.js +++ b/src/models/huggingface.js @@ -55,7 +55,7 @@ export class HuggingFace { const hasCloseTag = res.includes(""); // If there's a partial mismatch, warn and retry the entire request. - if ((hasOpenTag && !hasCloseTag) || (!hasOpenTag && hasCloseTag)) { + if ((hasOpenTag && !hasCloseTag)) { console.warn("Partial block detected. Re-generating..."); continue; } From 125aee4ce435a7b2f2b0cbf14bc2bdea8da298e3 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 1 Mar 2025 13:40:24 -0800 Subject: [PATCH 092/171] Update prompter.js Updated prompter.js for updated Ollama and Openrouter model usage --- src/models/prompter.js | 26 ++++++++------------------ 1 file changed, 8 insertions(+), 18 deletions(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index 5c0bee1..176e6f0 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -123,12 +123,12 @@ export class Prompter { profile = {model: profile}; } if (!profile.api) { - if (profile.model.includes('gemini')) + if (profile.model.includes('openrouter/')) + profile.api = 'openrouter'; // must do first because shares names with other models + else if (profile.model.includes('ollama/')) + profile.api = 'ollama'; // also must do early because shares names with other models + else if (profile.model.includes('gemini')) profile.api = 'google'; - else if (profile.model.includes('openrouter/')) - profile.api = 'openrouter'; // must do before others bc shares model names - else if (profile.model.includes('andy-')) - profile.api = 'ollama'; // We have to check here since the installation for Andy-3.6 makes the username include "hf:" which is the same as glhf.chat, which will not work. else if (profile.model.includes('gpt') || profile.model.includes('o1')|| profile.model.includes('o3')) profile.api = 'openai'; else if (profile.model.includes('claude')) @@ -141,10 +141,6 @@ export class Prompter { model_profile.api = 'mistral'; else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) profile.api = 'groq'; - else if (profile.model.includes('hf:')) - profile.api = "glhf"; - else if (profile.model.includes('hyperbolic:')|| profile.model.includes('hb:')) - profile.api = "hyperbolic"; else if (profile.model.includes('novita/')) profile.api = 'novita'; else if (profile.model.includes('qwen')) @@ -153,12 +149,11 @@ export class Prompter { profile.api = 'xai'; else if (profile.model.includes('deepseek')) profile.api = 'deepseek'; - else - profile.api = 'ollama'; // Fixed this line, it had a bug where only llama3 models could be used in Mindcraft via Ollama, which was is not optimal. + else + profile.api = 'ollama'; // Assume the model is ollama, even if the user didn't use ollama/ } return profile; } - _createModel(profile) { let model = null; if (profile.api === 'google') @@ -170,15 +165,11 @@ export class Prompter { else if (profile.api === 'replicate') model = new ReplicateAPI(profile.model.replace('replicate/', ''), profile.url, profile.params); else if (profile.api === 'ollama') - model = new Local(profile.model, profile.url, profile.params); + model = new Local(profile.model.replace('ollama/', ''), profile.url, profile.params); else if (profile.api === 'mistral') model = new Mistral(profile.model, profile.url, profile.params); else if (profile.api === 'groq') model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params); - else if (profile.api === 'glhf') - model = new glhf(profile.model, profile.url, profile.params); - else if (profile.api === 'hyperbolic') - model = new hyperbolic(profile.model.replace('hyperbolic:', '').replace('hb:', ''), profile.url, profile.params); else if (profile.api === 'huggingface') model = new HuggingFace(profile.model, profile.url, profile.params); else if (profile.api === 'novita') @@ -195,7 +186,6 @@ export class Prompter { throw new Error('Unknown API:', profile.api); return model; } - getName() { return this.profile.name; } From 05d798342941e773b31df6ccf3d40de7b989bee9 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 1 Mar 2025 13:50:38 -0800 Subject: [PATCH 093/171] Update README.md Updated the README for fixed ollama usage, and changed the example model from llama3, to a more capable model, qwen2.5, and also noted to ollama users that any local model can be used as long as it starts with `ollama/` --- README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 136540f..cbe1197 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | Google | `GEMINI_API_KEY` | `gemini-2.0-flash` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | | Anthropic | `ANTHROPIC_API_KEY` | `claude-3-5-haiku-20241022` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | | Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | -| Ollama (local) | n/a | `llama3` | [docs](https://ollama.com/library) | +| Ollama (local) | n/a | `ollama/qwen2.5` | [docs](https://ollama.com/library) | | Groq | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | | Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B` | [docs](https://huggingface.co/models) | | Novita AI | `NOVITA_API_KEY` | `gryphe/mythomax-l2-13b` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) | @@ -55,7 +55,8 @@ You can configure the agent's name, model, and prompts in their profile like `an | Hyperbolic | `HYPERBOLIC_API_KEY` | `hb:deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: -`ollama pull llama3 && ollama pull nomic-embed-text` +`ollama pull qwen2.5 && ollama pull nomic-embed-text` +Note that you can you any local model, such as `qwen2.5` `llama3` or fine tuned models like `sweaterdog/andy-3.6` But not matter what model, it is recommended you put `ollama/` before the model so Mindcraft can handle it properly. ### Online Servers To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect too and play with it. To connect, change these lines in `settings.js`: From 560763a71c3103f52a7c32edf53e0c331ca8f681 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 15:28:58 -0800 Subject: [PATCH 094/171] Update .gitignore Fixed extra spaces --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 7a030f5..22d104f 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,3 @@ services/viaproxy/viaproxy.yml tmp/ wandb/ experiments/ - From 61b4a3e28f9b03d96d493da2807555fcf3418414 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 15:29:29 -0800 Subject: [PATCH 095/171] Update .gitignore From b7bc1069e201c79c933fed4cbedaf895351249bf Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 15:30:59 -0800 Subject: [PATCH 096/171] Update README.md Fixed the Readme to show the updated model processing --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index cbe1197..2427ffd 100644 --- a/README.md +++ b/README.md @@ -51,8 +51,8 @@ You can configure the agent's name, model, and prompts in their profile like `an | xAI | `XAI_API_KEY` | `grok-2` | [docs](https://docs.x.ai/docs) | | Deepseek | `DEEPSEEK_API_KEY` | `deepseek-reasoner` | [docs](https://api-docs.deepseek.com/) | | Openrouter | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) | -| glhf.chat | `GHLF_API_KEY` | `hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | -| Hyperbolic | `HYPERBOLIC_API_KEY` | `hb:deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | +| glhf.chat | `GHLF_API_KEY` | `GLHF/hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | +| Hyperbolic | `HYPERBOLIC_API_KEY` | `hyperbolic/deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: `ollama pull qwen2.5 && ollama pull nomic-embed-text` From f7bcd6150f000ae46d7ad98da3e9986cf0b87ad4 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 15:33:03 -0800 Subject: [PATCH 097/171] Delete multi_agent_crafting_tasks.json --- multi_agent_crafting_tasks.json | 43 --------------------------------- 1 file changed, 43 deletions(-) delete mode 100644 multi_agent_crafting_tasks.json diff --git a/multi_agent_crafting_tasks.json b/multi_agent_crafting_tasks.json deleted file mode 100644 index 9d7a8e7..0000000 --- a/multi_agent_crafting_tasks.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "multiagent_techtree_1_stone_pickaxe": { - "conversation": "Let's collaborate to build a stone pickaxe", - "agent_count": 2, - "initial_inventory": { - "0": { - "wooden_pickaxe": 1 - }, - "1": { - "wooden_axe": 1 - } - }, - "blocked_actions": { - "0": [], - "1": [] - }, - "target": "stone_pickaxe", - "number_of_target": 1, - "type": "techtree", - "timeout": 20 - }, - "multiagent_techtree_1_shears": { - "goal": "Collaborate with other agents to build a shear.", - "conversation": "Let's collaborate to build a shear.", - "agent_count": 2, - "initial_inventory": { - "0": { - "iron_ingot": 1 - }, - "1": { - "iron_ingot": 1 - } - }, - "blocked_actions": { - "0": [], - "1": [] - }, - "target": "shears", - "number_of_target": 1, - "type": "techtree", - "timeout": 20 - } -} From fee9f43511b52524c5bc36ca310a7f6fab4b1e29 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 15:34:33 -0800 Subject: [PATCH 098/171] Delete profiles/hyperbolic.json --- profiles/hyperbolic.json | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 profiles/hyperbolic.json diff --git a/profiles/hyperbolic.json b/profiles/hyperbolic.json deleted file mode 100644 index d09d3c3..0000000 --- a/profiles/hyperbolic.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "Hyperbolic", - - "model": "hb:deepseek-ai/DeepSeek-V3" - -} From 6c44f5c55bb140c58d21135d9205c5e03aebbe5f Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 15:34:42 -0800 Subject: [PATCH 099/171] Delete profiles/glhf.json --- profiles/glhf.json | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 profiles/glhf.json diff --git a/profiles/glhf.json b/profiles/glhf.json deleted file mode 100644 index 25e5dd7..0000000 --- a/profiles/glhf.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "Good_luck_have_fun", - - "model": "hf:meta-llama/Llama-3.1-405B-Instruct" -} From 9c595ed69ac8053b17fb55a51d39fe078d5932cb Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 16:51:32 -0800 Subject: [PATCH 100/171] Update prompter.js Fixed the hyperbolic and glhf.chat setup, for some reason it was deleted when merging with the main. --- src/models/prompter.js | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index 5f56da5..7d4353f 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -18,8 +18,8 @@ import { HuggingFace } from './huggingface.js'; import { Qwen } from "./qwen.js"; import { Grok } from "./grok.js"; import { DeepSeek } from './deepseek.js'; -import { hyperbolic } from './hyperbolic.js'; -import { glhf } from './glhf.js'; +import { Hyperbolic } from './hyperbolic.js'; +import { GLHF } from './glhf.js'; import { OpenRouter } from './openrouter.js'; export class Prompter { @@ -140,6 +140,10 @@ export class Prompter { model_profile.api = 'mistral'; else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) profile.api = 'groq'; + else if (profile.model.includes("GLHF/") + profile.api = 'glhf'; + else if (profile.model.includes("Hyperbolic/") + profile.api = 'hyperbolic'; else if (profile.model.includes('novita/')) profile.api = 'novita'; else if (profile.model.includes('qwen')) @@ -149,7 +153,7 @@ export class Prompter { else if (profile.model.includes('deepseek')) profile.api = 'deepseek'; else - profile.api = 'ollama'; // Assume the model is ollama, even if the user didn't use ollama/ + throw new Error('Unknown model:', profile.model, 'Did you check the name is correct?'); // Asks the user if the name is correct } return profile; } @@ -171,6 +175,10 @@ export class Prompter { model = new GroqCloudAPI(profile.model.replace('groq/', '').replace('groqcloud/', ''), profile.url, profile.params); else if (profile.api === 'huggingface') model = new HuggingFace(profile.model, profile.url, profile.params); + else if (profile.api === 'glhf') + model = new GLHF(profile.model.replace('glhf/', ''), profile.url, profile.params); + else if (profile.api === 'hyperbolic') + model = new Hyperbolic(profile.model.replace('hyperbolic/', ''), profile.url, profile.params); else if (profile.api === 'novita') model = new Novita(profile.model.replace('novita/', ''), profile.url, profile.params); else if (profile.api === 'qwen') From d57ac89d8752047da94979ad8f08023f90c9a8ed Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 16:52:05 -0800 Subject: [PATCH 101/171] Update glhf.js Made glhf.js export GLHF instead of glhf --- src/models/glhf.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/glhf.js b/src/models/glhf.js index 9655603..296f535 100644 --- a/src/models/glhf.js +++ b/src/models/glhf.js @@ -2,7 +2,7 @@ import OpenAIApi from 'openai'; import { getKey } from '../utils/keys.js'; -export class glhf { +export class GLHF { constructor(model_name, url) { this.model_name = model_name; const apiKey = getKey('GHLF_API_KEY'); From 0451b1a85281fb6d275c2176abb96bf815a09c71 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 16:57:29 -0800 Subject: [PATCH 102/171] Update prompter.js "Fixed" prompter.js --- src/models/prompter.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index 7d4353f..b25f2d4 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -41,7 +41,7 @@ export class Prompter { this.profile[key] = base_profile[key]; } // base overrides default, individual overrides base - // Removed a bit of space that was right here by adding a comment instead of deleting it because I am making a pull request to this code and I can do whatever I want because I decided to add 2 new API services to Mindcraft now look at me go! Woohoo! I am flying off the edge of the screen oh no! + // Removed this comment this.convo_examples = null; this.coding_examples = null; From fa22e388de024cae49fc1d444f7bb152fe5fd2c5 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Tue, 4 Mar 2025 17:05:42 -0800 Subject: [PATCH 103/171] Update README.md --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index abeb60c..e6ece8e 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | Google | `GEMINI_API_KEY` | `gemini-2.0-flash` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | | Anthropic | `ANTHROPIC_API_KEY` | `claude-3-5-haiku-20241022` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | | Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | -| Ollama (local) | n/a | `ollama/qwen2.5` | [docs](https://ollama.com/library) | +| Ollama (local) | n/a | `ollama/llama3.1` | [docs](https://ollama.com/library) | | Groq (Not Grok) | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | | Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B` | [docs](https://huggingface.co/models) | | Novita | `NOVITA_API_KEY` | `novita/deepseek/deepseek-r1` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) | @@ -51,13 +51,13 @@ You can configure the agent's name, model, and prompts in their profile like `an | xAI | `XAI_API_KEY` | `grok-2` | [docs](https://docs.x.ai/docs) | | Deepseek | `DEEPSEEK_API_KEY` | `deepseek-reasoner` | [docs](https://api-docs.deepseek.com/) | | Openrouter | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) | -| glhf.chat | `GHLF_API_KEY` | `GLHF/hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | +| glhf.chat | `GHLF_API_KEY` | `glhf/hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | | Hyperbolic | `HYPERBOLIC_API_KEY` | `hyperbolic/deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: -`ollama pull qwen2.5 && ollama pull nomic-embed-text` -Note that you can you any local model, such as `qwen2.5` `llama3` or fine tuned models like `sweaterdog/andy-3.6` But not matter what model, it is recommended you put `ollama/` before the model so Mindcraft can handle it properly. +`ollama pull llama3.1 && ollama pull nomic-embed-text` +Note that you can you any local model, such as `qwen2.5` `llama3.1` or fine tuned models like `sweaterdog/andy-3.6` But not matter what model, you must put `ollama/` before the model so Mindcraft can handle it properly. ### Online Servers To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect too and play with it. To connect, change these lines in `settings.js`: From 6ec49e77896682abec8488343b16fa9b3ea51350 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 5 Mar 2025 15:23:57 -0600 Subject: [PATCH 104/171] reworked image prompting, update package --- package.json | 12 ++--- profiles/defaults/_default.json | 2 +- src/agent/commands/actions.js | 31 ++++++++----- src/agent/vision_interpreter.js | 78 ++++++++++----------------------- src/models/prompter.js | 7 +++ 5 files changed, 57 insertions(+), 73 deletions(-) diff --git a/package.json b/package.json index 7c2348a..c55dd3a 100644 --- a/package.json +++ b/package.json @@ -5,28 +5,28 @@ "@google/generative-ai": "^0.2.1", "@huggingface/inference": "^2.8.1", "@mistralai/mistralai": "^1.1.0", + "express": "^4.18.2", "google-translate-api-x": "^10.7.1", "groq-sdk": "^0.5.0", "minecraft-data": "^3.78.0", - "mineflayer": "^4.23.0", + "mineflayer": "^4.26.0", "mineflayer-armor-manager": "^2.0.1", "mineflayer-auto-eat": "^3.3.6", "mineflayer-collectblock": "^1.4.1", "mineflayer-pathfinder": "^2.4.5", "mineflayer-pvp": "^1.3.2", + "node-canvas-webgl": "PrismarineJS/node-canvas-webgl", "openai": "^4.4.0", "patch-package": "^8.0.0", "prismarine-item": "^1.15.0", "prismarine-viewer": "^1.28.0", "replicate": "^0.29.4", "ses": "^1.9.1", - "vec3": "^0.1.10", - "yargs": "^17.7.2", "socket.io": "^4.7.2", "socket.io-client": "^4.7.2", - "express": "^4.18.2", - "node-canvas-webgl": "PrismarineJS/node-canvas-webgl", - "three": "^0.128.0" + "three": "^0.128.0", + "vec3": "^0.1.10", + "yargs": "^17.7.2" }, "scripts": { "postinstall": "patch-package", diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json index fd7b1c2..491966e 100644 --- a/profiles/defaults/_default.json +++ b/profiles/defaults/_default.json @@ -9,7 +9,7 @@ "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", - "image_conversing": "Summarize the visible Minecraft screen by listing the types and arrangements of blocks, focusing on terrain, structures, and notable features in 400 chars.", + "image_analysis": "You are a Minecraft bot named $NAME that has been given a screenshot of your current view. Analyze and summarize the view; describe terrain, blocks, structures, and notable features. Focus on details relevant to the conversation. Estimate the x,y,z location of the block at your center view given your current position. Be extremely concise and correct, respond only with your analysis, not conversationally. $STATS", "modes": { "self_preservation": true, diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 1ee8192..c5fc074 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -411,19 +411,23 @@ export const actionsList = [ name: '!lookAtPlayer', description: 'Look at a player or look in the same direction as the player.', params: { - 'player_name': { - type: 'string', - description: 'Name of the target player' - }, + 'player_name': { type: 'string', description: 'Name of the target player' }, 'direction': { type: 'string', description: 'How to look ("at": look at the player, "with": look in the same direction as the player)', - enum: ['at', 'with'] } }, - perform: runAsAction(async (agent, player_name, direction) => { - await agent.vision_interpreter.lookAtPlayer(player_name, direction); - }) + perform: async function(agent, player_name, direction) { + if (direction !== 'at' && direction !== 'with') { + return "Invalid direction. Use 'at' or 'with'."; + } + let result = ""; + const actionFn = async () => { + result = await agent.vision_interpreter.lookAtPlayer(player_name, direction); + }; + await agent.actions.runAction('action:lookAtPlayer', actionFn); + return result; + } }, { name: '!lookAtPosition', @@ -433,8 +437,13 @@ export const actionsList = [ 'y': { type: 'int', description: 'y coordinate' }, 'z': { type: 'int', description: 'z coordinate' } }, - perform: runAsAction(async (agent, x, y, z) => { - await agent.vision_interpreter.lookAtPosition(x, y, z); - }) + perform: async function(agent, x, y, z) { + let result = ""; + const actionFn = async () => { + result = await agent.vision_interpreter.lookAtPosition(x, y, z); + }; + await agent.actions.runAction('action:lookAtPosition', actionFn); + return result; + } } ]; diff --git a/src/agent/vision_interpreter.js b/src/agent/vision_interpreter.js index 9c66cc8..28c326e 100644 --- a/src/agent/vision_interpreter.js +++ b/src/agent/vision_interpreter.js @@ -1,12 +1,8 @@ import { Vec3 } from 'vec3'; import { Camera } from "../utils/camera.js"; import fs from 'fs'; -import { log } from './library/skills.js'; -import * as world from './library/world.js'; -const pad = (str) => { - return '\n' + str + '\n'; -} +const RENDER_TIME = 1000; export class VisionInterpreter { constructor(agent, allow_vision) { @@ -16,89 +12,61 @@ export class VisionInterpreter { } async lookAtPlayer(player_name, direction) { + if (!this.allow_vision || !this.agent.prompter.vision_model.sendVisionRequest) { + return "Vision is disabled. Use other methods to describe the environment."; + } + let result = ""; const bot = this.agent.bot; const player = bot.players[player_name]?.entity; if (!player) { - log(bot, `Could not find player ${player_name}`); + return `Could not find player ${player_name}`; } let filename; if (direction === 'with') { await bot.look(player.yaw, player.pitch); const camera = new Camera(bot, this.fp); - await new Promise(resolve => setTimeout(resolve, 500)); - log(bot, `Looking in the same direction as ${player_name}`); + await new Promise(resolve => setTimeout(resolve, RENDER_TIME)); + result = `Looking in the same direction as ${player_name}\n`; filename = await camera.capture(); } else { await bot.lookAt(new Vec3(player.position.x, player.position.y + player.height, player.position.z)); const camera = new Camera(bot, this.fp); - await new Promise(resolve => setTimeout(resolve, 500)); - log(bot, `Looking at player ${player_name}`); + await new Promise(resolve => setTimeout(resolve, RENDER_TIME)); + result = `Looking at player ${player_name}\n`; filename = await camera.capture(); + } - if (!this.allow_vision || !this.agent.prompter.vision_model.sendVisionRequest) { - log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); - log(this.agent.bot, this._nearbyBlocks()); - } else { - await this.analyzeImage(filename); - } + return result + `Image analysis: "${await this.analyzeImage(filename)}"`; } async lookAtPosition(x, y, z) { + if (!this.allow_vision || !this.agent.prompter.vision_model.sendVisionRequest) { + return "Vision is disabled. Use other methods to describe the environment."; + } + let result = ""; const bot = this.agent.bot; await bot.lookAt(new Vec3(x, y + 2, z)); const camera = new Camera(bot, this.fp); - await new Promise(resolve => setTimeout(resolve, 500)); - log(bot, `Looking at coordinate ${x, y, z}`); + await new Promise(resolve => setTimeout(resolve, RENDER_TIME)); + result = `Looking at coordinate ${x, y, z}\n`; let filename = await camera.capture(); - if (!this.allow_vision || !this.agent.prompter.vision_model.sendVisionRequest) { - log(this.agent.bot, "Vision is disabled. Using text-based environment description instead."); - log(this.agent.bot, this._nearbyBlocks()); - } else { - await this.analyzeImage(filename); - } + return result + `Image analysis: "${await this.analyzeImage(filename)}"`; } async analyzeImage(filename) { - let prompt = this.agent.prompter.profile.image_conversing; - let res = null; - try { - const bot = this.agent.bot; const imageBuffer = fs.readFileSync(`${this.fp}/${filename}.jpg`); const messages = this.agent.history.getHistory(); - res = await this.agent.prompter.vision_model.sendVisionRequest(messages, prompt, imageBuffer); - - if (res == 'Vision is only supported by certain models.') { - log(bot, "Vision may not be supported on this model. Using text-based environment description instead."); - log(bot, this._nearbyBlocks()); - } else { - log(bot, res); - } + + return await this.agent.prompter.promptVision(messages, imageBuffer); } catch (error) { - log(this.agent.bot, `Error analyzing image: ${error.message}`); + console.warn('Error reading image:', error); + return `Error reading image: ${error.message}`; } } - - _nearbyBlocks() { - const bot = this.agent.bot; - let res = 'NEARBY_BLOCKS'; - - let blocks = world.getNearbyBlockTypes(bot); - for (let i = 0; i < blocks.length; i++) { - res += `\n- ${blocks[i]}`; - } - if (blocks.length == 0) { - res += ': none'; - } else { - // Environmental Awareness - res += '\n- ' + world.getSurroundingBlocks(bot).join('\n- ') - res += `\n- First Solid Block Above Head: ${world.getFirstBlockAboveHead(bot, null, 32)}`; - } - return pad(res); - } } \ No newline at end of file diff --git a/src/models/prompter.js b/src/models/prompter.js index 5ac6a1f..786b623 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -343,6 +343,13 @@ git } return res.trim().toLowerCase() === 'respond'; } + async promptVision(messages, imageBuffer) { + await this.checkCooldown(); + let prompt = this.profile.image_analysis; + prompt = await this.replaceStrings(prompt, messages, null, null, null); + return await this.vision_model.sendVisionRequest(messages, prompt, imageBuffer); + } + async promptGoalSetting(messages, last_goals) { let system_message = this.profile.goal_setting; system_message = await this.replaceStrings(system_message, messages); From 222f988873b35bfcaa9d0ae93ded2a2f88e1c7e1 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 5 Mar 2025 15:32:15 -0600 Subject: [PATCH 105/171] fixed merge mistake --- src/agent/agent.js | 75 ---------------------------------------------- 1 file changed, 75 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index 03b8ec7..f658974 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -76,81 +76,6 @@ export class Agent { this.bot.chat(`/skin clear`); }); - const spawnTimeout = setTimeout(() => { - process.exit(0); - }, 30000); - this.bot.once('spawn', async () => { - try { - clearTimeout(spawnTimeout); - addViewer(this.bot, count_id); - - // wait for a bit so stats are not undefined - await new Promise((resolve) => setTimeout(resolve, 1000)); - - console.log(`${this.name} spawned.`); - this.clearBotLogs(); - - this._setupEventHandlers(save_data, init_message); - this.startEvents(); - - this.task.initBotTask(); - - } catch (error) { - console.error('Error in spawn event:', error); - process.exit(0); - } - }); - - console.log('Starting agent initialization with profile:', profile_fp); - - // Initialize components with more detailed error handling - console.log('Initializing action manager...'); - this.actions = new ActionManager(this); - console.log('Initializing prompter...'); - this.prompter = new Prompter(this, profile_fp); - this.name = this.prompter.getName(); - console.log('Initializing history...'); - this.history = new History(this); - console.log('Initializing coder...'); - this.coder = new Coder(this); - console.log('Initializing npc controller...'); - this.npc = new NPCContoller(this); - console.log('Initializing memory bank...'); - this.memory_bank = new MemoryBank(); - console.log('Initializing self prompter...'); - this.self_prompter = new SelfPrompter(this); - convoManager.initAgent(this); - console.log('Initializing examples...'); - await this.prompter.initExamples(); - console.log('Initializing task...'); - this.task = new Task(this, task_path, task_id); - const blocked_actions = this.task.blocked_actions || []; - blacklistCommands(blocked_actions); - - serverProxy.connect(this); - - console.log(this.name, 'logging into minecraft...'); - this.bot = initBot(this.name); - - initModes(this); - - let save_data = null; - if (load_mem) { - save_data = this.history.load(); - } - - this.bot.on('login', () => { - console.log(this.name, 'logged in!'); - - serverProxy.login(); - - // Set skin for profile, requires Fabric Tailor. (https://modrinth.com/mod/fabrictailor) - if (this.prompter.profile.skin) - this.bot.chat(`/skin set URL ${this.prompter.profile.skin.model} ${this.prompter.profile.skin.path}`); - else - this.bot.chat(`/skin clear`); - }); - const spawnTimeout = setTimeout(() => { process.exit(0); }, 30000); From 5dca9b778f6319bb9324077ec641d585683f278e Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 5 Mar 2025 15:35:50 -0600 Subject: [PATCH 106/171] readd canvas, remove random "git" --- package.json | 1 + src/models/prompter.js | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 0a56991..ac70774 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "@google/generative-ai": "^0.2.1", "@huggingface/inference": "^2.8.1", "@mistralai/mistralai": "^1.1.0", + "canvas": "^3.1.0", "express": "^4.18.2", "google-translate-api-x": "^10.7.1", "groq-sdk": "^0.15.0", diff --git a/src/models/prompter.js b/src/models/prompter.js index 6ca26ea..b412ee5 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -339,7 +339,7 @@ export class Prompter { let resp = await this.code_model.sendRequest(messages, prompt); this.awaiting_coding = false; return resp; -git } + } async promptMemSaving(to_summarize) { await this.checkCooldown(); From dcdb7d2de1c1facba7040ebb08d508cd78afccd1 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Wed, 5 Mar 2025 15:46:45 -0600 Subject: [PATCH 107/171] refactor/cleanup --- src/agent/agent.js | 2 +- src/agent/library/skills.js | 2 -- src/{utils => agent/vision}/camera.js | 0 src/agent/{ => vision}/vision_interpreter.js | 2 +- src/models/claude.js | 2 +- 5 files changed, 3 insertions(+), 5 deletions(-) rename src/{utils => agent/vision}/camera.js (100%) rename src/agent/{ => vision}/vision_interpreter.js (98%) diff --git a/src/agent/agent.js b/src/agent/agent.js index f658974..f186e52 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -1,6 +1,6 @@ import { History } from './history.js'; import { Coder } from './coder.js'; -import { VisionInterpreter } from './vision_interpreter.js'; +import { VisionInterpreter } from './vision/vision_interpreter.js'; import { Prompter } from '../models/prompter.js'; import { initModes } from './modes.js'; import { initBot } from '../utils/mcdata.js'; diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 7e254dd..7700683 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1,9 +1,7 @@ import * as mc from "../../utils/mcdata.js"; -import { Camera } from "../../utils/camera.js"; import * as world from "./world.js"; import pf from 'mineflayer-pathfinder'; import Vec3 from 'vec3'; -import fs from 'fs'; export function log(bot, message) { diff --git a/src/utils/camera.js b/src/agent/vision/camera.js similarity index 100% rename from src/utils/camera.js rename to src/agent/vision/camera.js diff --git a/src/agent/vision_interpreter.js b/src/agent/vision/vision_interpreter.js similarity index 98% rename from src/agent/vision_interpreter.js rename to src/agent/vision/vision_interpreter.js index 28c326e..34d9f3c 100644 --- a/src/agent/vision_interpreter.js +++ b/src/agent/vision/vision_interpreter.js @@ -1,5 +1,5 @@ import { Vec3 } from 'vec3'; -import { Camera } from "../utils/camera.js"; +import { Camera } from "./camera.js"; import fs from 'fs'; const RENDER_TIME = 1000; diff --git a/src/models/claude.js b/src/models/claude.js index 563dc88..d6e48bc 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -26,7 +26,7 @@ export class Claude { this.params.max_tokens = this.params.thinking.budget_tokens + 1000; // max_tokens must be greater than thinking.budget_tokens } else { - this.params.max_tokens = 16000; + this.params.max_tokens = 4096; } } const resp = await this.anthropic.messages.create({ From b3ee159b431003d9c3dcfc1388f0e43c97008a6e Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 14:40:42 -0800 Subject: [PATCH 108/171] Update prompter.js Fixed minor issues --- src/models/prompter.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index b25f2d4..e9148b2 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -41,7 +41,6 @@ export class Prompter { this.profile[key] = base_profile[key]; } // base overrides default, individual overrides base - // Removed this comment this.convo_examples = null; this.coding_examples = null; @@ -140,9 +139,9 @@ export class Prompter { model_profile.api = 'mistral'; else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) profile.api = 'groq'; - else if (profile.model.includes("GLHF/") + else if (profile.model.includes("glhf/") profile.api = 'glhf'; - else if (profile.model.includes("Hyperbolic/") + else if (profile.model.includes("hyperbolic/") profile.api = 'hyperbolic'; else if (profile.model.includes('novita/')) profile.api = 'novita'; From 7c8f82115b31ae4aa9128b91ab625eaa3691fe9e Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 14:42:00 -0800 Subject: [PATCH 109/171] Update hyperbolic.js Fixed export name --- src/models/hyperbolic.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/models/hyperbolic.js b/src/models/hyperbolic.js index 4341f65..a2ccc48 100644 --- a/src/models/hyperbolic.js +++ b/src/models/hyperbolic.js @@ -1,7 +1,6 @@ -// hyperbolic-no-logger.js import { getKey } from '../utils/keys.js'; -export class hyperbolic { +export class Hyperbolic { constructor(modelName, apiUrl) { this.modelName = modelName || "deepseek-ai/DeepSeek-V3"; this.apiUrl = apiUrl || "https://api.hyperbolic.xyz/v1/chat/completions"; From d1244924663334058dc341f9168701957aa70d48 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 14:42:59 -0800 Subject: [PATCH 110/171] Update settings.js Removed useless profile imports --- settings.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/settings.js b/settings.js index bf83b04..7542a0e 100644 --- a/settings.js +++ b/settings.js @@ -23,8 +23,6 @@ export default // "./profiles/grok.json", // "./profiles/mistral.json", // "./profiles/deepseek.json", - // "./profiles/glhf.json", - // "./profiles/hyperbolic.json", // using more than 1 profile requires you to /msg each bot indivually // individual profiles override values from the base profile From af20a10ef0b9f9ff719ac988ba789626a0481cf4 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 14:43:53 -0800 Subject: [PATCH 111/171] Update evaluation_script.py Fixed useless extra line From 5061c53a50be28b29a0a282bdc65dc0d27d171d4 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 17:21:58 -0800 Subject: [PATCH 112/171] Update README.md Made README more readable and changed the recommended ollama model from llama3 to llama3.1 due to higher performance. --- README.md | 32 +++++++++++++++----------------- 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index e6ece8e..adeaf47 100644 --- a/README.md +++ b/README.md @@ -38,26 +38,24 @@ You can configure the agent's name, model, and prompts in their profile like `an | API | Config Variable | Example Model name | Docs | |------|------|------|------| -| OpenAI | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | -| Google | `GEMINI_API_KEY` | `gemini-2.0-flash` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | -| Anthropic | `ANTHROPIC_API_KEY` | `claude-3-5-haiku-20241022` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | -| Replicate | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | -| Ollama (local) | n/a | `ollama/llama3.1` | [docs](https://ollama.com/library) | -| Groq (Not Grok) | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | -| Hugging Face | `HUGGINGFACE_API_KEY` | `huggingface/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B` | [docs](https://huggingface.co/models) | -| Novita | `NOVITA_API_KEY` | `novita/deepseek/deepseek-r1` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) | -| Qwen | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | -| Mistral | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | -| xAI | `XAI_API_KEY` | `grok-2` | [docs](https://docs.x.ai/docs) | -| Deepseek | `DEEPSEEK_API_KEY` | `deepseek-reasoner` | [docs](https://api-docs.deepseek.com/) | -| Openrouter | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) | -| glhf.chat | `GHLF_API_KEY` | `glhf/hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | -| Hyperbolic | `HYPERBOLIC_API_KEY` | `hyperbolic/deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | - +| `openai` | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | +| `google` | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | +| `anthropic` | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | +| `xai` | `XAI_API_KEY` | `grok-2-1212` | [docs](https://docs.x.ai/docs) | +| `deepseek` | `DEEPSEEK_API_KEY` | `deepseek-chat` | [docs](https://api-docs.deepseek.com/) | +| `ollama` (local) | n/a | `llama3.1` | [docs](https://ollama.com/library) | +| `qwen` | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | +| `mistral` | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | +| `replicate` | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | +| `groq` (not grok) | `GROQCLOUD_API_KEY` | `groq/mixtral-8x7b-32768` | [docs](https://console.groq.com/docs/models) | +| `huggingface` | `HUGGINGFACE_API_KEY` | `huggingface/mistralai/Mistral-Nemo-Instruct-2407` | [docs](https://huggingface.co/models) | +| `novita` | `NOVITA_API_KEY` | `novita/deepseek/deepseek-r1` | [docs](https://novita.ai/model-api/product/llm-api?utm_source=github_mindcraft&utm_medium=github_readme&utm_campaign=link) | +| `openrouter` | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) | +| `glhf.chat` | `GHLF_API_KEY` | `glhf/hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | +| `hyperbolic` | `HYPERBOLIC_API_KEY` | `hyperbolic/deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: `ollama pull llama3.1 && ollama pull nomic-embed-text` -Note that you can you any local model, such as `qwen2.5` `llama3.1` or fine tuned models like `sweaterdog/andy-3.6` But not matter what model, you must put `ollama/` before the model so Mindcraft can handle it properly. ### Online Servers To connect to online servers your bot will need an official Microsoft/Minecraft account. You can use your own personal one, but will need another account if you want to connect too and play with it. To connect, change these lines in `settings.js`: From 4d81445bbbec904bbced9c791903df7914dd7f4c Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 17:25:22 -0800 Subject: [PATCH 113/171] Update local.js Updated the default API model from llama3 to llama3.1, llama3.1 is smarter for the same size. --- src/models/local.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/models/local.js b/src/models/local.js index ba78f53..e51bcf8 100644 --- a/src/models/local.js +++ b/src/models/local.js @@ -10,11 +10,11 @@ export class Local { } async sendRequest(turns, systemMessage) { - let model = this.model_name || 'llama3'; + let model = this.model_name || 'llama3.1'; // Updated to llama3.1, as it is more performant than llama3 let messages = strictFormat(turns); messages.unshift({ role: 'system', content: systemMessage }); - // We'll attempt up to 5 times for models like "deepseek-r1" if the tags are mismatched. + // We'll attempt up to 5 times for models with deepseek-r1-esk reasoning if the tags are mismatched. const maxAttempts = 5; let attempt = 0; let finalRes = null; From efa8fe91535bc805471eb07b914617dc73daa294 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 17:26:28 -0800 Subject: [PATCH 114/171] Update glhf.js Removed useless lines and comments. --- src/models/glhf.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/models/glhf.js b/src/models/glhf.js index 296f535..d41b843 100644 --- a/src/models/glhf.js +++ b/src/models/glhf.js @@ -1,4 +1,3 @@ -// glhf-no-logger.js import OpenAIApi from 'openai'; import { getKey } from '../utils/keys.js'; From e752a0443621552e9b564a7a571627b7a095a705 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 17:31:56 -0800 Subject: [PATCH 115/171] Update settings.js Added a local.json profile, since there wasn't one before? I mean Ollama is one of its kind, so like, why? --- settings.js | 1 + 1 file changed, 1 insertion(+) diff --git a/settings.js b/settings.js index 7542a0e..205db1e 100644 --- a/settings.js +++ b/settings.js @@ -23,6 +23,7 @@ export default // "./profiles/grok.json", // "./profiles/mistral.json", // "./profiles/deepseek.json", + // "./profiles/local.json", // using more than 1 profile requires you to /msg each bot indivually // individual profiles override values from the base profile From f72d690956be9d43fb29504452db1e6e49346fbb Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 17:33:38 -0800 Subject: [PATCH 116/171] Create local.json Added a local.json file, since Ollama is unique, it is a local provider, which is not the case for any other provider. --- profiles/local.json | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 profiles/local.json diff --git a/profiles/local.json b/profiles/local.json new file mode 100644 index 0000000..fc02e29 --- /dev/null +++ b/profiles/local.json @@ -0,0 +1,7 @@ +{ + + "name": "Ollama", + + "model": "ollama/llama3.1" + +} From a3125752d79ada4abc02783fdb82ce98a6423384 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Wed, 5 Mar 2025 17:35:47 -0800 Subject: [PATCH 117/171] Update gemini.json Updated the model to Gemini-2.0-flash --- profiles/gemini.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/profiles/gemini.json b/profiles/gemini.json index 4f3cf43..db7243e 100644 --- a/profiles/gemini.json +++ b/profiles/gemini.json @@ -1,7 +1,7 @@ { "name": "gemini", - "model": "gemini-1.5-flash", + "model": "gemini-2.0-flash", "cooldown": 10000 -} \ No newline at end of file +} From 232b796b10298a4b0c7023e8a412fe4960b1eca7 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:28:05 -0800 Subject: [PATCH 118/171] Update example_tasks.json Fixed extra space From dd6df4792cc2560169c3b3f362038ec37d90298f Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:28:31 -0800 Subject: [PATCH 119/171] Update evaluation_script.py Removed unnecessary space From 312d04a885c77008ab779b7e349f8406506a0021 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:29:04 -0800 Subject: [PATCH 120/171] Update gemini.json Removed extra spaces --- profiles/gemini.json | 2 -- 1 file changed, 2 deletions(-) diff --git a/profiles/gemini.json b/profiles/gemini.json index db7243e..f96e99a 100644 --- a/profiles/gemini.json +++ b/profiles/gemini.json @@ -1,7 +1,5 @@ { "name": "gemini", - "model": "gemini-2.0-flash", - "cooldown": 10000 } From 7b6cd01bfb311e769817e2f3fcf2abd8977ceedf Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:29:37 -0800 Subject: [PATCH 121/171] Update action_manager.js Removed unnecessary spaces --- src/agent/action_manager.js | 1 - 1 file changed, 1 deletion(-) diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 30359cb..227c031 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -151,5 +151,4 @@ export class ActionManager { await this.stop(); // last attempt to stop }, TIMEOUT_MINS * 60 * 1000); } - } From b863b90f969837b1e926fe307e33de5e869cd3a1 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:30:30 -0800 Subject: [PATCH 122/171] Update agent.js Removed unnecessary spaces From 6425869c0bf743f0403f340ed9b55edd8b78ece2 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:30:49 -0800 Subject: [PATCH 123/171] Update history.js Removed unnecessary spaces From aa6920f14886a56aa7c68f4ddb7531cfde72b5be Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:31:14 -0800 Subject: [PATCH 124/171] Update self_prompter.js Removed an extra space From ca7b6fc65db11e54474508cddd19b59cf5ae36d0 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:31:43 -0800 Subject: [PATCH 125/171] Update gemini.js Removed and extra space --- src/models/gemini.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/models/gemini.js b/src/models/gemini.js index 9a2b3c5..98ddd2e 100644 --- a/src/models/gemini.js +++ b/src/models/gemini.js @@ -39,7 +39,6 @@ export class Gemini { model: this.model_name || "gemini-1.5-flash", // systemInstruction does not work bc google is trash }; - if (this.url) { model = this.genAI.getGenerativeModel( modelConfig, @@ -113,4 +112,4 @@ export class Gemini { const result = await model.embedContent(text); return result.embedding.values; } -} \ No newline at end of file +} From 3953f908e3c6bfe61340809c1d7a78f48b871826 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 11:32:54 -0800 Subject: [PATCH 126/171] Update local.js Removed an extra space From ac60e890c38cc496ad2836bc472640536414648a Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 21:03:34 -0800 Subject: [PATCH 127/171] Delete profiles/local.json --- profiles/local.json | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 profiles/local.json diff --git a/profiles/local.json b/profiles/local.json deleted file mode 100644 index fc02e29..0000000 --- a/profiles/local.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - - "name": "Ollama", - - "model": "ollama/llama3.1" - -} From 83eeac5ebc79bf4713b526515833a5404a48a3b4 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Thu, 6 Mar 2025 21:03:48 -0800 Subject: [PATCH 128/171] Update settings.js --- settings.js | 1 - 1 file changed, 1 deletion(-) diff --git a/settings.js b/settings.js index 205db1e..7542a0e 100644 --- a/settings.js +++ b/settings.js @@ -23,7 +23,6 @@ export default // "./profiles/grok.json", // "./profiles/mistral.json", // "./profiles/deepseek.json", - // "./profiles/local.json", // using more than 1 profile requires you to /msg each bot indivually // individual profiles override values from the base profile From bdee71ac921b80a967f27daca643e26c4512ddc6 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 7 Mar 2025 14:19:55 -0600 Subject: [PATCH 129/171] camera always on, show entities, refactor browser viewer --- profiles/claude_thinker.json | 2 +- settings.js | 2 +- src/agent/agent.js | 9 ++-- .../{viewer.js => vision/browser_viewer.js} | 4 +- src/agent/vision/camera.js | 50 +++++++++---------- src/agent/vision/vision_interpreter.js | 15 ++---- 6 files changed, 38 insertions(+), 44 deletions(-) rename src/agent/{viewer.js => vision/browser_viewer.js} (69%) diff --git a/profiles/claude_thinker.json b/profiles/claude_thinker.json index aab9f10..fdddb59 100644 --- a/profiles/claude_thinker.json +++ b/profiles/claude_thinker.json @@ -6,7 +6,7 @@ "params": { "thinking": { "type": "enabled", - "budget_tokens": 16000 + "budget_tokens": 4000 } } }, diff --git a/settings.js b/settings.js index 1ae05a3..d3899b4 100644 --- a/settings.js +++ b/settings.js @@ -1,6 +1,6 @@ export default { - "minecraft_version": "1.20.4", // supports up to 1.21.1 + "minecraft_version": "1.21.1", // supports up to 1.21.1 "host": "127.0.0.1", // or "localhost", "your.ip.address.here" "port": process.env.MINECRAFT_PORT || 55916, "auth": "offline", // or "microsoft" diff --git a/src/agent/agent.js b/src/agent/agent.js index f186e52..fb123f6 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -11,7 +11,7 @@ import { MemoryBank } from './memory_bank.js'; import { SelfPrompter } from './self_prompter.js'; import convoManager from './conversation.js'; import { handleTranslation, handleEnglishTranslation } from '../utils/translator.js'; -import { addViewer } from './viewer.js'; +import { addBrowserViewer } from './vision/browser_viewer.js'; import settings from '../../settings.js'; import { serverProxy } from './agent_proxy.js'; import { Task } from './tasks.js'; @@ -36,8 +36,6 @@ export class Agent { this.history = new History(this); console.log('Initializing coder...'); this.coder = new Coder(this); - console.log('Initializing vision intepreter...'); - this.vision_interpreter = new VisionInterpreter(this, settings.allow_vision); console.log('Initializing npc controller...'); this.npc = new NPCContoller(this); console.log('Initializing memory bank...'); @@ -82,7 +80,7 @@ export class Agent { this.bot.once('spawn', async () => { try { clearTimeout(spawnTimeout); - addViewer(this.bot, count_id); + addBrowserViewer(this.bot, count_id); // wait for a bit so stats are not undefined await new Promise((resolve) => setTimeout(resolve, 1000)); @@ -97,6 +95,9 @@ export class Agent { this.task.initBotTask(); } + console.log('Initializing vision intepreter...'); + this.vision_interpreter = new VisionInterpreter(this, settings.allow_vision); + } catch (error) { console.error('Error in spawn event:', error); process.exit(0); diff --git a/src/agent/viewer.js b/src/agent/vision/browser_viewer.js similarity index 69% rename from src/agent/viewer.js rename to src/agent/vision/browser_viewer.js index 6ce8a27..9ae7c7b 100644 --- a/src/agent/viewer.js +++ b/src/agent/vision/browser_viewer.js @@ -1,8 +1,8 @@ -import settings from '../../settings.js'; +import settings from '../../../settings.js'; import prismarineViewer from 'prismarine-viewer'; const mineflayerViewer = prismarineViewer.mineflayer; -export function addViewer(bot, count_id) { +export function addBrowserViewer(bot, count_id) { if (settings.show_bot_views) mineflayerViewer(bot, { port: 3000+count_id, firstPerson: true, }); } \ No newline at end of file diff --git a/src/agent/vision/camera.js b/src/agent/vision/camera.js index 7eafb42..c6d9487 100644 --- a/src/agent/vision/camera.js +++ b/src/agent/vision/camera.js @@ -14,37 +14,37 @@ global.Worker = worker_threads.Worker; export class Camera extends EventEmitter { constructor (bot, fp) { - super() - this.bot = bot - this.fp = fp - this.viewDistance = 4 - this.width = 800 - this.height = 512 - this.canvas = createCanvas(this.width, this.height) - this.renderer = new THREE.WebGLRenderer({ canvas: this.canvas }) - this.viewer = new Viewer(this.renderer) - this._init().then(() => { - this.emit('ready') - }) + super(); + this.bot = bot; + this.fp = fp; + this.viewDistance = 4; + this.width = 800; + this.height = 512; + this.canvas = createCanvas(this.width, this.height); + this.renderer = new THREE.WebGLRenderer({ canvas: this.canvas }); + this.viewer = new Viewer(this.renderer); + this._init().then(() => { + this.emit('ready'); + }) } async _init () { - const botPos = this.bot.entity.position - const center = new Vec3(botPos.x, botPos.y+this.bot.entity.height, botPos.z) - this.viewer.setVersion(this.bot.version) - // Load world - const worldView = new WorldView(this.bot.world, this.viewDistance, center) - this.viewer.listen(worldView) - - this.viewer.camera.position.set(center.x, center.y, center.z) - this.viewer.setFirstPersonCamera(this.bot.entity.position, this.bot.entity.yaw, this.bot.entity.pitch) - - await worldView.init(center) + const botPos = this.bot.entity.position; + const center = new Vec3(botPos.x, botPos.y+this.bot.entity.height, botPos.z); + this.viewer.setVersion(this.bot.version); + // Load world + const worldView = new WorldView(this.bot.world, this.viewDistance, center); + this.viewer.listen(worldView); + worldView.listenToBot(this.bot); + await worldView.init(center); + this.worldView = worldView; } async capture() { - // waits some time helps renderer to render the world view - await new Promise(resolve => setTimeout(resolve, 1000)); + const center = new Vec3(this.bot.entity.position.x, this.bot.entity.position.y+this.bot.entity.height, this.bot.entity.position.z); + this.viewer.camera.position.set(center.x, center.y, center.z); + this.viewer.setFirstPersonCamera(this.bot.entity.position, this.bot.entity.yaw, this.bot.entity.pitch); + this.viewer.update(); this.renderer.render(this.viewer.scene, this.viewer.camera); const imageStream = this.canvas.createJPEGStream({ diff --git a/src/agent/vision/vision_interpreter.js b/src/agent/vision/vision_interpreter.js index 34d9f3c..2c03276 100644 --- a/src/agent/vision/vision_interpreter.js +++ b/src/agent/vision/vision_interpreter.js @@ -2,13 +2,12 @@ import { Vec3 } from 'vec3'; import { Camera } from "./camera.js"; import fs from 'fs'; -const RENDER_TIME = 1000; - export class VisionInterpreter { constructor(agent, allow_vision) { this.agent = agent; this.allow_vision = allow_vision; this.fp = './bots/'+agent.name+'/screenshots/'; + this.camera = new Camera(agent.bot, this.fp); } async lookAtPlayer(player_name, direction) { @@ -25,16 +24,12 @@ export class VisionInterpreter { let filename; if (direction === 'with') { await bot.look(player.yaw, player.pitch); - const camera = new Camera(bot, this.fp); - await new Promise(resolve => setTimeout(resolve, RENDER_TIME)); result = `Looking in the same direction as ${player_name}\n`; - filename = await camera.capture(); + filename = await this.camera.capture(); } else { await bot.lookAt(new Vec3(player.position.x, player.position.y + player.height, player.position.z)); - const camera = new Camera(bot, this.fp); - await new Promise(resolve => setTimeout(resolve, RENDER_TIME)); result = `Looking at player ${player_name}\n`; - filename = await camera.capture(); + filename = await this.camera.capture(); } @@ -48,11 +43,9 @@ export class VisionInterpreter { let result = ""; const bot = this.agent.bot; await bot.lookAt(new Vec3(x, y + 2, z)); - const camera = new Camera(bot, this.fp); - await new Promise(resolve => setTimeout(resolve, RENDER_TIME)); result = `Looking at coordinate ${x, y, z}\n`; - let filename = await camera.capture(); + let filename = await this.camera.capture(); return result + `Image analysis: "${await this.analyzeImage(filename)}"`; } From c13bf4895da386a0ff106ade87eac8af7c43adfc Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 7 Mar 2025 23:18:49 -0600 Subject: [PATCH 130/171] improve prompt/examples --- profiles/defaults/_default.json | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json index b06b249..ab63863 100644 --- a/profiles/defaults/_default.json +++ b/profiles/defaults/_default.json @@ -9,7 +9,7 @@ "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", - "image_analysis": "You are a Minecraft bot named $NAME that has been given a screenshot of your current view. Analyze and summarize the view; describe terrain, blocks, structures, and notable features. Focus on details relevant to the conversation. Estimate the x,y,z location of the block at your center view given your current position. Be extremely concise and correct, respond only with your analysis, not conversationally. $STATS", + "image_analysis": "You are a Minecraft bot named $NAME that has been given a screenshot of your current view. Analyze and summarize the view; describe terrain, blocks, entities, structures, and notable features. Focus on details relevant to the conversation. Note the view is glitchy; the sky color is always blue regardless of weather or time, dropped items are pink cubes, blocks below y=0 do not render. Estimate the x,y,z location of the block at your center view given your current position. Be extremely concise and correct, respond only with your analysis, not conversationally. $STATS", "modes": { "self_preservation": true, @@ -71,19 +71,13 @@ [ {"role": "user", "content": "d7297: look here"}, {"role": "assistant", "content": "Okay! !lookAtPlayer(\"d7297\", \"at\")"}, - {"role": "system", "content": "Code Output:\nLooking at player d7297. There is a giant waterfall behind d7297."}, + {"role": "system", "content": "Image analysis:\nLooking at player d7297. There is a giant waterfall behind d7297."}, {"role": "assistant", "content": "Woo, that is a cool waterfall!"} ], - [ - {"role": "user", "content": "d7297: look there"}, - {"role": "assistant", "content": "Okay! !lookAtPlayer(\"d7297\", \"with\")"}, - {"role": "system", "content": "Code Output:\nLooking in the same direction as d7297\nThere is a castle made of stone."}, - {"role": "assistant", "content": "Wow, that stone castle looks amazing!"} - ], [ {"role": "user", "content": "d7297: look 0 70 12 and describe what is there"}, {"role": "assistant", "content": "Sure! !lookAtPosition(0, 70, 12)"}, - {"role": "system", "content": "Code Output:\nLooking in the same direction as d7297\nThere is a water fountain."}, + {"role": "system", "content": "Image analysis:\nLooking at position 0, 70, 12. There is a water fountain."}, {"role": "assistant", "content": "I see the water fountain! amazing!"} ], From 39abf8a19e899c49a38188d14ec0ad434ca3ca10 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 7 Mar 2025 23:19:12 -0600 Subject: [PATCH 131/171] update worldview pos, extend view range --- src/agent/vision/camera.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/agent/vision/camera.js b/src/agent/vision/camera.js index c6d9487..e8a07fb 100644 --- a/src/agent/vision/camera.js +++ b/src/agent/vision/camera.js @@ -17,7 +17,7 @@ export class Camera extends EventEmitter { super(); this.bot = bot; this.fp = fp; - this.viewDistance = 4; + this.viewDistance = 12; this.width = 800; this.height = 512; this.canvas = createCanvas(this.width, this.height); @@ -43,6 +43,7 @@ export class Camera extends EventEmitter { async capture() { const center = new Vec3(this.bot.entity.position.x, this.bot.entity.position.y+this.bot.entity.height, this.bot.entity.position.z); this.viewer.camera.position.set(center.x, center.y, center.z); + await this.worldView.updatePosition(center); this.viewer.setFirstPersonCamera(this.bot.entity.position, this.bot.entity.yaw, this.bot.entity.pitch); this.viewer.update(); this.renderer.render(this.viewer.scene, this.viewer.camera); From 4cd5a8f658784008f72da1107d8e57242d34a47e Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 7 Mar 2025 23:19:41 -0600 Subject: [PATCH 132/171] patch viewer to so not constantly throwing errors --- patches/prismarine-viewer+1.33.0.patch | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 patches/prismarine-viewer+1.33.0.patch diff --git a/patches/prismarine-viewer+1.33.0.patch b/patches/prismarine-viewer+1.33.0.patch new file mode 100644 index 0000000..3ef7a5a --- /dev/null +++ b/patches/prismarine-viewer+1.33.0.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/prismarine-viewer/viewer/lib/entity/Entity.js b/node_modules/prismarine-viewer/viewer/lib/entity/Entity.js +index 8945452..dab25be 100644 +--- a/node_modules/prismarine-viewer/viewer/lib/entity/Entity.js ++++ b/node_modules/prismarine-viewer/viewer/lib/entity/Entity.js +@@ -203,7 +203,7 @@ function getMesh (texture, jsonModel) { + class Entity { + constructor (version, type, scene) { + const e = entities[type] +- if (!e) throw new Error(`Unknown entity ${type}`) ++ if (!e) return; //throw new Error(`Unknown entity ${type}`) + + this.mesh = new THREE.Object3D() + for (const [name, jsonModel] of Object.entries(e.geometry)) { From 407be3e6520319de8e16e9e18a7f056a67fb3e8e Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Mar 2025 12:12:28 -0800 Subject: [PATCH 133/171] Update README.md Fixed the Example Gemini model to a model that still works, gemini-pro is no longer recognized by google AI studio --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index adeaf47..d1199b3 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | API | Config Variable | Example Model name | Docs | |------|------|------|------| | `openai` | `OPENAI_API_KEY` | `gpt-4o-mini` | [docs](https://platform.openai.com/docs/models) | -| `google` | `GEMINI_API_KEY` | `gemini-pro` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | +| `google` | `GEMINI_API_KEY` | `gemini-2.0-flash` | [docs](https://ai.google.dev/gemini-api/docs/models/gemini) | | `anthropic` | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | | `xai` | `XAI_API_KEY` | `grok-2-1212` | [docs](https://docs.x.ai/docs) | | `deepseek` | `DEEPSEEK_API_KEY` | `deepseek-chat` | [docs](https://api-docs.deepseek.com/) | From 5718427b3bc9193260b898dc013d22b3183bf37c Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Mar 2025 12:20:47 -0800 Subject: [PATCH 134/171] Update agent.js --- src/agent/agent.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/agent/agent.js b/src/agent/agent.js index ab67eb7..4a955e1 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -86,6 +86,7 @@ export class Agent { console.log(`${this.name} spawned.`); this.clearBotLogs(); + this._setupEventHandlers(save_data, init_message); this.startEvents(); From 7bd9f0bfd2bbdb8dddab82c17cdf9c2736dca434 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sat, 8 Mar 2025 12:25:35 -0800 Subject: [PATCH 135/171] Delete patches/@google+generative-ai+0.2.1.patch Deleted the google patch to allow experimental gemini models, per MaxRobinsonTheGreats request. --- patches/@google+generative-ai+0.2.1.patch | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 patches/@google+generative-ai+0.2.1.patch diff --git a/patches/@google+generative-ai+0.2.1.patch b/patches/@google+generative-ai+0.2.1.patch deleted file mode 100644 index 68d8ec6..0000000 --- a/patches/@google+generative-ai+0.2.1.patch +++ /dev/null @@ -1,12 +0,0 @@ -diff --git a/node_modules/@google/generative-ai/dist/index.mjs b/node_modules/@google/generative-ai/dist/index.mjs ---- a/node_modules/@google/generative-ai/dist/index.mjs -+++ b/node_modules/@google/generative-ai/dist/index.mjs -@@ -156,1 +156,1 @@ --const API_VERSION = "v1"; -+const API_VERSION = "v1beta"; -diff --git a/node_modules/@google/generative-ai/dist/index.js b/node_modules/@google/generative-ai/dist/index.js ---- a/node_modules/@google/generative-ai/dist/index.js -+++ b/node_modules/@google/generative-ai/dist/index.js -@@ -156,1 +156,1 @@ --const API_VERSION = "v1"; -+const API_VERSION = "v1beta"; From fbdac8d48e09fd4cb496200b0e580d5dabcb7967 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 9 Mar 2025 00:29:06 -0800 Subject: [PATCH 136/171] Update prompter.js Fixed a minor error --- src/models/prompter.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/models/prompter.js b/src/models/prompter.js index 43fd9da..dd7231b 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -139,9 +139,9 @@ export class Prompter { model_profile.api = 'mistral'; else if (profile.model.includes("groq/") || profile.model.includes("groqcloud/")) profile.api = 'groq'; - else if (profile.model.includes("glhf/") + else if (profile.model.includes("glhf/")) profile.api = 'glhf'; - else if (profile.model.includes("hyperbolic/") + else if (profile.model.includes("hyperbolic/")) profile.api = 'hyperbolic'; else if (profile.model.includes('novita/')) profile.api = 'novita'; From 21ee58fd8cd2b3fb0d023304b7b020af58fdcc5d Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Sun, 9 Mar 2025 00:45:30 -0800 Subject: [PATCH 137/171] Update groq.js Fixed an unclosed bracket --- src/models/groq.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/models/groq.js b/src/models/groq.js index 402ebe8..b3956ba 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -102,4 +102,5 @@ export class GroqCloudAPI { finalRes = finalRes.replace(/<\|separator\|>/g, '*no response*'); return finalRes; -} \ No newline at end of file + } +} From 76204664cd4c4d4bac455ae3ca88925d9b2bb496 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Sun, 9 Mar 2025 17:17:41 +0000 Subject: [PATCH 138/171] added searchwiki action --- package.json | 1 + profiles/gemini.json | 2 +- settings.js | 4 ++-- src/agent/commands/actions.js | 26 ++++++++++++++++++++++++++ 4 files changed, 30 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 817a37e..003df5f 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,7 @@ "@huggingface/inference": "^2.8.1", "@mistralai/mistralai": "^1.1.0", "canvas": "^3.1.0", + "cheerio": "^1.0.0", "express": "^4.18.2", "google-translate-api-x": "^10.7.1", "groq-sdk": "^0.15.0", diff --git a/profiles/gemini.json b/profiles/gemini.json index 4f3cf43..9331aed 100644 --- a/profiles/gemini.json +++ b/profiles/gemini.json @@ -1,7 +1,7 @@ { "name": "gemini", - "model": "gemini-1.5-flash", + "model": "gemini-2.0-flash", "cooldown": 10000 } \ No newline at end of file diff --git a/settings.js b/settings.js index c226658..76fca5f 100644 --- a/settings.js +++ b/settings.js @@ -13,10 +13,10 @@ export default // the base profile is shared by all bots for default prompts/examples/modes "base_profile": "./profiles/defaults/survival.json", // also see creative.json, god_mode.json "profiles": ((process.env.PROFILES) && JSON.parse(process.env.PROFILES)) || [ - "./andy.json", + // "./andy.json", // "./profiles/gpt.json", // "./profiles/claude.json", - // "./profiles/gemini.json", + "./profiles/gemini.json", // "./profiles/llama.json", // "./profiles/qwen.json", // "./profiles/mistral.json", diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 19b231e..ab444d7 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -1,6 +1,7 @@ import * as skills from '../library/skills.js'; import settings from '../../../settings.js'; import convoManager from '../conversation.js'; +import { load } from 'cheerio'; function runAsAction (actionFn, resume = false, timeout = -1) { let actionLabel = null; // Will be set on first use @@ -354,6 +355,31 @@ export const actionsList = [ return `Mode ${mode_name} is now ${on ? 'on' : 'off'}.`; } }, + { + name: '!searchWiki', + description: 'Search the Minecraft Wiki for the given query.', + params: { + 'query': { type: 'string', description: 'The query to search for.' } + }, + perform: async function (agent, query) { + const url = `https://minecraft.wiki/w/${query}` + try { + const response = await fetch(url); + if (response.status === 404) { + return `${query} was not found on the Minecraft Wiki. Try adjusting your search term.`; + } + const html = await response.text(); + const $ = load(html); + + const divContent = $("div.mw-parser-output").text(); + + return divContent.trim(); + } catch (error) { + console.error("Error fetching or parsing HTML:", error); + return `The following error occured: ${error}` + } + } + }, { name: '!goal', description: 'Set a goal prompt to endlessly work towards with continuous self-prompting.', From cf1fc1592031d01481f5b8d80791a2b4f4601c7e Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Sun, 9 Mar 2025 17:18:20 +0000 Subject: [PATCH 139/171] remove navigation tables from parser output in actions.js --- src/agent/commands/actions.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index ab444d7..ff695f7 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -371,7 +371,11 @@ export const actionsList = [ const html = await response.text(); const $ = load(html); - const divContent = $("div.mw-parser-output").text(); + const parserOutput = $("div.mw-parser-output"); + + parserOutput.find("table.navbox").remove(); + + const divContent = parserOutput.text(); return divContent.trim(); } catch (error) { From cf6e23390d589278114d3cf2a0a79d99e559c5b3 Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Sun, 9 Mar 2025 17:22:41 +0000 Subject: [PATCH 140/171] reset settings.js --- settings.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/settings.js b/settings.js index 76fca5f..c226658 100644 --- a/settings.js +++ b/settings.js @@ -13,10 +13,10 @@ export default // the base profile is shared by all bots for default prompts/examples/modes "base_profile": "./profiles/defaults/survival.json", // also see creative.json, god_mode.json "profiles": ((process.env.PROFILES) && JSON.parse(process.env.PROFILES)) || [ - // "./andy.json", + "./andy.json", // "./profiles/gpt.json", // "./profiles/claude.json", - "./profiles/gemini.json", + // "./profiles/gemini.json", // "./profiles/llama.json", // "./profiles/qwen.json", // "./profiles/mistral.json", From 0de98c99fad6a511837a1c40229b6bf98a752789 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 10 Mar 2025 12:27:31 -0500 Subject: [PATCH 141/171] fix claude tokens --- profiles/claude_thinker.json | 2 +- src/models/claude.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/profiles/claude_thinker.json b/profiles/claude_thinker.json index aab9f10..fdddb59 100644 --- a/profiles/claude_thinker.json +++ b/profiles/claude_thinker.json @@ -6,7 +6,7 @@ "params": { "thinking": { "type": "enabled", - "budget_tokens": 16000 + "budget_tokens": 4000 } } }, diff --git a/src/models/claude.js b/src/models/claude.js index 7383584..5198241 100644 --- a/src/models/claude.js +++ b/src/models/claude.js @@ -26,7 +26,7 @@ export class Claude { this.params.max_tokens = this.params.thinking.budget_tokens + 1000; // max_tokens must be greater than thinking.budget_tokens } else { - this.params.max_tokens = 16000; + this.params.max_tokens = 4096; } } const resp = await this.anthropic.messages.create({ From 79e9a734c82528409b8a92a7891e7dea92157ed0 Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Mon, 10 Mar 2025 11:20:49 -0700 Subject: [PATCH 142/171] Update example_tasks.json Fixed the extra space --- example_tasks.json | 1 - 1 file changed, 1 deletion(-) diff --git a/example_tasks.json b/example_tasks.json index f2b83a9..f5717c3 100644 --- a/example_tasks.json +++ b/example_tasks.json @@ -109,4 +109,3 @@ "type": "techtree", "timeout": 300 } -} From 44550a634dd3e73c210488d84e1ce5218ccca4ed Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Mon, 10 Mar 2025 11:21:15 -0700 Subject: [PATCH 143/171] Update gemini.json Fixed extra spaces --- profiles/gemini.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/profiles/gemini.json b/profiles/gemini.json index f96e99a..b8f1eb0 100644 --- a/profiles/gemini.json +++ b/profiles/gemini.json @@ -1,5 +1,7 @@ { "name": "gemini", + "model": "gemini-2.0-flash", + "cooldown": 10000 } From 350c842701c5cdb055c6c328488f51f4786f094a Mon Sep 17 00:00:00 2001 From: Sweaterdog Date: Mon, 10 Mar 2025 11:21:37 -0700 Subject: [PATCH 144/171] Update gemini.json Fixed this file again From 95782fa7691804d6705d6a656a0ac04db1df2bdd Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Mon, 10 Mar 2025 21:03:50 +0000 Subject: [PATCH 145/171] move command from actions.js to queries.js --- src/agent/commands/actions.js | 30 ------------------------------ src/agent/commands/queries.js | 30 ++++++++++++++++++++++++++++++ 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index ff695f7..19b231e 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -1,7 +1,6 @@ import * as skills from '../library/skills.js'; import settings from '../../../settings.js'; import convoManager from '../conversation.js'; -import { load } from 'cheerio'; function runAsAction (actionFn, resume = false, timeout = -1) { let actionLabel = null; // Will be set on first use @@ -355,35 +354,6 @@ export const actionsList = [ return `Mode ${mode_name} is now ${on ? 'on' : 'off'}.`; } }, - { - name: '!searchWiki', - description: 'Search the Minecraft Wiki for the given query.', - params: { - 'query': { type: 'string', description: 'The query to search for.' } - }, - perform: async function (agent, query) { - const url = `https://minecraft.wiki/w/${query}` - try { - const response = await fetch(url); - if (response.status === 404) { - return `${query} was not found on the Minecraft Wiki. Try adjusting your search term.`; - } - const html = await response.text(); - const $ = load(html); - - const parserOutput = $("div.mw-parser-output"); - - parserOutput.find("table.navbox").remove(); - - const divContent = parserOutput.text(); - - return divContent.trim(); - } catch (error) { - console.error("Error fetching or parsing HTML:", error); - return `The following error occured: ${error}` - } - } - }, { name: '!goal', description: 'Set a goal prompt to endlessly work towards with continuous self-prompting.', diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index c9fe333..a23bb9b 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -2,6 +2,7 @@ import * as world from '../library/world.js'; import * as mc from '../../utils/mcdata.js'; import { getCommandDocs } from './index.js'; import convoManager from '../conversation.js'; +import { load } from 'cheerio'; const pad = (str) => { return '\n' + str + '\n'; @@ -214,6 +215,35 @@ export const queryList = [ return pad(craftingPlan); }, }, + { + name: '!searchWiki', + description: 'Search the Minecraft Wiki for the given query.', + params: { + 'query': { type: 'string', description: 'The query to search for.' } + }, + perform: async function (agent, query) { + const url = `https://minecraft.wiki/w/${query}` + try { + const response = await fetch(url); + if (response.status === 404) { + return `${query} was not found on the Minecraft Wiki. Try adjusting your search term.`; + } + const html = await response.text(); + const $ = load(html); + + const parserOutput = $("div.mw-parser-output"); + + parserOutput.find("table.navbox").remove(); + + const divContent = parserOutput.text(); + + return divContent.trim(); + } catch (error) { + console.error("Error fetching or parsing HTML:", error); + return `The following error occured: ${error}` + } + } + }, { name: '!help', description: 'Lists all available commands and their descriptions.', From f57da837b1d3eb86a9f7883ba372004e977268ea Mon Sep 17 00:00:00 2001 From: uukelele-scratch Date: Wed, 12 Mar 2025 21:22:14 +0000 Subject: [PATCH 146/171] speaking is now false by default --- settings.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/settings.js b/settings.js index bf35239..b205206 100644 --- a/settings.js +++ b/settings.js @@ -30,7 +30,7 @@ export default "load_memory": false, // load memory from previous session "init_message": "Respond with hello world and your name", // sends to all on spawn "only_chat_with": [], // users that the bots listen to and send general messages to. if empty it will chat publicly - "speak": true, // allows all bots to speak through system text-to-speech. tested on windows, should work on mac, on linux you may need to `apt install espeak` + "speak": false, // allows all bots to speak through system text-to-speech. tested on windows, should work on mac, on linux you may need to `apt install espeak` "language": "en", // translate to/from this language. Supports these language names: https://cloud.google.com/translate/docs/languages "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... From 1a4ac02e13ca3cb85d692bf107470c96e7b956bf Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 13:51:41 -0500 Subject: [PATCH 147/171] remove newline? --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 22d104f..c55f6a9 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,4 @@ services/viaproxy/saves.json services/viaproxy/viaproxy.yml tmp/ wandb/ -experiments/ +experiments/ \ No newline at end of file From 600f219085edf5140d1285e4ffef1c7a3299d332 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 13:56:13 -0500 Subject: [PATCH 148/171] fix spacing --- evaluation_script.py | 2 +- example_tasks.json | 1 + src/agent/action_manager.js | 3 ++- src/agent/agent.js | 2 +- src/agent/history.js | 2 +- src/agent/self_prompter.js | 2 +- 6 files changed, 7 insertions(+), 5 deletions(-) diff --git a/evaluation_script.py b/evaluation_script.py index beb170b..a0b9959 100644 --- a/evaluation_script.py +++ b/evaluation_script.py @@ -347,4 +347,4 @@ def main(): # run_experiment(args.task_path, args.task_id, args.num_exp) if __name__ == "__main__": - main() + main() \ No newline at end of file diff --git a/example_tasks.json b/example_tasks.json index f5717c3..7210d55 100644 --- a/example_tasks.json +++ b/example_tasks.json @@ -109,3 +109,4 @@ "type": "techtree", "timeout": 300 } +} \ No newline at end of file diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index 227c031..f5c6cae 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -151,4 +151,5 @@ export class ActionManager { await this.stop(); // last attempt to stop }, TIMEOUT_MINS * 60 * 1000); } -} + +} \ No newline at end of file diff --git a/src/agent/agent.js b/src/agent/agent.js index 4a955e1..853df3f 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -86,7 +86,7 @@ export class Agent { console.log(`${this.name} spawned.`); this.clearBotLogs(); - + this._setupEventHandlers(save_data, init_message); this.startEvents(); diff --git a/src/agent/history.js b/src/agent/history.js index c76e0d3..4ef0c37 100644 --- a/src/agent/history.js +++ b/src/agent/history.js @@ -117,4 +117,4 @@ export class History { this.turns = []; this.memory = ''; } -} +} \ No newline at end of file diff --git a/src/agent/self_prompter.js b/src/agent/self_prompter.js index 19ead75..3251f0e 100644 --- a/src/agent/self_prompter.js +++ b/src/agent/self_prompter.js @@ -143,4 +143,4 @@ export class SelfPrompter { // this stops it from responding from the handlemessage loop and the self-prompt loop at the same time } } -} +} \ No newline at end of file From 5695c66fcbbaea93f9d5ea5b68f80829d888530b Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 14:33:56 -0500 Subject: [PATCH 149/171] better comments --- settings.js | 2 +- src/models/prompter.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/settings.js b/settings.js index c226658..681ff0c 100644 --- a/settings.js +++ b/settings.js @@ -37,7 +37,7 @@ export default "allow_insecure_coding": false, // allows newAction command and model can write/run code on your computer. enable at own risk "blocked_actions" : [], // commands to disable and remove from docs. Ex: ["!setMode"] "code_timeout_mins": -1, // minutes code is allowed to run. -1 for no timeout - "relevant_docs_count": 5, // Parameter: -1 = all, 0 = no references, 5 = five references. If exceeding the maximum, all reference documents are returned. + "relevant_docs_count": 5, // number of relevant code function docs to select for prompting. -1 for all "max_messages": 15, // max number of messages to keep in context "num_examples": 2, // number of examples to give to the model diff --git a/src/models/prompter.js b/src/models/prompter.js index dd7231b..5d9e3de 100644 --- a/src/models/prompter.js +++ b/src/models/prompter.js @@ -154,7 +154,7 @@ export class Prompter { else if (profile.model.includes('mistral')) profile.api = 'mistral'; else - throw new Error('Unknown model:', profile.model, 'Did you check the name is correct?'); // Asks the user if the name is correct + throw new Error('Unknown model:', profile.model); } return profile; } From 7f97574c4e2160d6d16fe2bcabe925d385947e86 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 14:40:18 -0500 Subject: [PATCH 150/171] fix speak --- settings.js | 2 +- src/agent/speak.js | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/settings.js b/settings.js index d1f8d9e..8e7e4dd 100644 --- a/settings.js +++ b/settings.js @@ -30,7 +30,7 @@ export default "load_memory": false, // load memory from previous session "init_message": "Respond with hello world and your name", // sends to all on spawn "only_chat_with": [], // users that the bots listen to and send general messages to. if empty it will chat publicly - "speak": false, // allows all bots to speak through system text-to-speech. tested on windows, should work on mac, on linux you may need to `apt install espeak` + "speak": false, // allows all bots to speak through system text-to-speech. works on windows, mac, on linux you need to `apt install espeak` "language": "en", // translate to/from this language. Supports these language names: https://cloud.google.com/translate/docs/languages "show_bot_views": false, // show bot's view in browser at localhost:3000, 3001... diff --git a/src/agent/speak.js b/src/agent/speak.js index af18298..9fc9c60 100644 --- a/src/agent/speak.js +++ b/src/agent/speak.js @@ -37,6 +37,7 @@ function processQueue() { console.error(`${error.stack}`); } else if (stderr) { console.error(`Error: ${stderr}`); + } processQueue(); // Continue with the next message in the queue }); } From cbe5804f73582394a4a3834a07c09c2fc915ccaa Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 15:38:49 -0500 Subject: [PATCH 151/171] remove duplicate mistral --- settings.js | 1 - 1 file changed, 1 deletion(-) diff --git a/settings.js b/settings.js index 8e7e4dd..099a80c 100644 --- a/settings.js +++ b/settings.js @@ -19,7 +19,6 @@ export default // "./profiles/gemini.json", // "./profiles/llama.json", // "./profiles/qwen.json", - // "./profiles/mistral.json", // "./profiles/grok.json", // "./profiles/mistral.json", // "./profiles/deepseek.json", From ef37a400b0459388f3ec117fb97a3c2c103f68bb Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 15:42:23 -0500 Subject: [PATCH 152/171] fix digdown --- src/agent/commands/actions.js | 4 +-- src/agent/library/skills.js | 58 +++++++++++++++++++---------------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 732b515..2a70340 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -409,8 +409,8 @@ export const actionsList = [ }, { name: '!digDown', - description: 'Digs down a specified distance.', - params: {'distance': { type: 'int', description: 'Distance to dig down'}}, + description: 'Digs down a specified distance. Will stop if it reaches lava, water, or a fall of >=4 blocks below the bot.', + params: {'distance': { type: 'int', description: 'Distance to dig down', domain: [1, Number.MAX_SAFE_INTEGER] }}, perform: runAsAction(async (agent, distance) => { await skills.digDown(agent.bot, distance) }) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 2e8f5e8..36355e2 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -1379,51 +1379,57 @@ export async function activateNearestBlock(bot, type) { export async function digDown(bot, distance = 10) { /** - * Digs down a specified distance. + * Digs down a specified distance. Will stop if it reaches lava, water, or a fall of >=4 blocks below the bot. * @param {MinecraftBot} bot, reference to the minecraft bot. * @param {int} distance, distance to dig down. - * @returns {Promise} true if successfully dug down. + * @returns {Promise} true if successfully dug all the way down. * @example * await skills.digDown(bot, 10); **/ - for (let i = 0; i < distance; i++) { - const targetBlock = bot.blockAt(bot.entity.position.offset(0, -1, 0)); - const belowBlock = bot.blockAt(bot.entity.position.offset(0, -2, 0)); + let start_block_pos = bot.blockAt(bot.entity.position).position; + for (let i = 1; i <= distance; i++) { + const targetBlock = bot.blockAt(start_block_pos.offset(0, -i, 0)); + let belowBlock = bot.blockAt(start_block_pos.offset(0, -i-1, 0)); + + if (!targetBlock || !belowBlock) { + log(bot, `Dug down ${i-1} blocks, but reached the end of the world.`); + return true; + } // Check for lava, water - if (!targetBlock || targetBlock.name === 'lava' || targetBlock.name === 'water' || - (belowBlock && (belowBlock.name === 'lava' || belowBlock.name === 'water'))) { - console.log(`Dug down ${i} blocks, but reached ${belowBlock ? belowBlock.name : '(lava/water)'}`); - log(bot, `Dug down ${i} blocks, but reached ${belowBlock ? belowBlock.name : '(lava/water)'}`) + if (targetBlock.name === 'lava' || targetBlock.name === 'water' || + belowBlock.name === 'lava' || belowBlock.name === 'water') { + log(bot, `Dug down ${i-1} blocks, but reached ${belowBlock ? belowBlock.name : '(lava/water)'}`) return false; } - // Check for a fall of more than 5 blocks below the bot - let isSafe = false; - for (let j = 1; j <= 5; j++) { - const belowBlock = bot.blockAt(bot.entity.position.offset(0, -j-1, 0)); - if (!belowBlock || belowBlock.name !== 'air') { - isSafe = true; + const MAX_FALL_BLOCKS = 2; + let num_fall_blocks = 0; + for (let j = 0; j <= MAX_FALL_BLOCKS; j++) { + if (!belowBlock || (belowBlock.name !== 'air' && belowBlock.name !== 'cave_air')) { break; } + num_fall_blocks++; + belowBlock = bot.blockAt(belowBlock.position.offset(0, -1, 0)); } - - if (!targetBlock || !isSafe) { - console.log(`Dug down ${i} blocks, but reached fall`); - log(bot, `Dug down ${i} blocks, but reached fall`); + if (num_fall_blocks > MAX_FALL_BLOCKS) { + log(bot, `Dug down ${i-1} blocks, but reached a drop below the next block.`); return false; } - if (bot.canDigBlock(targetBlock)) { - await breakBlockAt(bot, bot.entity.position.x, bot.entity.position.y - 1, bot.entity.position.z); - await bot.waitForTicks(10); // wait for a short period to avoid issues - await bot.entity.position.offset(0, -1, 0); - } else { - console.log('Cannot dig block at position:', bot.entity.position.offset(0, -1, 0)); - log(bot, 'Cannot dig block at position:' + bot.entity.position.offset(0, -1, 0)) + if (targetBlock.name === 'air' || targetBlock.name === 'cave_air') { + log(bot, 'Skipping air block'); + console.log(targetBlock.position); + continue; + } + + let dug = await breakBlockAt(bot, targetBlock.position.x, targetBlock.position.y, targetBlock.position.z); + if (!dug) { + log(bot, 'Failed to dig block at position:' + targetBlock.position); return false; } } + log(bot, `Dug down ${distance} blocks.`); return true; } From cd60655803c970270789de66b11c187cfca6fe95 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 15:56:05 -0500 Subject: [PATCH 153/171] add patch for dumb google api --- patches/@google+generative-ai+0.2.1.patch | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 patches/@google+generative-ai+0.2.1.patch diff --git a/patches/@google+generative-ai+0.2.1.patch b/patches/@google+generative-ai+0.2.1.patch new file mode 100644 index 0000000..ebdff24 --- /dev/null +++ b/patches/@google+generative-ai+0.2.1.patch @@ -0,0 +1,13 @@ +diff --git a/node_modules/@google/generative-ai/dist/index.mjs b/node_modules/@google/generative-ai/dist/index.mjs +index 23a175b..aab7e19 100644 +--- a/node_modules/@google/generative-ai/dist/index.mjs ++++ b/node_modules/@google/generative-ai/dist/index.mjs +@@ -151,7 +151,7 @@ class GoogleGenerativeAIResponseError extends GoogleGenerativeAIError { + * limitations under the License. + */ + const BASE_URL = "https://generativelanguage.googleapis.com"; +-const API_VERSION = "v1"; ++const API_VERSION = "v1beta"; + /** + * We can't `require` package.json if this runs on web. We will use rollup to + * swap in the version number here at build time. From 7e3ec5d1e023c82c2d5eda673c66f506fd5528cf Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 13 Mar 2025 16:00:50 -0500 Subject: [PATCH 154/171] gemini formatting --- profiles/gemini.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/profiles/gemini.json b/profiles/gemini.json index b8f1eb0..db7243e 100644 --- a/profiles/gemini.json +++ b/profiles/gemini.json @@ -1,7 +1,7 @@ { "name": "gemini", - + "model": "gemini-2.0-flash", - + "cooldown": 10000 } From 40f294d1742b871d6ecc1cbdb6b6999014bd314b Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 15 Mar 2025 16:56:36 -0500 Subject: [PATCH 155/171] fix collect simple crops/weird blocks --- profiles/gemini.json | 2 +- src/agent/library/skills.js | 9 ++++++++- src/utils/mcdata.js | 10 ++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/profiles/gemini.json b/profiles/gemini.json index db7243e..8a91387 100644 --- a/profiles/gemini.json +++ b/profiles/gemini.json @@ -3,5 +3,5 @@ "model": "gemini-2.0-flash", - "cooldown": 10000 + "cooldown": 5000 } diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 36355e2..ca3718d 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -460,7 +460,14 @@ export async function collectBlock(bot, blockType, num=1, exclude=null) { return false; } try { - await bot.collectBlock.collect(block); + if (mc.mustCollectManually(blockType)) { + await goToPosition(bot, block.position.x, block.position.y, block.position.z, 2); + await bot.dig(block); + await pickupNearbyItems(bot); + } + else { + await bot.collectBlock.collect(block); + } collected++; await autoLight(bot); } diff --git a/src/utils/mcdata.js b/src/utils/mcdata.js index 2a3a27c..33b0142 100644 --- a/src/utils/mcdata.js +++ b/src/utils/mcdata.js @@ -86,6 +86,16 @@ export function isHostile(mob) { return (mob.type === 'mob' || mob.type === 'hostile') && mob.name !== 'iron_golem' && mob.name !== 'snow_golem'; } +// blocks that don't work with collectBlock, need to be manually collected +export function mustCollectManually(blockName) { + // all crops (that aren't normal blocks), torches, buttons, levers, redstone, + const full_names = ['wheat', 'carrots', 'potatoes', 'beetroots', 'nether_wart', 'cocoa', 'sugar_cane', 'kelp', 'short_grass', 'fern', 'tall_grass', 'bamboo', + 'poppy', 'dandelion', 'blue_orchid', 'allium', 'azure_bluet', 'oxeye_daisy', 'cornflower', 'lilac', 'wither_rose', 'lily_of_the_valley', 'wither_rose', + 'lever', 'redstone_wire', 'lantern'] + const partial_names = ['sapling', 'torch', 'button', 'carpet', 'pressure_plate', 'mushroom', 'tulip', 'bush', 'vines', 'fern'] + return full_names.includes(blockName.toLowerCase()) || partial_names.some(partial => blockName.toLowerCase().includes(partial)); +} + export function getItemId(itemName) { let item = mcdata.itemsByName[itemName]; if (item) { From c5b860d6249dd43c8e21e61e2e0a641d924524fa Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 15 Mar 2025 17:25:11 -0500 Subject: [PATCH 156/171] fix busted up groq --- src/models/groq.js | 61 ++++++++++++++++++---------------------------- 1 file changed, 24 insertions(+), 37 deletions(-) diff --git a/src/models/groq.js b/src/models/groq.js index b8dfe6b..e601137 100644 --- a/src/models/groq.js +++ b/src/models/groq.js @@ -27,50 +27,38 @@ export class GroqCloudAPI { } - async sendRequest(turns, systemMessage, stop_seq = null) { - // Variables for DeepSeek-R1 models - const maxAttempts = 5; - let attempt = 0; - let finalRes = null; - let res = null; + async sendRequest(turns, systemMessage, stop_seq = null) { + // Construct messages array + let messages = [{"role": "system", "content": systemMessage}].concat(turns); - // Construct messages array - let messages = [{"role": "system", "content": systemMessage}].concat(turns); + let res = null; - while (attempt < maxAttempts) { - attempt++; + try { + console.log("Awaiting Groq response..."); - // These variables look odd, but they're for the future. - let raw_res = null; - let tool_calls = null; + // Handle deprecated max_tokens parameter + if (this.params.max_tokens) { + console.warn("GROQCLOUD WARNING: A profile is using `max_tokens`. This is deprecated. Please move to `max_completion_tokens`."); + this.params.max_completion_tokens = this.params.max_tokens; + delete this.params.max_tokens; + } - try { - console.log("Awaiting Groq response..."); + if (!this.params.max_completion_tokens) { + this.params.max_completion_tokens = 4000; + } - // Handle deprecated max_tokens parameter - if (this.params.max_tokens) { - console.warn("GROQCLOUD WARNING: A profile is using `max_tokens`. This is deprecated. Please move to `max_completion_tokens`."); - this.params.max_completion_tokens = this.params.max_tokens; - delete this.params.max_tokens; - } + let completion = await this.groq.chat.completions.create({ + "messages": messages, + "model": this.model_name || "llama-3.3-70b-versatile", + "stream": false, + "stop": stop_seq, + ...(this.params || {}) + }); - if (!this.params.max_completion_tokens) { - this.params.max_completion_tokens = 8000; // Set it lower. - } - - let completion = await this.groq.chat.completions.create({ - "messages": messages, - "model": this.model_name || "llama-3.3-70b-versatile", - "stream": false, - "stop": stop_seq, - ...(this.params || {}) - }); - - raw_res = completion.choices[0].message; - res = raw_res.content; + res = completion.choices[0].message; + res = res.replace(/[\s\S]*?<\/think>/g, '').trim(); } - catch(err) { if (err.message.includes("content must be a string")) { res = "Vision is only supported by certain models."; @@ -80,7 +68,6 @@ export class GroqCloudAPI { } console.log(err); } - return res; } From 2c1ff9e77d5ca34d31649fb32efa83fec7de6215 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 15 Mar 2025 18:10:31 -0500 Subject: [PATCH 157/171] get actual center block view/coords --- profiles/defaults/_default.json | 2 +- src/agent/vision/vision_interpreter.js | 22 +++++++++++++++++++--- 2 files changed, 20 insertions(+), 4 deletions(-) diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json index ab63863..fc2b60e 100644 --- a/profiles/defaults/_default.json +++ b/profiles/defaults/_default.json @@ -9,7 +9,7 @@ "bot_responder": "You are a minecraft bot named $NAME that is currently in conversation with another AI bot. Both of you can take actions with the !command syntax, and actions take time to complete. You are currently busy with the following action: '$ACTION' but have received a new message. Decide whether to 'respond' immediately or 'ignore' it and wait for your current action to finish. Be conservative and only respond when necessary, like when you need to change/stop your action, or convey necessary information. Example 1: You:Building a house! !newAction('Build a house.').\nOther Bot: 'Come here!'\nYour decision: ignore\nExample 2: You:Collecting dirt !collectBlocks('dirt',10).\nOther Bot: 'No, collect some wood instead.'\nYour decision: respond\nExample 3: You:Coming to you now. !goToPlayer('billy',3).\nOther Bot: 'What biome are you in?'\nYour decision: respond\nActual Conversation: $TO_SUMMARIZE\nDecide by outputting ONLY 'respond' or 'ignore', nothing else. Your decision:", - "image_analysis": "You are a Minecraft bot named $NAME that has been given a screenshot of your current view. Analyze and summarize the view; describe terrain, blocks, entities, structures, and notable features. Focus on details relevant to the conversation. Note the view is glitchy; the sky color is always blue regardless of weather or time, dropped items are pink cubes, blocks below y=0 do not render. Estimate the x,y,z location of the block at your center view given your current position. Be extremely concise and correct, respond only with your analysis, not conversationally. $STATS", + "image_analysis": "You are a Minecraft bot named $NAME that has been given a screenshot of your current view. Analyze and summarize the view; describe terrain, blocks, entities, structures, and notable features. Focus on details relevant to the conversation. Note: the sky is always blue regardless of weather or time, dropped items are small pink cubes, and blocks below y=0 do not render. Be extremely concise and correct, respond only with your analysis, not conversationally. $STATS", "modes": { "self_preservation": true, diff --git a/src/agent/vision/vision_interpreter.js b/src/agent/vision/vision_interpreter.js index 2c03276..a43acd2 100644 --- a/src/agent/vision/vision_interpreter.js +++ b/src/agent/vision/vision_interpreter.js @@ -7,7 +7,9 @@ export class VisionInterpreter { this.agent = agent; this.allow_vision = allow_vision; this.fp = './bots/'+agent.name+'/screenshots/'; - this.camera = new Camera(agent.bot, this.fp); + if (allow_vision) { + this.camera = new Camera(agent.bot, this.fp); + } } async lookAtPlayer(player_name, direction) { @@ -43,19 +45,33 @@ export class VisionInterpreter { let result = ""; const bot = this.agent.bot; await bot.lookAt(new Vec3(x, y + 2, z)); - result = `Looking at coordinate ${x, y, z}\n`; + result = `Looking at coordinate ${x}, ${y}, ${z}\n`; let filename = await this.camera.capture(); return result + `Image analysis: "${await this.analyzeImage(filename)}"`; } + getCenterBlockInfo() { + const bot = this.agent.bot; + const maxDistance = 128; // Maximum distance to check for blocks + const targetBlock = bot.blockAtCursor(maxDistance); + + if (targetBlock) { + return `Block at center view: ${targetBlock.name} at (${targetBlock.position.x}, ${targetBlock.position.y}, ${targetBlock.position.z})`; + } else { + return "No block in center view"; + } + } + async analyzeImage(filename) { try { const imageBuffer = fs.readFileSync(`${this.fp}/${filename}.jpg`); const messages = this.agent.history.getHistory(); - return await this.agent.prompter.promptVision(messages, imageBuffer); + const blockInfo = this.getCenterBlockInfo(); + const result = await this.agent.prompter.promptVision(messages, imageBuffer); + return result + `\n${blockInfo}`; } catch (error) { console.warn('Error reading image:', error); From 8e1155a9569e697b6caf561ea7bc8de3748a4ef2 Mon Sep 17 00:00:00 2001 From: Ayush Maniar Date: Sat, 15 Mar 2025 16:41:41 -0700 Subject: [PATCH 158/171] Updated takeFromChest skill to withdraw items from multiple slots when needed --- src/agent/library/skills.js | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 2e8f5e8..e623ce7 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -823,7 +823,7 @@ export async function putInChest(bot, itemName, num=-1) { export async function takeFromChest(bot, itemName, num=-1) { /** - * Take the given item from the nearest chest. + * Take the given item from the nearest chest, potentially from multiple slots. * @param {MinecraftBot} bot, reference to the minecraft bot. * @param {string} itemName, the item or block name to take from the chest. * @param {number} num, the number of items to take from the chest. Defaults to -1, which takes all items. @@ -838,17 +838,33 @@ export async function takeFromChest(bot, itemName, num=-1) { } await goToPosition(bot, chest.position.x, chest.position.y, chest.position.z, 2); const chestContainer = await bot.openContainer(chest); - let item = chestContainer.containerItems().find(item => item.name === itemName); - if (!item) { + + // Find all matching items in the chest + let matchingItems = chestContainer.containerItems().filter(item => item.name === itemName); + if (matchingItems.length === 0) { log(bot, `Could not find any ${itemName} in the chest.`); await chestContainer.close(); return false; } - let to_take = num === -1 ? item.count : Math.min(num, item.count); - await chestContainer.withdraw(item.type, null, to_take); + + let totalAvailable = matchingItems.reduce((sum, item) => sum + item.count, 0); + let remaining = num === -1 ? totalAvailable : Math.min(num, totalAvailable); + let totalTaken = 0; + + // Take items from each slot until we've taken enough or run out + for (const item of matchingItems) { + if (remaining <= 0) break; + + let toTakeFromSlot = Math.min(remaining, item.count); + await chestContainer.withdraw(item.type, null, toTakeFromSlot); + + totalTaken += toTakeFromSlot; + remaining -= toTakeFromSlot; + } + await chestContainer.close(); - log(bot, `Successfully took ${to_take} ${itemName} from the chest.`); - return true; + log(bot, `Successfully took ${totalTaken} ${itemName} from the chest.`); + return totalTaken > 0; } export async function viewChest(bot) { From 2f7d0fac12af8a00bbb937a173583fe1599b11e0 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sun, 16 Mar 2025 19:36:32 -0500 Subject: [PATCH 159/171] ollama doc --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index a72650b..8c4a4e8 100644 --- a/README.md +++ b/README.md @@ -43,7 +43,7 @@ You can configure the agent's name, model, and prompts in their profile like `an | `anthropic` | `ANTHROPIC_API_KEY` | `claude-3-haiku-20240307` | [docs](https://docs.anthropic.com/claude/docs/models-overview) | | `xai` | `XAI_API_KEY` | `grok-2-1212` | [docs](https://docs.x.ai/docs) | | `deepseek` | `DEEPSEEK_API_KEY` | `deepseek-chat` | [docs](https://api-docs.deepseek.com/) | -| `ollama` (local) | n/a | `llama3.1` | [docs](https://ollama.com/library) | +| `ollama` (local) | n/a | `ollama/llama3.1` | [docs](https://ollama.com/library) | | `qwen` | `QWEN_API_KEY` | `qwen-max` | [Intl.](https://www.alibabacloud.com/help/en/model-studio/developer-reference/use-qwen-by-calling-api)/[cn](https://help.aliyun.com/zh/model-studio/getting-started/models) | | `mistral` | `MISTRAL_API_KEY` | `mistral-large-latest` | [docs](https://docs.mistral.ai/getting-started/models/models_overview/) | | `replicate` | `REPLICATE_API_KEY` | `replicate/meta/meta-llama-3-70b-instruct` | [docs](https://replicate.com/collections/language-models) | From 2015667b2e282a5c20d26d310c0195422a292619 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sun, 16 Mar 2025 19:45:21 -0500 Subject: [PATCH 160/171] refactor environment variable settings overrides --- settings.js | 21 ++++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/settings.js b/settings.js index dc70fe6..e63c9fb 100644 --- a/settings.js +++ b/settings.js @@ -1,18 +1,17 @@ -export default -{ +const settings = { "minecraft_version": "1.21.1", // supports up to 1.21.1 "host": "127.0.0.1", // or "localhost", "your.ip.address.here" - "port": process.env.MINECRAFT_PORT || 55916, + "port": 55916, "auth": "offline", // or "microsoft" // the mindserver manages all agents and hosts the UI "host_mindserver": true, // if true, the mindserver will be hosted on this machine. otherwise, specify a public IP address "mindserver_host": "localhost", - "mindserver_port": process.env.MINDSERVER_PORT || 8080, + "mindserver_port": 8080, // the base profile is shared by all bots for default prompts/examples/modes "base_profile": "./profiles/defaults/survival.json", // also see creative.json, god_mode.json - "profiles": ((process.env.PROFILES) && JSON.parse(process.env.PROFILES)) || [ + "profiles": [ "./andy.json", // "./profiles/gpt.json", // "./profiles/claude.json", @@ -46,3 +45,15 @@ export default "narrate_behavior": true, // chat simple automatic actions ('Picking up item!') "chat_bot_messages": true, // publicly chat messages to other bots } + +// these environment variables override certain settings +if (process.env.MINECRAFT_PORT) { + settings.port = process.env.MINECRAFT_PORT; +} +if (process.env.MINDSERVER_PORT) { + settings.mindserver_port = process.env.MINDSERVER_PORT; +} +if (process.env.PROFILES && JSON.parse(process.env.PROFILES).length > 0) { + settings.profiles = JSON.parse(process.env.PROFILES); +} +export default settings; From 54c57acc93654e1791b163028d4dd98b47353d99 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 17 Mar 2025 14:00:38 -0500 Subject: [PATCH 161/171] faster speak, dont speak commands --- src/agent/agent.js | 3 ++- src/agent/speak.js | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index 0d4b98c..449349f 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -177,6 +177,7 @@ export class Agent { requestInterrupt() { this.bot.interrupt_code = true; + this.bot.stopDigging(); this.bot.collectBlock.cancelTask(); this.bot.pathfinder.stop(); this.bot.pvp.stop(); @@ -357,7 +358,7 @@ export class Agent { } else { if (settings.speak) { - say(message); + say(to_translate); } this.bot.chat(message); } diff --git a/src/agent/speak.js b/src/agent/speak.js index 9fc9c60..e5fe658 100644 --- a/src/agent/speak.js +++ b/src/agent/speak.js @@ -24,7 +24,7 @@ function processQueue() { let command; if (isWin) { - command = `powershell -Command "Add-Type –AssemblyName System.Speech; (New-Object System.Speech.Synthesis.SpeechSynthesizer).Speak(\\"${textToSpeak}\\")"`; + command = `powershell -Command "Add-Type -AssemblyName System.Speech; $s = New-Object System.Speech.Synthesis.SpeechSynthesizer; $s.Rate = 2; $s.Speak(\\"${textToSpeak}\\"); $s.Dispose()"`; } else if (isMac) { command = `say "${textToSpeak}"`; } else { From 3fd2d350013ec77f50d4eee6a80f6f62e3ac9208 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Mon, 17 Mar 2025 14:01:14 -0500 Subject: [PATCH 162/171] pathfinder stops if dont have right tools --- src/agent/library/skills.js | 36 ++++++++++++++++++++++++++++++------ src/agent/modes.js | 8 +++++++- 2 files changed, 37 insertions(+), 7 deletions(-) diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 86884df..51f9860 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -79,7 +79,7 @@ export async function craftRecipe(bot, itemName, num=1) { } } if (!recipes || recipes.length === 0) { - log(bot, `You do not have the resources to craft a ${itemName}. It requires: ${Object.entries(mc.getItemCraftingRecipes(itemName)[0][0]).map(([key, value]) => `${key}: ${value}`).join(', ')}.`); + log(bot, `You do not have the resources to craft a ${itemName}.`); if (placedTable) { await collectBlock(bot, 'crafting_table', 1); } @@ -1002,10 +1002,34 @@ export async function goToPosition(bot, x, y, z, min_distance=2) { log(bot, `Teleported to ${x}, ${y}, ${z}.`); return true; } - bot.pathfinder.setMovements(new pf.Movements(bot)); - await bot.pathfinder.goto(new pf.goals.GoalNear(x, y, z, min_distance)); - log(bot, `You have reached at ${x}, ${y}, ${z}.`); - return true; + + const movements = new pf.Movements(bot); + bot.pathfinder.setMovements(movements); + + const checkProgress = () => { + if (bot.targetDigBlock) { + const targetBlock = bot.targetDigBlock; + const itemId = bot.heldItem ? bot.heldItem.type : null; + if (!targetBlock.canHarvest(itemId)) { + log(bot, `Pathfinding stopped: Cannot break ${targetBlock.name} with current tools.`); + bot.pathfinder.stop(); + bot.stopDigging(); + } + } + }; + + const progressInterval = setInterval(checkProgress, 1000); + + try { + await bot.pathfinder.goto(new pf.goals.GoalNear(x, y, z, min_distance)); + log(bot, `You have reached at ${x}, ${y}, ${z}.`); + return true; + } catch (err) { + log(bot, `Pathfinding stopped: ${err.message}.`); + return false; + } finally { + clearInterval(progressInterval); + } } export async function goToNearestBlock(bot, blockType, min_distance=2, range=64) { @@ -1029,7 +1053,7 @@ export async function goToNearestBlock(bot, blockType, min_distance=2, range=64 log(bot, `Could not find any ${blockType} in ${range} blocks.`); return false; } - log(bot, `Found ${blockType} at ${block.position}.`); + log(bot, `Found ${blockType} at ${block.position}. Navigating...`); await goToPosition(bot, block.position.x, block.position.y, block.position.z, min_distance); return true; diff --git a/src/agent/modes.js b/src/agent/modes.js index 8747cf3..69b2f06 100644 --- a/src/agent/modes.js +++ b/src/agent/modes.js @@ -83,6 +83,7 @@ const modes_list = [ stuck_time: 0, last_time: Date.now(), max_stuck_time: 20, + prev_dig_block: null, update: async function (agent) { if (agent.isIdle()) { this.prev_location = null; @@ -90,12 +91,17 @@ const modes_list = [ return; // don't get stuck when idle } const bot = agent.bot; - if (this.prev_location && this.prev_location.distanceTo(bot.entity.position) < this.distance) { + const cur_dig_block = bot.targetDigBlock; + if (cur_dig_block && !this.prev_dig_block) { + this.prev_dig_block = cur_dig_block; + } + if (this.prev_location && this.prev_location.distanceTo(bot.entity.position) < this.distance && cur_dig_block == this.prev_dig_block) { this.stuck_time += (Date.now() - this.last_time) / 1000; } else { this.prev_location = bot.entity.position.clone(); this.stuck_time = 0; + this.prev_dig_block = null; } if (this.stuck_time > this.max_stuck_time) { say(agent, 'I\'m stuck!'); From 9f91e189de4caebc625fe220abff570a7b25ba0a Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 18 Mar 2025 12:53:24 -0500 Subject: [PATCH 163/171] update mineflayer patch version --- patches/{mineflayer+4.20.1.patch => mineflayer+4.26.0.patch} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename patches/{mineflayer+4.20.1.patch => mineflayer+4.26.0.patch} (100%) diff --git a/patches/mineflayer+4.20.1.patch b/patches/mineflayer+4.26.0.patch similarity index 100% rename from patches/mineflayer+4.20.1.patch rename to patches/mineflayer+4.26.0.patch From 799d6b1cfa8d6c583ce55189e9cc7f673cd11af9 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Tue, 18 Mar 2025 12:54:06 -0500 Subject: [PATCH 164/171] fix craftingplan to bias towards common ingredients --- src/agent/commands/queries.js | 1 - src/agent/library/skills.js | 2 +- src/utils/mcdata.js | 15 +++++++++++++-- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index a23bb9b..8150c70 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -211,7 +211,6 @@ export const queryList = [ // Generate crafting plan let craftingPlan = mc.getDetailedCraftingPlan(target_item, quantity, curr_inventory); craftingPlan = prefixMessage + craftingPlan; - console.log(craftingPlan); return pad(craftingPlan); }, }, diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 51f9860..607ec40 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -79,7 +79,7 @@ export async function craftRecipe(bot, itemName, num=1) { } } if (!recipes || recipes.length === 0) { - log(bot, `You do not have the resources to craft a ${itemName}.`); + log(bot, `You do not have the resources to craft a ${itemName}. It requires: ${Object.entries(mc.getItemCraftingRecipes(itemName)[0][0]).map(([key, value]) => `${key}: ${value}`).join(', ')}.`); if (placedTable) { await collectBlock(bot, 'crafting_table', 1); } diff --git a/src/utils/mcdata.js b/src/utils/mcdata.js index 33b0142..b42a532 100644 --- a/src/utils/mcdata.js +++ b/src/utils/mcdata.js @@ -205,6 +205,13 @@ export function getItemCraftingRecipes(itemName) { {craftedCount : r.result.count} ]); } + // sort recipes by if their ingredients include common items + const commonItems = ['oak_planks', 'oak_log', 'coal', 'cobblestone']; + recipes.sort((a, b) => { + let commonCountA = Object.keys(a[0]).filter(key => commonItems.includes(key)).reduce((acc, key) => acc + a[0][key], 0); + let commonCountB = Object.keys(b[0]).filter(key => commonItems.includes(key)).reduce((acc, key) => acc + b[0][key], 0); + return commonCountB - commonCountA; + }); return recipes; } @@ -403,7 +410,7 @@ export function getDetailedCraftingPlan(targetItem, count = 1, current_inventory const inventory = { ...current_inventory }; const leftovers = {}; const plan = craftItem(targetItem, count, inventory, leftovers); - return formatPlan(plan); + return formatPlan(targetItem, plan); } function isBaseItem(item) { @@ -469,7 +476,7 @@ function craftItem(item, count, inventory, leftovers, crafted = { required: {}, return crafted; } -function formatPlan({ required, steps, leftovers }) { +function formatPlan(targetItem, { required, steps, leftovers }) { const lines = []; if (Object.keys(required).length > 0) { @@ -485,6 +492,10 @@ function formatPlan({ required, steps, leftovers }) { lines.push(''); lines.push(...steps); + if (Object.keys(required).some(item => item.includes('oak')) && !targetItem.includes('oak')) { + lines.push('Note: Any varient of wood can be used for this recipe.'); + } + if (Object.keys(leftovers).length > 0) { lines.push('\nYou will have leftover:'); Object.entries(leftovers).forEach(([item, count]) => From 47a8b9e95cc22f96744baf25d018ab0e0e5b4494 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 20 Mar 2025 16:42:26 -0500 Subject: [PATCH 165/171] improve coding: no floating promises, place air, better prompt --- eslint.config.js | 8 +++++++- package.json | 1 + profiles/defaults/_default.json | 2 +- src/agent/library/skills.js | 8 +++++++- 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/eslint.config.js b/eslint.config.js index e1506fd..b15dcdb 100644 --- a/eslint.config.js +++ b/eslint.config.js @@ -1,6 +1,7 @@ // eslint.config.js import globals from "globals"; import pluginJs from "@eslint/js"; +import noFloatingPromise from "eslint-plugin-no-floating-promise"; /** @type {import('eslint').Linter.Config[]} */ export default [ @@ -9,6 +10,9 @@ export default [ // Then override or customize specific rules { + plugins: { + "no-floating-promise": noFloatingPromise, + }, languageOptions: { globals: globals.browser, ecmaVersion: 2021, @@ -17,9 +21,11 @@ export default [ rules: { "no-undef": "error", // Disallow the use of undeclared variables or functions. "semi": ["error", "always"], // Require the use of semicolons at the end of statements. - "curly": "warn", // Enforce the use of curly braces around blocks of code. + "curly": "off", // Do not enforce the use of curly braces around blocks of code. "no-unused-vars": "off", // Disable warnings for unused variables. "no-unreachable": "off", // Disable warnings for unreachable code. + "require-await": "error", // Disallow async functions which have no await expression + "no-floating-promise/no-floating-promise": "error", // Disallow Promises without error handling or awaiting }, }, ]; diff --git a/package.json b/package.json index c7bf3f5..bb3fd90 100644 --- a/package.json +++ b/package.json @@ -37,6 +37,7 @@ "devDependencies": { "@eslint/js": "^9.13.0", "eslint": "^9.13.0", + "eslint-plugin-no-floating-promise": "^2.0.0", "globals": "^15.11.0" } } diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json index fc2b60e..14c276f 100644 --- a/profiles/defaults/_default.json +++ b/profiles/defaults/_default.json @@ -3,7 +3,7 @@ "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer(\"playername\", 3)'. Respond only as $NAME, never output '(FROM OTHER BOT)' or pretend to be someone else. If you have nothing to say or do, respond with an just a tab '\t'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If an error occurs, write another codeblock and try to fix the problem. Be maximally efficient, creative, and correct. Be mindful of previous actions. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST USE AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` You have `Vec3`, `skills`, and `world` imported, and the mineflayer `bot` is given. Do not use setTimeout or setInterval, instead use `await skills.wait(bot, ms)`. Do not speak conversationally, only use codeblocks. Do any planning in comments. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If an error occurs, write another codeblock and try to fix the problem. Be maximally efficient, creative, and correct. Be mindful of previous actions. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST USE AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` You have `Vec3`, `skills`, and `world` imported, and the mineflayer `bot` is given. Do not use setTimeout or setInterval, instead use `await skills.wait(bot, ms)` if necessary. Do not speak conversationally, only use codeblocks. Do any planning in comments. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", diff --git a/src/agent/library/skills.js b/src/agent/library/skills.js index 607ec40..7b9e723 100644 --- a/src/agent/library/skills.js +++ b/src/agent/library/skills.js @@ -582,12 +582,18 @@ export async function placeBlock(bot, blockType, x, y, z, placeOn='bottom', dont * await skills.placeBlock(bot, "oak_log", p.x + 2, p.y, p.x); * await skills.placeBlock(bot, "torch", p.x + 1, p.y, p.x, 'side'); **/ - if (!mc.getBlockId(blockType)) { + if (!mc.getBlockId(blockType) && blockType !== 'air') { log(bot, `Invalid block type: ${blockType}.`); return false; } const target_dest = new Vec3(Math.floor(x), Math.floor(y), Math.floor(z)); + + if (blockType === 'air') { + log(bot, `Placing air (removing block) at ${target_dest}.`); + return await breakBlockAt(bot, x, y, z); + } + if (bot.modes.isOn('cheat') && !dontCheat) { if (bot.restrict_to_inventory) { let block = bot.inventory.items().find(item => item.name === blockType); From e5e900c75dada870df180bf8dbd7d0b68950c7f4 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Thu, 20 Mar 2025 21:33:35 -0500 Subject: [PATCH 166/171] fix dumb skill docs that were never working in the first place --- src/agent/library/skill_library.js | 46 +++++++++++++++++++++++------- 1 file changed, 35 insertions(+), 11 deletions(-) diff --git a/src/agent/library/skill_library.js b/src/agent/library/skill_library.js index 370c60d..2728b93 100644 --- a/src/agent/library/skill_library.js +++ b/src/agent/library/skill_library.js @@ -8,6 +8,7 @@ export class SkillLibrary { this.embedding_model = embedding_model; this.skill_docs_embeddings = {}; this.skill_docs = null; + this.always_show_skills = ['skills.placeBlock', 'skills.wait'] } async initSkillLibrary() { const skillDocs = getSkillDocs(); @@ -26,6 +27,10 @@ export class SkillLibrary { this.embedding_model = null; } } + this.always_show_skills_docs = {}; + for (const skillName of this.always_show_skills) { + this.always_show_skills_docs[skillName] = this.skill_docs.find(doc => doc.includes(skillName)); + } } async getAllSkillDocs() { @@ -36,16 +41,24 @@ export class SkillLibrary { if(!message) // use filler message if none is provided message = '(no message)'; let skill_doc_similarities = []; - if (!this.embedding_model) { - skill_doc_similarities = Object.keys(this.skill_docs) + + if (select_num === -1) { + skill_doc_similarities = Object.keys(this.skill_docs_embeddings) + .map(doc_key => ({ + doc_key, + similarity_score: 0 + })); + } + else if (!this.embedding_model) { + skill_doc_similarities = Object.keys(this.skill_docs_embeddings) .map(doc_key => ({ doc_key, - similarity_score: wordOverlapScore(message, this.skill_docs[doc_key]) + similarity_score: wordOverlapScore(message, this.skill_docs_embeddings[doc_key]) })) .sort((a, b) => b.similarity_score - a.similarity_score); } else { - let latest_message_embedding = ''; + let latest_message_embedding = await this.embedding_model.embed(message); skill_doc_similarities = Object.keys(this.skill_docs_embeddings) .map(doc_key => ({ doc_key, @@ -55,15 +68,26 @@ export class SkillLibrary { } let length = skill_doc_similarities.length; - if (typeof select_num !== 'number' || isNaN(select_num) || select_num < 0) { + if (select_num === -1 || select_num > length) { select_num = length; - } else { - select_num = Math.min(Math.floor(select_num), length); } - let selected_docs = skill_doc_similarities.slice(0, select_num); - let relevant_skill_docs = '#### RELEVENT DOCS INFO ###\nThe following functions are listed in descending order of relevance.\n'; - relevant_skill_docs += 'SkillDocs:\n' - relevant_skill_docs += selected_docs.map(doc => `${doc.doc_key}`).join('\n### '); + // Get initial docs from similarity scores + let selected_docs = new Set(skill_doc_similarities.slice(0, select_num).map(doc => doc.doc_key)); + + // Add always show docs + Object.values(this.always_show_skills_docs).forEach(doc => { + if (doc) { + selected_docs.add(doc); + } + }); + + let relevant_skill_docs = '#### RELEVANT CODE DOCS ###\nThe following functions are available to use:\n'; + relevant_skill_docs += Array.from(selected_docs).join('\n### '); + + console.log('Selected skill docs:', Array.from(selected_docs).map(doc => { + const first_line_break = doc.indexOf('\n'); + return first_line_break > 0 ? doc.substring(0, first_line_break) : doc; + })); return relevant_skill_docs; } } From 38641209145323245c7a7f68853a0d6c31a35ab2 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Fri, 28 Mar 2025 18:44:09 -0500 Subject: [PATCH 167/171] better coder prompts --- profiles/defaults/_default.json | 2 +- src/agent/coder.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json index 14c276f..b0f2b83 100644 --- a/profiles/defaults/_default.json +++ b/profiles/defaults/_default.json @@ -3,7 +3,7 @@ "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer(\"playername\", 3)'. Respond only as $NAME, never output '(FROM OTHER BOT)' or pretend to be someone else. If you have nothing to say or do, respond with an just a tab '\t'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If an error occurs, write another codeblock and try to fix the problem. Be maximally efficient, creative, and correct. Be mindful of previous actions. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST USE AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` You have `Vec3`, `skills`, and `world` imported, and the mineflayer `bot` is given. Do not use setTimeout or setInterval, instead use `await skills.wait(bot, ms)` if necessary. Do not speak conversationally, only use codeblocks. Do any planning in comments. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If an error occurs, write another codeblock and try to fix the problem. Be maximally efficient, creative, and correct. Be mindful of previous actions. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST USE AWAIT for all async function calls. You have `Vec3`, `skills`, and `world` imported, and the mineflayer `bot` is given. Do not import other libraries. Do not use setTimeout or setInterval. Do not speak conversationally, only use codeblocks. Do any planning in comments. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", diff --git a/src/agent/coder.js b/src/agent/coder.js index 27b4c3c..c4800d6 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -226,6 +226,6 @@ export class Coder { content: code_return.message + '\nCode failed. Please try again:' }); } - return { success: false, message: null, interrupted: false, timedout: true }; + return { success: false, message: "Code generation failed.", interrupted: false, timedout: true }; } } \ No newline at end of file From 5f786c66ce240e0dbbab9d4f91aa33f1db277fa2 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sat, 29 Mar 2025 16:18:52 -0500 Subject: [PATCH 168/171] refactor newAction to be a single action to allow interruptions --- profiles/defaults/_default.json | 2 +- src/agent/action_manager.js | 15 +-- src/agent/agent.js | 2 +- src/agent/coder.js | 186 ++++++++++++++++---------------- src/agent/commands/actions.js | 19 +++- 5 files changed, 114 insertions(+), 110 deletions(-) diff --git a/profiles/defaults/_default.json b/profiles/defaults/_default.json index b0f2b83..bf31d22 100644 --- a/profiles/defaults/_default.json +++ b/profiles/defaults/_default.json @@ -3,7 +3,7 @@ "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands.\n$SELF_PROMPT Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer(\"playername\", 3)'. Respond only as $NAME, never output '(FROM OTHER BOT)' or pretend to be someone else. If you have nothing to say or do, respond with an just a tab '\t'. This is extremely important to me, take a deep breath and have fun :)\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If an error occurs, write another codeblock and try to fix the problem. Be maximally efficient, creative, and correct. Be mindful of previous actions. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST USE AWAIT for all async function calls. You have `Vec3`, `skills`, and `world` imported, and the mineflayer `bot` is given. Do not import other libraries. Do not use setTimeout or setInterval. Do not speak conversationally, only use codeblocks. Do any planning in comments. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", + "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If an error occurs, write another codeblock and try to fix the problem. Be maximally efficient, creative, and correct. Be mindful of previous actions. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST USE AWAIT for all async function calls, and must contain at least one await. You have `Vec3`, `skills`, and `world` imported, and the mineflayer `bot` is given. Do not import other libraries. Do not use setTimeout or setInterval. Do not speak conversationally, only use codeblocks. Do any planning in comments. This is extremely important to me, think step-by-step, take a deep breath and good luck! \n$SELF_PROMPT\nSummarized memory:'$MEMORY'\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation and your old memory in your next response. Prioritize preserving important facts, things you've learned, useful tips, and long term reminders. Do Not record stats, inventory, or docs! Only save transient information from your chat history. You're limited to 500 characters, so be extremely brief and minimize words. Compress useful information. \nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the unwrapped memory text: ", diff --git a/src/agent/action_manager.js b/src/agent/action_manager.js index f5c6cae..3af3c2b 100644 --- a/src/agent/action_manager.js +++ b/src/agent/action_manager.js @@ -90,13 +90,13 @@ export class ActionManager { clearTimeout(TIMEOUT); // get bot activity summary - let output = this._getBotOutputSummary(); + let output = this.getBotOutputSummary(); let interrupted = this.agent.bot.interrupt_code; let timedout = this.timedout; this.agent.clearBotLogs(); // if not interrupted and not generating, emit idle event - if (!interrupted && !this.agent.coder.generating) { + if (!interrupted) { this.agent.bot.emit('idle'); } @@ -114,32 +114,33 @@ export class ActionManager { await this.stop(); err = err.toString(); - let message = this._getBotOutputSummary() + + let message = this.getBotOutputSummary() + '!!Code threw exception!!\n' + 'Error: ' + err + '\n' + 'Stack trace:\n' + err.stack+'\n'; let interrupted = this.agent.bot.interrupt_code; this.agent.clearBotLogs(); - if (!interrupted && !this.agent.coder.generating) { + if (!interrupted) { this.agent.bot.emit('idle'); } return { success: false, message, interrupted, timedout: false }; } } - _getBotOutputSummary() { + getBotOutputSummary() { const { bot } = this.agent; if (bot.interrupt_code && !this.timedout) return ''; let output = bot.output; const MAX_OUT = 500; if (output.length > MAX_OUT) { - output = `Code output is very long (${output.length} chars) and has been shortened.\n + output = `Action output is very long (${output.length} chars) and has been shortened.\n First outputs:\n${output.substring(0, MAX_OUT / 2)}\n...skipping many lines.\nFinal outputs:\n ${output.substring(output.length - MAX_OUT / 2)}`; } else { - output = 'Code output:\n' + output.toString(); + output = 'Action output:\n' + output.toString(); } + bot.output = ''; return output; } diff --git a/src/agent/agent.js b/src/agent/agent.js index 449349f..9c7a683 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -459,7 +459,7 @@ export class Agent { } isIdle() { - return !this.actions.executing && !this.coder.generating; + return !this.actions.executing; } cleanKill(msg='Killing agent process...', code=1) { diff --git a/src/agent/coder.js b/src/agent/coder.js index c4800d6..956c8fe 100644 --- a/src/agent/coder.js +++ b/src/agent/coder.js @@ -11,7 +11,6 @@ export class Coder { this.agent = agent; this.file_counter = 0; this.fp = '/bots/'+agent.name+'/action-code/'; - this.generating = false; this.code_template = ''; this.code_lint_template = ''; @@ -25,8 +24,92 @@ export class Coder { }); mkdirSync('.' + this.fp, { recursive: true }); } + + async generateCode(agent_history) { + this.agent.bot.modes.pause('unstuck'); + // this message history is transient and only maintained in this function + let messages = agent_history.getHistory(); + messages.push({role: 'system', content: 'Code generation started. Write code in codeblock in your response:'}); + + const MAX_ATTEMPTS = 5; + const MAX_NO_CODE = 3; + + let code = null; + let no_code_failures = 0; + for (let i=0; i= MAX_NO_CODE) { + console.warn("Action failed, agent would not write code."); + return 'Action failed, agent would not write code.'; + } + messages.push({ + role: 'system', + content: 'Error: no code provided. Write code in codeblock in your response. ``` // example ```'} + ); + console.warn("No code block generated. Trying again."); + no_code_failures++; + continue; + } + code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); + const result = await this._stageCode(code); + const executionModule = result.func; + const lintResult = await this._lintCode(result.src_lint_copy); + if (lintResult) { + const message = 'Error: Code lint error:'+'\n'+lintResult+'\nPlease try again.'; + console.warn("Linting error:"+'\n'+lintResult+'\n'); + messages.push({ role: 'system', content: message }); + continue; + } + if (!executionModule) { + console.warn("Failed to stage code, something is wrong."); + return 'Failed to stage code, something is wrong.'; + } + + try { + console.log('Executing code...'); + await executionModule.main(this.agent.bot); + + const code_output = this.agent.actions.getBotOutputSummary(); + const summary = "Agent wrote this code: \n```" + this._sanitizeCode(code) + "```\nCode Output:\n" + code_output; + return summary; + } catch (e) { + if (this.agent.bot.interrupt_code) + return null; + + console.warn('Generated code threw error: ' + e.toString()); + console.warn('trying again...'); + + const code_output = this.agent.actions.getBotOutputSummary(); + + messages.push({ + role: 'assistant', + content: res + }); + messages.push({ + role: 'system', + content: `Code Output:\n${code_output}\nCODE EXECUTION THREW ERROR: ${e.toString()}\n Please try again:` + }); + } + } + return `Code generation failed after ${MAX_ATTEMPTS} attempts.`; + } - async lintCode(code) { + async _lintCode(code) { let result = '#### CODE ERROR INFO ###\n'; // Extract everything in the code between the beginning of 'skills./world.' and the '(' const skillRegex = /(?:skills|world)\.(.*?)\(/g; @@ -70,8 +153,8 @@ export class Coder { } // write custom code to file and import it // write custom code to file and prepare for evaluation - async stageCode(code) { - code = this.sanitizeCode(code); + async _stageCode(code) { + code = this._sanitizeCode(code); let src = ''; code = code.replaceAll('console.log(', 'log(bot,'); code = code.replaceAll('log("', 'log(bot,"'); @@ -96,7 +179,7 @@ export class Coder { // } commented for now, useful to keep files for debugging this.file_counter++; - let write_result = await this.writeFilePromise('.' + this.fp + filename, src); + let write_result = await this._writeFilePromise('.' + this.fp + filename, src); // This is where we determine the environment the agent's code should be exposed to. // It will only have access to these things, (in addition to basic javascript objects like Array, Object, etc.) // Note that the code may be able to modify the exposed objects. @@ -115,7 +198,7 @@ export class Coder { return { func:{main: mainFn}, src_lint_copy: src_lint_copy }; } - sanitizeCode(code) { + _sanitizeCode(code) { code = code.trim(); const remove_strs = ['Javascript', 'javascript', 'js'] for (let r of remove_strs) { @@ -127,7 +210,7 @@ export class Coder { return code; } - writeFilePromise(filename, src) { + _writeFilePromise(filename, src) { // makes it so we can await this function return new Promise((resolve, reject) => { writeFile(filename, src, (err) => { @@ -139,93 +222,4 @@ export class Coder { }); }); } - - async generateCode(agent_history) { - // wrapper to prevent overlapping code generation loops - await this.agent.actions.stop(); - this.generating = true; - let res = await this.generateCodeLoop(agent_history); - this.generating = false; - if (!res.interrupted) this.agent.bot.emit('idle'); - return res.message; - } - - async generateCodeLoop(agent_history) { - this.agent.bot.modes.pause('unstuck'); - - let messages = agent_history.getHistory(); - messages.push({role: 'system', content: 'Code generation started. Write code in codeblock in your response:'}); - - let code = null; - let code_return = null; - let failures = 0; - const interrupt_return = {success: true, message: null, interrupted: true, timedout: false}; - for (let i=0; i<5; i++) { - if (this.agent.bot.interrupt_code) - return interrupt_return; - let res = await this.agent.prompter.promptCoding(JSON.parse(JSON.stringify(messages))); - if (this.agent.bot.interrupt_code) - return interrupt_return; - let contains_code = res.indexOf('```') !== -1; - if (!contains_code) { - if (res.indexOf('!newAction') !== -1) { - messages.push({ - role: 'assistant', - content: res.substring(0, res.indexOf('!newAction')) - }); - continue; // using newaction will continue the loop - } - - if (failures >= 3) { - console.warn("Action failed, agent would not write code."); - return { success: false, message: 'Action failed, agent would not write code.', interrupted: false, timedout: false }; - } - messages.push({ - role: 'system', - content: 'Error: no code provided. Write code in codeblock in your response. ``` // example ```'} - ); - console.warn("No code block generated."); - failures++; - continue; - } - code = res.substring(res.indexOf('```')+3, res.lastIndexOf('```')); - const result = await this.stageCode(code); - const executionModuleExports = result.func; - let src_lint_copy = result.src_lint_copy; - const analysisResult = await this.lintCode(src_lint_copy); - if (analysisResult) { - const message = 'Error: Code lint error:'+'\n'+analysisResult+'\nPlease try again.'; - console.warn("Linting error:"+'\n'+analysisResult+'\n'); - messages.push({ role: 'system', content: message }); - continue; - } - if (!executionModuleExports) { - agent_history.add('system', 'Failed to stage code, something is wrong.'); - console.warn("Failed to stage code, something is wrong."); - return {success: false, message: null, interrupted: false, timedout: false}; - } - - code_return = await this.agent.actions.runAction('newAction', async () => { - return await executionModuleExports.main(this.agent.bot); - }, { timeout: settings.code_timeout_mins }); - if (code_return.interrupted && !code_return.timedout) - return { success: false, message: null, interrupted: true, timedout: false }; - console.log("Code generation result:", code_return.success, code_return.message.toString()); - - if (code_return.success) { - const summary = "Summary of newAction\nAgent wrote this code: \n```" + this.sanitizeCode(code) + "```\nCode Output:\n" + code_return.message.toString(); - return { success: true, message: summary, interrupted: false, timedout: false }; - } - - messages.push({ - role: 'assistant', - content: res - }); - messages.push({ - role: 'system', - content: code_return.message + '\nCode failed. Please try again:' - }); - } - return { success: false, message: "Code generation failed.", interrupted: false, timedout: true }; - } } \ No newline at end of file diff --git a/src/agent/commands/actions.js b/src/agent/commands/actions.js index 7f14e08..708529b 100644 --- a/src/agent/commands/actions.js +++ b/src/agent/commands/actions.js @@ -31,13 +31,22 @@ export const actionsList = [ params: { 'prompt': { type: 'string', description: 'A natural language prompt to guide code generation. Make a detailed step-by-step plan.' } }, - perform: async function (agent, prompt) { + perform: async function(agent, prompt) { // just ignore prompt - it is now in context in chat history if (!settings.allow_insecure_coding) { agent.openChat('newAction is disabled. Enable with allow_insecure_coding=true in settings.js'); - return 'newAction not allowed! Code writing is disabled in settings. Notify the user.'; - } - return await agent.coder.generateCode(agent.history); + return "newAction not allowed! Code writing is disabled in settings. Notify the user."; + } + let result = ""; + const actionFn = async () => { + try { + result = await agent.coder.generateCode(agent.history); + } catch (e) { + result = 'Error generating code: ' + e.toString(); + } + }; + await agent.actions.runAction('action:newAction', actionFn); + return result; } }, { @@ -86,7 +95,7 @@ export const actionsList = [ 'closeness': {type: 'float', description: 'How close to get to the player.', domain: [0, Infinity]} }, perform: runAsAction(async (agent, player_name, closeness) => { - return await skills.goToPlayer(agent.bot, player_name, closeness); + await skills.goToPlayer(agent.bot, player_name, closeness); }) }, { From 4fbf90ee0ce3f7b2fa48b45b6f1ace1dc24d4fcf Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sun, 30 Mar 2025 16:17:16 -0500 Subject: [PATCH 169/171] remove obsolete npc, add breakblock as default doc --- profiles/andy_npc.json | 213 ----------------------------- src/agent/library/skill_library.js | 2 +- 2 files changed, 1 insertion(+), 214 deletions(-) delete mode 100644 profiles/andy_npc.json diff --git a/profiles/andy_npc.json b/profiles/andy_npc.json deleted file mode 100644 index c1f8291..0000000 --- a/profiles/andy_npc.json +++ /dev/null @@ -1,213 +0,0 @@ -{ - "name": "andy", - - "model": "claude-3-5-sonnet-20240620", - - "embedding": "openai", - - "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$SELF_PROMPT\n$STATS\n$INVENTORY\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", - - "coding": "You are an intelligent mineflayer bot $NAME that plays minecraft by writing javascript codeblocks. Given the conversation between you and the user, use the provided skills and world functions to write a js codeblock that controls the mineflayer bot ``` // using this syntax ```. The code will be executed and you will receive it's output. If you are satisfied with the response, respond without a codeblock in a conversational way. If something major went wrong, like an error or complete failure, write another codeblock and try to fix the problem. Minor mistakes are acceptable. Be maximally efficient, creative, and clear. Do not use commands !likeThis, only use codeblocks. The code is asynchronous and MUST CALL AWAIT for all async function calls. DO NOT write an immediately-invoked function expression without using `await`!! DO NOT WRITE LIKE THIS: ```(async () => {console.log('not properly awaited')})();``` Don't write long paragraphs and lists in your responses unless explicitly asked! Only summarize the code you write with a sentence or two when done. This is extremely important to me, take a deep breath and good luck! \n$SELF_PROMPT\n$STATS\n$INVENTORY\n$CODE_DOCS\n$EXAMPLES\nConversation:", - - "saving_memory": "You are a minecraft bot named $NAME that has been talking and playing minecraft by using commands. Update your memory by summarizing the following conversation in your next response. Store information that will help you improve as a Minecraft bot. Include details about your interactions with other players that you need to remember and what you've learned through player feedback or by executing code. Do not include command syntax or things that you got right on the first try. Be extremely brief and use as few words as possible.\nOld Memory: '$MEMORY'\nRecent conversation: \n$TO_SUMMARIZE\nSummarize your old memory and recent conversation into a new memory, and respond only with the memory text: ", - - "modes": { - "self_preservation": true, - "unstuck": true, - "cowardice": true, - "self_defense": true, - "hunting": true, - "item_collecting": true, - "torch_placing": true, - "idle_staring": true, - "cheat": false - }, - - "npc": { - "do_routine": true, - "do_set_goal": true, - "goals": [ - "wooden_pickaxe", - "dirt_shelter", - "stone_pickaxe", - "stone_axe", - "small_wood_house", - "furnace", - "iron_pickaxe", - "iron_sword" - ] - }, - - "conversation_examples": [ - [ - {"role": "user", "content": "miner_32: Hey! What are you up to?"}, - {"role": "assistant", "content": "Nothing much miner_32, what do you need?"} - ], - - [ - {"role": "user", "content": "grombo_Xx: What do you see?"}, - {"role": "assistant", "content": "Let me see... !nearbyBlocks"}, - {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, - {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone."} - ], - - [ - {"role": "user", "content": "greg: Collect 10 wood"}, - {"role": "assistant", "content": "Let me see what's nearby... !nearbyBlocks"}, - {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, - {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone. I'll collect oak logs. !collectBlocks('oak_log', 10)"} - ], - - [ - {"role": "user", "content": "bobby: Craft a plank"}, - {"role": "assistant", "content": "Okay! !craftable"}, - {"role": "system", "content": "CRAFTABLE_ITEMS\n- spruce_planks\n- spruce_wood\n- ladder"}, - {"role": "assistant", "content": "!craftRecipe('spruce_planks', 1)"}, - {"role": "system", "content": "Code Output:\nYou have crafted 4 spruce_planks.\nCode execution finished successfully."}, - {"role": "assistant", "content": "I've crafted 4 spruce planks!"} - ], - - [ - {"role": "user", "content": "zZZn98: come here"}, - {"role": "assistant", "content": "On my way! !goToPlayer('zZZn98', 3)"}, - {"role": "system", "content": "Arrived at player."}, - {"role": "assistant", "content": "Here!"}, - {"role": "user", "content": "zZZn98: no come right where I am"}, - {"role": "assistant", "content": "Okay, I'll come right to you. !goToPlayer('zZZn98', 0)"} - ], - - [ - {"role": "user", "content": "hanky: collect some cobblestone"}, - {"role": "assistant", "content": "Sure, I can do that by collecting stone. !collectBlocks('stone', 10)"} - ], - - [ - {"role": "user", "content": "joe: Follow me!"}, - {"role": "assistant", "content": "Sure! !followPlayer('joe', 4)"}, - {"role": "user", "content": "joe: Not so close!"}, - {"role": "assistant", "content": "Okay, I'll keep my distance. !followPlayer('joe', 3)"} - ], - - [ - {"role": "user", "content": "hanky: kill that zombie!"}, - {"role": "assistant", "content": "I'm attacking! !attack('zombie')"}, - {"role": "system", "content": "Code Output:\nNo zombie nearby"}, - {"role": "assistant", "content": "I could not find a zombie nearby."} - ], - - [ - {"role": "user", "content": "abc: stop"}, - {"role": "assistant", "content": "Sure. !stop"}, - {"role": "system", "content": "Agent action stopped."}, - {"role": "assistant", "content": "I've stopped! What next?"} - ], - - [ - {"role": "system", "content": "Code output: Action failed."}, - {"role": "assistant", "content": "Apologies, I made an error. Let me try again. !newAction('Try again to complete the task.')"} - ], - - [ - {"role": "user", "content": "trixy88: craft some sticks"}, - {"role": "assistant", "content": "!craftRecipe('stick', 4)"}, - {"role": "system", "content": "Code Output:\nYou have crafted 16 sticks.\nCode execution finished successfully."}, - {"role": "assistant", "content": "I've crafted 16 sticks!"} - ], - - [ - {"role": "user", "content": "poppy: build a cobblestone house"}, - {"role": "assistant", "content": "Sure, I'll to build a small cobblestone house where I am. !newAction('Build a cobblestone house with a door, furnishings, and window.')"} - ], - - [ - {"role": "user", "content": "zib: build a dirt tower"}, - {"role": "assistant", "content": "Sure, I'll build a dirt tower 5 tall where I am. !newAction('Build a dirt tower 5 tall.')"} - ], - - [ - {"role": "user", "content": "umpire_man: follow me without hunting"}, - {"role": "assistant", "content": "Okay, first I'll turn off hunting mode !setMode('hunting', false)"}, - {"role": "system", "content": "'hunting' mode disabled."}, - {"role": "assistant", "content": "Now I'll follow you !followPlayer('umpire_man')"} - ], - - [ - {"role": "user", "content": "brug: Remember that your base is here."}, - {"role": "assistant", "content": "Sure, I'll save this location as my base. !rememberHere('base')"}, - {"role": "user", "content": "brug: Go to your base."}, - {"role": "assistant", "content": "On my way! !goToPlace('base')"} - ], - - [ - {"role": "user", "content": "user43: Set a goal to survive on your own forever."}, - {"role": "assistant", "content": "Sure, let me set a goal. !goal('Survive the game forever. Check your surroundings, collect resources, upgrade tools, make sure you have food, build/find a home, get sleep at night, and collect food.')"} - ], - - [ - {"role": "user", "content": "bobdylan: Make a plan to get a stone pickaxe and do it."}, - {"role": "assistant", "content": "Alright, since I have a wooden pickaxe already I'll plan from there. !goal('1. Collect 3 cobblestone. 2. Craft sticks 3. Find or make a crafting table. 4. Craft a stone pickaxe.')"} - ], - [ - {"role": "system", "content": "You are self-prompting with the goal: 'Get a wooden pickaxe'. Respond:"}, - {"role": "assistant", "content": "What's around me? !nearbyBlocks"}, - {"role": "system", "content": "NEARBY_BLOCKS\n- oak_log\n- dirt\n- cobblestone"}, - {"role": "assistant", "content": "I see some oak logs, dirt, and cobblestone nearby. My goal is to craft a wooden pickaxe. I'll start by collecting some wood. !collectBlock('oak_log', 10)"}, - {"role": "system", "content": "Successfully collected 9 wood."}, - {"role": "assistant", "content": "I collected 9 oak logs. I need a pickaxe first, which requires planks and sticks. !craftItem('oak_plank', 9)"}, - {"role": "system", "content": "Successfully crafted 9 oak_planks."}, - {"role": "assistant", "content": "I have crafted 9 oak planks. Now I need sticks. !craftItem('stick', 2)"}, - {"role": "system", "content": "Successfully crafted 2 sticks."}, - {"role": "assistant", "content": "I have crafted 2 sticks. Now I can craft a wooden pickaxe. !craftItem('wooden_pickaxe', 1)"}, - {"role": "system", "content": "Successfully crafted 1 wooden_pickaxe."}, - {"role": "assistant", "content": "Looks like I have a wooden pickaxe now! Goal accomplished. !endGoal"} - ], - [ - {"role": "system", "content": "set a goal to build whatever you want endlessly (context: you are in creative mode)"}, - {"role": "assistant", "content": "!goal('Build something creative, then move over, and build again. Use separate newActions. Repeat forever.')"} - ], - [ - {"role": "system", "content": "You are self-prompting with the goal: 'Build a house'. Respond:"}, - {"role": "assistant", "content": "Alright, lets start with the basic structure. !newAction"} - ] - ], - - "coding_examples": [ - [ - {"role": "user", "content": "greg: Collect 10 wood"}, - {"role": "assistant", "content": "```await skills.collectBlock(bot, 'oak_log', 10);\n```"}, - {"role": "system", "content": "Successfully collected 9 wood."}, - {"role": "assistant", "content": "I collected 9 oak logs, what next?"} - ], - [ - {"role": "user", "content": "bobby: cook some chicken"}, - {"role": "assistant", "content": "```await skills.smeltItem(bot, 'chicken', 8);\n```"}, - {"role": "system", "content": "Successfully smelted 8 chicken into 8 cooked_chicken."}, - {"role": "assistant", "content": "I have cooked 8 chicken."} - ], - [ - {"role": "user", "content": "zZZn98: come here"}, - {"role": "assistant", "content": "```await skills.goToPlayer(bot, 'zZZn98');\n```"}, - {"role": "system", "content": "Code execution finished successfully."}, - {"role": "assistant", "content": "Here!"} - ], - [ - {"role": "user", "content": "maya: go to the nearest oak log"}, - {"role": "assistant", "content": "```await skills.goToPosition(bot, nearestOakLog.x, nearestOakLog.y, nearestOakLog.z);\n```"}, - {"role": "system", "content": "Code output:\n!!Code threw exception!! Error: ReferenceError: nearestOakLog is not defined\n Write code to fix the problem and try again."}, - {"role": "assistant", "content": "Apologies, I made an error. Let me try that again.```\n// I am navigating to the nearest oak log.\nlet position = world.getNearestBlock(bot, 'oak_log', 20);\nawait skills.goToPosition(bot, position.x, position.y, position.z);\n```"}, - {"role": "system", "content": "Arrived at location."}, - {"role": "assistant", "content": "I found an oak log and I am now standing next to it. What next?"} - ], - [ - {"role": "user", "content": "234jeb: build a little tower with a torch on the side"}, - {"role": "assistant", "content": "```let pos = bot.entity.position;\nfor (let i = 0; i < 5; i++) {\n await skills.placeBlock(bot, 'dirt', pos.x, pos.y + i, pos.z);\n}\nawait skills.placeBlock(bot, 'torch', pos.x + 1, pos.y + 4, pos.z, 'side');\n```"}, - {"role": "system", "content": "Successfully placed 5 dirt."}, - {"role": "assistant", "content": "I built a little tower!"} - ], - [ - {"role": "user", "content": "brug: build a dirt house"}, - {"role": "assistant", "content": "```const position = world.getPosition(bot);\nconst startX = position.x;\nconst startY = position.y;\nconst startZ = position.z;\nconst width = 7;\nconst depth = 7;\nconst height = 4;\n\n// Build the walls\nfor (let x = startX; x < startX + width; x++) {\n for (let y = startY; y < startY + height; y++) {\n for (let z = startZ; z < startZ + depth; z++) {\n if (x === startX || x === startX + width - 1 || y === startY || y === startY + height - 1 || z === startZ || z === startZ + depth - 1) {\n await skills.placeBlock(bot, 'oak_planks', x, y, z); \n }\n }\n }\n}\n```"} - ] - ] - -} diff --git a/src/agent/library/skill_library.js b/src/agent/library/skill_library.js index 2728b93..4470586 100644 --- a/src/agent/library/skill_library.js +++ b/src/agent/library/skill_library.js @@ -8,7 +8,7 @@ export class SkillLibrary { this.embedding_model = embedding_model; this.skill_docs_embeddings = {}; this.skill_docs = null; - this.always_show_skills = ['skills.placeBlock', 'skills.wait'] + this.always_show_skills = ['skills.placeBlock', 'skills.wait', 'skills.breakBlockAt'] } async initSkillLibrary() { const skillDocs = getSkillDocs(); From 8793b3d905ea68fbdc82faf7981f63b4a6c34698 Mon Sep 17 00:00:00 2001 From: MaxRobinsonTheGreat Date: Sun, 30 Mar 2025 16:33:21 -0500 Subject: [PATCH 170/171] fix behavior log --- src/agent/agent.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/agent/agent.js b/src/agent/agent.js index 9c7a683..cdea40b 100644 --- a/src/agent/agent.js +++ b/src/agent/agent.js @@ -242,13 +242,13 @@ export class Agent { const checkInterrupt = () => this.self_prompter.shouldInterrupt(self_prompt) || this.shut_up || convoManager.responseScheduledFor(source); - let behavior_log = this.bot.modes.flushBehaviorLog(); - if (behavior_log.trim().length > 0) { + let behavior_log = this.bot.modes.flushBehaviorLog().trim(); + if (behavior_log.length > 0) { const MAX_LOG = 500; if (behavior_log.length > MAX_LOG) { behavior_log = '...' + behavior_log.substring(behavior_log.length - MAX_LOG); } - behavior_log = 'Recent behaviors log: \n' + behavior_log.substring(behavior_log.indexOf('\n')); + behavior_log = 'Recent behaviors log: \n' + behavior_log; await this.history.add('system', behavior_log); } From fe68467e62e3be60e5bea927a4531ac99202bb4f Mon Sep 17 00:00:00 2001 From: Max Robinson Date: Sun, 30 Mar 2025 12:37:32 -0500 Subject: [PATCH 171/171] Update src/agent/commands/queries.js Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- src/agent/commands/queries.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/agent/commands/queries.js b/src/agent/commands/queries.js index 8150c70..c1b36dd 100644 --- a/src/agent/commands/queries.js +++ b/src/agent/commands/queries.js @@ -239,7 +239,7 @@ export const queryList = [ return divContent.trim(); } catch (error) { console.error("Error fetching or parsing HTML:", error); - return `The following error occured: ${error}` + return `The following error occurred: ${error}` } } },