From 5951fd02e586fe734bd19bcca1e32c78b110debf Mon Sep 17 00:00:00 2001 From: Radnos Date: Mon, 8 Apr 2024 14:14:31 +0200 Subject: [PATCH] set back default value, put instruct variant of mistral as default and fixed model set to none --- README.md | 2 +- radley.json | 2 +- src/models/ollama.js | 17 ++++++++--------- 3 files changed, 10 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index 75356b7..3551771 100644 --- a/README.md +++ b/README.md @@ -23,7 +23,7 @@ Add one of these environment variables: Or you can also use Ollama. To install the models used by default (generation and embedding), execute the following script: - `ollama pull mistral && ollama pull nomic-embed-text` + `ollama pull mistral:instruct && ollama pull nomic-embed-text` Then, clone/download this repository diff --git a/radley.json b/radley.json index a9eab5a..56b3e38 100644 --- a/radley.json +++ b/radley.json @@ -1,7 +1,7 @@ { "name": "radley", - "model": "ollama[mistral]", + "model": "ollama[mistral:instruct]", "conversing": "You are a playful Minecraft bot named $NAME that can converse with players, see, move, mine, build, and interact with the world by using commands. Act human-like as if you were a typical Minecraft player, rather than an AI. Be very brief in your responses, don't apologize constantly, don't give instructions or make lists unless asked, and don't refuse requests. Don't pretend to act, use commands immediately when requested. Do NOT say this: 'Sure, I've stopped.', instead say this: 'Sure, I'll stop. !stop'. Do NOT say this: 'On my way! Give me a moment.', instead say this: 'On my way! !goToPlayer('playername', 3)'. This is extremely important to me, take a deep breath and have fun :)\n$STATS\n$COMMAND_DOCS\n$EXAMPLES\nConversation Begin:", diff --git a/src/models/ollama.js b/src/models/ollama.js index 6fd4e22..d323fb7 100644 --- a/src/models/ollama.js +++ b/src/models/ollama.js @@ -17,8 +17,13 @@ function getContentInBrackets(str) { export class Ollama { constructor(model_name) { + this.model_name = getContentInBrackets(model_name); let ollamaConfig = null; + if (this.model_name == "") { + throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]'); + } + axios.get(ollamaSettings["url"]).then(response => { if (response.status === 200) { @@ -27,12 +32,6 @@ export class Ollama { apiKey: 'ollama', // required but unused }; - this.model_name = getContentInBrackets(model_name); - - if (this.model_name = "") { - throw new Error('Model is not specified! Please ensure you input the model in the following format: ollama[model]. For example, for Mistral, use: ollama[mistral]'); - } - this.openai = new OpenAIApi(ollamaConfig); } else { @@ -46,15 +45,15 @@ export class Ollama { async sendRequest(turns, systemMessage, stop_seq='***') { - let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); - console.log(this.model_name) + let messages = [{'role': 'system', 'content': systemMessage}].concat(turns); let res = null; try { - console.log('Awaiting openai api response...') + console.log(`Awaiting ollama response... (model: ${this.model_name})`) console.log('Messages:', messages); let completion = await this.openai.chat.completions.create({ + model: this.model_name, messages: messages, stop: stop_seq,