remove pollinations

This commit is contained in:
uukelele-scratch 2025-08-27 07:22:08 +01:00
parent 3557215e29
commit 96e68cc865
6 changed files with 8 additions and 134 deletions

View file

@ -63,7 +63,6 @@ You can configure the agent's name, model, and prompts in their profile like `an
| `openrouter` | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) | | `openrouter` | `OPENROUTER_API_KEY` | `openrouter/anthropic/claude-3.5-sonnet` | [docs](https://openrouter.ai/models) |
| `glhf.chat` | `GHLF_API_KEY` | `glhf/hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) | | `glhf.chat` | `GHLF_API_KEY` | `glhf/hf:meta-llama/Llama-3.1-405B-Instruct` | [docs](https://glhf.chat/user-settings/api) |
| `hyperbolic` | `HYPERBOLIC_API_KEY` | `hyperbolic/deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) | | `hyperbolic` | `HYPERBOLIC_API_KEY` | `hyperbolic/deepseek-ai/DeepSeek-V3` | [docs](https://docs.hyperbolic.xyz/docs/getting-started) |
| `pollinations` | n/a | `pollinations/openai-large` | [docs](https://github.com/pollinations/pollinations/blob/master/APIDOCS.md) |
| `vllm` | n/a | `vllm/llama3` | n/a | | `vllm` | n/a | `vllm/llama3` | n/a |
If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command: If you use Ollama, to install the models used by default (generation and embedding), execute the following terminal command:
@ -142,9 +141,9 @@ You can pass a string or an object for these fields. A model object must specify
"model": "text-embedding-ada-002" "model": "text-embedding-ada-002"
}, },
"speak_model": { "speak_model": {
"api": "pollinations", "api": "openai",
"url": "https://text.pollinations.ai/openai", "url": "https://api.openai.com/v1/",
"model": "openai-audio", "model": "tts-1",
"voice": "echo" "voice": "echo"
} }

View file

@ -11,7 +11,7 @@
"image_analysis": "You are a Minecraft bot named $NAME that has been given a screenshot of your current view. Analyze and summarize the view; describe terrain, blocks, entities, structures, and notable features. Focus on details relevant to the conversation. Note: the sky is always blue regardless of weather or time, dropped items are small pink cubes, and blocks below y=0 do not render. Be extremely concise and correct, respond only with your analysis, not conversationally. $STATS", "image_analysis": "You are a Minecraft bot named $NAME that has been given a screenshot of your current view. Analyze and summarize the view; describe terrain, blocks, entities, structures, and notable features. Focus on details relevant to the conversation. Note: the sky is always blue regardless of weather or time, dropped items are small pink cubes, and blocks below y=0 do not render. Be extremely concise and correct, respond only with your analysis, not conversationally. $STATS",
"speak_model": "pollinations/openai-audio/echo", "speak_model": "openai/tts-1/echo",
"modes": { "modes": {
"self_preservation": true, "self_preservation": true,

View file

@ -1,5 +1,4 @@
import { exec, spawn } from 'child_process'; import { exec, spawn } from 'child_process';
import { TTSConfig as pollinationsTTSConfig } from '../models/pollinations.js';
import { TTSConfig as gptTTSConfig } from '../models/gpt.js'; import { TTSConfig as gptTTSConfig } from '../models/gpt.js';
import { TTSConfig as geminiTTSConfig } from '../models/gemini.js'; import { TTSConfig as geminiTTSConfig } from '../models/gemini.js';
@ -41,9 +40,7 @@ $s.Speak('${txt.replace(/'/g,"''")}'); $s.Dispose()"`
} else { } else {
function getModelUrl(prov) { function getModelUrl(prov) {
if (prov === 'pollinations') { if (prov === 'openai') {
return pollinationsTTSConfig.baseUrl;
} else if (prov === 'openai') {
return gptTTSConfig.baseUrl; return gptTTSConfig.baseUrl;
} else if (prov === 'google') { } else if (prov === 'google') {
return geminiTTSConfig.baseUrl; return geminiTTSConfig.baseUrl;
@ -67,9 +64,7 @@ $s.Speak('${txt.replace(/'/g,"''")}'); $s.Dispose()"`
try { try {
let audioData; let audioData;
if (prov === "pollinations") { if (prov === "openai") {
audioData = await pollinationsTTSConfig.sendAudioRequest(txt, mdl, voice, url);
} else if (prov === "openai") {
audioData = await gptTTSConfig.sendAudioRequest(txt, mdl, voice, url); audioData = await gptTTSConfig.sendAudioRequest(txt, mdl, voice, url);
} else if (prov === "google") { } else if (prov === "google") {
audioData = await geminiTTSConfig.sendAudioRequest(txt, mdl, voice, url); audioData = await geminiTTSConfig.sendAudioRequest(txt, mdl, voice, url);

View file

@ -1,5 +1,5 @@
import { GoogleGenAI, VideoCompressionQuality } from '@google/genai'; import { GoogleGenAI } from '@google/genai';
import { toSinglePrompt, strictFormat } from '../utils/text.js'; import { strictFormat } from '../utils/text.js';
import { getKey } from '../utils/keys.js'; import { getKey } from '../utils/keys.js';
import { lamejs } from 'lamejs/lame.all.js'; import { lamejs } from 'lamejs/lame.all.js';

View file

@ -1,115 +0,0 @@
import { strictFormat } from "../utils/text.js";
export class Pollinations {
// models: https://text.pollinations.ai/models
constructor(model_name, url, params) {
this.model_name = model_name;
this.params = params;
this.url = url || "https://text.pollinations.ai/openai";
}
async sendRequest(turns, systemMessage) {
let messages = [{'role': 'system', 'content': systemMessage}].concat(turns);
const payload = {
model: this.model_name || "openai-large",
messages: strictFormat(messages),
seed: Math.floor( Math.random() * (99999) ),
referrer: "mindcraft",
...(this.params || {})
};
let res = null;
try {
console.log(`Awaiting pollinations response from model`, this.model_name);
const response = await fetch(this.url, {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(payload)
});
if (!response.ok) {
console.error(`Failed to receive response. Status`, response.status, (await response.text()));
res = "My brain disconnected, try again.";
} else {
const result = await response.json();
res = result.choices[0].message.content;
}
} catch (err) {
console.error(`Failed to receive response.`, err || err.message);
res = "My brain disconnected, try again.";
}
return res;
}
async sendVisionRequest(messages, systemMessage, imageBuffer) {
const imageMessages = [...messages];
imageMessages.push({
role: "user",
content: [
{ type: "text", text: systemMessage },
{
type: "image_url",
image_url: {
url: `data:image/jpeg;base64,${imageBuffer.toString('base64')}`
}
}
]
});
return this.sendRequest(imageMessages, systemMessage)
}
}
const sendAudioRequest = async (text, model, voice, url) => {
const payload = {
model: model,
modalities: ["text", "audio"],
audio: {
voice: voice,
format: "mp3",
},
messages: [
{
role: "developer",
content: "You are an AI that echoes. Your sole function is to repeat back everything the user says to you exactly as it is written. This includes punctuation, grammar, language, and text formatting. Do not add, remove, or alter anything in the user's input in any way. Respond only with an exact duplicate of the user’s query."
// this is required because pollinations attempts to send an AI response to the text instead of just saying the text.
},
{
role: "user",
content: text
}
]
}
let audioData = null;
try {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json"
},
body: JSON.stringify(payload)
})
if (!response.ok) {
console.error("Failed to get text transcription. Status", response.status, (await response.text()))
return null;
}
const result = await response.json();
audioData = result.choices[0].message.audio.data;
return audioData;
} catch (err) {
console.error("TTS fetch failed:", err);
return null;
}
}
export const TTSConfig = {
sendAudioRequest: sendAudioRequest,
baseUrl: 'https://text.pollinations.ai/openai',
}

View file

@ -21,7 +21,6 @@ import { DeepSeek } from './deepseek.js';
import { Hyperbolic } from './hyperbolic.js'; import { Hyperbolic } from './hyperbolic.js';
import { GLHF } from './glhf.js'; import { GLHF } from './glhf.js';
import { OpenRouter } from './openrouter.js'; import { OpenRouter } from './openrouter.js';
import { Pollinations } from './pollinations.js';
import { VLLM } from './vllm.js'; import { VLLM } from './vllm.js';
import { promises as fs } from 'fs'; import { promises as fs } from 'fs';
import path from 'path'; import path from 'path';
@ -150,8 +149,6 @@ export class Prompter {
profile.api = 'openrouter'; // must do first because shares names with other models profile.api = 'openrouter'; // must do first because shares names with other models
else if (profile.model.includes('ollama/')) else if (profile.model.includes('ollama/'))
profile.api = 'ollama'; // also must do early because shares names with other models profile.api = 'ollama'; // also must do early because shares names with other models
else if (profile.model.includes('pollinations/'))
profile.api = 'pollinations'; // also shares some model names like llama
else if (profile.model.includes('gemini')) else if (profile.model.includes('gemini'))
profile.api = 'google'; profile.api = 'google';
else if (profile.model.includes('vllm/')) else if (profile.model.includes('vllm/'))
@ -219,8 +216,6 @@ export class Prompter {
model = new DeepSeek(profile.model, profile.url, profile.params); model = new DeepSeek(profile.model, profile.url, profile.params);
else if (profile.api === 'openrouter') else if (profile.api === 'openrouter')
model = new OpenRouter(profile.model.replace('openrouter/', ''), profile.url, profile.params); model = new OpenRouter(profile.model.replace('openrouter/', ''), profile.url, profile.params);
else if (profile.api === 'pollinations')
model = new Pollinations(profile.model.replace('pollinations/', ''), profile.url, profile.params);
else if (profile.api === 'vllm') else if (profile.api === 'vllm')
model = new VLLM(profile.model.replace('vllm/', ''), profile.url, profile.params); model = new VLLM(profile.model.replace('vllm/', ''), profile.url, profile.params);
else else