Merge pull request #130 from Ninot1Quyi/main

Load sample text in parallel for faster loading
This commit is contained in:
Max Robinson 2024-08-26 12:04:48 -05:00 committed by GitHub
commit ce5bfbc3bc
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 10 additions and 6 deletions

View file

@ -93,12 +93,15 @@ export class Prompter {
}
async initExamples() {
console.log('Loading examples...')
// Using Promise.all to implement concurrent processing
// Create Examples instances
this.convo_examples = new Examples(this.embedding_model);
await this.convo_examples.load(this.profile.conversation_examples);
this.coding_examples = new Examples(this.embedding_model);
await this.coding_examples.load(this.profile.coding_examples);
console.log('Examples loaded.');
// Use Promise.all to load examples concurrently
await Promise.all([
this.convo_examples.load(this.profile.conversation_examples),
this.coding_examples.load(this.profile.coding_examples),
]);
}
async replaceStrings(prompt, messages, examples=null, prev_memory=null, to_summarize=[], last_goals=null) {

View file

@ -32,10 +32,11 @@ export class Examples {
async load(examples) {
this.examples = examples;
if (this.model !== null) {
for (let example of this.examples) {
const embeddingPromises = this.examples.map(async (example) => {
let turn_text = this.turnsToText(example);
this.embeddings[turn_text] = await this.model.embed(turn_text);
}
});
await Promise.all(embeddingPromises);
}
}