talemate/talemate_frontend/src/components/AIClient.vue
FInalWombat 72202dee02
Prep 0.12.0 (#26)
* no " or * just treat as spoken words

* chromadb perist to db

* collect name should contain embedding so switching between chromadb configurations doesn't brick your scenes

* fix save-as long term memory transfer

* add chroma

* director agent refactor

* tweak director command, prompt reset, ux display

* tweak director message ux

* allow clearing of prompt log

* remove auto adding of quotes if neither quote or * are present

* command to reset long term memory for the scene

* improve summarization template as it would cause some llms to add extra details

* rebuilding history will now also rebuild long term memory

* direct scene template

* fix scene time reset

* dialogue template tweaks

* better dialog format fixing

* some dialogue template adjustments

* adjust default values of director agent

* keep track of scene saved/unsaved status and confirm loading a different scene if current scene is unsaved

* prompt fixes

* remove the collection on recommitting the seen to memory, as the embeddings may have changed

* change to the official python api for the openai client and make it async

* prompt tweaks

* world state prompt parsing fixes

* improve handling of json responses

* 0 seconds ago changed to moments ago

* move memory context closer to scene

* token counts for openai client

* narrator agent option: narrate passage of time

* gitignore

* remove memory id

* refactor world state with persistence to chromadb (wip)

* remove world state update instructions

* dont display blank emotion in world state

* openai gpt-4 turbo support

* conversation agent extra instructions

* track prompt response times

* Yi and UtopiaXL

* long term memory retrieval improvements during conversations

* narrate scene tweaks

* conversation ltm augment tweaks

* hide subconfig if parent config isnt enabled

* ai assisted memory recall during conversation default to off

* openai json_object coersion only on model that supports it

openai client emit prompt processing time

* 0.12.0

* remove prompt number from prompt debug list

* add prompt number back in but shift it to the upper row

* narrate time passage hard content limit restriction for now as gpt-4
would just write a whole chapter.

* relock
2023-11-10 22:45:50 +02:00

201 lines
No EOL
6.7 KiB
Vue

<template>
<div v-if="isConnected()">
<v-list v-for="(client, index) in state.clients" :key="index">
<v-list-item>
<v-divider v-if="index !== 0" class="mb-3"></v-divider>
<v-list-item-title>
<v-progress-circular v-if="client.status === 'busy'" indeterminate color="primary"
size="14"></v-progress-circular>
<v-icon v-else-if="client.status == 'warning'" color="orange" size="14">mdi-checkbox-blank-circle</v-icon>
<v-icon v-else-if="client.status == 'error'" color="red" size="14">mdi-checkbox-blank-circle</v-icon>
<v-icon v-else-if="client.status == 'disabled'" color="grey-darken-2" size="14">mdi-checkbox-blank-circle</v-icon>
<v-icon v-else color="green" size="14">mdi-checkbox-blank-circle</v-icon>
{{ client.name }}
</v-list-item-title>
<v-list-item-subtitle class="text-caption">
{{ client.model_name }}
</v-list-item-subtitle>
<v-list-item-subtitle class="text-caption">
{{ client.type }}
<v-chip label size="x-small" variant="outlined" class="ml-1">ctx {{ client.max_token_length }}</v-chip>
</v-list-item-subtitle>
<v-list-item-content density="compact">
<v-slider
hide-details
v-model="client.max_token_length"
:min="1024"
:max="128000"
:step="512"
@update:modelValue="saveClient(client)"
@click.stop
density="compact"
></v-slider>
</v-list-item-content>
<v-list-item-subtitle class="text-center">
<v-tooltip text="Edit client">
<template v-slot:activator="{ props }">
<v-btn size="x-small" class="mr-1" v-bind="props" variant="tonal" density="comfortable" rounded="sm" @click.stop="editClient(index)" icon="mdi-cogs"></v-btn>
</template>
</v-tooltip>
<v-tooltip text="Assign to all agents">
<template v-slot:activator="{ props }">
<v-btn size="x-small" class="mr-1" v-bind="props" variant="tonal" density="comfortable" rounded="sm" @click.stop="assignClientToAllAgents(index)" icon="mdi-transit-connection-variant"></v-btn>
</template>
</v-tooltip>
<v-tooltip text="Delete client">
<template v-slot:activator="{ props }">
<v-btn size="x-small" class="mr-1" v-bind="props" variant="tonal" density="comfortable" rounded="sm" @click.stop="deleteClient(index)" icon="mdi-close-thick"></v-btn>
</template>
</v-tooltip>
</v-list-item-subtitle>
</v-list-item>
</v-list>
<ClientModal :dialog="dialog" :formTitle="formTitle" @save="saveClient" @update:dialog="updateDialog"></ClientModal>
<v-alert type="warning" variant="tonal" v-if="state.clients.length === 0">You have no LLM clients configured. Add one.</v-alert>
<v-btn @click="openModal" prepend-icon="mdi-plus-box">Add client</v-btn>
</div>
</template>
<script>
import ClientModal from './ClientModal.vue';
export default {
components: {
ClientModal,
},
data() {
return {
clientStatusCheck: null,
state: {
clients: [],
dialog: false,
currentClient: {
name: '',
type: '',
apiUrl: '',
model_name: '',
max_token_length: 2048,
}, // Add a new field to store the model name
formTitle: ''
}
}
},
inject: [
'getWebsocket',
'registerMessageHandler',
'isConnected',
'chekcingStatus',
'getAgents',
],
provide() {
return {
state: this.state
};
},
methods: {
configurationRequired() {
if(this.state.clients.length === 0) {
return true;
}
// cycle through clients and check if any are status 'error' or 'warning'
for (let i = 0; i < this.state.clients.length; i++) {
if (this.state.clients[i].status === 'error' || this.state.clients[i].status === 'warning') {
return true;
}
}
return false;
},
getActive() {
return this.state.clients.find(a => a.status === 'busy');
},
openModal() {
this.state.currentClient = {
name: 'TextGenWebUI',
type: 'textgenwebui',
apiUrl: 'http://localhost:5000/api',
model_name: '',
max_token_length: 4096,
};
this.state.formTitle = 'Add Client';
this.state.dialog = true;
},
saveClient(client) {
const index = this.state.clients.findIndex(c => c.name === client.name);
if (index === -1) {
this.state.clients.push(client);
} else {
this.state.clients[index] = client;
}
console.log("Saving client", client)
this.state.dialog = false; // Close the dialog after saving the client
this.$emit('clients-updated', this.state.clients);
},
editClient(index) {
this.state.currentClient = { ...this.state.clients[index] };
this.state.formTitle = 'Edit AI Client';
this.state.dialog = true;
},
deleteClient(index) {
if (window.confirm('Are you sure you want to delete this client?')) {
this.state.clients.splice(index, 1);
this.$emit('clients-updated', this.state.clients);
}
},
assignClientToAllAgents(index) {
let agents = this.getAgents();
let client = this.state.clients[index];
for (let i = 0; i < agents.length; i++) {
agents[i].client = client.name;
this.$emit('client-assigned', agents);
}
},
updateDialog(newVal) {
this.state.dialog = newVal;
},
handleMessage(data) {
// Handle client_status message type
if (data.type === 'client_status') {
// Find the client with the given name
const client = this.state.clients.find(client => client.name === data.name);
if (client) {
// Update the model name of the client
client.model_name = data.model_name;
client.type = data.message;
client.status = data.status;
client.max_token_length = data.max_token_length;
client.apiUrl = data.apiUrl;
} else {
console.log("Adding new client", data);
this.state.clients.push({
name: data.name,
model_name: data.model_name,
type: data.message,
status: data.status,
max_token_length: data.max_token_length,
apiUrl: data.apiUrl,
});
// sort the clients by name
this.state.clients.sort((a, b) => (a.name > b.name) ? 1 : -1);
}
return;
}
}
},
created() {
this.registerMessageHandler(this.handleMessage);
},
}
</script>