Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
63 changes: 46 additions & 17 deletions js/chat/chat-core.js
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,34 @@ document.addEventListener("DOMContentLoaded", () => {
let autoSpeakEnabled = localStorage.getItem("autoSpeakEnabled") === "true";
let currentlySpeakingMessage = null;
let activeUtterance = null;
let recognition = null;
let isListening = false;
let voiceInputBtn = null;
let slideshowInterval = null;
let recognition = null;
let isListening = false;
let voiceInputBtn = null;
let slideshowInterval = null;

let capabilities = window.pollinationsCaps || null;

async function ensureCapabilities() {
if (!capabilities && window.polliLib?.modelCapabilities) {
try {
capabilities = await window.polliLib.modelCapabilities();
window.pollinationsCaps = capabilities;
} catch (e) {
console.warn('capabilities fetch failed', e);
capabilities = {};
}
}
}

function applyCapabilities(model) {
const info = capabilities?.text?.[model] || {};
const hasAudio = !!info.audio;
if (voiceToggleBtn) voiceToggleBtn.disabled = !hasAudio;
if (voiceInputBtn) voiceInputBtn.disabled = !hasAudio;
}

window.updateCapabilityUI = applyCapabilities;
ensureCapabilities().then(() => applyCapabilities(modelSelect?.value));

function normalize(str) {
return str?.toLowerCase().trim() || "";
Expand Down Expand Up @@ -481,12 +505,13 @@ document.addEventListener("DOMContentLoaded", () => {
return executeCommand(text);
}

function setVoiceInputButton(btn) {
voiceInputBtn = btn;
if (window._chatInternals) {
window._chatInternals.voiceInputBtn = btn;
}
}
function setVoiceInputButton(btn) {
voiceInputBtn = btn;
if (window._chatInternals) {
window._chatInternals.voiceInputBtn = btn;
}
if (modelSelect) applyCapabilities(modelSelect.value);
}

function loadVoices() {
return new Promise((resolve) => {
Expand Down Expand Up @@ -752,8 +777,10 @@ document.addEventListener("DOMContentLoaded", () => {
}

try {
// Use polliLib OpenAI-compatible chat endpoint
const data = await (window.polliLib?.chat?.({ model, messages, tools: toolDefinitions }) ?? Promise.reject(new Error('polliLib not loaded')));
const capsInfo = capabilities?.text?.[model];
const chatParams = { model, messages };
if (capsInfo?.tools) chatParams.tools = toolDefinitions;
const data = await (window.polliLib?.chat?.(chatParams) ?? Promise.reject(new Error('polliLib not loaded')));
loadingDiv.remove();

const messageObj = data?.choices?.[0]?.message || {};
Expand Down Expand Up @@ -1012,11 +1039,13 @@ document.addEventListener("DOMContentLoaded", () => {
currentlySpeakingMessage,
recognition,
isListening,
voiceInputBtn,
slideshowInterval,
setVoiceInputButton,
toggleAutoSpeak,
updateVoiceToggleUI,
voiceInputBtn,
slideshowInterval,
setVoiceInputButton,
applyCapabilities,
capabilities,
toggleAutoSpeak,
updateVoiceToggleUI,
speakMessage,
stopSpeaking,
speakSentences,
Expand Down
20 changes: 18 additions & 2 deletions js/polliLib/polliLib-web.global.js
Original file line number Diff line number Diff line change
Expand Up @@ -284,12 +284,28 @@
async function listTextModels(client) { return await textModels(client); }
async function listAudioVoices(client) { const models = await textModels(client); return models?.['openai-audio']?.voices ?? []; }

async function toolModels(client) { const r = await client.get(`${client.textBase}/tools`); if (!r.ok) throw new Error(`toolModels error ${r.status}`); return await r.json(); }
async function audioModels(client) { const r = await client.get(`${client.textBase}/audio`); if (!r.ok) throw new Error(`audioModels error ${r.status}`); return await r.json(); }

async function modelCapabilities(client = getDefaultClient()) {
const [image, text] = await Promise.all([
const [img, text, audio, tools] = await Promise.all([
imageModels(client).catch(() => ({})),
textModels(client).catch(() => ({})),
audioModels(client).catch(() => ({})),
toolModels(client).catch(() => ({})),
]);
return { image, text, audio: text?.['openai-audio'] ?? {} };
const image = {};
for (const [name, info] of Object.entries(img ?? {})) {
image[name] = { ...(info || {}), json: imageModelSupportsJson(info) };
}
const textCaps = { ...(text ?? {}) };
for (const [name, info] of Object.entries(audio ?? {})) {
textCaps[name] = { ...(textCaps[name] || {}), audio: info };
}
for (const [name, info] of Object.entries(tools ?? {})) {
textCaps[name] = { ...(textCaps[name] || {}), tools: info };
}
return { image, text: textCaps, audio, tools };
}

// --- pipeline.js ---
Expand Down
25 changes: 23 additions & 2 deletions js/polliLib/src/models.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,15 +13,36 @@ export function imageModelSupportsJson(info) {
return false;
}

async function toolModels(client) {
const r = await client.get(`${client.textBase}/tools`);
if (!r.ok) throw new Error(`toolModels error ${r.status}`);
return await r.json();
}

async function audioModels(client) {
const r = await client.get(`${client.textBase}/audio`);
if (!r.ok) throw new Error(`audioModels error ${r.status}`);
return await r.json();
}

export async function modelCapabilities(client = getDefaultClient()) {
const [img, text] = await Promise.all([
const [img, text, audio, tools] = await Promise.all([
imageModels(client).catch(() => ({})),
textModels(client).catch(() => ({})),
audioModels(client).catch(() => ({})),
toolModels(client).catch(() => ({})),
]);
const image = {};
for (const [name, info] of Object.entries(img ?? {})) {
image[name] = { ...(info || {}), json: imageModelSupportsJson(info) };
}
return { image, text, audio: text?.['openai-audio'] ?? {} };
const textCaps = { ...(text ?? {}) };
for (const [name, info] of Object.entries(audio ?? {})) {
textCaps[name] = { ...(textCaps[name] || {}), audio: info };
}
for (const [name, info] of Object.entries(tools ?? {})) {
textCaps[name] = { ...(textCaps[name] || {}), tools: info };
}
return { image, text: textCaps, audio, tools };
}

94 changes: 49 additions & 45 deletions js/ui/ui.js
Original file line number Diff line number Diff line change
Expand Up @@ -110,38 +110,39 @@ document.addEventListener("DOMContentLoaded", () => {

async function fetchPollinationsModels() {
try {
// Use polliLib to list text models instead of direct endpoint
const models = await (window.polliLib?.textModels?.() ?? Promise.reject(new Error('polliLib not loaded')));
const caps = await (window.polliLib?.modelCapabilities?.() ?? Promise.reject(new Error('polliLib not loaded')));
window.pollinationsCaps = caps;
const models = Object.entries(caps.text || {}).map(([name, info]) => ({ name, ...(info || {}) }));
modelSelect.innerHTML = "";
let hasValidModel = false;
if (!Array.isArray(models) || models.length === 0) {
console.error("Models response is not a valid array or is empty:", models);
throw new Error("Invalid models response");
}
models.forEach(m => {
if (m && m.name) {
const opt = document.createElement("option");
opt.value = m.name;
opt.textContent = m.description || m.name;
let tooltip = m.description || m.name;
if (m.censored !== undefined) {
tooltip += m.censored ? " (Censored)" : " (Uncensored)";
}
if (m.reasoning) tooltip += " | Reasoning";
if (m.vision) tooltip += " | Vision";
if (m.audio) tooltip += " | Audio: " + (m.voices ? m.voices.join(", ") : "N/A");
if (m.provider) tooltip += " | Provider: " + m.provider;
opt.title = tooltip;
modelSelect.appendChild(opt);
hasValidModel = true;
} else {
console.warn("Skipping invalid model entry:", m);
}
});

if (!Array.isArray(models) || models.length === 0) {
console.error("Models response is not a valid array or is empty:", models);
throw new Error("Invalid models response");
}

models.forEach(m => {
if (m && m.name) {
const opt = document.createElement("option");
opt.value = m.name;
opt.textContent = m.description || m.name;

let tooltip = m.description || m.name;
if (m.censored !== undefined) {
tooltip += m.censored ? " (Censored)" : " (Uncensored)";
}
if (m.reasoning) tooltip += " | Reasoning";
if (m.vision) tooltip += " | Vision";
if (m.audio) tooltip += " | Audio: " + (m.audio.voices ? m.audio.voices.join(", ") : "N/A");
if (m.provider) tooltip += " | Provider: " + m.provider;

opt.title = tooltip;
modelSelect.appendChild(opt);
hasValidModel = true;
} else {
console.warn("Skipping invalid model entry:", m);
}
});

const currentSession = Storage.getCurrentSession();
const preferredModel = currentSession?.model || Storage.getDefaultModel();
Expand All @@ -160,6 +161,8 @@ document.addEventListener("DOMContentLoaded", () => {
}
}

if (window.updateCapabilityUI) window.updateCapabilityUI(modelSelect.value);

if (!modelSelect.value && modelSelect.options.length > 0) {
const unityOption = Array.from(modelSelect.options).find(opt => opt.value === "unity");
const firstModel = unityOption ? unityOption.value : modelSelect.options[0].value;
Expand Down Expand Up @@ -197,21 +200,22 @@ document.addEventListener("DOMContentLoaded", () => {
window.showToast("New chat session created");
});

modelSelect.addEventListener("change", () => {
const currentSession = Storage.getCurrentSession();
if (currentSession) {
const newModel = modelSelect.value;
Storage.setSessionModel(currentSession.id, newModel);
const originalBg = modelSelect.style.backgroundColor;
modelSelect.style.backgroundColor = "#4CAF50";
modelSelect.style.color = "white";
setTimeout(() => {
modelSelect.style.backgroundColor = originalBg;
modelSelect.style.color = "";
}, 500);
window.showToast(`Model updated to: ${newModel}`);
}
});
modelSelect.addEventListener("change", () => {
const currentSession = Storage.getCurrentSession();
if (currentSession) {
const newModel = modelSelect.value;
Storage.setSessionModel(currentSession.id, newModel);
const originalBg = modelSelect.style.backgroundColor;
modelSelect.style.backgroundColor = "#4CAF50";
modelSelect.style.color = "white";
setTimeout(() => {
modelSelect.style.backgroundColor = originalBg;
modelSelect.style.color = "";
}, 500);
window.showToast(`Model updated to: ${newModel}`);
if (window.updateCapabilityUI) window.updateCapabilityUI(newModel);
}
});

donationOpenBtn.addEventListener("click", () => {
donationModal.classList.remove("hidden");
Expand Down
17 changes: 14 additions & 3 deletions tests/pollilib-capabilities.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,25 @@ const client = {
imageBase: 'https://img.example',
textBase: 'https://txt.example',
async get(url) {
if (url.startsWith('https://img.example')) {
if (url === 'https://img.example/models') {
return { ok: true, async json() { return { foo: {} }; }, headers: { get: () => 'application/json' } };
}
return { ok: true, async json() { return { bar: {}, 'openai-audio': { voices: ['a'] } }; }, headers: { get: () => 'application/json' } };
if (url === 'https://txt.example/models') {
return { ok: true, async json() { return { bar: {} }; }, headers: { get: () => 'application/json' } };
}
if (url === 'https://txt.example/audio') {
return { ok: true, async json() { return { bar: { voices: ['a'] } }; }, headers: { get: () => 'application/json' } };
}
if (url === 'https://txt.example/tools') {
return { ok: true, async json() { return { bar: { toolA: true } }; }, headers: { get: () => 'application/json' } };
}
}
};

const caps = await modelCapabilities(client);
assert('foo' in caps.image);
assert('bar' in caps.text);
assert.deepEqual(caps.audio.voices, ['a']);
assert.deepEqual(caps.audio.bar.voices, ['a']);
assert(caps.text.bar.audio.voices[0] === 'a');
assert(caps.text.bar.tools.toolA);
assert('bar' in caps.tools);
32 changes: 32 additions & 0 deletions tests/pollilib-capability-usage.mjs
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import assert from 'assert/strict';
import { modelCapabilities } from '../js/polliLib/src/models.js';

const client = {
imageBase: 'https://img.example',
textBase: 'https://txt.example',
async get(url) {
if (url === 'https://img.example/models') {
return { ok: true, async json() { return { foo: {} }; }, headers: { get: () => 'application/json' } };
}
if (url === 'https://txt.example/models') {
return { ok: true, async json() { return { bar: {}, baz: {} }; }, headers: { get: () => 'application/json' } };
}
if (url === 'https://txt.example/audio') {
return { ok: true, async json() { return { bar: { voices: ['a'] } }; }, headers: { get: () => 'application/json' } };
}
if (url === 'https://txt.example/tools') {
return { ok: true, async json() { return { bar: { toolA: true } }; }, headers: { get: () => 'application/json' } };
}
}
};

const caps = await modelCapabilities(client);

function buildOptions(model) {
const opts = { model, messages: [] };
if (caps.text?.[model]?.tools) opts.tools = ['toolA'];
return opts;
}

assert('tools' in buildOptions('bar'));
assert(!('tools' in buildOptions('baz')));