This commit is contained in:
2026-03-12 13:59:33 +01:00
parent 228391f280
commit 93c197e8e4

View File

@@ -112,7 +112,7 @@ function resetPracticeUI() {
skipBtn.disabled = false; skipBtn.disabled = false;
downloadBtn.disabled = true; downloadBtn.disabled = true;
audioBlob = null; audioBlob = null;
stopRecording(); stopRecording(true);
} }
// ── Audio playback ──────────────────────────────────────────────────────────── // ── Audio playback ────────────────────────────────────────────────────────────
@@ -138,160 +138,82 @@ speakBtn.addEventListener('click', () => {
autoSpeak(); autoSpeak();
}); });
// ── Device detection ────────────────────────────────────────────────────────── // ── MediaRecorder → Whisper ───────────────────────────────────────────────────
const isMobile = /Android|iPhone|iPad/i.test(navigator.userAgent);
// ── Speech recognition (desktop only) ────────────────────────────────────────
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
let recognition = null;
function createRecognition() {
if (!SpeechRecognition) return null;
const r = new SpeechRecognition();
r.lang = 'de-DE';
r.continuous = false; // one phrase per session — avoids all Android e.resultIndex bugs
r.interimResults = true; // stream words within the phrase
r.maxAlternatives = 1;
let committed = false; // guard: each session commits at most one final result
r.onstart = () => {
recordBtn.classList.add('recording');
recordHint.textContent = 'Tippen zum Stoppen';
state.isRecording = true;
};
r.onresult = (e) => {
if (committed) return;
const result = e.results[0];
const text = result[0].transcript;
if (result.isFinal) {
committed = true;
state.finalTranscript += (state.finalTranscript ? ' ' : '') + text;
state.transcript = state.finalTranscript;
} else {
// interim — show but don't commit
state.transcript = (state.finalTranscript ? state.finalTranscript + ' ' : '') + text;
}
if (state.transcript.trim().split(/\s+/).length >= MAX_RECORD_WORDS) {
stopRecording();
return;
}
updateTranscriptBox(state.transcript);
checkBtn.disabled = !state.transcript.trim();
};
r.onend = () => {
if (!state.isRecording) return;
setTimeout(() => {
if (!state.isRecording) return;
recognition = createRecognition();
try { recognition.start(); } catch (_) { stopRecording(); }
}, 100);
};
r.onerror = (e) => {
if (e.error !== 'no-speech' && e.error !== 'aborted') stopRecording();
};
return r;
}
// ── MediaRecorder ─────────────────────────────────────────────────────────────
let recordingTimer = null; let recordingTimer = null;
let mediaRecorder = null; let mediaRecorder = null;
let audioChunks = []; let audioChunks = [];
let audioBlob = null; let audioBlob = null;
let _discardNext = false; // set to true when stopping without intent to transcribe
async function startRecording() { async function startRecording() {
stopAudio(); stopAudio();
audioBlob = null; _discardNext = false;
audioChunks = []; audioBlob = null;
audioChunks = [];
downloadBtn.disabled = true; downloadBtn.disabled = true;
transcriptBox.contentEditable = 'false'; transcriptBox.contentEditable = 'false';
if (isMobile) { try {
// ── Mobile: MediaRecorder → Whisper ─────────────────────────────────────── const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
try { const mimeType = MediaRecorder.isTypeSupported('audio/webm;codecs=opus') ? 'audio/webm;codecs=opus'
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); : MediaRecorder.isTypeSupported('audio/webm') ? 'audio/webm'
const mimeType = MediaRecorder.isTypeSupported('audio/mp4') ? 'audio/mp4' : MediaRecorder.isTypeSupported('audio/mp4') ? 'audio/mp4'
: MediaRecorder.isTypeSupported('audio/webm') ? 'audio/webm' : '';
: ''; mediaRecorder = new MediaRecorder(stream, mimeType ? { mimeType } : {});
mediaRecorder = new MediaRecorder(stream, mimeType ? { mimeType } : {}); mediaRecorder.ondataavailable = e => { if (e.data.size > 0) audioChunks.push(e.data); };
mediaRecorder.ondataavailable = e => { if (e.data.size > 0) audioChunks.push(e.data); }; mediaRecorder.onstop = async () => {
mediaRecorder.onstop = async () => { stream.getTracks().forEach(t => t.stop());
stream.getTracks().forEach(t => t.stop()); if (_discardNext) { _discardNext = false; return; }
const blob = new Blob(audioChunks, { type: mediaRecorder._mimeType || mimeType || 'audio/webm' }); const blob = new Blob(audioChunks, { type: mimeType || 'audio/webm' });
await transcribeAudio(blob); audioBlob = blob;
}; if (blob.size > 0) downloadBtn.disabled = false;
mediaRecorder._mimeType = mimeType; // stash for onstop await transcribeAudio(blob);
mediaRecorder.start(); };
state.isRecording = true; mediaRecorder.start();
recordBtn.classList.add('recording'); } catch (err) {
recordHint.textContent = 'Tippen zum Stoppen'; alert('Mikrofon nicht verfügbar: ' + err.message);
} catch (e) { return;
alert('Mikrofon nicht verfügbar: ' + e.message);
return;
}
} else {
// ── Desktop: Web Speech API ────────────────────────────────────────────────
if (!SpeechRecognition) {
alert('Spracherkennung wird in diesem Browser nicht unterstützt. Bitte nutze Chrome oder Edge.');
return;
}
recognition = createRecognition();
state.isRecording = true;
try { recognition.start(); } catch (_) {}
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const mimeType = MediaRecorder.isTypeSupported('audio/webm;codecs=opus')
? 'audio/webm;codecs=opus' : 'audio/webm';
mediaRecorder = new MediaRecorder(stream, { mimeType });
mediaRecorder.ondataavailable = e => { if (e.data.size > 0) audioChunks.push(e.data); };
mediaRecorder.onstop = () => {
audioBlob = new Blob(audioChunks, { type: mimeType });
stream.getTracks().forEach(t => t.stop());
if (audioBlob.size > 0) downloadBtn.disabled = false;
};
mediaRecorder.start();
} catch (e) {
console.warn('MediaRecorder unavailable:', e);
}
} }
state.isRecording = true;
recordBtn.classList.add('recording');
recordHint.textContent = 'Tippen zum Stoppen';
// Show listening indicator — not editable, no interim text
transcriptBox.textContent = 'Слушаю…';
transcriptBox.classList.remove('empty');
clearTimeout(recordingTimer); clearTimeout(recordingTimer);
recordingTimer = setTimeout(stopRecording, MAX_RECORD_SECONDS * 1000); recordingTimer = setTimeout(stopRecording, MAX_RECORD_SECONDS * 1000);
} }
function stopRecording() { function stopRecording(discard = false) {
clearTimeout(recordingTimer); clearTimeout(recordingTimer);
state.isRecording = false; state.isRecording = false;
if (recognition) { if (discard) _discardNext = true;
try { recognition.stop(); } catch (_) {}
recognition = null;
}
if (mediaRecorder && mediaRecorder.state !== 'inactive') { if (mediaRecorder && mediaRecorder.state !== 'inactive') {
mediaRecorder.stop(); // onstop handles the rest (incl. transcribeAudio on mobile) mediaRecorder.stop(); // onstop fires → transcribeAudio (unless _discardNext)
mediaRecorder = null; mediaRecorder = null;
} }
recordBtn.classList.remove('recording'); recordBtn.classList.remove('recording');
if (!isMobile) { if (discard) {
recordHint.textContent = 'Tippen zum Aufnehmen'; recordHint.textContent = 'Tippen zum Aufnehmen';
transcriptBox.contentEditable = 'true'; transcriptBox.contentEditable = 'true';
} }
// On mobile, recordHint and contentEditable are updated inside transcribeAudio // When not discarding: UI updated inside transcribeAudio after server responds
} }
async function transcribeAudio(blob) { async function transcribeAudio(blob) {
recordBtn.disabled = true; recordBtn.disabled = true;
recordHint.textContent = 'Transkribiere…'; recordHint.textContent = 'Transkribiere…';
transcriptBox.textContent = 'Transkribiere…';
transcriptBox.classList.remove('empty');
try { try {
const res = await authFetch('api/transcribe', { const res = await authFetch('api/transcribe', {
method: 'POST', method: 'POST',
headers: { 'Content-Type': blob.type || 'audio/webm' }, headers: { 'Content-Type': blob.type || 'audio/webm' },
body: blob, body: blob,
}); });
const data = await res.json(); const data = await res.json();
if (!res.ok) throw new Error(data.error || 'Fehler'); if (!res.ok) throw new Error(data.error || 'Fehler');
@@ -308,8 +230,7 @@ async function transcribeAudio(blob) {
recordHint.textContent = 'Fehler nochmal versuchen'; recordHint.textContent = 'Fehler nochmal versuchen';
} finally { } finally {
recordBtn.disabled = false; recordBtn.disabled = false;
if (!state.transcript) recordHint.textContent = 'Tippen zum Aufnehmen'; recordHint.textContent = 'Tippen zum Aufnehmen';
else recordHint.textContent = 'Tippen zum Aufnehmen';
} }
} }
@@ -342,7 +263,7 @@ downloadBtn.addEventListener('click', () => {
// ── Clear ───────────────────────────────────────────────────────────────────── // ── Clear ─────────────────────────────────────────────────────────────────────
clearBtn.addEventListener('click', () => { clearBtn.addEventListener('click', () => {
stopRecording(); stopRecording(true);
state.transcript = ''; state.transcript = '';
state.finalTranscript = ''; state.finalTranscript = '';
transcriptBox.contentEditable = 'false'; transcriptBox.contentEditable = 'false';
@@ -357,7 +278,7 @@ checkBtn.addEventListener('click', async () => {
const answer = state.transcript.trim(); const answer = state.transcript.trim();
if (!answer || state.isChecking) return; if (!answer || state.isChecking) return;
stopRecording(); stopRecording(true);
state.isChecking = true; state.isChecking = true;
checkBtn.disabled = true; checkBtn.disabled = true;
feedbackBox.classList.add('visible'); feedbackBox.classList.add('visible');
@@ -406,7 +327,7 @@ function simpleMarkdown(text) {
// ── Skip / Retry / Next / Done ──────────────────────────────────────────────── // ── Skip / Retry / Next / Done ────────────────────────────────────────────────
skipBtn.addEventListener('click', () => { skipBtn.addEventListener('click', () => {
stopRecording(); stopAudio(); stopRecording(true); stopAudio();
state.questionQueue.shift(); state.questionQueue.shift();
state.questionQueue.length === 0 ? showDone() : loadQuestion(); state.questionQueue.length === 0 ? showDone() : loadQuestion();
}); });
@@ -425,7 +346,7 @@ function showDone() {
showScreen('done'); showScreen('done');
} }
backBtn.addEventListener('click', () => { stopRecording(); stopAudio(); showScreen('topics'); }); backBtn.addEventListener('click', () => { stopRecording(true); stopAudio(); showScreen('topics'); });
document.getElementById('restart-btn').addEventListener('click', () => showScreen('topics')); document.getElementById('restart-btn').addEventListener('click', () => showScreen('topics'));
// ── History ─────────────────────────────────────────────────────────────────── // ── History ───────────────────────────────────────────────────────────────────