import { ParameterType } from 'jspsych'; import html from '../utils/html.js'; const info = { name: "mark-call", parameters: { }, }; class jsPsychMarkCall { constructor(jsPsych) { this.jsPsych = jsPsych; } static { this.info = info; } trial(display_element, trial) { // Get the last recording from the previous trial const lastTrialData = this.jsPsych.data.getLastTrialData(); const recordingData = lastTrialData.values()[0]; if (!recordingData || !recordingData.response) { display_element.innerHTML = `

No recording found from the previous trial.

`; return; } // Convert base64 back to audio blob with error handling let audioData; let audioBlob; try { // Create blob from base64 data const byteCharacters = atob(recordingData.response); const byteNumbers = new Array(byteCharacters.length); for (let i = 0; i < byteCharacters.length; i++) { byteNumbers[i] = byteCharacters.charCodeAt(i); } const byteArray = new Uint8Array(byteNumbers); audioBlob = new Blob([byteArray], { type: 'audio/ogg' }); audioData = URL.createObjectURL(audioBlob); console.log('Audio blob created, size:', audioBlob.size); } catch (error) { console.error('Error creating audio blob:', error); display_element.innerHTML = `

Error loading audio data.

`; return; } // Check if waveform should be shown const showWaveform = import.meta.env.VITE_SHOW_WAVEFORM === 'true'; const allowSkip = import.meta.env.VITE_ALLOW_SKIP_BUTTONS === 'true'; display_element.innerHTML = `

🎧 Listen to your recording and mark where you would lift:

  • Press SPACE to play the audio and SPACE again to pause at the lifting point.
  • You may adjust the lifting point by clicking and dragging the red line on the playback indicator.
  • Once you have paused at the lifting point, press ENTER to confirm your selection before submitting.
Playing at 0.5x speed
${allowSkip ? `
` : ''}
${showWaveform ? `
0:00 0:00 0:00
` : `
Current time: 0:00 / 0:00
Click and drag the red line to adjust the lifting point.
`}
`; // Set the audio source after creating the element const audio = document.getElementById('playback-audio'); audio.src = audioData; console.log('Audio source set to blob URL:', audioData); console.log('Audio blob size:', audioBlob.size, 'bytes'); this.setupMarkingEvents(recordingData, audioBlob, showWaveform); } setupMarkingEvents(recordingData, audioBlob, showWaveform = true) { const audio = document.getElementById('playback-audio'); const playPauseBtn = document.getElementById('play-pause-btn'); // Progress bar elements const startTimeDisplay = document.getElementById('start-time'); const currentTimeDisplay = document.getElementById('current-time'); const endTimeDisplay = document.getElementById('end-time'); const progressBar = document.getElementById('progress-bar'); const skipBackBtn = document.getElementById('skip-back-btn'); const skipForwardBtn = document.getElementById('skip-forward-btn'); const markLiftPointBtn = document.getElementById('mark-lift-point-btn'); const markedPointDisplay = document.getElementById('marked-point-display'); const submitButton = document.getElementById('submit-lift-point-btn'); // Waveform elements const waveformCanvas = document.getElementById('waveform-canvas'); const playbackIndicator = document.getElementById('playback-indicator'); const ctx = waveformCanvas ? waveformCanvas.getContext('2d') : null; // Scrubbing area elements (for when waveform is disabled) const scrubArea = document.getElementById('scrub-area'); const scrubIndicator = document.getElementById('scrub-indicator'); let audioDuration = 0; let liftPointTime = null; let isPlaying = false; let waveformData = null; let canvasWidth = 0; let canvasHeight = 0; let isDragging = false; let wasPlayingBeforeDrag = false; // Format time as MM:SS const formatTime = (seconds) => { const minutes = Math.floor(seconds / 60); const secs = Math.floor(seconds % 60); return `${minutes}:${secs.toString().padStart(2, '0')}`; }; // Format time with precision for lift point const formatPreciseTime = (seconds) => { const minutes = Math.floor(seconds / 60); const secs = (seconds % 60).toFixed(2); return `${minutes}:${secs.padStart(5, '0')}s`; }; // Setup canvas dimensions const setupCanvas = () => { if (!waveformCanvas || !ctx) { console.error('Canvas or context not available'); return false; } const rect = waveformCanvas.getBoundingClientRect(); if (rect.width === 0 || rect.height === 0) { console.warn('Canvas has zero dimensions, retrying...'); return false; } const dpr = window.devicePixelRatio || 1; canvasWidth = rect.width * dpr; canvasHeight = rect.height * dpr; waveformCanvas.width = canvasWidth; waveformCanvas.height = canvasHeight; ctx.scale(dpr, dpr); return true; }; // Generate waveform data from audio const generateWaveform = async () => { try { // Ensure canvas is properly set up if (!setupCanvas()) { console.error('Failed to setup canvas for waveform'); throw new Error('Canvas setup failed'); } const audioContext = new (window.AudioContext || window.webkitAudioContext)(); const arrayBuffer = await audioBlob.arrayBuffer(); const audioBuffer = await audioContext.decodeAudioData(arrayBuffer); const rawData = audioBuffer.getChannelData(0); const samples = Math.floor(canvasWidth / 2); // Sample every 2 pixels // Guard against invalid sample count if (samples <= 0) { throw new Error('Invalid canvas width for waveform generation'); } const blockSize = Math.floor(rawData.length / samples); const filteredData = []; for (let i = 0; i < samples; i++) { let blockStart = blockSize * i; let sum = 0; for (let j = 0; j < blockSize; j++) { sum += Math.abs(rawData[blockStart + j]); } filteredData.push(sum / blockSize); } waveformData = filteredData; drawWaveform(); } catch (error) { console.error('Error generating waveform:', error); // Fallback to progress bar if waveform fails document.getElementById('fallback-progress').style.display = 'block'; waveformCanvas.parentElement.style.display = 'none'; } }; // Draw waveform on canvas const drawWaveform = () => { if (!waveformData || !ctx) return; // Use the container's logical dimensions for consistent rendering const rect = waveformCanvas.getBoundingClientRect(); const logicalWidth = rect.width; const logicalHeight = rect.height; // Clear the entire canvas using its actual dimensions ctx.clearRect(0, 0, logicalWidth, logicalHeight); ctx.fillStyle = '#e0e0e0'; ctx.fillRect(0, 0, logicalWidth, logicalHeight); // Guard against invalid dimensions if (logicalWidth <= 0 || logicalHeight <= 0 || waveformData.length === 0) return; const barWidth = logicalWidth / waveformData.length; const maxAmplitude = Math.max(...waveformData); // Guard against invalid amplitude data if (maxAmplitude <= 0) return; ctx.fillStyle = '#007cba'; for (let i = 0; i < waveformData.length; i++) { const barHeight = (waveformData[i] / maxAmplitude) * logicalHeight * 0.8; const x = i * barWidth; const y = (logicalHeight - barHeight) / 2; ctx.fillRect(x, y, Math.max(1, barWidth - 1), barHeight); } }; // Seek to specific time const seekTo = (seconds) => { if (audioDuration > 0) { audio.currentTime = Math.max(0, Math.min(seconds, audioDuration)); updateDisplay(); } }; // Update display function const updateDisplay = () => { const currentTime = audio.currentTime; const progress = audioDuration > 0 ? (currentTime / audioDuration * 100) : 0; if (currentTimeDisplay) { currentTimeDisplay.textContent = formatTime(currentTime); } // Update progress bar (fallback) if (progressBar) { progressBar.style.width = `${progress}%`; } // Update waveform playback indicator (only if not dragging) if (showWaveform && playbackIndicator && waveformCanvas && !isDragging && audioDuration > 0) { const rect = waveformCanvas.getBoundingClientRect(); const indicatorPosition = (progress / 100) * rect.width; playbackIndicator.style.left = `${Math.max(0, Math.min(indicatorPosition, rect.width - 2))}px`; } // Update scrub area indicator when waveform is disabled if (!showWaveform && scrubIndicator && scrubArea && !isDragging && audioDuration > 0) { const rect = scrubArea.getBoundingClientRect(); const indicatorPosition = (progress / 100) * rect.width; scrubIndicator.style.left = `${Math.max(0, Math.min(indicatorPosition, rect.width - 2))}px`; } }; // Update submit button state let isPlaybackConfirmed = false; const updateSubmitButton = () => { if (liftPointTime !== null && isPlaybackConfirmed) { submitButton.disabled = false; submitButton.style.opacity = '1'; submitButton.style.cursor = 'pointer'; } else { submitButton.disabled = true; submitButton.style.opacity = '0.5'; submitButton.style.cursor = 'not-allowed'; } }; // Function to play back from marked point for confirmation const playbackFromMarkedPoint = () => { if (liftPointTime === null) return; // Store current position to restore later const originalPosition = audio.currentTime; // Show confirmation message markedPointDisplay.innerHTML = `

🔊 Playing back from marked point...

If this sounds right, press Submit. Otherwise, choose a new mark point.

`; // Set audio to marked point and play audio.currentTime = liftPointTime; updateDisplay(); // Play for a few seconds (3 seconds or until end) const playbackDuration = 3; const endTime = Math.min(liftPointTime + playbackDuration, audioDuration); audio.play(); // Set up event to stop playback after duration and restore position const onTimeUpdate = () => { if (audio.currentTime >= endTime) { audio.pause(); audio.removeEventListener('timeupdate', onTimeUpdate); // Restore original position audio.currentTime = originalPosition; updateDisplay(); // Update display to show confirmation markedPointDisplay.innerHTML = `

✓ Lifting point confirmed at: ${formatPreciseTime(liftPointTime)}

Preview completed. Press Submit to continue, ENTER to play again, or mark a new point.

`; // Enable submit button isPlaybackConfirmed = true; updateSubmitButton(); } }; audio.addEventListener('timeupdate', onTimeUpdate); }; // Check if we have a stored duration from the recording const storedDuration = recordingData.audio_duration; // Wait for audio to load with better error handling let retryCount = 0; const maxRetries = 10; // Reduced since we might have stored duration const setupAudioControls = () => { console.log(`Setup attempt ${retryCount + 1}, duration:`, audio.duration, 'ready state:', audio.readyState, 'stored duration:', storedDuration); console.log('Audio duration isFinite:', isFinite(audio.duration), 'Audio duration !== Infinity:', audio.duration !== Infinity); // Use stored duration if available and valid if (storedDuration && isFinite(storedDuration) && storedDuration > 0) { audioDuration = storedDuration; if (startTimeDisplay) startTimeDisplay.textContent = '0:00'; if (endTimeDisplay) endTimeDisplay.textContent = formatTime(audioDuration); updateDisplay(); updateSubmitButton(); // Initialize waveform only if enabled if (showWaveform) { generateWaveform(); } console.log('Using stored duration:', audioDuration); return; } // Otherwise try to get duration from audio element (reject Infinity) if (audio.duration && isFinite(audio.duration) && audio.duration > 0 && audio.duration !== Infinity) { audioDuration = audio.duration; if (startTimeDisplay) startTimeDisplay.textContent = '0:00'; if (endTimeDisplay) endTimeDisplay.textContent = formatTime(audioDuration); updateDisplay(); updateSubmitButton(); // Initialize waveform only if enabled if (showWaveform) { generateWaveform(); } console.log('Audio controls setup complete, duration:', audioDuration); } else if (retryCount < maxRetries) { // Retry after a short delay if duration is not available retryCount++; setTimeout(setupAudioControls, 100); } else { console.warn('Using fallback duration estimation'); // Fallback: estimate duration based on blob size (rough approximation) const estimatedDuration = Math.max(1, audioBlob.size / 8000); // ~8KB per second rough estimate audioDuration = estimatedDuration; if (startTimeDisplay) startTimeDisplay.textContent = '0:00'; if (endTimeDisplay) endTimeDisplay.textContent = formatTime(audioDuration); updateDisplay(); updateSubmitButton(); // Initialize waveform even with estimated duration (only if enabled) if (showWaveform) { generateWaveform(); } console.log('Using estimated duration:', audioDuration); } }; audio.addEventListener('loadedmetadata', setupAudioControls); audio.addEventListener('loadeddata', setupAudioControls); audio.addEventListener('canplay', setupAudioControls); // Try to set up immediately if we have stored duration if (storedDuration) { setupAudioControls(); } // Add error handling for audio loading audio.addEventListener('error', (e) => { console.error('Audio loading error:', e); console.log('Trying fallback with data URL...'); // Fallback to data URL if blob URL fails audio.src = `data:audio/ogg;base64,${recordingData.response}`; audio.load(); }); // Set default playback speed to 0.5x audio.playbackRate = 0.5; // Force load the audio audio.load(); // Handle window resize to redraw waveform (only if waveform is enabled) if (showWaveform) { window.addEventListener('resize', () => { if (waveformData) { if (setupCanvas()) { drawWaveform(); } } }); } // Mouse event handlers for dragging the playback indicator const handleMouseDown = (e) => { isDragging = true; wasPlayingBeforeDrag = isPlaying; if (isPlaying) { audio.pause(); } if (showWaveform && playbackIndicator) { playbackIndicator.style.cursor = 'grabbing'; } e.preventDefault(); handleMouseMove(e); // Immediately update position }; const handleMouseMove = (e) => { if (!isDragging || audioDuration === 0) return; let rect, indicator; if (showWaveform && waveformCanvas) { rect = waveformCanvas.getBoundingClientRect(); indicator = playbackIndicator; } else if (!showWaveform && scrubArea) { rect = scrubArea.getBoundingClientRect(); indicator = scrubIndicator; } else { return; } const x = Math.max(0, Math.min(e.clientX - rect.left, rect.width - 2)); const progress = x / rect.width; const newTime = progress * audioDuration; // Update indicator position immediately if (indicator) { indicator.style.left = `${x}px`; } // Update time display if (currentTimeDisplay) { currentTimeDisplay.textContent = formatTime(newTime); } // Update audio position seekTo(newTime); }; const handleMouseUp = () => { if (!isDragging) return; isDragging = false; if (showWaveform && playbackIndicator) { playbackIndicator.style.cursor = 'grab'; } // Resume playback if it was playing before drag if (wasPlayingBeforeDrag) { audio.play(); } }; // Add event listeners for dragging if (showWaveform && playbackIndicator) { playbackIndicator.addEventListener('mousedown', handleMouseDown); document.addEventListener('mousemove', handleMouseMove); document.addEventListener('mouseup', handleMouseUp); } else if (!showWaveform && scrubIndicator) { scrubIndicator.addEventListener('mousedown', handleMouseDown); document.addEventListener('mousemove', handleMouseMove); document.addEventListener('mouseup', handleMouseUp); } // Also allow clicking anywhere to seek if (showWaveform && waveformCanvas) { waveformCanvas.addEventListener('click', (e) => { if (isDragging) return; // Don't handle click if we're dragging const rect = waveformCanvas.getBoundingClientRect(); const x = Math.max(0, Math.min(e.clientX - rect.left, rect.width - 2)); const progress = x / rect.width; const newTime = progress * audioDuration; seekTo(newTime); }); } else if (!showWaveform && scrubArea) { scrubArea.addEventListener('click', (e) => { if (isDragging) return; // Don't handle click if we're dragging const rect = scrubArea.getBoundingClientRect(); const x = Math.max(0, Math.min(e.clientX - rect.left, rect.width - 2)); const progress = x / rect.width; const newTime = progress * audioDuration; seekTo(newTime); }); } // Speed control buttons (commented out - using fixed 0.5x speed) /* const speedButtons = document.querySelectorAll('.speed-btn'); speedButtons.forEach(btn => { btn.addEventListener('click', () => { const speed = parseFloat(btn.dataset.speed); audio.playbackRate = speed; // Update button styles speedButtons.forEach(b => { b.style.backgroundColor = '#ddd'; b.style.color = '#333'; }); btn.style.backgroundColor = '#007cba'; btn.style.color = 'white'; console.log('Playback speed set to:', speed); }); }); */ // Skip buttons (0.1 second increments) - only if waveform is enabled if (showWaveform && skipBackBtn) { skipBackBtn.addEventListener('click', () => { const skipAmount = 0.1; // 0.1 seconds audio.currentTime = Math.max(0, audio.currentTime - skipAmount); updateDisplay(); }); } if (showWaveform && skipForwardBtn) { skipForwardBtn.addEventListener('click', () => { const skipAmount = 0.1; // 0.1 seconds audio.currentTime = Math.min(audioDuration, audio.currentTime + skipAmount); updateDisplay(); }); } // Play/Pause button playPauseBtn.addEventListener('click', () => { if (isPlaying) { audio.pause(); } else { audio.play(); } }); // Audio play event audio.addEventListener('play', () => { isPlaying = true; playPauseBtn.innerHTML = '⏸ Pause SPACE'; playPauseBtn.style.backgroundColor = '#f44336'; }); // Audio pause event audio.addEventListener('pause', () => { isPlaying = false; playPauseBtn.innerHTML = '▶ Play SPACE'; playPauseBtn.style.backgroundColor = '#4caf50'; }); // Update display during playback audio.addEventListener('timeupdate', updateDisplay); // Keyboard controls document.addEventListener('keydown', (e) => { // Only handle keys if we're not typing in a text field if (e.target.tagName === 'INPUT' || e.target.tagName === 'TEXTAREA') { return; } switch(e.code) { case 'Space': e.preventDefault(); // Play/Pause with spacebar if (isPlaying) { audio.pause(); } else { audio.play(); } break; case 'ArrowLeft': if (showWaveform) { e.preventDefault(); // Skip back with left arrow (0.1 seconds) const skipBackAmount = 0.1; audio.currentTime = Math.max(0, audio.currentTime - skipBackAmount); updateDisplay(); } break; case 'ArrowRight': if (showWaveform) { e.preventDefault(); // Skip forward with right arrow (0.1 seconds) const skipForwardAmount = 0.1; audio.currentTime = Math.min(audioDuration, audio.currentTime + skipForwardAmount); updateDisplay(); } break; case 'Enter': e.preventDefault(); // Mark lift point with Enter liftPointTime = audio.currentTime; isPlaybackConfirmed = false; updateSubmitButton(); playbackFromMarkedPoint(); break; } }); // Mark lift point button markLiftPointBtn.addEventListener('click', () => { liftPointTime = audio.currentTime; isPlaybackConfirmed = false; updateSubmitButton(); playbackFromMarkedPoint(); }); // Submit button submitButton.addEventListener('click', () => { if (liftPointTime === null) return; const trialData = { lift_point_seconds: liftPointTime, lift_point_formatted: formatPreciseTime(liftPointTime), audio_duration: audioDuration, original_recording_data: { spelling: recordingData.spelling, language: recordingData.language, translation: recordingData.translation, meaning: recordingData.meaning, rt: recordingData.rt, stimulus: recordingData.stimulus } }; // Clean up object URL to prevent memory leaks if (audio.src && audio.src.startsWith('blob:')) { URL.revokeObjectURL(audio.src); } this.jsPsych.finishTrial(trialData); }); } } export default jsPsychMarkCall;