You can use keyboard controls (space and arrow keys) to play and skip the recording.
Adjust playback speed for more precise control.
+
+ 🎧 Listen to your recording and mark where you would lift:
+
+
+
+ -
+ Pause the audio at the lift point and click
+ 'Mark this as lifting point' or press
+ ENTER to save it.
+
+ -
+ You can use the keyboard or the buttons below to mark lift points.
+
+ -
+ Keyboard shortcuts:
+
+ - SPACE – Play/Pause
+ - – Skip back 0.1s
+ - – Skip forward 0.1s
+ - ENTER – Mark lift point
+
+
+ -
+ Adjust playback speed if you need more precise control.
+
+
+
-
-
+
+
+ Playing at 0.5x speed
+
+
+
+
+
+
@@ -141,7 +201,7 @@ import html from '../utils/html.js';
border: none;
border-radius: 5px;
cursor: pointer;
- ">⏪ Skip Back 5%
+ ">⏪ Skip Back 0.1s
+ ">Skip Forward 0.1s ⏩
+
-
- Keyboard shortcuts: Spacebar = Play/Pause | ← → = Skip Back/Forward | Enter = Mark Lift Point
-
-
-
-
+
+
+
+
+ 0:00
+ 0:00
+ 0:00
+
+
+
+
+
+
+
+
🔊 Playing back from marked point...
+
If this sounds right, press Submit. Otherwise, choose a new mark point.
+
+ `;
+
+ // Set audio to marked point and play
+ audio.currentTime = liftPointTime;
+ updateDisplay();
+
+ // Play for a few seconds (3 seconds or until end)
+ const playbackDuration = 3;
+ const endTime = Math.min(liftPointTime + playbackDuration, audioDuration);
+
+ audio.play();
+
+ // Set up event to stop playback after duration and restore position
+ const onTimeUpdate = () => {
+ if (audio.currentTime >= endTime) {
+ audio.pause();
+ audio.removeEventListener('timeupdate', onTimeUpdate);
+
+ // Restore original position
+ audio.currentTime = originalPosition;
+ updateDisplay();
+
+ // Update display to show confirmation
+ markedPointDisplay.innerHTML = `
+
+
✓ Lift point marked at: ${formatPreciseTime(liftPointTime)}
+
Preview completed. Press Submit to confirm, ENTER to play again, or mark a new point.
+
+ `;
+
+ // Enable submit button
+ isPlaybackConfirmed = true;
+ updateSubmitButton();
+ }
+ };
+
+ audio.addEventListener('timeupdate', onTimeUpdate);
+ };
+
// Check if we have a stored duration from the recording
const storedDuration = recordingData.audio_duration;
@@ -282,7 +409,8 @@ import html from '../utils/html.js';
// Use stored duration if available and valid
if (storedDuration && isFinite(storedDuration) && storedDuration > 0) {
audioDuration = storedDuration;
- durationDisplay.textContent = formatTime(audioDuration);
+ startTimeDisplay.textContent = '0:00';
+ endTimeDisplay.textContent = formatTime(audioDuration);
updateDisplay();
updateSubmitButton();
console.log('Using stored duration:', audioDuration);
@@ -292,7 +420,8 @@ import html from '../utils/html.js';
// Otherwise try to get duration from audio element
if (audio.duration && isFinite(audio.duration) && audio.duration > 0) {
audioDuration = audio.duration;
- durationDisplay.textContent = formatTime(audioDuration);
+ startTimeDisplay.textContent = '0:00';
+ endTimeDisplay.textContent = formatTime(audioDuration);
updateDisplay();
updateSubmitButton();
console.log('Audio controls setup complete, duration:', audioDuration);
@@ -305,7 +434,8 @@ import html from '../utils/html.js';
// Fallback: estimate duration based on blob size (rough approximation)
const estimatedDuration = Math.max(1, audioBlob.size / 8000); // ~8KB per second rough estimate
audioDuration = estimatedDuration;
- durationDisplay.textContent = formatTime(audioDuration) + ' (est)';
+ startTimeDisplay.textContent = '0:00';
+ endTimeDisplay.textContent = formatTime(audioDuration);
updateDisplay();
updateSubmitButton();
console.log('Using estimated duration:', audioDuration);
@@ -336,7 +466,8 @@ import html from '../utils/html.js';
// Force load the audio
audio.load();
- // Speed control buttons
+ // Speed control buttons (commented out - using fixed 0.5x speed)
+ /*
const speedButtons = document.querySelectorAll('.speed-btn');
speedButtons.forEach(btn => {
btn.addEventListener('click', () => {
@@ -354,16 +485,17 @@ import html from '../utils/html.js';
console.log('Playback speed set to:', speed);
});
});
+ */
- // Skip buttons (5% of total duration)
+ // Skip buttons (0.1 second increments)
skipBackBtn.addEventListener('click', () => {
- const skipAmount = audioDuration * 0.05; // 5% of duration
+ const skipAmount = 0.1; // 0.1 seconds
audio.currentTime = Math.max(0, audio.currentTime - skipAmount);
updateDisplay();
});
skipForwardBtn.addEventListener('click', () => {
- const skipAmount = audioDuration * 0.05; // 5% of duration
+ const skipAmount = 0.1; // 0.1 seconds
audio.currentTime = Math.min(audioDuration, audio.currentTime + skipAmount);
updateDisplay();
});
@@ -380,14 +512,14 @@ import html from '../utils/html.js';
// Audio play event
audio.addEventListener('play', () => {
isPlaying = true;
- playPauseBtn.textContent = '⏸ Pause';
+ playPauseBtn.innerHTML = '⏸ Pause
SPACE';
playPauseBtn.style.backgroundColor = '#f44336';
});
// Audio pause event
audio.addEventListener('pause', () => {
isPlaying = false;
- playPauseBtn.textContent = '▶ Play';
+ playPauseBtn.innerHTML = '▶ Play
SPACE';
playPauseBtn.style.backgroundColor = '#4caf50';
});
@@ -414,16 +546,16 @@ import html from '../utils/html.js';
case 'ArrowLeft':
e.preventDefault();
- // Skip back with left arrow
- const skipBackAmount = audioDuration * 0.05;
+ // Skip back with left arrow (0.1 seconds)
+ const skipBackAmount = 0.1;
audio.currentTime = Math.max(0, audio.currentTime - skipBackAmount);
updateDisplay();
break;
case 'ArrowRight':
e.preventDefault();
- // Skip forward with right arrow
- const skipForwardAmount = audioDuration * 0.05;
+ // Skip forward with right arrow (0.1 seconds)
+ const skipForwardAmount = 0.1;
audio.currentTime = Math.min(audioDuration, audio.currentTime + skipForwardAmount);
updateDisplay();
break;
@@ -432,8 +564,9 @@ import html from '../utils/html.js';
e.preventDefault();
// Mark lift point with Enter
liftPointTime = audio.currentTime;
- markedPointDisplay.textContent = `✓ Lift point marked at: ${formatPreciseTime(liftPointTime)}`;
+ isPlaybackConfirmed = false;
updateSubmitButton();
+ playbackFromMarkedPoint();
break;
}
});
@@ -441,8 +574,9 @@ import html from '../utils/html.js';
// Mark lift point button
markLiftPointBtn.addEventListener('click', () => {
liftPointTime = audio.currentTime;
- markedPointDisplay.textContent = `✓ Lift point marked at: ${formatPreciseTime(liftPointTime)}`;
+ isPlaybackConfirmed = false;
updateSubmitButton();
+ playbackFromMarkedPoint();
});
// Submit button
diff --git a/scripts/record-call.js b/scripts/record-call.js
index d204c18..d72b8b6 100644
--- a/scripts/record-call.js
+++ b/scripts/record-call.js
@@ -90,63 +90,6 @@ import { ParameterType } from 'jspsych';
margin: 5px;
">Accept Recording
-
-
-
Please answer the following questions:
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
`;
@@ -295,84 +238,10 @@ import { ParameterType } from 'jspsych';
});
acceptButton.addEventListener("click", () => {
- document.getElementById("recording-controls").style.display = "none";
- document.getElementById("questions-section").style.display = "block";
-
- const finalAudio = document.getElementById("final-playback-audio");
- const originalAudio = document.getElementById("playback-audio");
- finalAudio.src = originalAudio.src;
-
- document.getElementById("recording-status").textContent = "Please answer all questions to continue:";
-
- this.setupQuestionEvents(trial);
+ this.endTrial(trial);
});
}
- setupQuestionEvents(trial) {
- const spellingInput = document.getElementById("spelling-input");
- const languageSelect = document.getElementById("language-select");
- const otherLanguageSection = document.getElementById("other-language-section");
- const otherLanguageInput = document.getElementById("other-language-input");
- const translationSection = document.getElementById("translation-section");
- const translationInput = document.getElementById("translation-input");
- const meaningInput = document.getElementById("meaning-input");
- const submitButton = document.getElementById("submit-answers-button");
-
- const validateForm = () => {
- const spelling = spellingInput.value.trim();
- const language = languageSelect.value;
- const otherLanguage = otherLanguageInput.value.trim();
- const meaning = meaningInput.value.trim();
- const needsOtherLanguage = language === "Other";
- const needsTranslation = language && language !== "English";
- const translation = translationInput.value.trim();
-
- const isValid = spelling && language && meaning &&
- (!needsOtherLanguage || otherLanguage) &&
- (!needsTranslation || translation);
-
- submitButton.disabled = !isValid;
- submitButton.style.opacity = isValid ? "1" : "0.5";
- submitButton.style.cursor = isValid ? "pointer" : "not-allowed";
- };
-
- languageSelect.addEventListener("change", () => {
- const selectedLanguage = languageSelect.value;
- const isEnglish = selectedLanguage === "English";
- const isOther = selectedLanguage === "Other";
-
- translationSection.style.display = isEnglish ? "none" : "block";
- otherLanguageSection.style.display = isOther ? "block" : "none";
-
- if (isEnglish) {
- translationInput.value = "";
- }
- if (!isOther) {
- otherLanguageInput.value = "";
- }
- validateForm();
- });
-
- [spellingInput, languageSelect, otherLanguageInput, translationInput, meaningInput].forEach(element => {
- element.addEventListener("input", validateForm);
- element.addEventListener("change", validateForm);
- });
-
- submitButton.addEventListener("click", () => {
- if (!submitButton.disabled) {
- const finalLanguage = languageSelect.value === "Other" ?
- otherLanguageInput.value.trim() :
- languageSelect.value;
-
- this.endTrialWithAnswers(trial, {
- spelling: spellingInput.value.trim(),
- language: finalLanguage,
- translation: translationInput.value.trim() || null,
- meaning: meaningInput.value.trim()
- });
- }
- });
- }
startRecording() {
try {
@@ -396,7 +265,7 @@ import { ParameterType } from 'jspsych';
}
}
- endTrialWithAnswers(trial, answers) {
+ endTrial(trial) {
this.recorder.removeEventListener("dataavailable", this.data_available_handler);
this.recorder.removeEventListener("start", this.start_event_handler);
this.recorder.removeEventListener("stop", this.stop_event_handler);
@@ -412,10 +281,6 @@ import { ParameterType } from 'jspsych';
stimulus: trial.stimulus,
response: response,
estimated_stimulus_onset: this.recorder_start_time ? Math.round(this.stimulus_start_time - this.recorder_start_time) : null,
- spelling: answers.spelling,
- language: answers.language,
- translation: answers.translation,
- meaning: answers.meaning,
audio_duration: this.audio_duration
};
@@ -430,47 +295,11 @@ import { ParameterType } from 'jspsych';
stimulus: trial.stimulus,
response: null,
estimated_stimulus_onset: null,
- spelling: answers.spelling,
- language: answers.language,
- translation: answers.translation,
- meaning: answers.meaning,
audio_duration: this.audio_duration
});
}
}
- endTrial(display_element, trial) {
- this.recorder.removeEventListener("dataavailable", this.data_available_handler);
- this.recorder.removeEventListener("start", this.start_event_handler);
- this.recorder.removeEventListener("stop", this.stop_event_handler);
-
- this.jsPsych.pluginAPI.clearAllTimeouts();
-
- const reader = new FileReader();
- reader.addEventListener("load", () => {
- const response = reader.result.split(",")[1];
-
- let trial_data = {
- rt: this.stimulus_start_time ? Date.now() - this.stimulus_start_time : null,
- stimulus: trial.stimulus,
- response: response,
- estimated_stimulus_onset: this.recorder_start_time ? Math.round(this.stimulus_start_time - this.recorder_start_time) : null,
- };
-
- this.jsPsych.finishTrial(trial_data);
- });
-
- if (this.current_recording) {
- reader.readAsDataURL(this.current_recording);
- } else {
- this.jsPsych.finishTrial({
- rt: null,
- stimulus: trial.stimulus,
- response: null,
- estimated_stimulus_onset: null,
- });
- }
- }
}
export default jsPsychRecordCall;
\ No newline at end of file
diff --git a/scripts/text-stimuli.js b/scripts/text-stimuli.js
index aedea78..3b4136f 100644
--- a/scripts/text-stimuli.js
+++ b/scripts/text-stimuli.js
@@ -12,7 +12,7 @@ export const textStimuli = {
Lifting calls are phrases, words or sounds that people say or make when they want to lift a heavy object together with someone else. To build this online collection of lifting calls, we will ask you to record one or more lifting calls that you know. We will also ask you to provide us with a written version of the recorded calls and information about the region and language that the calls are used in.
We process this data in accordance with the Austrian Forschungsorganisationsgesetz – FOG and Consent under Art 6 (1) (a) GDPR, public interest (e) and (f) legitimate interest. You have the right to withdraw your consent at any time. To do so and to request the deletion of your data, please contact CEU’s data protection officer at privacy@ceu.edu.
More information about your rights can be found at the controller’s website https://www.ceu.edu/privacy. If you have any questions regarding data protection, please contact CEU's data protection officer at privacy@ceu.edu.
-In case you are accessing this online form through via Prolific, Prolific acts as a data processor and has access to personal data. You can download Prolific’s full privacy notices here: https://prolific.notion.site/Privacy-and-Legal-at-Prolific-395a0b3414cd4d84a2557566256e3d58
+In case you are accessing this online form through Prolific, Prolific acts as a data processor and has access to personal data. You can download Prolific’s full privacy notices here: https://prolific.notion.site/Privacy-and-Legal-at-Prolific-395a0b3414cd4d84a2557566256e3d58
By checking the “I agree” box, you agree to participate in this study. You also confirm you are 18 years or older. To agree: Check the “I agree” box below and then click next to participate in the study. If you do not wish to participate in this study, simply close out of this browser window.