first demo

This commit is contained in:
2025-07-20 01:59:08 +02:00
parent 544b5d2b1e
commit 85ffeb480d
11 changed files with 2561 additions and 22 deletions

11
data.json Normal file
View File

@@ -0,0 +1,11 @@
[
{
"rt": 1506,
"stimulus": "<div class=\"max-w-3xl mx-auto space-y-6 text-left\">\n <p class=\"font-semibold\">\n Please specify which of the following statements you agree with by checking the corresponding “I agree” box:\n </p>\n\n <div class=\"space-y-4\">\n <div>\n <p class=\"font-semibold\">Statement 1:</p>\n <p>\n I agree that the audio recording of my lifting call will be made available to the public as part of the \n Lifting Call Collection together with the geographic location and language that this call is associated with. \n I understand that, although no personal data will be attached to the recording, my voice in the audio \n recording remains identifiable.\n </p>\n </div>\n\n <div class=\"mx-auto my-6\">\n<input type=\"checkbox\" id=\"statement_1_checkbox\" />\n<label for=\"statement_1_checkbox\">I agree</label>\n</div>\n\n <div>\n <p class=\"font-semibold\">Statement 2:</p>\n <p>\n I agree that a written transcript of my lifting call will be made available to the public as part of the \n Lifting Call Collection together with the geographic location and language that this call is associated with.\n </p>\n </div>\n\n <div class=\"mx-auto my-6\">\n<input type=\"checkbox\" id=\"statement_2_checkbox\" />\n<label for=\"statement_2_checkbox\">I agree</label>\n</div>\n\n </div>\n </div>",
"response": 0,
"trial_type": "html-button-response",
"trial_index": 0,
"plugin_version": "2.1.0",
"time_elapsed": 1509
}
]

174
index.js
View File

@@ -3,13 +3,25 @@ import "jspsych/css/jspsych.css";
import "./styles.css";
import { delayed_redirect } from "./utils/helpers.js";
import jsPsychHtmlKeyboardResponse from "@jspsych/plugin-html-keyboard-response";
import { textStimuli } from './scripts/text_stimuli';
import { textStimuli } from './scripts/text-stimuli.js';
import jsPsychHtmlButtonResponse from '@jspsych/plugin-html-button-response';
import jsPsychSurvey from '@jspsych/plugin-survey';
import jsPsychRecordCall from './scripts/record-call.js';
import jsPsychMarkCall from './scripts/mark-call.js';
import '@jspsych/plugin-survey/css/survey.css'
import jsPsychInitializeMicrophone from '@jspsych/plugin-initialize-microphone';
import jsPsychBrowserCheck from '@jspsych/plugin-browser-check';
const debug = import.meta.env.VITE_DEBUG === 'true';
const debug = import.meta.env.VITE_DEBUG;
const jsPsych = initJsPsych({
on_finish: function() {
jsPsych.getDisplayElement().innerHTML = textStimuli.complete;
if(debug) {
jsPsych.data.displayData('json');
} else {
jsPsych.getDisplayElement().innerHTML = textStimuli.complete;
}
},
on_close: function() {
delayed_redirect(import.meta.env.VITE_CLOSED_URL);
@@ -21,12 +33,158 @@ const jsPsych = initJsPsych({
},
});
const demo_trial = {
type: jsPsychHtmlKeyboardResponse,
stimulus: `<h1 class="text-2xl font-bold">Hello, world!</h1>`,
choices: [''],
const browser_check = {
type: jsPsychBrowserCheck,
inclusion_function: data => {
console.log(data.browser);
return ['chrome', 'edge-chromium'].includes(data.browser);
},
exclusion_message: data => {
return `<p>You must use Google Chrome or Microsoft Edge to complete this experiment.</p>`;
},
};
const timeline = [demo_trial];
const info_consent = {
type: jsPsychHtmlButtonResponse,
stimulus: textStimuli.info_consent,
choices: ['Continue'],
on_load: function() {
const continue_button = document.getElementsByClassName('jspsych-btn')[0];
continue_button.disabled = true;
const info_consent_checkbox = document.getElementById('info_consent_checkbox');
info_consent_checkbox.addEventListener('change', function() {
if (info_consent_checkbox.checked) {
continue_button.disabled = false;
}
});
},
};
const publication_consent = {
type: jsPsychHtmlButtonResponse,
stimulus: textStimuli.publication_consent,
choices: ['Continue'],
on_load: function() {
const continue_button = document.getElementsByClassName('jspsych-btn')[0];
continue_button.disabled = true;
let timeLeft = 15;
if(debug) {
timeLeft = 0;
}
const originalText = continue_button.textContent;
const statement_1_checkbox = document.getElementById('statement_1_checkbox');
const statement_2_checkbox = document.getElementById('statement_2_checkbox');
let statement_1_checked = false;
let statement_2_checked = false;
jsPsych.data.addProperties({
statement_1_consent: statement_1_checked,
statement_2_consent: statement_2_checked,
});
statement_1_checkbox.addEventListener('change', function() {
statement_1_checked = statement_1_checkbox.checked;
jsPsych.data.addProperties({
statement_1_consent: statement_1_checked,
statement_2_consent: statement_2_checked,
});
});
statement_2_checkbox.addEventListener('change', function() {
statement_2_checked = statement_2_checkbox.checked;
jsPsych.data.addProperties({
statement_1_consent: statement_1_checked,
statement_2_consent: statement_2_checked,
});
});
const countdown = setInterval(() => {
timeLeft--;
continue_button.textContent = `Continue (${timeLeft}s)`;
if (timeLeft <= 0) {
clearInterval(countdown);
continue_button.disabled = false;
continue_button.textContent = originalText;
}
}, 1000);
},
};
const initialize_microphone = {
type: jsPsychInitializeMicrophone
};
const recording_page = {
type: jsPsychRecordCall,
};
const mark_page = {
type: jsPsychMarkCall,
};
const data_quality_warning = {
type: jsPsychHtmlButtonResponse,
stimulus: textStimuli.data_quality_warning,
choices: ['Continue'],
};
const exit_page = {
type: jsPsychHtmlKeyboardResponse,
stimulus: textStimuli.recording_saved,
choices: ['r', 'q'],
};
const recording_loop = {
timeline: [recording_page, mark_page, exit_page],
loop_function: function(data){
const last_trial_data = jsPsych.data.getLastTrialData();
if(jsPsych.pluginAPI.compareKeys(last_trial_data.trials[0].response, 'q')){
return false;
} else {
return true;
}
}
}
const post_experiment_survey = {
type: jsPsychSurvey,
survey_json: {
elements:
[
{
type: 'comment',
title: "Did you have any thoughts or observations about this study?",
name: 'thoughts_observations',
isRequired: !debug,
},
]
}
};
let timeline = []
if (debug) {
timeline.push(data_quality_warning);
timeline.push(browser_check);
timeline.push(initialize_microphone);
timeline.push(recording_loop);
timeline.push(post_experiment_survey);
} else {
timeline.push(browser_check);
timeline.push(info_consent);
timeline.push(publication_consent);
timeline.push(initialize_microphone);
timeline.push(data_quality_warning);
timeline.push(recording_loop);
timeline.push(post_experiment_survey);
}
jsPsych.run(timeline);

1014
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -3,19 +3,26 @@
"private": true,
"version": "1.0.0",
"scripts": {
"dev": "vite",
"dev": "vite --host",
"build": "vite build",
"preview": "vite preview",
"deploy": "npm run build && NODE_ENV=production node scripts/deploy.js",
"deploy-dev": "npm run build && NODE_ENV=development node scripts/deploy.js"
},
"devDependencies": {
"@vitejs/plugin-basic-ssl": "^2.1.0",
"terser": "^5.39.0",
"vite": "^6.3.1"
},
"dependencies": {
"@jspsych/plugin-browser-check": "^2.1.0",
"@jspsych/plugin-html-button-response": "^2.1.0",
"@jspsych/plugin-html-keyboard-response": "^2.1.0",
"@jspsych/plugin-initialize-microphone": "^2.1.0",
"@jspsych/plugin-survey": "^3.0.0",
"@tailwindcss/vite": "^4.1.4",
"archiver": "^7.0.1",
"dotenv": "^17.2.0",
"jspsych": "^8.2.1",
"prettier-plugin-html-template-literals": "^1.0.5",
"tailwindcss": "^4.1.4",

87
scripts/deploy.js Normal file
View File

@@ -0,0 +1,87 @@
const { execSync } = require('child_process');
const path = require('path');
const fs = require('fs');
const archiver = require('archiver');
const dotenv = require('dotenv');
// Load environment variables based on NODE_ENV
const envFile =
process.env.NODE_ENV === 'production'
? '.env.production'
: '.env.development';
dotenv.config({ path: path.join(__dirname, '..', envFile) });
// Function to create and send zip file
function sendZipFile() {
return new Promise((resolve, reject) => {
const distPath = path.join(__dirname, '../dist');
const zipPath = path.join(__dirname, '../dist.zip');
// Create a file to stream archive data to
const output = fs.createWriteStream(zipPath);
const archive = archiver('zip', {
zlib: { level: 9 }, // Sets the compression level
});
// Listen for all archive data to be written
output.on('close', () => {
console.log(`✅ Archive created: ${archive.pointer()} total bytes`);
// Get the deployment URL from environment
const deployUrl = process.env.VITE_DEPLOY_URL;
if (!deployUrl) {
console.error('❌ VITE_DEPLOY_URL not found in environment variables');
process.exit(1);
}
// Send the zip file via curl
try {
console.log(`📤 Sending zip file to ${deployUrl}...`);
execSync(`curl -X POST -F "file=@${zipPath}" ${deployUrl}`, {
stdio: 'inherit',
});
console.log('✅ Zip file sent successfully');
} catch (error) {
console.error('❌ Failed to send zip file:', error.message);
process.exit(1);
}
// Clean up the zip file
fs.unlinkSync(zipPath);
resolve();
});
// Handle warnings and errors
archive.on('warning', err => {
if (err.code === 'ENOENT') {
console.warn('⚠️ Archive warning:', err);
} else {
reject(err);
}
});
archive.on('error', err => {
reject(err);
});
// Pipe archive data to the file
archive.pipe(output);
// Add the dist directory to the archive
archive.directory(distPath, false);
// Finalize the archive
archive.finalize();
});
}
// Run the deployment
sendZipFile()
.then(() => {
console.log('✅ Deployment successful!');
})
.catch(error => {
console.error('❌ Deployment failed:', error.message);
process.exit(1);
});

476
scripts/mark-call.js Normal file
View File

@@ -0,0 +1,476 @@
import { ParameterType } from 'jspsych';
import html from '../utils/html.js';
const info = {
name: "mark-call",
parameters: {
},
};
class jsPsychMarkCall {
constructor(jsPsych) {
this.jsPsych = jsPsych;
}
static {
this.info = info;
}
trial(display_element, trial) {
// Get the last recording from the previous trial
const lastTrialData = this.jsPsych.data.getLastTrialData();
const recordingData = lastTrialData.values()[0];
if (!recordingData || !recordingData.response) {
display_element.innerHTML = `
<div style="text-align: center; padding: 20px;">
<p style="color: red;">No recording found from the previous trial.</p>
<button onclick="this.jsPsych.finishTrial()" style="
padding: 10px 20px;
font-size: 14px;
background-color: #007cba;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
">Continue</button>
</div>
`;
return;
}
// Convert base64 back to audio blob with error handling
let audioData;
let audioBlob;
try {
// Create blob from base64 data
const byteCharacters = atob(recordingData.response);
const byteNumbers = new Array(byteCharacters.length);
for (let i = 0; i < byteCharacters.length; i++) {
byteNumbers[i] = byteCharacters.charCodeAt(i);
}
const byteArray = new Uint8Array(byteNumbers);
audioBlob = new Blob([byteArray], { type: 'audio/ogg' });
audioData = URL.createObjectURL(audioBlob);
console.log('Audio blob created, size:', audioBlob.size);
} catch (error) {
console.error('Error creating audio blob:', error);
display_element.innerHTML = `
<div style="text-align: center; padding: 20px;">
<p style="color: red;">Error loading audio data.</p>
<button onclick="this.jsPsych.finishTrial({})" style="
padding: 10px 20px;
font-size: 14px;
background-color: #007cba;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
">Continue</button>
</div>
`;
return;
}
display_element.innerHTML = `
<div style="text-align: center; padding: 20px;">
<div style="margin: 20px 0;">
<audio style="display: none;" id="playback-audio">
Your browser does not support the audio element.
</audio>
</div>
<div style="margin: 30px 0;">
<p style="font-weight: bold; margin-bottom: 15px;">Listen to your recording and mark when you would lift:</p>
<p style="margin: 10px 0; font-size: 14px; color: #666;">You can use keyboard controls (space and arrow keys) to play and skip the recording.</p>
<p style="margin: 10px 0; font-size: 14px; color: #666;">Adjust playback speed for more precise control.</p>
<div style="max-width: 500px; margin: 0 auto; background: #f9f9f9; padding: 20px; border-radius: 10px;">
<div style="margin-bottom: 20px; display: flex; gap: 10px; align-items: center; justify-content: center; flex-wrap: wrap;">
<button id="play-pause-btn" style="
padding: 12px 24px;
font-size: 16px;
background-color: #4caf50;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
min-width: 100px;
">▶ Play</button>
<div style="display: flex; gap: 5px; align-items: center;">
<span style="font-size: 14px; color: #666;">Speed:</span>
<button id="speed-025" class="speed-btn" data-speed="0.25" style="
padding: 5px 10px;
font-size: 12px;
background-color: #ddd;
color: #333;
border: none;
border-radius: 3px;
cursor: pointer;
">0.25x</button>
<button id="speed-05" class="speed-btn" data-speed="0.5" style="
padding: 5px 10px;
font-size: 12px;
background-color: #007cba;
color: white;
border: none;
border-radius: 3px;
cursor: pointer;
">0.5x</button>
<button id="speed-1" class="speed-btn" data-speed="1" style="
padding: 5px 10px;
font-size: 12px;
background-color: #ddd;
color: #333;
border: none;
border-radius: 3px;
cursor: pointer;
">1x</button>
</div>
</div>
<div style="margin: 20px 0; display: flex; gap: 10px; align-items: center; justify-content: center; flex-wrap: wrap;">
<button id="skip-back-btn" style="
padding: 8px 16px;
font-size: 14px;
background-color: #2196f3;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
">⏪ Skip Back 5%</button>
<button id="skip-forward-btn" style="
padding: 8px 16px;
font-size: 14px;
background-color: #2196f3;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
">Skip Forward 5% ⏩</button>
</div>
<div id="current-time-display" style="
font-size: 18px;
font-weight: bold;
color: #007cba;
margin: 20px 0;
text-align: center;
min-height: 25px;
">Current position: 0:00s</div>
<div style="display: flex; justify-content: space-between; font-size: 12px; color: #666; margin-bottom: 20px;">
<span>Duration: <span id="duration-display">0:00</span></span>
<span>Progress: <span id="progress-display">0%</span></span>
</div>
<div style="font-size: 11px; color: #888; text-align: center; margin-bottom: 15px; line-height: 1.4;">
<strong>Keyboard shortcuts:</strong> Spacebar = Play/Pause | ← → = Skip Back/Forward | Enter = Mark Lift Point
</div>
<div style="margin: 20px 0; text-align: center;">
<button id="mark-lift-point-btn" style="
padding: 12px 24px;
font-size: 16px;
background-color: #ff9800;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
">Mark This as Lift Point</button>
</div>
<div id="marked-point-display" style="
font-size: 16px;
font-weight: bold;
color: #e91e63;
margin: 15px 0;
text-align: center;
min-height: 20px;
"></div>
</div>
</div>
<button id="submit-lift-point-btn" style="
padding: 12px 24px;
font-size: 16px;
background-color: #007cba;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
margin-top: 20px;
">Submit Lift Point</button>
</div>
`;
// Set the audio source after creating the element
const audio = document.getElementById('playback-audio');
audio.src = audioData;
console.log('Audio source set to blob URL:', audioData);
console.log('Audio blob size:', audioBlob.size, 'bytes');
this.setupMarkingEvents(recordingData, audioBlob);
}
setupMarkingEvents(recordingData, audioBlob) {
const audio = document.getElementById('playback-audio');
const playPauseBtn = document.getElementById('play-pause-btn');
const currentTimeDisplay = document.getElementById('current-time-display');
const durationDisplay = document.getElementById('duration-display');
const progressDisplay = document.getElementById('progress-display');
const skipBackBtn = document.getElementById('skip-back-btn');
const skipForwardBtn = document.getElementById('skip-forward-btn');
const markLiftPointBtn = document.getElementById('mark-lift-point-btn');
const markedPointDisplay = document.getElementById('marked-point-display');
const submitButton = document.getElementById('submit-lift-point-btn');
let audioDuration = 0;
let liftPointTime = null;
let isPlaying = false;
// Format time as MM:SS
const formatTime = (seconds) => {
const minutes = Math.floor(seconds / 60);
const secs = Math.floor(seconds % 60);
return `${minutes}:${secs.toString().padStart(2, '0')}`;
};
// Format time with precision for lift point
const formatPreciseTime = (seconds) => {
const minutes = Math.floor(seconds / 60);
const secs = (seconds % 60).toFixed(2);
return `${minutes}:${secs.padStart(5, '0')}s`;
};
// Update display function
const updateDisplay = () => {
const currentTime = audio.currentTime;
const progress = audioDuration > 0 ? (currentTime / audioDuration * 100).toFixed(1) : 0;
currentTimeDisplay.textContent = `Current position: ${formatPreciseTime(currentTime)}`;
progressDisplay.textContent = `${progress}%`;
};
// Update submit button state
const updateSubmitButton = () => {
if (liftPointTime !== null) {
submitButton.disabled = false;
submitButton.style.opacity = '1';
submitButton.style.cursor = 'pointer';
} else {
submitButton.disabled = true;
submitButton.style.opacity = '0.5';
submitButton.style.cursor = 'not-allowed';
}
};
// Check if we have a stored duration from the recording
const storedDuration = recordingData.audio_duration;
// Wait for audio to load with better error handling
let retryCount = 0;
const maxRetries = 10; // Reduced since we might have stored duration
const setupAudioControls = () => {
console.log(`Setup attempt ${retryCount + 1}, duration:`, audio.duration, 'ready state:', audio.readyState, 'stored duration:', storedDuration);
// Use stored duration if available and valid
if (storedDuration && isFinite(storedDuration) && storedDuration > 0) {
audioDuration = storedDuration;
durationDisplay.textContent = formatTime(audioDuration);
updateDisplay();
updateSubmitButton();
console.log('Using stored duration:', audioDuration);
return;
}
// Otherwise try to get duration from audio element
if (audio.duration && isFinite(audio.duration) && audio.duration > 0) {
audioDuration = audio.duration;
durationDisplay.textContent = formatTime(audioDuration);
updateDisplay();
updateSubmitButton();
console.log('Audio controls setup complete, duration:', audioDuration);
} else if (retryCount < maxRetries) {
// Retry after a short delay if duration is not available
retryCount++;
setTimeout(setupAudioControls, 100);
} else {
console.warn('Using fallback duration estimation');
// Fallback: estimate duration based on blob size (rough approximation)
const estimatedDuration = Math.max(1, audioBlob.size / 8000); // ~8KB per second rough estimate
audioDuration = estimatedDuration;
durationDisplay.textContent = formatTime(audioDuration) + ' (est)';
updateDisplay();
updateSubmitButton();
console.log('Using estimated duration:', audioDuration);
}
};
audio.addEventListener('loadedmetadata', setupAudioControls);
audio.addEventListener('loadeddata', setupAudioControls);
audio.addEventListener('canplay', setupAudioControls);
// Try to set up immediately if we have stored duration
if (storedDuration) {
setupAudioControls();
}
// Add error handling for audio loading
audio.addEventListener('error', (e) => {
console.error('Audio loading error:', e);
console.log('Trying fallback with data URL...');
// Fallback to data URL if blob URL fails
audio.src = `data:audio/ogg;base64,${recordingData.response}`;
audio.load();
});
// Set default playback speed to 0.5x
audio.playbackRate = 0.5;
// Force load the audio
audio.load();
// Speed control buttons
const speedButtons = document.querySelectorAll('.speed-btn');
speedButtons.forEach(btn => {
btn.addEventListener('click', () => {
const speed = parseFloat(btn.dataset.speed);
audio.playbackRate = speed;
// Update button styles
speedButtons.forEach(b => {
b.style.backgroundColor = '#ddd';
b.style.color = '#333';
});
btn.style.backgroundColor = '#007cba';
btn.style.color = 'white';
console.log('Playback speed set to:', speed);
});
});
// Skip buttons (5% of total duration)
skipBackBtn.addEventListener('click', () => {
const skipAmount = audioDuration * 0.05; // 5% of duration
audio.currentTime = Math.max(0, audio.currentTime - skipAmount);
updateDisplay();
});
skipForwardBtn.addEventListener('click', () => {
const skipAmount = audioDuration * 0.05; // 5% of duration
audio.currentTime = Math.min(audioDuration, audio.currentTime + skipAmount);
updateDisplay();
});
// Play/Pause button
playPauseBtn.addEventListener('click', () => {
if (isPlaying) {
audio.pause();
} else {
audio.play();
}
});
// Audio play event
audio.addEventListener('play', () => {
isPlaying = true;
playPauseBtn.textContent = '⏸ Pause';
playPauseBtn.style.backgroundColor = '#f44336';
});
// Audio pause event
audio.addEventListener('pause', () => {
isPlaying = false;
playPauseBtn.textContent = '▶ Play';
playPauseBtn.style.backgroundColor = '#4caf50';
});
// Update display during playback
audio.addEventListener('timeupdate', updateDisplay);
// Keyboard controls
document.addEventListener('keydown', (e) => {
// Only handle keys if we're not typing in a text field
if (e.target.tagName === 'INPUT' || e.target.tagName === 'TEXTAREA') {
return;
}
switch(e.code) {
case 'Space':
e.preventDefault();
// Play/Pause with spacebar
if (isPlaying) {
audio.pause();
} else {
audio.play();
}
break;
case 'ArrowLeft':
e.preventDefault();
// Skip back with left arrow
const skipBackAmount = audioDuration * 0.05;
audio.currentTime = Math.max(0, audio.currentTime - skipBackAmount);
updateDisplay();
break;
case 'ArrowRight':
e.preventDefault();
// Skip forward with right arrow
const skipForwardAmount = audioDuration * 0.05;
audio.currentTime = Math.min(audioDuration, audio.currentTime + skipForwardAmount);
updateDisplay();
break;
case 'Enter':
e.preventDefault();
// Mark lift point with Enter
liftPointTime = audio.currentTime;
markedPointDisplay.textContent = `✓ Lift point marked at: ${formatPreciseTime(liftPointTime)}`;
updateSubmitButton();
break;
}
});
// Mark lift point button
markLiftPointBtn.addEventListener('click', () => {
liftPointTime = audio.currentTime;
markedPointDisplay.textContent = `✓ Lift point marked at: ${formatPreciseTime(liftPointTime)}`;
updateSubmitButton();
});
// Submit button
submitButton.addEventListener('click', () => {
if (liftPointTime === null) return;
const trialData = {
lift_point_seconds: liftPointTime,
lift_point_formatted: formatPreciseTime(liftPointTime),
audio_duration: audioDuration,
original_recording_data: {
spelling: recordingData.spelling,
language: recordingData.language,
translation: recordingData.translation,
meaning: recordingData.meaning,
rt: recordingData.rt,
stimulus: recordingData.stimulus
}
};
// Clean up object URL to prevent memory leaks
if (audio.src && audio.src.startsWith('blob:')) {
URL.revokeObjectURL(audio.src);
}
this.jsPsych.finishTrial(trialData);
});
}
}
export default jsPsychMarkCall;

View File

@@ -0,0 +1,254 @@
let jsPsychVerbalResponse = (function (jspsych) {
'use strict';
const info = {
name: "verbal-response",
parameters: {
transcription_server_url: {
type: jspsych.ParameterType.STRING,
default: null,
},
recording_key: {
type: jspsych.ParameterType.INT,
default: 71,
},
stimulus: {
type: jspsych.ParameterType.STRING,
default: null,
},
start_image: {
type: jspsych.ParameterType.STRING,
default: null,
},
video_html: {
type: jspsych.ParameterType.STRING,
default: null,
},
red_points_html: {
type: jspsych.ParameterType.STRING,
default: null,
},
blue_points_html: {
type: jspsych.ParameterType.STRING,
default: null,
},
locale: {
type: jspsych.ParameterType.STRING,
default: "en-US",
}
},
};
class VerbalResponsePlugin {
constructor(jsPsych) {
this.jsPsych = jsPsych;
this.rt = null;
this.recorded_data_chunks = [];
this.recording_key_down = false;
}
trial(display_element, trial) {
display_element.innerHTML = trial.video_html;
const pointsContainer = document.getElementById("points-container");
pointsContainer.innerHTML = pointsContainer.innerHTML + trial.red_points_html + trial.blue_points_html;
document.getElementById("prompts").innerHTML = trial.stimulus;
this.recorder = this.jsPsych.pluginAPI.getMicrophoneRecorder();
this.setupRecordingEvents(display_element, trial);
window.addEventListener("keydown", this.startRecording, false);
window.addEventListener("keyup", this.stopRecording, false);
}
setupRecordingEvents(display_element, trial) {
this.data_available_handler = (e) => {
if (e.data.size > 0) {
this.recorded_data_chunks.push(e.data);
}
};
this.stop_event_handler = () => {
const data = new Blob(this.recorded_data_chunks, {type: "audio/ogg"});
const reader = new FileReader();
reader.addEventListener("load", () => {
this.response = reader.result.split(",")[1];
this.load_resolver();
this.checkResponse(this.response, trial, display_element)
});
reader.readAsDataURL(data);
};
this.start_event_handler = (e) => {
this.recorded_data_chunks.length = 0;
this.recorder_start_time = e.timeStamp;
};
this.recorder.addEventListener("dataavailable", this.data_available_handler);
this.recorder.addEventListener("stop", this.stop_event_handler);
this.recorder.addEventListener("start", this.start_event_handler);
}
checkRecordingKey(e) {
return e.keyCode === 71;
}
startRecording(e) {
if (!this.checkRecordingKey(e)) {
return;
}
if (this.recording_key_down) {
return;
}
document.getElementById("recording").innerHTML = "Listening..."
this.recording_key_down = true;
this.recorder.start();
this.stimulus_start_time = Date.now();
}
delay = ms => new Promise(res => setTimeout(res, ms));
async stopRecording(e) {
if (!this.checkRecordingKey(e)) {
return;
}
if (!this.recording_key_down) {
return;
}
await this.delay(200);
this.recorder.stop();
this.recording_key_down = false;
document.getElementById("recording").innerHTML = "Processing..."
return new Promise((resolve) => {
this.load_resolver = resolve;
});
}
async checkResponse(responseAudio, trial, display_element) {
let transcription = await this.transcribeResponse(responseAudio, trial);
document.getElementById("recording").innerHTML = ""
//console.log(transcription);
if (!transcription) {
document.getElementById("prompts").innerHTML = `Your request wasn't detected. Hold down the 'G' key and try again`;
return;
}
const processedResponse = nlp(transcription)
const requested = processedResponse.match('~give~', null, {fuzzy: 0.75})
const negated = processedResponse.match('(~not~|~dont~|~keep~)', null, {fuzzy: 0.75})
const objectMentioned = processedResponse.match('~object~', null, {fuzzy: 0.75})
if (negated.found | !requested.found || !objectMentioned.found) {
document.getElementById("prompts").innerHTML = `Request not registered Be sure to say 'Give me the (red/blue) object' in full. Hold down the 'G' key and try again.`;
document.getElementById("recording").innerHTML = "Your request: " + transcription;
return;
}
const redMentioned = processedResponse.match('~red~', null, {fuzzy: 0.75})
const blueMentioned = processedResponse.match('~blue~', null, {fuzzy: 0.75})
if (redMentioned.found && blueMentioned.found) {
document.getElementById("prompts").innerHTML = `You mentioned both colours of objects. Please ask for only one. Hold down the 'G' key and try again`;
document.getElementById("recording").innerHTML = "Your request: " + transcription;
return;
}
if (!redMentioned.found && !blueMentioned.found) {
document.getElementById("prompts").innerHTML = `You mentioned neither colour of object. Hold down the 'G' key and try again`;
document.getElementById("recording").innerHTML = "Your request: " + transcription;
return;
}
let requestedColour;
if (redMentioned.found) {
requestedColour = "red";
}
if (blueMentioned.found) {
requestedColour = "blue";
}
document.getElementById("prompts").innerHTML = `You asked for the ` + requestedColour + ` object.`
this.transribedResponse = transcription;
this.chosenObject = requestedColour;
//await this.delay(100);
this.endTrial(display_element, trial);
}
transcribeResponse(responseAudio, trial) {
return new Promise(function (resolve, reject) {
const xhr = new XMLHttpRequest();
const requestBody = "clip=" + encodeURIComponent(responseAudio) + "&locale=" + trial.locale;
xhr.open("POST", trial.transcription_server_url);
xhr.onload = function () {
if (this.status >= 200 && this.status < 300) {
resolve(xhr.response);
} else {
reject({
status: this.status,
statusText: xhr.statusText
});
}
};
xhr.onerror = function () {
reject({
status: this.status,
statusText: xhr.statusText
});
};
xhr.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xhr.send(requestBody);
});
}
endTrial(display_element, trial) {
this.recorder.removeEventListener("dataavailable", this.data_available_handler);
this.recorder.removeEventListener("start", this.start_event_handler);
this.recorder.removeEventListener("stop", this.stop_event_handler);
window.removeEventListener("keydown", this.startRecording);
window.removeEventListener("keyup", this.stopRecording);
document.getElementById("redpoints").innerHTML = "";
document.getElementById("bluepoints").innerHTML = "";
// kill any remaining setTimeout handlers
this.jsPsych.pluginAPI.clearAllTimeouts();
// gather the data to store for the trial
let trial_data = {
rt: this.rt,
stimulus: trial.stimulus,
response: this.response,
estimated_stimulus_onset: Math.round(this.stimulus_start_time - this.recorder_start_time),
transcribed_response: this.transribedResponse,
chosen_object: this.chosenObject,
};
this.jsPsych.finishTrial(trial_data);
}
}
VerbalResponsePlugin.info = info;
return VerbalResponsePlugin;
})(jsPsychModule);

476
scripts/record-call.js Normal file
View File

@@ -0,0 +1,476 @@
import { ParameterType } from 'jspsych';
const info = {
name: "record-call",
parameters: {
stimulus: {
type: ParameterType.STRING,
default: null,
},
},
};
class jsPsychRecordCall {
constructor(jsPsych) {
this.jsPsych = jsPsych;
this.recorded_data_chunks = [];
this.recording = false;
this.recorder = null;
this.current_recording = null;
this.audio_duration = null;
}
static {
this.info = info;
}
trial(display_element, trial) {
try {
this.recorder = this.jsPsych.pluginAPI.getMicrophoneRecorder();
if (!this.recorder) {
throw new Error("Microphone not initialized");
}
this.setupRecordingEvents();
} catch (error) {
console.error("Microphone setup failed:", error);
display_element.innerHTML = `
<div style="text-align: center; padding: 20px;">
<p style="color: red;">Microphone access is required for this experiment.</p>
<p>Please refresh the page and allow microphone access when prompted.</p>
<button onclick="location.reload()" style="
padding: 10px 20px;
font-size: 14px;
background-color: #007cba;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
">Refresh Page</button>
</div>
`;
return;
}
display_element.innerHTML = `
<div style="text-align: center; padding: 20px;">
<div style="background-color: #f0f0f0; padding: 15px; margin-bottom: 20px; border-radius: 8px; text-align: left; max-width: 600px; margin-left: auto; margin-right: auto;">
<p style="margin: 0; font-weight: bold; font-size: 16px;">Imagine two people lifting something heavy together. In your culture, is there a typical phrase, word or utterance that is used in such a situation? Please record one such a phrase, word or utterance!</p>
</div>
${trial.stimulus ? `<p>${trial.stimulus}</p>` : ''}
<button id="record-button" style="
padding: 15px 30px;
font-size: 16px;
background-color: #007cba;
color: white;
border: none;
border-radius: 8px;
cursor: pointer;
margin: 10px;
">Hold to Record</button>
<div id="recording-status" style="margin: 10px; font-weight: bold;"></div>
<div id="recording-controls" style="display: none; margin: 20px;">
<audio id="playback-audio" controls style="display: block; margin: 10px auto;"></audio>
<button id="re-record-button" style="
padding: 10px 20px;
font-size: 14px;
background-color: #f44336;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
margin: 5px;
">Re-record</button>
<button id="accept-recording-button" style="
padding: 10px 20px;
font-size: 14px;
background-color: #4caf50;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
margin: 5px;
">Accept Recording</button>
</div>
<div id="questions-section" style="display: none; margin: 20px; text-align: left; max-width: 600px; margin-left: auto; margin-right: auto;">
<audio id="final-playback-audio" controls style="display: block; margin: 10px auto;"></audio>
<h3 style="text-align: center; margin-bottom: 20px;">Please answer the following questions:</h3>
<div style="margin-bottom: 15px;">
<label for="spelling-input" style="display: block; margin-bottom: 5px; font-weight: bold;">How would you spell what you have just recorded?</label>
<input type="text" id="spelling-input" style="width: 100%; padding: 8px; border: 1px solid #ddd; border-radius: 4px;" required>
</div>
<div style="margin-bottom: 15px;">
<label for="language-select" style="display: block; margin-bottom: 5px; font-weight: bold;">What language is it in?</label>
<select id="language-select" style="width: 100%; padding: 8px; border: 1px solid #ddd; border-radius: 4px;" required>
<option value="">Select a language...</option>
<option value="English">English</option>
<option value="Spanish">Spanish</option>
<option value="French">French</option>
<option value="German">German</option>
<option value="Italian">Italian</option>
<option value="Portuguese">Portuguese</option>
<option value="Russian">Russian</option>
<option value="Chinese">Chinese</option>
<option value="Japanese">Japanese</option>
<option value="Korean">Korean</option>
<option value="Arabic">Arabic</option>
<option value="Hindi">Hindi</option>
<option value="Other">Other</option>
</select>
</div>
<div id="other-language-section" style="margin-bottom: 15px; display: none;">
<label for="other-language-input" style="display: block; margin-bottom: 5px; font-weight: bold;">Please specify the language:</label>
<input type="text" id="other-language-input" style="width: 100%; padding: 8px; border: 1px solid #ddd; border-radius: 4px;">
</div>
<div id="translation-section" style="margin-bottom: 15px; display: none;">
<label for="translation-input" style="display: block; margin-bottom: 5px; font-weight: bold;">How would you translate it to English?</label>
<input type="text" id="translation-input" style="width: 100%; padding: 8px; border: 1px solid #ddd; border-radius: 4px;">
</div>
<div style="margin-bottom: 15px;">
<label for="meaning-input" style="display: block; margin-bottom: 5px; font-weight: bold;">Does it have a meaning? If so, write it here in English:</label>
<input type="text" id="meaning-input" style="width: 100%; padding: 8px; border: 1px solid #ddd; border-radius: 4px;" placeholder="Write the meaning or 'No meaning' if it doesn't have one">
</div>
<button id="submit-answers-button" style="
padding: 12px 24px;
font-size: 16px;
background-color: #007cba;
color: white;
border: none;
border-radius: 5px;
cursor: pointer;
display: block;
margin: 20px auto;
opacity: 0.5;
" disabled>Submit Answers</button>
</div>
</div>
`;
this.setupButtonEvents(display_element, trial);
}
setupRecordingEvents() {
this.data_available_handler = (e) => {
if (e.data.size > 0) {
this.recorded_data_chunks.push(e.data);
}
};
this.stop_event_handler = () => {
const data = new Blob(this.recorded_data_chunks, {type: "audio/ogg"});
this.current_recording = data;
const audioUrl = URL.createObjectURL(data);
const audioElement = document.getElementById("playback-audio");
audioElement.src = audioUrl;
// Force playback to load metadata and get exact duration
document.getElementById("recording-status").textContent = "Processing recording...";
document.getElementById("record-button").style.display = "none";
const loadAudioMetadata = () => {
let metadataLoaded = false;
const showControls = () => {
if (metadataLoaded) return;
metadataLoaded = true;
// Clean up
audioElement.muted = false;
audioElement.pause();
audioElement.currentTime = 0;
// Show controls
document.getElementById("recording-controls").style.display = "block";
document.getElementById("record-button").style.display = "none";
document.getElementById("recording-status").textContent = "Recording complete. Listen and choose:";
console.log('Final audio duration:', this.audio_duration);
};
// Multiple event handlers to catch metadata loading
const onMetadataEvent = () => {
if (audioElement.duration && isFinite(audioElement.duration) && audioElement.duration > 0) {
this.audio_duration = audioElement.duration;
console.log('Audio duration loaded via metadata:', this.audio_duration);
showControls();
}
};
const onCanPlayEvent = () => {
if (audioElement.duration && isFinite(audioElement.duration) && audioElement.duration > 0) {
this.audio_duration = audioElement.duration;
console.log('Audio duration loaded via canplay:', this.audio_duration);
showControls();
}
};
audioElement.addEventListener('loadedmetadata', onMetadataEvent);
audioElement.addEventListener('loadeddata', onMetadataEvent);
audioElement.addEventListener('canplay', onCanPlayEvent);
// Mute and try to load
audioElement.muted = true;
audioElement.currentTime = 0;
audioElement.load();
// Fallback: if metadata doesn't load within 2 seconds, continue anyway
setTimeout(() => {
if (!metadataLoaded) {
console.warn('Metadata loading timeout, continuing without duration');
showControls();
}
}, 2000);
// Try playing after a short delay if metadata isn't loaded
setTimeout(() => {
if (!metadataLoaded && !this.audio_duration) {
console.log('Attempting to play audio to force metadata loading');
audioElement.play().catch(e => {
console.log('Auto-play prevented:', e);
// If play fails, just continue
if (!metadataLoaded) {
showControls();
}
});
}
}, 500);
};
loadAudioMetadata();
};
this.start_event_handler = (e) => {
this.recorded_data_chunks.length = 0;
this.recorder_start_time = e.timeStamp;
};
this.recorder.addEventListener("dataavailable", this.data_available_handler);
this.recorder.addEventListener("stop", this.stop_event_handler);
this.recorder.addEventListener("start", this.start_event_handler);
}
setupButtonEvents(display_element, trial) {
const recordButton = document.getElementById("record-button");
const reRecordButton = document.getElementById("re-record-button");
const acceptButton = document.getElementById("accept-recording-button");
const statusDiv = document.getElementById("recording-status");
recordButton.addEventListener("mousedown", () => {
if (!this.recording) {
this.startRecording();
recordButton.textContent = "Recording... (Release to stop)";
recordButton.style.backgroundColor = "#f44336";
statusDiv.textContent = "Recording in progress...";
}
});
recordButton.addEventListener("mouseup", () => {
if (this.recording) {
this.stopRecording();
recordButton.textContent = "Hold to Record";
recordButton.style.backgroundColor = "#007cba";
statusDiv.textContent = "Processing recording...";
}
});
recordButton.addEventListener("mouseleave", () => {
if (this.recording) {
this.stopRecording();
recordButton.textContent = "Hold to Record";
recordButton.style.backgroundColor = "#007cba";
statusDiv.textContent = "Processing recording...";
}
});
reRecordButton.addEventListener("click", () => {
document.getElementById("recording-controls").style.display = "none";
document.getElementById("record-button").style.display = "inline-block";
statusDiv.textContent = "";
this.current_recording = null;
});
acceptButton.addEventListener("click", () => {
document.getElementById("recording-controls").style.display = "none";
document.getElementById("questions-section").style.display = "block";
const finalAudio = document.getElementById("final-playback-audio");
const originalAudio = document.getElementById("playback-audio");
finalAudio.src = originalAudio.src;
document.getElementById("recording-status").textContent = "Please answer all questions to continue:";
this.setupQuestionEvents(trial);
});
}
setupQuestionEvents(trial) {
const spellingInput = document.getElementById("spelling-input");
const languageSelect = document.getElementById("language-select");
const otherLanguageSection = document.getElementById("other-language-section");
const otherLanguageInput = document.getElementById("other-language-input");
const translationSection = document.getElementById("translation-section");
const translationInput = document.getElementById("translation-input");
const meaningInput = document.getElementById("meaning-input");
const submitButton = document.getElementById("submit-answers-button");
const validateForm = () => {
const spelling = spellingInput.value.trim();
const language = languageSelect.value;
const otherLanguage = otherLanguageInput.value.trim();
const meaning = meaningInput.value.trim();
const needsOtherLanguage = language === "Other";
const needsTranslation = language && language !== "English";
const translation = translationInput.value.trim();
const isValid = spelling && language && meaning &&
(!needsOtherLanguage || otherLanguage) &&
(!needsTranslation || translation);
submitButton.disabled = !isValid;
submitButton.style.opacity = isValid ? "1" : "0.5";
submitButton.style.cursor = isValid ? "pointer" : "not-allowed";
};
languageSelect.addEventListener("change", () => {
const selectedLanguage = languageSelect.value;
const isEnglish = selectedLanguage === "English";
const isOther = selectedLanguage === "Other";
translationSection.style.display = isEnglish ? "none" : "block";
otherLanguageSection.style.display = isOther ? "block" : "none";
if (isEnglish) {
translationInput.value = "";
}
if (!isOther) {
otherLanguageInput.value = "";
}
validateForm();
});
[spellingInput, languageSelect, otherLanguageInput, translationInput, meaningInput].forEach(element => {
element.addEventListener("input", validateForm);
element.addEventListener("change", validateForm);
});
submitButton.addEventListener("click", () => {
if (!submitButton.disabled) {
const finalLanguage = languageSelect.value === "Other" ?
otherLanguageInput.value.trim() :
languageSelect.value;
this.endTrialWithAnswers(trial, {
spelling: spellingInput.value.trim(),
language: finalLanguage,
translation: translationInput.value.trim() || null,
meaning: meaningInput.value.trim()
});
}
});
}
startRecording() {
try {
this.recording = true;
this.recorder.start();
this.stimulus_start_time = Date.now();
} catch (error) {
console.error("Failed to start recording:", error);
document.getElementById("recording-status").textContent = "Recording failed. Please refresh and try again.";
this.recording = false;
}
}
stopRecording() {
if (this.recording) {
this.recording = false;
// Add a small buffer to prevent clipping
setTimeout(() => {
this.recorder.stop();
}, 200); // 200ms buffer
}
}
endTrialWithAnswers(trial, answers) {
this.recorder.removeEventListener("dataavailable", this.data_available_handler);
this.recorder.removeEventListener("start", this.start_event_handler);
this.recorder.removeEventListener("stop", this.stop_event_handler);
this.jsPsych.pluginAPI.clearAllTimeouts();
const reader = new FileReader();
reader.addEventListener("load", () => {
const response = reader.result.split(",")[1];
let trial_data = {
rt: this.stimulus_start_time ? Date.now() - this.stimulus_start_time : null,
stimulus: trial.stimulus,
response: response,
estimated_stimulus_onset: this.recorder_start_time ? Math.round(this.stimulus_start_time - this.recorder_start_time) : null,
spelling: answers.spelling,
language: answers.language,
translation: answers.translation,
meaning: answers.meaning,
audio_duration: this.audio_duration
};
this.jsPsych.finishTrial(trial_data);
});
if (this.current_recording) {
reader.readAsDataURL(this.current_recording);
} else {
this.jsPsych.finishTrial({
rt: null,
stimulus: trial.stimulus,
response: null,
estimated_stimulus_onset: null,
spelling: answers.spelling,
language: answers.language,
translation: answers.translation,
meaning: answers.meaning,
audio_duration: this.audio_duration
});
}
}
endTrial(display_element, trial) {
this.recorder.removeEventListener("dataavailable", this.data_available_handler);
this.recorder.removeEventListener("start", this.start_event_handler);
this.recorder.removeEventListener("stop", this.stop_event_handler);
this.jsPsych.pluginAPI.clearAllTimeouts();
const reader = new FileReader();
reader.addEventListener("load", () => {
const response = reader.result.split(",")[1];
let trial_data = {
rt: this.stimulus_start_time ? Date.now() - this.stimulus_start_time : null,
stimulus: trial.stimulus,
response: response,
estimated_stimulus_onset: this.recorder_start_time ? Math.round(this.stimulus_start_time - this.recorder_start_time) : null,
};
this.jsPsych.finishTrial(trial_data);
});
if (this.current_recording) {
reader.readAsDataURL(this.current_recording);
} else {
this.jsPsych.finishTrial({
rt: null,
stimulus: trial.stimulus,
response: null,
estimated_stimulus_onset: null,
});
}
}
}
export default jsPsychRecordCall;

62
scripts/text-stimuli.js Normal file
View File

@@ -0,0 +1,62 @@
import html from '../utils/html.js';
export const textStimuli = {
complete: html`Experiment complete. Please paste the following link into your browser to confirm completion on Prolific:
<span class="text-blue-500">
${import.meta.env.VITE_COMPLETE_URL}
</a>
`,
info_consent: html`
<div class="mx-auto w-10/12 text-left">
<p> The aim of this project is to create an online collection of lifting calls (working title: Lifting Call Collection) from around the world that is accessible to the public and to other researchers.</p>
<p class = "mt-2">Lifting calls are phrases, words or sounds that people say or make when they want to lift a heavy object together with someone else. To build this online collection of lifting calls, we will ask you to record one or more lifting calls that you know. We will also ask you to provide us with a written version of the recorded calls and information about the region and language that the calls are used in.</p>
<p class = "mt-2"> We process this data in accordance with the Austrian Forschungsorganisationsgesetz FOG and Consent under Art 6 (1) (a) GDPR, public interest (e) and (f) legitimate interest. You have the right to withdraw your consent at any time. To do so and to request the deletion of your data, please contact CEUs data protection officer at privacy@ceu.edu. </p>
<p class = "mt-2"> More information about your rights can be found at the controllers website https://www.ceu.edu/privacy. If you have any questions regarding data protection, please contact CEU's data protection officer at privacy@ceu.edu.
In case you are accessing this online form through via Prolific, Prolific acts as a data processor and has access to personal data. You can download Prolifics full privacy notices here: https://prolific.notion.site/Privacy-and-Legal-at-Prolific-395a0b3414cd4d84a2557566256e3d58
<p class = "mt-2"> By checking the “I agree” box, you agree to participate in this study. You also confirm you are 18 years or older. To agree: Check the “I agree” box below and then click next to participate in the study. If you do not wish to participate in this study, simply close out of this browser window.</p>
</div>
<div class="mx-auto my-6">
<input type="checkbox" id="info_consent_checkbox" />
<label for="info_consent_checkbox">I agree</label>
</div>
`,
publication_consent: html`
<div class="max-w-3xl mx-auto space-y-6 text-left">
<p class="font-semibold">
Please specify which of the following statements you agree with by checking the corresponding “I agree” box:
</p>
<div class="space-y-4">
<div>
<p class="font-semibold">Statement 1:</p>
<p>
I agree that the audio recording of my lifting call will be made available to the public as part of the
Lifting Call Collection together with the geographic location and language that this call is associated with.
I understand that, although no personal data will be attached to the recording, my voice in the audio
recording remains identifiable.
</p>
</div>
<div class="mx-auto my-6">
<input type="checkbox" id="statement_1_checkbox" />
<label for="statement_1_checkbox">I agree</label>
</div>
<div>
<p class="font-semibold">Statement 2:</p>
<p>
I agree that a written transcript of my lifting call will be made available to the public as part of the
Lifting Call Collection together with the geographic location and language that this call is associated with.
</p>
</div>
<div class="mx-auto my-6">
<input type="checkbox" id="statement_2_checkbox" />
<label for="statement_2_checkbox">I agree</label>
</div>
</div>
</div>`,
recording_saved: html`<p class="mx-10">Your recording has been saved. If you can think of another lifting call, press R to record another, or Q to continue.</p>`,
data_quality_warning: html`<p class="mx-10">Please ensure that you only save intelligible lifting calls. Submissions with unintelligible audio and low-effort responses to questions will be rejected.</p>`,
};

View File

@@ -1,9 +0,0 @@
import html from '../utils/html.js';
export const textStimuli = {
complete: html`Experiment complete. Please paste the following link into your browser to confirm completion on Prolific:
<span class="text-blue-500">
${import.meta.env.VITE_COMPLETE_URL}
</a>
`,
};

View File

@@ -1,21 +1,24 @@
import { defineConfig } from 'vite';
import tailwindcss from '@tailwindcss/vite';
import { viteStaticCopy } from 'vite-plugin-static-copy';
import basicSsl from '@vitejs/plugin-basic-ssl';
export default defineConfig({
base: './', // This makes all assets use relative paths
plugins: [
basicSsl(),
tailwindcss(),
viteStaticCopy({
targets: [
{
src: 'images/*',
dest: 'images',
},
//{
// src: 'images/*',
// dest: 'images',
//},
],
}),
],
server: {
host: true,
watch: {
usePolling: true,
},