Files
work-calls-corpus/scripts/plugin-verbal-response.js
2025-07-20 01:59:08 +02:00

255 lines
9.1 KiB
JavaScript

let jsPsychVerbalResponse = (function (jspsych) {
'use strict';
const info = {
name: "verbal-response",
parameters: {
transcription_server_url: {
type: jspsych.ParameterType.STRING,
default: null,
},
recording_key: {
type: jspsych.ParameterType.INT,
default: 71,
},
stimulus: {
type: jspsych.ParameterType.STRING,
default: null,
},
start_image: {
type: jspsych.ParameterType.STRING,
default: null,
},
video_html: {
type: jspsych.ParameterType.STRING,
default: null,
},
red_points_html: {
type: jspsych.ParameterType.STRING,
default: null,
},
blue_points_html: {
type: jspsych.ParameterType.STRING,
default: null,
},
locale: {
type: jspsych.ParameterType.STRING,
default: "en-US",
}
},
};
class VerbalResponsePlugin {
constructor(jsPsych) {
this.jsPsych = jsPsych;
this.rt = null;
this.recorded_data_chunks = [];
this.recording_key_down = false;
}
trial(display_element, trial) {
display_element.innerHTML = trial.video_html;
const pointsContainer = document.getElementById("points-container");
pointsContainer.innerHTML = pointsContainer.innerHTML + trial.red_points_html + trial.blue_points_html;
document.getElementById("prompts").innerHTML = trial.stimulus;
this.recorder = this.jsPsych.pluginAPI.getMicrophoneRecorder();
this.setupRecordingEvents(display_element, trial);
window.addEventListener("keydown", this.startRecording, false);
window.addEventListener("keyup", this.stopRecording, false);
}
setupRecordingEvents(display_element, trial) {
this.data_available_handler = (e) => {
if (e.data.size > 0) {
this.recorded_data_chunks.push(e.data);
}
};
this.stop_event_handler = () => {
const data = new Blob(this.recorded_data_chunks, {type: "audio/ogg"});
const reader = new FileReader();
reader.addEventListener("load", () => {
this.response = reader.result.split(",")[1];
this.load_resolver();
this.checkResponse(this.response, trial, display_element)
});
reader.readAsDataURL(data);
};
this.start_event_handler = (e) => {
this.recorded_data_chunks.length = 0;
this.recorder_start_time = e.timeStamp;
};
this.recorder.addEventListener("dataavailable", this.data_available_handler);
this.recorder.addEventListener("stop", this.stop_event_handler);
this.recorder.addEventListener("start", this.start_event_handler);
}
checkRecordingKey(e) {
return e.keyCode === 71;
}
startRecording(e) {
if (!this.checkRecordingKey(e)) {
return;
}
if (this.recording_key_down) {
return;
}
document.getElementById("recording").innerHTML = "Listening..."
this.recording_key_down = true;
this.recorder.start();
this.stimulus_start_time = Date.now();
}
delay = ms => new Promise(res => setTimeout(res, ms));
async stopRecording(e) {
if (!this.checkRecordingKey(e)) {
return;
}
if (!this.recording_key_down) {
return;
}
await this.delay(200);
this.recorder.stop();
this.recording_key_down = false;
document.getElementById("recording").innerHTML = "Processing..."
return new Promise((resolve) => {
this.load_resolver = resolve;
});
}
async checkResponse(responseAudio, trial, display_element) {
let transcription = await this.transcribeResponse(responseAudio, trial);
document.getElementById("recording").innerHTML = ""
//console.log(transcription);
if (!transcription) {
document.getElementById("prompts").innerHTML = `Your request wasn't detected. Hold down the 'G' key and try again`;
return;
}
const processedResponse = nlp(transcription)
const requested = processedResponse.match('~give~', null, {fuzzy: 0.75})
const negated = processedResponse.match('(~not~|~dont~|~keep~)', null, {fuzzy: 0.75})
const objectMentioned = processedResponse.match('~object~', null, {fuzzy: 0.75})
if (negated.found | !requested.found || !objectMentioned.found) {
document.getElementById("prompts").innerHTML = `Request not registered Be sure to say 'Give me the (red/blue) object' in full. Hold down the 'G' key and try again.`;
document.getElementById("recording").innerHTML = "Your request: " + transcription;
return;
}
const redMentioned = processedResponse.match('~red~', null, {fuzzy: 0.75})
const blueMentioned = processedResponse.match('~blue~', null, {fuzzy: 0.75})
if (redMentioned.found && blueMentioned.found) {
document.getElementById("prompts").innerHTML = `You mentioned both colours of objects. Please ask for only one. Hold down the 'G' key and try again`;
document.getElementById("recording").innerHTML = "Your request: " + transcription;
return;
}
if (!redMentioned.found && !blueMentioned.found) {
document.getElementById("prompts").innerHTML = `You mentioned neither colour of object. Hold down the 'G' key and try again`;
document.getElementById("recording").innerHTML = "Your request: " + transcription;
return;
}
let requestedColour;
if (redMentioned.found) {
requestedColour = "red";
}
if (blueMentioned.found) {
requestedColour = "blue";
}
document.getElementById("prompts").innerHTML = `You asked for the ` + requestedColour + ` object.`
this.transribedResponse = transcription;
this.chosenObject = requestedColour;
//await this.delay(100);
this.endTrial(display_element, trial);
}
transcribeResponse(responseAudio, trial) {
return new Promise(function (resolve, reject) {
const xhr = new XMLHttpRequest();
const requestBody = "clip=" + encodeURIComponent(responseAudio) + "&locale=" + trial.locale;
xhr.open("POST", trial.transcription_server_url);
xhr.onload = function () {
if (this.status >= 200 && this.status < 300) {
resolve(xhr.response);
} else {
reject({
status: this.status,
statusText: xhr.statusText
});
}
};
xhr.onerror = function () {
reject({
status: this.status,
statusText: xhr.statusText
});
};
xhr.setRequestHeader("Content-type", "application/x-www-form-urlencoded");
xhr.send(requestBody);
});
}
endTrial(display_element, trial) {
this.recorder.removeEventListener("dataavailable", this.data_available_handler);
this.recorder.removeEventListener("start", this.start_event_handler);
this.recorder.removeEventListener("stop", this.stop_event_handler);
window.removeEventListener("keydown", this.startRecording);
window.removeEventListener("keyup", this.stopRecording);
document.getElementById("redpoints").innerHTML = "";
document.getElementById("bluepoints").innerHTML = "";
// kill any remaining setTimeout handlers
this.jsPsych.pluginAPI.clearAllTimeouts();
// gather the data to store for the trial
let trial_data = {
rt: this.rt,
stimulus: trial.stimulus,
response: this.response,
estimated_stimulus_onset: Math.round(this.stimulus_start_time - this.recorder_start_time),
transcribed_response: this.transribedResponse,
chosen_object: this.chosenObject,
};
this.jsPsych.finishTrial(trial_data);
}
}
VerbalResponsePlugin.info = info;
return VerbalResponsePlugin;
})(jsPsychModule);