Welcome to the LimeSurvey Community Forum

Ask the community, share ideas, and connect with other LimeSurvey users!

Voice Capture with Long Free Text question

  • davebostockgmail
  • davebostockgmail's Avatar Topic Author
  • Offline
  • Elite Member
  • Elite Member
More
2 days 6 hours ago #270633 by davebostockgmail
Voice Capture with Long Free Text question was created by davebostockgmail
Please help us help you and fill where relevant: 6.14.1
Your LimeSurvey version: [see right hand bottom of your LimeSurvey admin screen] 
Own server or LimeSurvey hosting: Self Hosted 
Survey theme/template: Custom
==================

As always people want the strangest things, So with a little AI help I have created a script that you can add to a long free text question to capture voice input. 

Currently this is question by questions where you need to edit the element ID and also it is defaulted to English, if anyone else want to adapt it to use the survey language and pick up the question ID then feel free, Just wanted to share in case someone else has demanding clients.

Put this in your question source
Code:
<!-- Button for starting the recording --><button id="startBtn" style="background-color: green; color: white;" type="button">Click here to use your microphone to answer</button> <!-- Modal HTML --></p>
 
<div id="countdownModal" style="display:none; position:fixed; left:0; top:0; width:100vw; height:100vh; background:rgba(0,0,0,0.5); z-index:1050; align-items:center; justify-content:center;">
<div style="background:white; padding:30px; border-radius:10px; text-align:center; min-width: 240px;">
<h2>Get Ready!</h2>
 
<p>Starting in <span id="countdownValue">3</span>...</p>
</div>
</div>
<script>
$(document).ready(function() {
    const startButton = $('#startBtn');
    const answerTextarea = $('#answer947655X225X9377');
    const modal = $('#countdownModal');
    const countdownEl = $('#countdownValue');
 
    let startedOnce = false;
    let isRecording = false;
    let lastManualValue = "";   // Keeps manual edits and previous result
    let recognitionActiveSessionTranscript = ""; // Keeps what is added in the present session
 
    if ("webkitSpeechRecognition" in window) {
        const recognition = new webkitSpeechRecognition();
        recognition.continuous = true;
        recognition.interimResults = true;
        recognition.lang = "en-US";
 
        let interimTranscript = "";
        let pauseTimeout;
 
        // Helper: Capitalize after punctuation
        const capitalizeAfterPunctuation = (text) => {
            return text.replace(/(?:^|\. )(\w)/g, (match, p1) => match.replace(p1, p1.toUpperCase()));
        };
 
        // Handle user edits
        answerTextarea.on('input propertychange', function(){
            if (!isRecording) {
                lastManualValue = answerTextarea.val();
            }
        });
 
        // Start speech recognition and sync with edits
        const startRecognition = () => {
            // Always take the CURRENT textarea contents as base
            lastManualValue = answerTextarea.val();
            recognitionActiveSessionTranscript = "";
            recognition.start();
        };
 
        const stopRecognition = () => {
            recognition.stop();
        };
 
        recognition.onstart = () => {
            isRecording = true;
            startButton.text("Stop and edit my answer").css({ "background-color": "red", "color": "white" });
        };
 
        recognition.onend = () => {
            isRecording = false;
            // Update manual value in case user typed something after last pause
            lastManualValue = answerTextarea.val();
            startButton.text("Resume recording").css({ "background-color": "green", "color": "white" });
            clearTimeout(pauseTimeout);
        };
 
        recognition.onresult = (event) => {
            interimTranscript = "";
            clearTimeout(pauseTimeout);
            for (let i = event.resultIndex; i < event.results.length; i++) {
                const transcript = event.results[i][0].transcript;
                if (event.results[i].isFinal) {
                    recognitionActiveSessionTranscript += transcript;
                } else {
                    interimTranscript += transcript;
                }
            }
 
            // Compose up-to-date value: always prepend latest manual edits!
            let finalText = lastManualValue
                + (recognitionActiveSessionTranscript ? " " + capitalizeAfterPunctuation(recognitionActiveSessionTranscript.trim()) : "")
                + (interimTranscript ? " " + interimTranscript : "");
 
            answerTextarea.val(finalText.trim());
 
            // After a pause: finalize what's been heard
            pauseTimeout = setTimeout(() => {
                recognitionActiveSessionTranscript = recognitionActiveSessionTranscript.trim() + ". ";
                recognitionActiveSessionTranscript = capitalizeAfterPunctuation(recognitionActiveSessionTranscript);
                let finalVal = lastManualValue
                    + (recognitionActiveSessionTranscript ? " " + recognitionActiveSessionTranscript.trim() : "");
                answerTextarea.val(finalVal.trim());
            }, 1000);
        };
 
        // Modal logic
        function showCountdownModal(seconds, callback) {
            countdownEl.text(seconds);
            modal.css('display', 'flex');
            let left = seconds;
            const countdownInterval = setInterval(() => {
                left--;
                countdownEl.text(left);
                if (left <= 0) {
                    clearInterval(countdownInterval);
                    modal.hide();
                    callback();
                }
            }, 1000);
        }
 
        // Button click logic
        startButton.on('click', function() {
            if (isRecording) {
                stopRecognition();
            } else if (!startedOnce) {
                startedOnce = true;
                showCountdownModal(3, function() {
                    startRecognition();
                });
            } else {
                startRecognition();
            }
        });
    } else {
        answerTextarea.val("Web Speech API not supported in this browser.");
    }
});
</script>
The following user(s) said Thank You: holch

Please Log in to join the conversation.

Moderators: tpartnerholch

Lime-years ahead

Online-surveys for every purse and purpose