// Get the PHP value from the data attribute in the HTML element //const phpDataElement = document.getElementById('php-data'); //const id_randtext = phpDataElement.getAttribute('data-id'); // Split the string into an array using "_" as the delimiter //const values = id_randtext.split("_"); // Access the separated values //const id = values[0]; //const randtext = values[1]; // Use the `id` value in your JavaScript code // console.log("The id is:", id); // console.log("The randtext is:", id); let url_png; let url_wavplay; let whole_filename; // not in the database???? let currentAudioElement = null; let taxonName; let randtext; let id; function fetchRandomUID() { // Fetch the list of UIDs fetch(`ajax?m=chirovox&getlist=UID`) .then(response => response.json()) .then(dataUID => { // Function to fetch and check recording type function getValidUID() { const randomItem = dataUID[Math.floor(Math.random() * dataUID.length)]; const UID = randomItem.name; //console.log(`Trying UID: ${UID}`); // Fetch the record details by UID return fetch(`ajax?m=chirovox&getrecordbyUID&value=${UID}`) .then(response => response.json()) .then(data => { //console.log(data[0]); let recordingType = data[0].recording_type; let access = data[0].access; let taxonName = data[0].taxonname; let randtext = data[0].randtext; const recorded_by = data[0].recorded_by; const country = data[0].country; //console.log(`Recording Type: ${recordingType}, Access: ${access}`); // If recording type is restricted, retry if (access === "restricted" || recordingType === "Zero-crossing") { //console.log("Access is restricted or type is Zero-crossing. Retrying..."); return getValidUID(); } const sheetURL="recordsheet.php?id=" + UID + "_" + randtext; // Process the valid record document.getElementById('recordingtype').innerHTML="Recording type: " + recordingType; document.getElementById('randomrecordID').innerHTML = `

${UID}: ${taxonName}
recorded by ${recorded_by} in ${country}

`; if (recordingType === "Zero-crossing") { handleZeroCrossing(access, UID, randtext); } else { handleWav(access, UID, randtext); } return data[0]; }); } // Start fetching until a valid UID is found getValidUID(); }); } function handleZeroCrossing(access, id, randtext) { const wavDiv = document.querySelector('.wav'); wavDiv.innerHTML = `

`; if (access === "restricted") { const zcdownloadDiv = document.querySelector('.div-zc-download'); zcdownloadDiv.innerHTML = `

Access to the zc file is restricted.

`; } else { const url_zc = `../chirovox/getphoto?ref=${id}_${randtext}.zc`; //console.log(url_zc); document.getElementById('zc-download').href = url_zc; const p = new ZCJS("div-zc-plot"); p.x_range = "auto"; let url_zc_show = `https://obm.ecolres.hu/projects/chirovox/getphoto?ref=${id}_${randtext}.zc`; p.setURL(url_zc_show); } } function handleWav(access, id, randtext) { //const zcDiv = document.querySelector('.zc'); //zcDiv.innerHTML = `

`; url_png = `../chirovox/local/spectrograms/${id}_${randtext}.WAV.png`; document.getElementById('spectrogram-png').src = url_png; if (access === "restricted") { const recordSoundDiv = document.querySelector('.div-playback'); recordSoundDiv.innerHTML = `

Access to the sound is restricted.

`; } else { //url_wav = `../chirovox/getphoto?ref=${id}_${randtext}.WAV`; //document.getElementById('spectrogram-wav').href = url_wav; url_wavplay = `https://obm.ecolres.hu/projects/chirovox/getphoto?playaudio=${id}_${randtext}.WAV`; //console.log(url_wavplay); } } // Trigger the function fetchRandomUID(); document.addEventListener('click', function(event) { // Check if the clicked element matches the selector if (event.target && event.target.id === 'spectrogram-png') { event.preventDefault(); // Prevent default behavior openPopup(url_png); } if (event.target && event.target.id === 'spectrogram-popup') { event.preventDefault(); // Prevent default behavior closePopup(); } }); // Open the popup function openPopup(imageUrl) { const popup = document.getElementById('imagePopup'); const popupImage = popup.querySelector('img'); popupImage.src = imageUrl; // Set the large image URL popup.style.display = 'flex'; // Show the popup } // Close the popup function closePopup() { const popup = document.getElementById('imagePopup'); popup.style.display = 'none'; // Hide the popup } // Playback function async function fetchAndPlayAudio() { //const audioUrl = 'https://obm.ecolres.hu/projects/chirovox/getphoto?playaudio=A003674_GZJEINMZUE.WAV'; const audioUrl = url_wavplay; const statusElement = document.getElementById('status'); const slowRateSelect = document.getElementById('slowRateSelect'); const slowRate = parseInt(slowRateSelect.value, 10); const progressContainer = document.getElementById('progressContainer'); const progressBar = document.getElementById('progressBar'); const audioContainer = document.querySelector('#content-box-playback'); const sampleRatesElement = document.getElementById('sampleRates'); statusElement.textContent = 'Fetching and processing the audio file...'; progressContainer.style.display = 'block'; progressBar.style.width = '0%'; try { // Stop and remove the previous audio element if it exists if (currentAudioElement) { currentAudioElement.pause(); currentAudioElement.remove(); currentAudioElement = null; } // Fetch the WAV file with progress tracking const response = await fetch(audioUrl); if (!response.ok) { throw new Error(`Failed to fetch audio file: ${response.statusText}`); } const reader = response.body.getReader(); const contentLength = response.headers.get('Content-Length'); const total = parseInt(contentLength, 10); let received = 0; const chunks = []; while (true) { const { done, value } = await reader.read(); if (done) break; chunks.push(value); received += value.length; const percentComplete = (received / total) * 100; progressBar.style.width = `${percentComplete}%`; } // Combine chunks into a single ArrayBuffer const arrayBuffer = new Uint8Array(received); let position = 0; for (let chunk of chunks) { arrayBuffer.set(chunk, position); position += chunk.length; } // Parse the original sample rate from the WAV header const dataView = new DataView(arrayBuffer.buffer); //// sound info /////////////////////////////////////////////// // Extract the file size from the WAV header let fileSize = dataView.getUint32(4, true) + 8; // File size is at byte 4, add 8 for RIFF header let fileSize_ = fileSize/1024/1024; //console.log(`File Size: ${fileSize.toFixed(1)} MB`); // Extract the sample rate from the WAV header const sampleRate = dataView.getUint32(24, true); // Sample rate is at byte 24 //console.log(`Sample Rate: ${sampleRate} Hz`); // Extract the number of channels from the WAV header const numChannels = dataView.getUint16(22, true); // Number of channels is at byte 22 //console.log(`Number of Channels: ${numChannels}`); // Extract the bits per sample from the WAV header const bitsPerSample = dataView.getUint16(34, true); // Bits per sample is at byte 34 //console.log(`Bits per Sample: ${bitsPerSample}`); // Calculate the duration of the audio const byteRate_ = dataView.getUint32(28, true); // Byte rate is at byte 28 //console.log(`byterate: ${byteRate_}` ) const dataSize = dataView.getUint32(40, true); // Data chunk size is at byte 40 //console.log(`datasize: ${dataSize}` ) const duration = fileSize / byteRate_; //console.log(`Duration: ${duration.toFixed(2)} seconds`); /* // Calculate the maximum amplitude let maxAmplitude = 0; const dataOffset = 44; // Audio data starts at byte 44 for (let i = dataOffset; i < dataOffset + dataSize; i += bitsPerSample / 8) { let sample; if (bitsPerSample === 16) { sample = dataView.getInt16(i, true); maxAmplitude = Math.max(maxAmplitude, Math.abs(sample)); } else if (bitsPerSample === 8) { sample = dataView.getUint8(i) - 128; // 8-bit samples are unsigned maxAmplitude = Math.max(maxAmplitude, Math.abs(sample)); } } console.log(`Max Amplitude: ${maxAmplitude}`); */ ///////////////////////////// const originalSampleRate = dataView.getUint32(24, true); // Read bytes 24–27 as little-endian //console.log(`Original Sample Rate: ${originalSampleRate} Hz`); // Calculate the new sample rate based on the slow rate const newSampleRate = originalSampleRate / slowRate; // Round the new sample rate to two decimal places const roundedNewSampleRate = newSampleRate.toFixed(0); const newduration = duration*slowRate; // Display the sample rates on the webpage sampleRatesElement.innerHTML = ` Original Sample Rate: ${originalSampleRate/1000} kHz, original duration: ${duration.toFixed(1)} s
New Sample Rate: ${roundedNewSampleRate/1000} kHz, time-expanded duration: ${newduration.toFixed(1)} s
Bits per Sample: ${bitsPerSample}, File Size: ${fileSize_.toFixed(1)} MB `; //console.log(`Slow Rate: ${slowRate}`); //console.log(`New Sample Rate: ${newSampleRate} Hz`); // Update the WAV header with the new sample rate dataView.setUint32(24, newSampleRate, true); // Update sample rate at byte 24 const byteRate = newSampleRate * dataView.getUint16(22, true) * (dataView.getUint16(34, true) / 8); dataView.setUint32(28, byteRate, true); // Update byte rate at byte 28 // Create a Blob from the modified ArrayBuffer const modifiedBlob = new Blob([arrayBuffer.buffer], { type: 'audio/wav' }); // Create an Object URL for the modified Blob const modifiedUrl = URL.createObjectURL(modifiedBlob); // Create an audio element and set its source to the modified URL const audioElement = new Audio(modifiedUrl); audioElement.controls = true; // Keep the native controls audioElement.style.width = '100%'; // Set the desired width // Append the audio element to the container audioContainer.appendChild(audioElement); // end of additional formatting //document.body.appendChild(audioElement); //audioContainer.appendChild(audioElement); // Play the audio audioElement.play(); // Update the current audio element reference currentAudioElement = audioElement; statusElement.textContent = `Playing audio at ${slowRate}x slow rate.`; } catch (error) { console.error('Error processing audio file:', error); statusElement.textContent = `Error: ${error.message}`; } finally { progressContainer.style.display = 'none'; } }