move web audio to worklet

This commit is contained in:
Emerald 2023-10-20 02:46:20 -04:00
parent 35782cf3a4
commit 1867b44fc0
Signed by: emerald
GPG Key ID: 420C9E1863CCB30F
4 changed files with 115 additions and 65 deletions

View File

@ -7,7 +7,7 @@ export const micThreshold = writable(0);
export const initAudio = async (): Promise<NodeJS.Timeout | undefined> => {
if (mode == 'tauri') {
const { invoke } = await import('@tauri-apps/api');
micThreshold.subscribe(async (threshold) => {
try {
@ -16,57 +16,65 @@ export const initAudio = async (): Promise<NodeJS.Timeout | undefined> => {
await invoke('log', { msg: `Error setting mic threshold: ${e}` });
}
});
return setInterval(async () => {
return setInterval(async () => {
const newLevel = (await invoke('get_audio_level')) as number;
micLevel.set(newLevel * 100);
}, 40);
} else {
return await webAudio();
}
await webAudio();
}
};
let audioCtx: AudioContext | undefined = undefined;
const webAudio = async () => {
if(audioCtx) audioCtx.close();
audioCtx = new AudioContext();
if (navigator.mediaDevices) {
try {
const device = await navigator.mediaDevices.getUserMedia({ audio: true });
const mic = audioCtx.createMediaStreamSource(device);
const gain = audioCtx.createGain();
mic.connect(gain);
const analyzer = audioCtx.createAnalyser();
analyzer.fftSize = 2048;
gain.connect(analyzer);
gain.gain.setValueAtTime(1, audioCtx.currentTime);
if (audioCtx) audioCtx.close();
audioCtx = new AudioContext();
if (navigator.mediaDevices) {
try {
const device = await navigator.mediaDevices.getUserMedia({ audio: true });
const mic = audioCtx.createMediaStreamSource(device);
const buffer = new Uint8Array(analyzer.frequencyBinCount);
const monitor = () => {
analyzer.getByteFrequencyData(buffer);
await audioCtx.audioWorklet.addModule("audio.worker.js")
const worklet = new AudioWorkletNode(audioCtx, "audio-trigger");
const max = (buffer.reduce((prev, cur) => prev + cur) / buffer.length);
micLevel.set(max);
micThreshold.subscribe((threshold) => {
console.log(max, threshold * 100 - 1);
if(max > (threshold * 100) - 1) {
dispatchEvent(new Event('mouth-open'))
} else {
dispatchEvent(new Event('mouth-close'));
}
})();
}
mic.connect(worklet);
return setInterval(monitor);
micThreshold.subscribe((val) => {
worklet.port.postMessage(val);
})
} catch (e) {
console.error("Failed to get microphone: ", e);
//TODO: toast here
}
} else {
console.error("This browser doesn't support audio access?");
//TODO: toast here
}
worklet.port.onmessage = (e) => {
switch (typeof e.data) {
case "number":
micLevel.set(e.data);
break;
case "string":
window?.dispatchEvent(new Event(e.data));
break;
}
}
await audioCtx.resume();
// const buffer = new Uint8Array(analyzer.frequencyBinCount);
// const monitor = () => {
// analyzer.getByteFrequencyData(buffer);
// }
// return setInterval(monitor);
} catch (e) {
console.error("Failed to get microphone: ", e);
//TODO: toast here
}
} else {
console.error("This browser doesn't support audio access?");
//TODO: toast here
}
}

View File

@ -5,7 +5,6 @@
'Eyes closed | Mouth closed',
'Eyes closed | Mouth open'
];
</script>
<script lang="ts">
@ -23,6 +22,8 @@
$: loading = false;
$: $keepFocused = loading;
$: {
const frame = $frames[index];
if (!frame) src = null;
@ -38,6 +39,7 @@
const openImage = async () => {
if (mode === 'web') {
fileInput?.click();
loading = true;
}
// loading[index] = true;
// const path = (await invoke('open_image')) as {
@ -66,28 +68,30 @@
};
const finishLoad = async () => {
info("Loading image in web mode");
info('Loading image in web mode');
loading = true;
const file = files.item(0);
info("Got file:", file);
info('Got file:', file);
if (file) {
switch(file.type) {
case "image/gif":
switch (file.type) {
case 'image/gif':
const gif = await loadGif(file);
$frames[index] = gif ? {
kind: "GIF",
value: gif,
data: new Uint8Array(await file.arrayBuffer())
} :
null;
$frames[index] = gif
? {
kind: 'GIF',
value: gif,
data: new Uint8Array(await file.arrayBuffer())
}
: null;
break;
default:
const img = await loadImage(file);
$frames[index] = img ?
{
kind:"still",
value: img
} : null
$frames[index] = img
? {
kind: 'still',
value: img
}
: null;
}
}
@ -100,14 +104,15 @@
</script>
{#if mode === 'web'}
<input
type="file"
accept="image/png, image/jpeg, image/gif, image/tiff"
bind:this={fileInput}
class="hidden w-0 h-0"
bind:files
on:change={finishLoad}
/>
<div class="w-0 h-0 overflow-hidden">
<input
type="file"
accept="image/png, image/jpeg, image/gif, image/tiff"
bind:this={fileInput}
bind:files
on:change={finishLoad}
/>
</div>
{/if}
<button

37
static/audio.worker.js Normal file
View File

@ -0,0 +1,37 @@
// @ts-nocheck
export class AudioTrigger extends AudioWorkletProcessor {
threshold
constructor() {
super();
this.port.onmessage = (e) => {
this.threshold = e.data;
}
}
process(inputs) {
const channels = inputs[0];
let count = 0;
let avg = 0;
for (const chan of channels) {
avg += (chan.map(v => Math.abs(v)).reduce((prev, cur) => prev + cur) / chan.length);
count += chan.length
}
avg /= count;
avg *= 100 * 100;
if (avg > (this.threshold * 100) - 1) {
this.port.postMessage('mouth-open')
} else {
this.port.postMessage('mouth-close');
}
this.port.postMessage(avg);
}
}
registerProcessor("audio-trigger", AudioTrigger);