add support for high quality opus audio messages

This commit is contained in:
liamcottle 2024-10-12 11:00:19 +13:00
commit a6556c1580
3 changed files with 163 additions and 34 deletions

View file

@ -27,10 +27,11 @@
leave-active-class="transition ease-in duration-75"
leave-from-class="transform opacity-100 scale-100"
leave-to-class="transform opacity-0 scale-95">
<div v-if="isShowingMenu" v-click-outside="hideMenu" class="absolute bottom-0 -ml-11 sm:right-0 sm:ml-0 z-10 mb-10 w-56 rounded-md bg-white shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none">
<div v-if="isShowingMenu" v-click-outside="hideMenu" class="absolute bottom-0 -ml-11 sm:right-0 sm:ml-0 z-10 mb-10 rounded-md bg-white shadow-lg ring-1 ring-black ring-opacity-5 focus:outline-none">
<div class="py-1">
<button @click="startRecordingCodec2('1200')" type="button" class="w-full block text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100">Codec2 (Low Quality)</button>
<button @click="startRecordingCodec2('3200')" type="button" class="w-full block text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100">Codec2 (Medium Quality)</button>
<button @click="startRecordingCodec2('1200')" type="button" class="w-full block text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100 whitespace-nowrap">Low Quality - Codec2 (1200)</button>
<button @click="startRecordingCodec2('3200')" type="button" class="w-full block text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100 whitespace-nowrap">Medium Quality - Codec2 (3200)</button>
<button @click="startRecordingOpus()" type="button" class="w-full block text-left px-4 py-2 text-sm text-gray-700 hover:bg-gray-100 whitespace-nowrap">High Quality - OPUS</button>
</div>
</div>
</Transition>
@ -67,6 +68,11 @@ export default {
mode: mode,
});
},
startRecordingOpus() {
this.startRecordingAudioAttachment({
codec: "opus",
});
},
stopRecordingAudioAttachment() {
this.isShowingMenu = false;
this.$emit("stop-recording");

View file

@ -251,7 +251,7 @@
<!-- audio preview -->
<div>
<audio controls class="h-10">
<source :src="newMessageAudio.audio_wav_url" type="audio/wav"/>
<source :src="newMessageAudio.audio_preview_url" type="audio/wav"/>
</audio>
</div>
@ -366,6 +366,7 @@
<script>
import Utils from "../../js/Utils";
import DialogUtils from "../../js/DialogUtils";
import MicrophoneRecorder from "../../js/MicrophoneRecorder";
import NotificationUtils from "../../js/NotificationUtils";
import WebSocketConnection from "../../js/WebSocketConnection";
import AddAudioButton from "./AddAudioButton.vue";
@ -403,6 +404,7 @@ export default {
isRecordingAudioAttachment: false,
audioAttachmentMicrophoneRecorder: null,
audioAttachmentMicrophoneRecorderCodec: null,
audioAttachmentRecordingStartedAt: null,
audioAttachmentRecordingDuration: null,
audioAttachmentRecordingTimer: null,
@ -1156,6 +1158,7 @@ export default {
case "codec2": {
// start recording microphone
this.audioAttachmentMicrophoneRecorderCodec = "codec2";
this.audioAttachmentMicrophoneRecorder = new Codec2MicrophoneRecorder();
this.audioAttachmentMicrophoneRecorder.codec2Mode = args.mode;
this.audioAttachmentRecordingStartedAt = Date.now();
@ -1176,6 +1179,30 @@ export default {
break;
}
case "opus": {
// start recording microphone
this.audioAttachmentMicrophoneRecorderCodec = "opus";
this.audioAttachmentMicrophoneRecorder = new MicrophoneRecorder();
this.audioAttachmentRecordingStartedAt = Date.now();
this.isRecordingAudioAttachment = await this.audioAttachmentMicrophoneRecorder.start();
// update recording time in ui every second
this.audioAttachmentRecordingDuration = Utils.formatMinutesSeconds(0);
this.audioAttachmentRecordingTimer = setInterval(() => {
const recordingDurationMillis = Date.now() - this.audioAttachmentRecordingStartedAt;
const recordingDurationSeconds = recordingDurationMillis / 1000;
this.audioAttachmentRecordingDuration = Utils.formatMinutesSeconds(recordingDurationSeconds);
}, 1000);
// alert if failed to start recording
if(!this.isRecordingAudioAttachment){
DialogUtils.alert("failed to start recording");
}
break;
}
default: {
DialogUtils.alert(`Unhandled microphone recorder codec: ${args.codec}`);
@ -1198,45 +1225,71 @@ export default {
this.isRecordingAudioAttachment = false;
const audio = await this.audioAttachmentMicrophoneRecorder.stop();
// do nothing if no audio was provided
if(audio.length === 0){
return;
}
// handle audio based on codec
switch(this.audioAttachmentMicrophoneRecorderCodec){
case "codec2": {
// decode codec2 audio back to wav so we can show a preview audio player before user sends it
const codec2Mode = this.audioAttachmentMicrophoneRecorder.codec2Mode;
const decoded = await Codec2Lib.runDecode(codec2Mode, new Uint8Array(audio));
// do nothing if no audio was provided
if(audio.length === 0){
return;
}
// convert decoded codec2 to wav audio and create a blob
const wavAudio = await Codec2Lib.rawToWav(decoded);
const wavBlob = new Blob([wavAudio], {
type: "audio/wav",
});
// decode codec2 audio back to wav so we can show a preview audio player before user sends it
const codec2Mode = this.audioAttachmentMicrophoneRecorder.codec2Mode;
const decoded = await Codec2Lib.runDecode(codec2Mode, new Uint8Array(audio));
// convert decoded codec2 to wav audio and create a blob
const wavAudio = await Codec2Lib.rawToWav(decoded);
const wavBlob = new Blob([wavAudio], {
type: "audio/wav",
});
// determine audio mode
var audioMode = null;
switch(codec2Mode){
case "1200": {
audioMode = 0x04; // LXMF.AM_CODEC2_1200
break;
}
case "3200": {
audioMode = 0x09; // LXMF.AM_CODEC2_3200
break;
}
default: {
DialogUtils.alert(`Unhandled microphone recorder codec2Mode: ${codec2Mode}`);
return;
}
}
// update message audio attachment
this.newMessageAudio = {
audio_mode: audioMode,
audio_blob: new Blob([audio]),
audio_preview_url: URL.createObjectURL(wavBlob),
};
// determine audio mode
var audioMode = null;
switch(codec2Mode){
case "1200": {
audioMode = 0x04; // LXMF.AM_CODEC2_1200
break;
}
case "3200": {
audioMode = 0x09; // LXMF.AM_CODEC2_3200
case "opus": {
// do nothing if no audio was provided
if(audio.size === 0){
return;
}
// update message audio attachment
this.newMessageAudio = {
audio_mode: 0x10, // LXMF.AM_OPUS_OGG
audio_blob: audio, // opus microphone recorder returns a blob
audio_preview_url: URL.createObjectURL(audio),
};
break;
}
default: {
DialogUtils.alert(`Unhandled microphone recorder codec2Mode: ${codec2Mode}`);
return;
}
}
// update message audio attachment
this.newMessageAudio = {
audio_mode: audioMode,
audio_blob: new Blob([audio]),
audio_wav_url: URL.createObjectURL(wavBlob),
};
},
removeAudioAttachment: function() {

View file

@ -0,0 +1,70 @@
/**
* A simple class for recording microphone input and returning the audio.
*/
class MicrophoneRecorder {
constructor() {
this.audioChunks = [];
this.microphoneMediaStream = null;
this.mediaRecorder = null;
}
async start() {
try {
// request access to the microphone
this.microphoneMediaStream = await navigator.mediaDevices.getUserMedia({
audio: true,
});
// create media recorder
this.mediaRecorder = new MediaRecorder(this.microphoneMediaStream);
// handle received audio from media recorder
this.mediaRecorder.ondataavailable = (event) => {
this.audioChunks.push(event.data);
};
// start recording
this.mediaRecorder.start();
// successfully started recording
return true;
} catch(e) {
return false;
}
}
async stop() {
return new Promise((resolve, reject) => {
try {
// handle media recording stopped
this.mediaRecorder.onstop = () => {
// stop using microphone
if(this.microphoneMediaStream){
this.microphoneMediaStream.getTracks().forEach(track => track.stop());
}
// create blob from audio chunks
const blob = new Blob(this.audioChunks, { type: "audio/ogg; codecs=opus" });
// resolve promise
resolve(blob);
};
// stop recording
this.mediaRecorder.stop();
} catch(e) {
reject(e);
}
});
}
}
export default MicrophoneRecorder;