disable UI recording bars so we can encode audio MP3

pull/1575/head
Audric Ackermann 4 years ago
parent ecf133ad5d
commit 0ce1059a96
No known key found for this signature in database
GPG Key ID: 999F434D76324AD4

@ -87,6 +87,7 @@
"linkify-it": "2.0.3", "linkify-it": "2.0.3",
"lodash": "4.17.11", "lodash": "4.17.11",
"long": "^4.0.0", "long": "^4.0.0",
"mic-recorder-to-mp3": "^2.2.2",
"mkdirp": "0.5.1", "mkdirp": "0.5.1",
"moment": "2.21.0", "moment": "2.21.0",
"mustache": "2.3.0", "mustache": "2.3.0",

@ -901,7 +901,7 @@ export class SessionCompositionBox extends React.Component<Props, State> {
const audioAttachment: Attachment = { const audioAttachment: Attachment = {
data: fileBuffer, data: fileBuffer,
flags: SignalService.AttachmentPointer.Flags.VOICE_MESSAGE, flags: SignalService.AttachmentPointer.Flags.VOICE_MESSAGE,
contentType: MIME.AUDIO_OPUS, contentType: MIME.AUDIO_MP3,
size: audioBlob.size, size: audioBlob.size,
}; };

@ -14,6 +14,7 @@ import { Constants } from '../../../session';
import { ToastUtils } from '../../../session/utils'; import { ToastUtils } from '../../../session/utils';
import { DefaultTheme, withTheme } from 'styled-components'; import { DefaultTheme, withTheme } from 'styled-components';
import autoBind from 'auto-bind'; import autoBind from 'auto-bind';
import MicRecorder from 'mic-recorder-to-mp3';
interface Props { interface Props {
onExitVoiceNoteView: any; onExitVoiceNoteView: any;
@ -29,28 +30,6 @@ interface State {
isPaused: boolean; isPaused: boolean;
actionHover: boolean; actionHover: boolean;
mediaSetting?: boolean;
// Steam information and data
mediaBlob?: any;
audioElement?: HTMLAudioElement;
streamParams?: {
stream: any;
media: any;
input: any;
processor: any;
};
canvasParams: {
width: number;
height: number;
barRadius: number;
barWidth: number;
barPadding: number;
maxBarHeight: number;
minBarHeight: number;
};
startTimestamp: number; startTimestamp: number;
nowTimestamp: number; nowTimestamp: number;
@ -61,6 +40,9 @@ class SessionRecordingInner extends React.Component<Props, State> {
private readonly visualisationRef: React.RefObject<HTMLDivElement>; private readonly visualisationRef: React.RefObject<HTMLDivElement>;
private readonly visualisationCanvas: React.RefObject<HTMLCanvasElement>; private readonly visualisationCanvas: React.RefObject<HTMLCanvasElement>;
private readonly playbackCanvas: React.RefObject<HTMLCanvasElement>; private readonly playbackCanvas: React.RefObject<HTMLCanvasElement>;
private recorder: any;
private audioBlobMp3?: Blob;
private audioElement?: HTMLAudioElement | null;
constructor(props: Props) { constructor(props: Props) {
super(props); super(props);
@ -81,37 +63,18 @@ class SessionRecordingInner extends React.Component<Props, State> {
isPlaying: false, isPlaying: false,
isPaused: false, isPaused: false,
actionHover: false, actionHover: false,
mediaSetting: undefined,
mediaBlob: undefined,
audioElement: undefined,
streamParams: undefined,
startTimestamp: now, startTimestamp: now,
nowTimestamp: now, nowTimestamp: now,
updateTimerInterval, updateTimerInterval,
// Initial width of 0 until bounds are located
canvasParams: {
width: 0,
height: 35,
barRadius: 15,
barWidth: 4,
barPadding: 3,
maxBarHeight: 30,
minBarHeight: 3,
},
}; };
} }
public componentWillMount() { public componentWillMount() {
// This turns on the microphone on the system. Later we need to turn it off. // This turns on the microphone on the system. Later we need to turn it off.
this.initiateRecordingStream(); void this.initiateRecordingStream();
} }
public componentDidMount() { public componentDidMount() {
window.addEventListener('resize', this.updateCanvasDimensions);
this.updateCanvasDimensions();
// Callback to parent on load complete // Callback to parent on load complete
if (this.props.onLoadVoiceNoteView) { if (this.props.onLoadVoiceNoteView) {
this.props.onLoadVoiceNoteView(); this.props.onLoadVoiceNoteView();
@ -120,19 +83,6 @@ class SessionRecordingInner extends React.Component<Props, State> {
public componentWillUnmount() { public componentWillUnmount() {
clearInterval(this.state.updateTimerInterval); clearInterval(this.state.updateTimerInterval);
window.removeEventListener('resize', this.updateCanvasDimensions);
}
public async componentDidUpdate() {
const { audioElement, isPlaying } = this.state;
if (audioElement) {
if (isPlaying) {
await audioElement.play();
} else {
audioElement.pause();
}
}
} }
// tslint:disable-next-line: cyclomatic-complexity // tslint:disable-next-line: cyclomatic-complexity
@ -144,7 +94,6 @@ class SessionRecordingInner extends React.Component<Props, State> {
isRecording, isRecording,
startTimestamp, startTimestamp,
nowTimestamp, nowTimestamp,
audioElement,
} = this.state; } = this.state;
const actionStopRecording = actionHover && isRecording; const actionStopRecording = actionHover && isRecording;
@ -153,9 +102,16 @@ class SessionRecordingInner extends React.Component<Props, State> {
const actionDefault = const actionDefault =
!actionStopRecording && !actionPlayAudio && !actionPauseAudio; !actionStopRecording && !actionPlayAudio && !actionPauseAudio;
// if we are recording, we base the time recording on our state values
// if we are playing ( audioElement?.currentTime is !== 0, use that instead)
// if we are not playing but we have an audioElement, display its duration
// otherwise display 0
const displayTimeMs = isRecording const displayTimeMs = isRecording
? (nowTimestamp - startTimestamp) * 1000 ? (nowTimestamp - startTimestamp) * 1000
: (audioElement && audioElement?.currentTime * 1000) || 0; : (this.audioElement &&
(this.audioElement?.currentTime * 1000 ||
this.audioElement?.duration)) ||
0;
const displayTimeString = moment.utc(displayTimeMs).format('m:ss'); const displayTimeString = moment.utc(displayTimeMs).format('m:ss');
@ -290,7 +246,7 @@ class SessionRecordingInner extends React.Component<Props, State> {
} }
} }
private stopRecording() { private stopRecordingState() {
this.setState({ this.setState({
isRecording: false, isRecording: false,
isPaused: true, isPaused: true,
@ -299,95 +255,49 @@ class SessionRecordingInner extends React.Component<Props, State> {
private async playAudio() { private async playAudio() {
// Generate audio element if it doesn't exist // Generate audio element if it doesn't exist
const generateAudioElement = () => { const { recordDuration } = this.state;
const { mediaBlob, recordDuration } = this.state;
if (!mediaBlob) {
return undefined;
}
const audioURL = window.URL.createObjectURL(mediaBlob.data);
const audioElementN = new Audio(audioURL);
audioElementN.loop = false;
audioElementN.oncanplaythrough = async () => { if (!this.audioBlobMp3) {
const duration = recordDuration;
if (duration && audioElementN.currentTime < duration) {
await audioElementN.play();
}
};
return audioElementN;
};
const audioElement = this.state.audioElement || generateAudioElement();
if (!audioElement) {
return; return;
} }
// Draw sweeping timeline if (this.audioElement) {
const drawSweepingTimeline = () => { window.log.info('Audio element already init');
const { isPaused } = this.state; } else {
const { textColor } = this.props.theme.colors; const audioURL = window.URL.createObjectURL(this.audioBlobMp3);
const { width, height } = this.state.canvasParams; this.audioElement = new Audio(audioURL);
const canvas = this.playbackCanvas.current;
if (!canvas || isPaused) {
return;
}
// Once audioElement is fully buffered, we get the true duration
let audioDuration = this.state.recordDuration;
if (audioElement.duration !== Infinity) {
audioDuration = audioElement.duration;
}
const progress = width * (audioElement.currentTime / audioDuration);
const canvasContext = canvas.getContext('2d'); this.audioElement.loop = false;
if (!canvasContext) { this.audioElement.onended = () => {
return; this.pauseAudio();
} };
canvasContext.beginPath(); this.audioElement.oncanplaythrough = async () => {
canvasContext.fillStyle = textColor; const duration = recordDuration;
canvasContext.globalCompositeOperation = 'source-atop';
canvasContext.fillRect(0, 0, progress, height);
// Pause audio when it reaches the end of the blob
if ( if (
audioElement.duration && duration &&
audioElement.currentTime === audioElement.duration this.audioElement &&
this.audioElement.currentTime < duration
) { ) {
this.pauseAudio(); await this.audioElement?.play();
return;
} }
requestAnimationFrame(drawSweepingTimeline);
}; };
}
this.setState({ this.setState({
audioElement,
isRecording: false, isRecording: false,
isPaused: false, isPaused: false,
isPlaying: true, isPlaying: true,
}); });
// If end of audio reached, reset the position of the sweeping timeline await this.audioElement.play();
if (
audioElement.duration &&
audioElement.currentTime === audioElement.duration
) {
await this.initPlaybackView();
}
await audioElement.play();
requestAnimationFrame(drawSweepingTimeline);
} }
private pauseAudio() { private pauseAudio() {
this.state.audioElement?.pause(); if (this.audioElement) {
this.audioElement.pause();
}
this.setState({ this.setState({
isPlaying: false, isPlaying: false,
isPaused: true, isPaused: true,
@ -397,302 +307,60 @@ class SessionRecordingInner extends React.Component<Props, State> {
private async onDeleteVoiceMessage() { private async onDeleteVoiceMessage() {
this.pauseAudio(); this.pauseAudio();
await this.stopRecordingStream(); await this.stopRecordingStream();
this.audioBlobMp3 = undefined;
this.audioElement = null;
this.props.onExitVoiceNoteView(); this.props.onExitVoiceNoteView();
} }
private onSendVoiceMessage() { private async onSendVoiceMessage() {
const audioBlob = this.state.mediaBlob.data; if (!this.audioBlobMp3 || !this.audioBlobMp3.size) {
if (!audioBlob) { window.log.info('Empty audio blob');
return; return;
} }
// Is the audio file > attachment filesize limit // Is the audio file > attachment filesize limit
if (audioBlob.size > Constants.CONVERSATION.MAX_ATTACHMENT_FILESIZE_BYTES) { if (
this.audioBlobMp3.size >
Constants.CONVERSATION.MAX_ATTACHMENT_FILESIZE_BYTES
) {
ToastUtils.pushFileSizeErrorAsByte( ToastUtils.pushFileSizeErrorAsByte(
Constants.CONVERSATION.MAX_ATTACHMENT_FILESIZE_BYTES Constants.CONVERSATION.MAX_ATTACHMENT_FILESIZE_BYTES
); );
return; return;
} }
this.props.sendVoiceMessage(audioBlob); this.props.sendVoiceMessage(this.audioBlobMp3);
} }
private initiateRecordingStream() { private async initiateRecordingStream() {
navigator.getUserMedia( // Start recording. Browser will request permission to use your microphone.
{ audio: true }, if (this.recorder) {
this.onRecordingStream,
this.onStreamError
);
}
private async stopRecordingStream() {
const { streamParams } = this.state;
// Exit if parameters aren't yet set
if (!streamParams) {
return;
}
// Stop the stream
if (streamParams.media.state !== 'inactive') {
streamParams.media.stop();
}
streamParams.input.disconnect();
streamParams.processor.disconnect();
streamParams.stream.getTracks().forEach((track: any) => track.stop);
// Stop recording
this.stopRecording();
}
private async onRecordingStream(stream: any) {
// If not recording, stop stream
if (!this.state.isRecording) {
await this.stopRecordingStream(); await this.stopRecordingStream();
return;
} }
// Start recording the stream this.recorder = new MicRecorder({
const media = new window.MediaRecorder(stream, { bitRate: 128,
mimeType: 'audio/webm;codecs=opus',
}); });
media.ondataavailable = (mediaBlob: any) => { this.recorder
this.setState({ mediaBlob }, async () => { .start()
// Generate PCM waveform for playback .then(() => {
await this.initPlaybackView(); // something else
})
.catch((e: any) => {
window.log.error(e);
}); });
};
media.start();
// Audio Context
const audioContext = new window.AudioContext();
const input = audioContext.createMediaStreamSource(stream);
const bufferSize = 1024;
const analyser = audioContext.createAnalyser();
analyser.smoothingTimeConstant = 0.3;
analyser.fftSize = 512;
const processor = audioContext.createScriptProcessor(bufferSize, 1, 1);
processor.onaudioprocess = () => {
const streamParams = { stream, media, input, processor };
this.setState({ streamParams });
const { textColorSubtleNoOpacity } = this.props.theme.colors;
const {
width,
height,
barWidth,
barPadding,
maxBarHeight,
minBarHeight,
} = this.state.canvasParams;
// Array of volumes by frequency (not in Hz, arbitrary unit)
const freqTypedArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(freqTypedArray);
const freqArray = Array.from(freqTypedArray);
// CANVAS CONTEXT
const drawRecordingCanvas = () => {
const canvas = this.visualisationCanvas.current;
const numBars = width / (barPadding + barWidth);
let volumeArray = freqArray.map(n => {
const maxVal = Math.max(...freqArray);
const initialHeight = maxBarHeight * (n / maxVal);
const freqBarHeight =
initialHeight > minBarHeight ? initialHeight : minBarHeight;
return freqBarHeight;
});
// Create initial fake bars to improve appearance.
// Gradually increasing wave rather than a wall at the beginning
const frontLoadLen = Math.ceil(volumeArray.length / 10);
const frontLoad = volumeArray
.slice(0, frontLoadLen - 1)
.reverse()
.map(n => n * 0.8);
volumeArray = [...frontLoad, ...volumeArray];
// Chop off values which exceed the bounds of the container
volumeArray = volumeArray.slice(0, numBars);
if (canvas) {
canvas.width = width;
canvas.height = height;
}
const canvasContext = canvas && canvas.getContext('2d');
for (let i = 0; i < volumeArray.length; i++) {
const barHeight = Math.ceil(volumeArray[i]);
const offsetX = Math.ceil(i * (barWidth + barPadding));
const offsetY = Math.ceil(height / 2 - barHeight / 2);
if (canvasContext) {
canvasContext.fillStyle = textColorSubtleNoOpacity;
this.drawRoundedRect(canvasContext, offsetX, offsetY, barHeight);
}
}
};
if (this.state.isRecording) {
requestAnimationFrame(drawRecordingCanvas);
}
};
// Init listeners for visualisation
input.connect(analyser);
processor.connect(audioContext.destination);
}
private onStreamError(error: any) {
return error;
}
private compactPCM(array: Float32Array, numGroups: number) {
// Takes an array of arbitary size and compresses it down into a smaller
// array, by grouping elements into bundles of groupSize and taking their
// average.
// Eg. [73, 6, 1, 9, 5, 11, 2, 19, 35] of groupSize 3, becomes
// = [(73 + 6 + 1) / 3 + (9 + 5 + 11) / 3 + (2 + 19 + 35) / 3]
// = [27, 8, 19]
// It's used to get a fixed number of freqBars or volumeBars out of
// a huge sample array.
const groupSize = Math.floor(array.length / numGroups);
let sum = 0;
const compacted = new Float32Array(numGroups);
for (let i = 0; i < array.length; i++) {
sum += array[i];
if ((i + 1) % groupSize === 0) {
const compactedIndex = (i + 1) / groupSize;
const average = sum / groupSize;
compacted[compactedIndex] = average;
sum = 0;
}
}
return compacted;
}
private async initPlaybackView() {
const {
width,
height,
barWidth,
barPadding,
maxBarHeight,
minBarHeight,
} = this.state.canvasParams;
const { textColorSubtleNoOpacity } = this.props.theme.colors;
const numBars = width / (barPadding + barWidth);
// Scan through audio file getting average volume per bar
// to display amplitude over time as a static image
const blob = this.state.mediaBlob.data;
const arrayBuffer = await new Response(blob).arrayBuffer();
const audioContext = new window.AudioContext();
await audioContext.decodeAudioData(arrayBuffer, (buffer: AudioBuffer) => {
this.setState({
recordDuration: buffer.duration,
});
// Get audio amplitude with PCM Data in Float32
// Grab single channel only to save computation
const channelData = buffer.getChannelData(0);
const pcmData = this.compactPCM(channelData, numBars);
const pcmDataArray = Array.from(pcmData);
const pcmDataArrayNormalised = pcmDataArray.map(v => Math.abs(v));
// Prepare values for drawing to canvas
const maxAmplitude = Math.max(...pcmDataArrayNormalised);
const barSizeArray = pcmDataArrayNormalised.map(amplitude => {
let barSize = maxBarHeight * (amplitude / maxAmplitude);
// Prevent values that are too small
if (barSize < minBarHeight) {
barSize = minBarHeight;
} }
return barSize; private async stopRecordingStream() {
}); if (!this.recorder) {
// CANVAS CONTEXT
const drawPlaybackCanvas = () => {
const canvas = this.playbackCanvas.current;
if (!canvas) {
return;
}
canvas.height = height;
canvas.width = width;
const canvasContext = canvas.getContext('2d');
if (!canvasContext) {
return; return;
} }
const [_, blob] = await this.recorder.stop().getMp3();
this.recorder = undefined;
for (let i = 0; i < barSizeArray.length; i++) { this.audioBlobMp3 = blob;
const barHeight = Math.ceil(barSizeArray[i]); // Stop recording
const offsetX = Math.ceil(i * (barWidth + barPadding)); this.stopRecordingState();
const offsetY = Math.ceil(height / 2 - barHeight / 2);
canvasContext.fillStyle = textColorSubtleNoOpacity;
this.drawRoundedRect(canvasContext, offsetX, offsetY, barHeight);
}
};
drawPlaybackCanvas();
});
}
private drawRoundedRect(
ctx: CanvasRenderingContext2D,
x: number,
y: number,
h: number
) {
let r = this.state.canvasParams.barRadius;
const w = this.state.canvasParams.barWidth;
if (w < r * 2) {
r = w / 2;
}
if (h < r * 2) {
r = h / 2;
}
ctx.beginPath();
ctx.moveTo(x + r, y);
ctx.arcTo(x + w, y, x + w, y + h, r);
ctx.arcTo(x + w, y + h, x, y + h, r);
ctx.arcTo(x, y + h, x, y, r);
ctx.arcTo(x, y, x + w, y, r);
ctx.closePath();
ctx.fill();
}
private updateCanvasDimensions() {
const canvas =
this.visualisationCanvas.current || this.playbackCanvas.current;
const width = canvas?.clientWidth || 0;
this.setState({
canvasParams: { ...this.state.canvasParams, width },
});
} }
private async onKeyDown(event: any) { private async onKeyDown(event: any) {

@ -0,0 +1 @@
declare module 'mic-recorder-to-mp3';

@ -1169,6 +1169,6 @@ export class MessageModel extends Backbone.Model<MessageAttributes> {
} }
} }
} }
export class MessageCollection extends Backbone.Collection<MessageModel> { } export class MessageCollection extends Backbone.Collection<MessageModel> {}
MessageCollection.prototype.model = MessageModel; MessageCollection.prototype.model = MessageModel;

@ -6462,6 +6462,13 @@ klaw@^1.0.0:
optionalDependencies: optionalDependencies:
graceful-fs "^4.1.9" graceful-fs "^4.1.9"
lamejs@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/lamejs/-/lamejs-1.2.0.tgz#0259f83db4666141a7b671b8caa6369d95177d08"
integrity sha1-Aln4PbRmYUGntnG4yqY2nZUXfQg=
dependencies:
use-strict "1.0.1"
latest-version@^5.0.0: latest-version@^5.0.0:
version "5.1.0" version "5.1.0"
resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face" resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face"
@ -6998,6 +7005,13 @@ methods@~1.1.2:
resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
mic-recorder-to-mp3@^2.2.2:
version "2.2.2"
resolved "https://registry.yarnpkg.com/mic-recorder-to-mp3/-/mic-recorder-to-mp3-2.2.2.tgz#32e767d1196fb81d10e279f31c304350c9501d01"
integrity sha512-xDkOaHbojW3bdKOGn9CI5dT+Mc0RrfczsX/Y1zGJp3FUB4zei5ZKFnNm7Nguc9v910wkd7T3csnCTq5EtCF3Zw==
dependencies:
lamejs "^1.2.0"
micromatch@^2.3.11: micromatch@^2.3.11:
version "2.3.11" version "2.3.11"
resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565" resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-2.3.11.tgz#86677c97d1720b363431d04d0d15293bd38c1565"
@ -11705,6 +11719,11 @@ url@^0.11.0, url@~0.11.0:
punycode "1.3.2" punycode "1.3.2"
querystring "0.2.0" querystring "0.2.0"
use-strict@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/use-strict/-/use-strict-1.0.1.tgz#0bb80d94f49a4a05192b84a8c7d34e95f1a7e3a0"
integrity sha1-C7gNlPSaSgUZK4Sox9NOlfGn46A=
use@^3.1.0: use@^3.1.0:
version "3.1.1" version "3.1.1"
resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f"

Loading…
Cancel
Save