From 74142fab284dd93b281be9f1d98c5875ee06a8f7 Mon Sep 17 00:00:00 2001 From: Vincent Date: Tue, 3 Mar 2020 15:07:48 +1100 Subject: [PATCH] Waveform dividing across fullwidth --- _locales/en/messages.json | 10 +- js/models/conversations.js | 1 - stylesheets/_session_conversation.scss | 16 +- .../session/LeftPaneMessageSection.tsx | 2 +- .../conversation/SessionCompositionBox.tsx | 60 +++-- .../conversation/SessionConversation.tsx | 4 +- .../session/conversation/SessionRecording.tsx | 229 +++++++++++++++--- ts/global.d.ts | 10 +- 8 files changed, 272 insertions(+), 60 deletions(-) diff --git a/_locales/en/messages.json b/_locales/en/messages.json index 2045d1f5c..a440679c1 100644 --- a/_locales/en/messages.json +++ b/_locales/en/messages.json @@ -898,9 +898,15 @@ } } }, - "audioPermissionNeeded": { + "audioPermissionNeededTitle": { "message": - "To send audio messages, allow Session to access your microphone.", + "Sending audio messages requires microphone access", + "description": + "Shown if the user attempts to send an audio message without audio permssions turned on" + }, + "audioPermissionNeededDescription": { + "message": + "Give Session microphone permissions in your settings", "description": "Shown if the user attempts to send an audio message without audio permssions turned on" }, diff --git a/js/models/conversations.js b/js/models/conversations.js index d3cee3c03..50c1f757b 100644 --- a/js/models/conversations.js +++ b/js/models/conversations.js @@ -2273,7 +2273,6 @@ }, async markRead(newestUnreadDate, providedOptions) { - console.log(`[vince][unread] Marking messages as read!!`); const options = providedOptions || {}; diff --git a/stylesheets/_session_conversation.scss b/stylesheets/_session_conversation.scss index b24a59e84..fac047ea4 100644 --- a/stylesheets/_session_conversation.scss +++ b/stylesheets/_session_conversation.scss @@ -257,10 +257,13 @@ $composition-container-height: 60px; } &--visualisation { - margin-top: -900px; z-index: 1000; - height: 400px; width: 100%; + padding: 0px $session-margin-lg; + display: flex; + align-items: center; + justify-content: center; + max-width: 850px; } &--delete { @@ -281,4 +284,13 @@ $composition-container-height: 60px; } } +} + +.freq-band-item{ + width: 5px; + margin: 0px 3px; + border-radius: 15px; + display: inline-block; + background-color: #AFAFAF; + transition: height 0.05s; } \ No newline at end of file diff --git a/ts/components/session/LeftPaneMessageSection.tsx b/ts/components/session/LeftPaneMessageSection.tsx index 8fea380f9..c68c604bd 100644 --- a/ts/components/session/LeftPaneMessageSection.tsx +++ b/ts/components/session/LeftPaneMessageSection.tsx @@ -209,7 +209,7 @@ export class LeftPaneMessageSection extends React.Component { ); } - public renderConversations() {\ + public renderConversations() { return (
{this.state.shouldRenderMessageOnboarding ? ( diff --git a/ts/components/session/conversation/SessionCompositionBox.tsx b/ts/components/session/conversation/SessionCompositionBox.tsx index a1d12f2bc..45d2faae8 100644 --- a/ts/components/session/conversation/SessionCompositionBox.tsx +++ b/ts/components/session/conversation/SessionCompositionBox.tsx @@ -18,6 +18,8 @@ interface State { isRecording: boolean; mediaSetting: boolean | null; showEmojiPanel: boolean; + attachments: Array; + voiceRecording?: File; } export class SessionCompositionBox extends React.Component { @@ -29,6 +31,8 @@ export class SessionCompositionBox extends React.Component { this.state = { message: '', + attachments: [], + voiceRecording: undefined, isRecording: false, mediaSetting: null, showEmojiPanel: false, @@ -43,22 +47,22 @@ export class SessionCompositionBox extends React.Component { this.renderCompositionView = this.renderCompositionView.bind(this); this.onKeyDown = this.onKeyDown.bind(this); - this.onStartRecording = this.onStartRecording.bind(this); - this.onStopRecording = this.onStopRecording.bind(this); + this.onStartedRecording = this.onStartedRecording.bind(this); + this.onStoppedRecording = this.onStoppedRecording.bind(this); this.onSendMessage = this.onSendMessage.bind(this); this.onChooseAttachment = this.onChooseAttachment.bind(this); } - public async componentWillMount() { - const mediaSetting = await window.getMediaPermissions(); - this.setState({mediaSetting}); - } - public componentWillReceiveProps(){ console.log(`[vince][info] Here are my composition props: `, this.props); } + public async componentWillMount(){ + const mediaSetting = await window.getMediaPermissions(); + this.setState({mediaSetting}); + } + render() { const { isRecording } = this.state; @@ -82,7 +86,8 @@ export class SessionCompositionBox extends React.Component { private renderRecordingView() { return ( ); } @@ -106,13 +111,12 @@ export class SessionCompositionBox extends React.Component { type='file' /> - { this.state.mediaSetting && ( - - )} + +
{ console.log(`[vince][msg] Message:`, messagePlaintext); console.log(`[vince][msg] Attachments:`, attachments); + console.log(`[vince][msg] Voice message:`, this.state.voiceRecording); + if (false){ this.props.sendMessage(); } } - private onStartRecording(){ + private onStartedRecording(){ // Do stuff for component, then run callback to SessionConversation - this.setState({ - isRecording: true, - }); + const {mediaSetting} = this.state; - this.props.onStartedRecording(); + if (mediaSetting){ + this.setState({ isRecording: true }); + this.props.onStartedRecording(); + return; + } + + window.pushToast({ + id: window.generateID(), + title: window.i18n('audioPermissionNeededTitle'), + description: window.i18n('audioPermissionNeededDescription'), + type: 'info', + }); + } - private onStopRecording() { + private onStoppedRecording() { // Do stuff for component, then run callback to SessionConversation - + this.setState({ isRecording: false }); this.props.onStoppedRecording(); } diff --git a/ts/components/session/conversation/SessionConversation.tsx b/ts/components/session/conversation/SessionConversation.tsx index 0f9ccb499..5b6d20cd0 100644 --- a/ts/components/session/conversation/SessionConversation.tsx +++ b/ts/components/session/conversation/SessionConversation.tsx @@ -21,6 +21,7 @@ interface State { selectedMessages: Array; isScrolledToBottom: boolean; doneInitialScroll: boolean; + displayScrollToBottomButton: boolean; messageFetchTimestamp: number; isRecording: boolean; } @@ -44,6 +45,7 @@ export class SessionConversation extends React.Component { selectedMessages: [], isScrolledToBottom: !unreadCount, doneInitialScroll: false, + displayScrollToBottomButton: false, messageFetchTimestamp: 0, isRecording: false, }; @@ -622,7 +624,7 @@ export class SessionConversation extends React.Component { this.setState({ isRecording: false, }); - + console.log(`[vince] Stopped recording entirely`); } diff --git a/ts/components/session/conversation/SessionRecording.tsx b/ts/components/session/conversation/SessionRecording.tsx index af69847e1..76d36d425 100644 --- a/ts/components/session/conversation/SessionRecording.tsx +++ b/ts/components/session/conversation/SessionRecording.tsx @@ -1,12 +1,13 @@ import React from 'react'; -import {ReactMic} from 'react-mic'; import { SessionIconButton, SessionIconSize, SessionIconType } from '../icon'; import { SessionButton, SessionButtonType, SessionButtonColor } from '../SessionButton'; + interface Props { onStoppedRecording: any; + onStartedRecording: any; } interface State { @@ -14,9 +15,13 @@ interface State { isRecording: boolean; isPaused: boolean; actionHover: boolean; + mediaSetting?: boolean; + volumeArray?: Array; } export class SessionRecording extends React.Component { + private visualisationRef: React.RefObject; + private visualisationCanvas: React.RefObject; constructor(props: any) { super(props); @@ -26,24 +31,34 @@ export class SessionRecording extends React.Component { isRecording: true, isPaused: false, actionHover: false, + mediaSetting: undefined, + volumeArray: undefined, }; this.handleHoverActions = this.handleHoverActions.bind(this); this.handleUnhoverActions = this.handleUnhoverActions.bind(this); - this.onPlayRecording = this.onPlayRecording.bind(this); - this.onStopRecording = this.onStopRecording.bind(this); + this.playRecording = this.playRecording.bind(this); + this.stopRecording = this.stopRecording.bind(this); this.onSendVoiceMessage = this.onSendVoiceMessage.bind(this); this.onDeleteVoiceMessage = this.onDeleteVoiceMessage.bind(this); + + this.onStream = this.onStream.bind(this); + + this.visualisationRef = React.createRef(); + this.visualisationCanvas = React.createRef(); } - public componentWillReceiveProps(){ - console.log(`[vince][mic] Here are my composition props: `, this.props); + public async componentWillMount(){ + // This turns on the microphone on the system. Later we need to turn it off. + + this.initiateStream(); - console.log(`[vince][mic] Permissions: `, navigator.getUserMedia({ audio: true }, () => null, error => alert(error))); } + + render() { const actionPause = (this.state.actionHover && this.state.isRecording); const actionPlay = (!this.state.isRecording || this.state.isPaused); @@ -62,14 +77,14 @@ export class SessionRecording extends React.Component { iconSize={SessionIconSize.Medium} // FIXME VINCE: Globalise constants for JS Session Colors iconColor={'#FF4538'} - onClick={this.onStopRecording} + onClick={this.stopRecording} /> )} {actionPlay && ( )} @@ -81,15 +96,13 @@ export class SessionRecording extends React.Component { )}
- null} - onData= {(data: any) => console.log(`[vince][mic] Data:`, data)} - strokeColor={'#00F480'} - backgroundColor={'blue'} - /> - +
+ +
+
{
- +
); } + + public blobToFile (data: any, fileName:string) { + const file = new File([data.blob], fileName); + console.log(`[vince][mic] File: `, file); + return file; + } private handleHoverActions() { if ((this.state.isRecording) && !this.state.actionHover) { @@ -119,7 +138,6 @@ export class SessionRecording extends React.Component { actionHover: true, }); } - } private handleUnhoverActions() { @@ -130,7 +148,7 @@ export class SessionRecording extends React.Component { } } - private onStopRecording() { + private stopRecording() { console.log(`[vince][mic] Stopped recording`); this.setState({ @@ -139,7 +157,7 @@ export class SessionRecording extends React.Component { }); } - private onPlayRecording() { + private playRecording() { console.log(`[vince][mic] Playing recording`); this.setState({ @@ -148,12 +166,167 @@ export class SessionRecording extends React.Component { }); } + private initSendVoiceRecording(){ + return; + } + private onDeleteVoiceMessage() { - this.onStopRecording(); - this.props.onStoppedRecording(); + //this.stopRecording(); + this.setState({ + isRecording: false, + isPaused: true, + }, () => this.props.onStoppedRecording()); } private onSendVoiceMessage() { console.log(`[vince][mic] Sending voice message`); } + + private async initiateStream() { + navigator.getUserMedia({audio:true}, this.onStream, this.onStreamError); + + //const mediaStreamSource = audioContext.createMediaStreamSource(stream); + //const meter = getMeter(audioContext); + //mediaStreamSource.connect(meter); + } + + private onStream(stream: any) { + + // AUDIO CONTEXT + const audioContext = new window.AudioContext(); + const input = audioContext.createMediaStreamSource(stream); + + const bufferSize = 8192; + const analyser = audioContext.createAnalyser(); + analyser.smoothingTimeConstant = 0.3; + analyser.fftSize = 256; + + const processor = audioContext.createScriptProcessor(bufferSize, 1, 1); + + processor.onaudioprocess = () => { + // Array of volumes by frequency (not in Hz, arbitrary unit) + const freqTypedArray = new Uint8Array(analyser.frequencyBinCount); + analyser.getByteFrequencyData(freqTypedArray); + + const freqArray = Array.from(freqTypedArray); + const VISUALISATION_WIDTH = this.visualisationRef.current?.clientWidth; + + const maxVisualisationHeight = 30; + const minVisualisationHeight = 3; + + + // CANVAS CONTEXT + const drawCanvas = () => { + const canvas = this.visualisationCanvas.current; + const CANVAS_HEIGHT = 35; + const CANVAS_WIDTH = VISUALISATION_WIDTH || 600; + + const barPadding = 3; + const barWidth = 4; + + const numBars = Math.floor(CANVAS_WIDTH / (barPadding + barWidth)); + const maxSumVal = Math.max(...freqArray) * numBars; + const sumReset = Math.floor(freqArray.length / numBars); + + // This takes the while frequency spectrum and splits it into + // the number of bars required to take up the entire width + let sum = 0; + let barHeightArray = []; + for (let i = 0; i < freqArray.length; i++) { + sum += freqArray[i]; + const initialHeight = maxVisualisationHeight * (sum / (maxSumVal)); + const freqBarHeight = initialHeight > minVisualisationHeight + ? initialHeight + : minVisualisationHeight; + + if (i % sumReset === 0){ + barHeightArray.push(freqBarHeight); + sum = 0; + continue; + } + + + } + + console.log(`[vince][mic] freqArray:`, freqArray); + console.log(`[vince][mic] Num bars`, numBars); + console.log(`[vince][mic] Max sum`, maxSumVal); + console.log(`[vince][mic] Barheight:`, barHeightArray); + + // let barHeightArray = freqArray.map(n => { + // const maxVal = Math.max(...freqArray); + // const initialHeight = maxVisualisationHeight * (n / maxVal); + // const freqBarHeight = initialHeight > minVisualisationHeight + // ? initialHeight + // : minVisualisationHeight; + + // return freqBarHeight; + // }); + + // // Create initial fake bars to improve appearance. + // // Gradually increasing wave rather than a wall at the beginning + // const frontLoadLen = Math.ceil(volumeArray.length / 10); + // const frontLoad = volumeArray.slice(0, frontLoadLen - 1).reverse().map(n => n * 0.80); + // volumeArray = [...frontLoad, ...volumeArray].slice(0, volumeArray.length - frontLoadLen - 1); + + + canvas && (canvas.height = CANVAS_HEIGHT); + canvas && (canvas.width = CANVAS_WIDTH); + const canvasContext = canvas && (canvas.getContext(`2d`)); + + for (var i = 0; i < barHeightArray.length; i++) { + const barHeight = Math.ceil(barHeightArray[i]); + const offset_x = Math.ceil(i * (barWidth + barPadding)); + const offset_y = Math.ceil((CANVAS_HEIGHT / 2 ) - (barHeight / 2 )); + const radius = 15; + + // FIXME VINCE - Globalise JS references to colors + canvasContext && (canvasContext.fillStyle = '#AFAFAF'); + canvasContext && this.drawRoundedRect( + canvasContext, + offset_x, + offset_y, + barWidth, + barHeight, + radius, + ); + } + } + + requestAnimationFrame(drawCanvas); + + } + + + // Get volume for visualisation + input.connect(analyser); + processor.connect(audioContext.destination); + + console.log(`[vince][mic] Freq:`, analyser.frequencyBinCount); + + //Start recording the stream + const media = new window.MediaRecorder(stream); + + } + + private onStreamError(error: any) { + return error; + } + + private drawRoundedRect (ctx: CanvasRenderingContext2D, x: number, y: number, w: number, h: number, r: number) { + if (w < 2 * r) r = w / 2; + if (h < 2 * r) r = h / 2; + ctx.beginPath(); + ctx.moveTo(x+r, y); + ctx.arcTo(x+w, y, x+w, y+h, r); + ctx.arcTo(x+w, y+h, x, y+h, r); + ctx.arcTo(x, y+h, x, y, r); + ctx.arcTo(x, y, x+w, y, r); + ctx.closePath(); + ctx.fill(); + } + + + } + diff --git a/ts/global.d.ts b/ts/global.d.ts index d9d1a117e..12961bc4f 100644 --- a/ts/global.d.ts +++ b/ts/global.d.ts @@ -1,4 +1,5 @@ interface Window { + // Low level CONSTANTS: any; versionInfo: any; @@ -10,6 +11,11 @@ interface Window { deleteAllData: any; clearLocalData: any; + // Microphone + MediaRecorder: any; + AudioContext: any; + + // Gets getAccountManager: any; getMediaPermissions: any; getConversations: any; @@ -35,9 +41,6 @@ interface Window { Whisper: any; ConversationController: any; - // Following function needs to be written in background.js - // getMemberList: any; - onLogin: any; setPassword: any; textsecure: any; @@ -57,6 +60,7 @@ interface Window { deleteAccount: any; + // Toggles toggleTheme: any; toggleMenuBar: any; toggleSpellCheck: any;