From 19a557153617f6187370e16fb332fcc6cd1b4cb4 Mon Sep 17 00:00:00 2001 From: Vincent Date: Thu, 12 Mar 2020 14:11:15 +1100 Subject: [PATCH] Protobuf init for attachments --- js/models/conversations.js | 1 + package.json | 2 + preload.js | 5 +- stylesheets/_session_conversation.scss | 9 +- .../conversation/SessionCompositionBox.tsx | 118 +++++++++-- .../conversation/SessionConversation.tsx | 11 +- .../session/conversation/SessionRecording.tsx | 189 +++++++++--------- yarn.lock | 14 ++ 8 files changed, 229 insertions(+), 120 deletions(-) diff --git a/js/models/conversations.js b/js/models/conversations.js index 50c1f757b..915520c0f 100644 --- a/js/models/conversations.js +++ b/js/models/conversations.js @@ -1523,6 +1523,7 @@ groupInvitation = null, otherOptions = {} ) { + this.clearTypingTimers(); const destination = this.id; diff --git a/package.json b/package.json index 6a6f99ec7..06c1271f6 100644 --- a/package.json +++ b/package.json @@ -63,6 +63,7 @@ "@journeyapps/sqlcipher": "https://github.com/scottnonnenberg-signal/node-sqlcipher.git#2e28733b61640556b0272a3bfc78b0357daf71e6", "@sindresorhus/is": "0.8.0", "@types/dompurify": "^2.0.0", + "@types/emoji-mart": "^2.11.3", "@types/moment": "^2.13.0", "@types/rc-slider": "^8.6.5", "@types/react-mic": "^12.4.1", @@ -84,6 +85,7 @@ "emoji-datasource": "4.0.0", "emoji-datasource-apple": "4.0.0", "emoji-js": "3.4.0", + "emoji-mart": "^2.11.2", "emoji-panel": "https://github.com/scottnonnenberg-signal/emoji-panel.git#v0.5.5", "filesize": "3.6.1", "firstline": "1.2.1", diff --git a/preload.js b/preload.js index 8a3a040fa..84ec174cd 100644 --- a/preload.js +++ b/preload.js @@ -70,7 +70,7 @@ window.CONSTANTS = { MAX_GROUP_NAME_LENGTH: 64, DEFAULT_PUBLIC_CHAT_URL, MAX_CONNECTION_DURATION: 5000, - MAX_MESSAGE_BODY_LENGTH: 64 * 1024, + MAX_MESSAGE_BODY_LENGTH: 2000, // Limited due to the proof-of-work requirement SMALL_GROUP_SIZE_LIMIT: 10, DEFAULT_MEDIA_FETCH_COUNT: 50, @@ -82,7 +82,10 @@ window.CONSTANTS = { MESSAGE_CONTAINER_BUFFER_OFFSET_PX: 30, MESSAGE_FETCH_INTERVAL: 1, // Maximum voice message duraiton of 5 minutes + // which equates to 1.97 MB MAX_VOICE_MESSAGE_DURATION: 300, + // Max attachment size: 10 MB + MAX_ATTACHMENT_FILESIZE: 10000000, }; window.versionInfo = { diff --git a/stylesheets/_session_conversation.scss b/stylesheets/_session_conversation.scss index 5d5a9e931..ba5d2497c 100644 --- a/stylesheets/_session_conversation.scss +++ b/stylesheets/_session_conversation.scss @@ -140,7 +140,7 @@ $composition-container-height: 60px; position: relative; overflow-y: auto; scrollbar-width: 4px; - padding: $session-margin-lg; + padding: $session-margin-sm $session-margin-lg; &__loading { position: absolute; @@ -243,6 +243,7 @@ $composition-container-height: 60px; justify-content: space-between; align-items: center; flex-grow: 1; + outline: none; $actions-element-size: 45px; @@ -320,6 +321,10 @@ $composition-container-height: 60px; font-weight: bold; font-size: 14px; + &.playback-timer { + margin-right: $session-margin-sm; + } + &-light{ height: $session-margin-sm; width: $session-margin-sm; @@ -330,4 +335,4 @@ $composition-container-height: 60px; animation: pulseLight 4s infinite; } } -} \ No newline at end of file +} diff --git a/ts/components/session/conversation/SessionCompositionBox.tsx b/ts/components/session/conversation/SessionCompositionBox.tsx index 15ebf4a83..68e312901 100644 --- a/ts/components/session/conversation/SessionCompositionBox.tsx +++ b/ts/components/session/conversation/SessionCompositionBox.tsx @@ -1,11 +1,17 @@ import React from 'react'; +import { Attachment } from '../../../types/Attachment'; +import * as MIME from '../../../types/MIME'; + import TextareaAutosize from 'react-autosize-textarea'; + import { SessionIconButton, SessionIconSize, SessionIconType } from '../icon'; import { SessionEmojiPanel } from './SessionEmojiPanel'; - import { SessionRecording } from './SessionRecording'; +import { SignalService } from '../../../../ts/protobuf'; + + interface Props { placeholder?: string; sendMessage: any; @@ -20,8 +26,8 @@ interface State { mediaSetting: boolean | null; showEmojiPanel: boolean; - attachments: Array; - voiceRecording?: File; + attachments: Array; + voiceRecording?: Blob; } export class SessionCompositionBox extends React.Component { @@ -48,14 +54,17 @@ export class SessionCompositionBox extends React.Component { this.renderRecordingView = this.renderRecordingView.bind(this); this.renderCompositionView = this.renderCompositionView.bind(this); - // Recording View render and unrender + // Recording view functions + this.sendVoiceMessage = this.sendVoiceMessage.bind(this); this.onLoadVoiceNoteView = this.onLoadVoiceNoteView.bind(this); this.onExitVoiceNoteView = this.onExitVoiceNoteView.bind(this); - this.onKeyDown = this.onKeyDown.bind(this); - this.onSendMessage = this.onSendMessage.bind(this); + // Attachments + this.onChoseAttachment = this.onChoseAttachment.bind(this); this.onChooseAttachment = this.onChooseAttachment.bind(this); + this.onKeyDown = this.onKeyDown.bind(this); + } public componentWillReceiveProps(){ @@ -90,6 +99,7 @@ export class SessionCompositionBox extends React.Component { private renderRecordingView() { return ( @@ -113,9 +123,9 @@ export class SessionCompositionBox extends React.Component { multiple={true} ref={this.fileInput} type='file' + onChange={this.onChoseAttachment} /> - { } private onChoseAttachment() { + // Build attachments list + const attachmentsFileList = this.fileInput.current?.files; + if (!attachmentsFileList) return; + const attachments: Array = []; + Array.from(attachmentsFileList).forEach(async (file: File) => { + + const fileBlob = new Blob([file]); + const fileBuffer = await new Response(fileBlob).arrayBuffer(); + + const attachment = { + fileName: file.name, + flags: undefined, + // FIXME VINCE: Set appropriate type + contentType: undefined, + size: file.size, + data: fileBuffer, + } + + // Push if size is nonzero + attachment.data.byteLength && attachments.push(attachment); + }); + + this.setState({attachments}); } private onKeyDown(event: any) { @@ -169,21 +202,74 @@ export class SessionCompositionBox extends React.Component { } } + private onDrop(){ + // On drop attachments! + // this.textarea.current?.ondrop; + // Look into react-dropzone + } + private onSendMessage(){ - // FIXME VINCE: Get emoiji, attachments, etc - const messagePlaintext = this.textarea.current?.value; - const attachments = this.fileInput.current?.files; + // FIXME VINCE: Get emoiji, attachments, etc + const messagePlaintext = this.textarea.current?.value; + const {attachments, voiceRecording} = this.state; + const messageInput = this.textarea.current; - console.log(`[vince][msg] Message:`, messagePlaintext); - console.log(`[vince][msg] Attachments:`, attachments); - console.log(`[vince][msg] Voice message:`, this.state.voiceRecording); + if (!messageInput) return; - - if (false){ - this.props.sendMessage(); + console.log(`[vince][msg] Message:`, messagePlaintext); + console.log(`[vince][msg] fileAttachments:`, attachments); + console.log(`[vince][msg] Voice message:`, voiceRecording); + + + // Verify message length + + + // Handle emojis + + const messageSuccess = this.props.sendMessage( + messagePlaintext, + attachments, + MIME.IMAGE_JPEG, + undefined, + null, + {}, + ); + + if (messageSuccess) { + // Empty composition box + messageInput.value = ''; } } + private async sendVoiceMessage(audioBlob: Blob) { + if (!this.state.isRecordingView) return; + + const fileBuffer = await new Response(audioBlob).arrayBuffer(); + + const audioAttachment: Attachment = { + data: fileBuffer, + flags: SignalService.AttachmentPointer.Flags.VOICE_MESSAGE, + }; + + const messageSuccess = this.props.sendMessage( + '', + [audioAttachment], + undefined, + undefined, + null, + {}, + ); + + if (messageSuccess) { + alert('MESSAGE VOICE SUCCESS'); + } + + console.log(`[compositionbox] Sending voice message:`, audioBlob); + + + this.onExitVoiceNoteView(); + } + private onLoadVoiceNoteView(){ // Do stuff for component, then run callback to SessionConversation const {mediaSetting} = this.state; diff --git a/ts/components/session/conversation/SessionConversation.tsx b/ts/components/session/conversation/SessionConversation.tsx index 394804fb0..fca9f2a43 100644 --- a/ts/components/session/conversation/SessionConversation.tsx +++ b/ts/components/session/conversation/SessionConversation.tsx @@ -111,11 +111,13 @@ export class SessionConversation extends React.Component { const conversationModel = window.getConversationByKey(conversationKey); const isRss = conversation.isRss; + const sendMessageFn = conversationModel.sendMessage.bind(conversationModel); + return (
{this.renderHeader()} @@ -145,7 +147,7 @@ export class SessionConversation extends React.Component { { !isRss && ( @@ -632,6 +634,7 @@ export class SessionConversation extends React.Component { private onKeyDown(event: any) { const selectionMode = !!this.state.selectedMessages.length; + const recordingMode = this.state.isRecordingView; const messageContainer = document.getElementsByClassName('messages-container')[0]; const pageHeight = messageContainer.clientHeight; @@ -648,9 +651,7 @@ export class SessionConversation extends React.Component { switch(event.key){ case 'Escape': - if (selectionMode){ - this.resetSelection(); - } + if (selectionMode) this.resetSelection(); break; // Scrolling diff --git a/ts/components/session/conversation/SessionRecording.tsx b/ts/components/session/conversation/SessionRecording.tsx index 02912143f..201e18623 100644 --- a/ts/components/session/conversation/SessionRecording.tsx +++ b/ts/components/session/conversation/SessionRecording.tsx @@ -1,10 +1,12 @@ import React from 'react'; +import classNames from 'classnames'; import moment from 'moment'; import { SessionIconButton, SessionIconSize, SessionIconType } from '../icon'; import { SessionButton, SessionButtonType, SessionButtonColor } from '../SessionButton'; interface Props { + sendVoiceMessage: any; onLoadVoiceNoteView: any; onExitVoiceNoteView: any; } @@ -40,8 +42,6 @@ interface State { minBarHeight: number; } - volumeArray?: Array; - startTimestamp: number; nowTimestamp: number; @@ -83,8 +83,8 @@ export class SessionRecording extends React.Component { this.onKeyDown = this.onKeyDown.bind(this); this.updateCanvasDimensions = this.updateCanvasDimensions.bind(this); - const now = Number(moment().format('x')) / 1000; - const updateTimerInterval = setInterval(this.timerUpdate, 1000); + const now = this.getTimestamp(); + const updateTimerInterval = setInterval(this.timerUpdate, 500); this.state = { recordDuration: 0, @@ -96,7 +96,6 @@ export class SessionRecording extends React.Component { mediaBlob: undefined, audioElement: undefined, streamParams: undefined, - volumeArray: undefined, startTimestamp: now, nowTimestamp: now, @@ -145,7 +144,6 @@ export class SessionRecording extends React.Component { } } - render() { const { actionHover, @@ -154,22 +152,27 @@ export class SessionRecording extends React.Component { isRecording, startTimestamp, nowTimestamp, + audioElement, } = this.state; const actionStopRecording = actionHover && isRecording; const actionPlayAudio = !isRecording && !isPlaying; const actionPauseAudio = !isRecording && !isPaused && isPlaying; const actionDefault = !actionStopRecording && !actionPlayAudio && !actionPauseAudio; - - const elapsedTimeMs = 1000 * (nowTimestamp - startTimestamp); - const displayTimeString = moment.utc(elapsedTimeMs).format('m:ss'); + + const displayTimeMs = isRecording + ? (nowTimestamp - startTimestamp) * 1000 + : audioElement && audioElement?.currentTime * 1000 || 0; + + const displayTimeString = moment.utc(displayTimeMs).format('m:ss'); + const actionPauseFn = isPlaying ? this.pauseAudio : this.stopRecordingStream; - return (
{ onMouseEnter={this.handleHoverActions} onMouseLeave={this.handleUnhoverActions} > - {actionStopRecording && ( - - )} - {actionPauseAudio && ( - - )} - {actionPlayAudio && ( - - )} - - {actionDefault && ( - - )} + {actionStopRecording && ( + + )} + {actionPauseAudio && ( + + )} + {actionPlayAudio && ( + + )} + + {actionDefault && ( + + )}
{ {isRecording && }
- - { isRecording ? ( -
- { displayTimeString } -
- -
-
- ) : ( -
- -
+ +
+ { displayTimeString } + { isRecording && ( +
)} +
+ + { !isRecording && ( +
+ +
+ )}
{ isRecording ? ( @@ -269,16 +272,16 @@ export class SessionRecording extends React.Component { } private timerUpdate(){ - const { nowTimestamp, startTimestamp, isRecording } = this.state; + const { nowTimestamp, startTimestamp } = this.state; const elapsedTime = (nowTimestamp - startTimestamp); - if (!isRecording || elapsedTime >= window.CONSTANTS.MAX_VOICE_MESSAGE_DURATION){ - clearInterval(this.state.updateTimerInterval); + // Prevent voice messages exceeding max length. + if (elapsedTime >= window.CONSTANTS.MAX_VOICE_MESSAGE_DURATION){ this.stopRecordingStream(); } this.setState({ - nowTimestamp: Number(moment().format('x')) / 1000 + nowTimestamp: this.getTimestamp() }); } @@ -346,10 +349,6 @@ export class SessionRecording extends React.Component { let audioDuration = this.state.recordDuration if (audioElement.duration !== Infinity) audioDuration = audioElement.duration; const progress = width * (audioElement.currentTime / audioDuration); - - console.log(`[details] Current Time:`, audioElement.currentTime); - console.log(`[details] Record Duration:`, audioDuration); - console.log(`[details] Audio element duration`, audioElement.duration); const canvasContext = canvas.getContext(`2d`); if (!canvasContext) return; @@ -395,12 +394,6 @@ export class SessionRecording extends React.Component { }); } - private initSendVoiceRecording(){ - // Is the audio file < 10mb? That's the attachment filesize limit - - return; - } - private onDeleteVoiceMessage() { this.pauseAudio(); this.stopRecordingStream(); @@ -408,7 +401,20 @@ export class SessionRecording extends React.Component { } private onSendVoiceMessage() { - console.log(`[vince][mic] Sending voice message`); + console.log(`[vince][mic] Sending voice message to composition box1`); + + const audioBlob = this.state.mediaBlob.data; + if (!audioBlob) return; + + // Is the audio file > attachment filesize limit + if (audioBlob.size > window.CONSTANTS.MAX_ATTACHMENT_FILESIZE) { + console.log(`[send] Voice message too large: ${audioBlob.size / 1000000} MB`); + return; + } + + this.props.sendVoiceMessage(audioBlob); + + return; } private async initiateRecordingStream() { @@ -416,8 +422,7 @@ export class SessionRecording extends React.Component { } private stopRecordingStream() { - const { streamParams, updateTimerInterval} = this.state; - updateTimerInterval && clearInterval(updateTimerInterval); + const { streamParams} = this.state; // Exit if parameters aren't yet set if (!streamParams){ @@ -429,12 +434,7 @@ export class SessionRecording extends React.Component { streamParams.input.disconnect(); streamParams.processor.disconnect(); streamParams.stream.getTracks().forEach((track: any) => track.stop); - - console.log(`[vince][stream] Stream: `, streamParams.stream); - console.log(`[vince][stream] Media: `, streamParams.media); - console.log(`[vince][stream] Input: `, streamParams.input); - console.log(`[vince][stream] Processor: `, streamParams.processor); - + // Stop recording this.stopRecording(); } @@ -585,8 +585,6 @@ export class SessionRecording extends React.Component { const numBars = width / (barPadding + barWidth); - console.log(`[] Starting playback view`); - // Scan through audio file getting average volume per bar // to display amplitude over time as a static image const blob = this.state.mediaBlob.data; @@ -595,7 +593,9 @@ export class SessionRecording extends React.Component { const audioContext = new window.AudioContext(); audioContext.decodeAudioData(arrayBuffer, (buffer: AudioBuffer) => { - this.setState({recordDuration: buffer.duration}); + this.setState({ + recordDuration: buffer.duration + }); // Get audio amplitude with PCM Data in Float32 // Grab single channel only to save compuation @@ -620,21 +620,14 @@ export class SessionRecording extends React.Component { // CANVAS CONTEXT const drawPlaybackCanvas = () => { - console.log(`[canvas] Drawing`); - + const canvas = this.playbackCanvas.current; - if (!canvas) { - console.log(`[canvas] Couldnt get playback canvas`); - return; - } + if (!canvas) return; canvas.height = height; canvas.width = width; const canvasContext = canvas.getContext(`2d`); - if (!canvasContext){ - console.log(`[canvas] Couldnt get cointext canvas`); - return; - } + if (!canvasContext) return; for (let i = 0; i < barSizeArray.length; i++){ const barHeight = Math.ceil(barSizeArray[i]); @@ -675,6 +668,10 @@ export class SessionRecording extends React.Component { ctx.fill(); } + private getTimestamp(){ + return Number(moment().format('x')) / 1000; + } + private updateCanvasDimensions(){ const canvas = this.visualisationCanvas.current || this.playbackCanvas.current; const width = canvas?.clientWidth || 0; diff --git a/yarn.lock b/yarn.lock index 65c437137..84a6e0a8e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -191,6 +191,13 @@ dependencies: "@types/trusted-types" "*" +"@types/emoji-mart@^2.11.3": + version "2.11.3" + resolved "https://registry.yarnpkg.com/@types/emoji-mart/-/emoji-mart-2.11.3.tgz#9949f6a8a231aea47aac1b2d4212597b41140b07" + integrity sha512-pRlU6+CFIB+9+FwjGGCVtDQq78u7N0iUijrO0Qh1j9RJ6T23DSNNfe0X6kf81N4ubVhF9jVckCI1M3kHpkwjqA== + dependencies: + "@types/react" "*" + "@types/events@*": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7" @@ -3162,6 +3169,13 @@ emoji-js@3.4.0: dependencies: emoji-datasource "4.0.0" +emoji-mart@^2.11.2: + version "2.11.2" + resolved "https://registry.yarnpkg.com/emoji-mart/-/emoji-mart-2.11.2.tgz#ed331867f7f55bb33c8421c9a493090fa4a378c7" + integrity sha512-IdHZR5hc3mipTY/r0ergtqBgQ96XxmRdQDSg7fsL+GiJQQ4akMws6+cjLSyIhGQxtvNuPVNaEQiAlU00NsyZUg== + dependencies: + prop-types "^15.6.0" + "emoji-panel@https://github.com/scottnonnenberg-signal/emoji-panel.git#v0.5.5": version "0.5.5" resolved "https://github.com/scottnonnenberg-signal/emoji-panel.git#81e236e03458a44d4a174ab5f367cb4b9b1b2f97"