Protobuf init for attachments

pull/1102/head
Vincent 5 years ago
parent d544f73122
commit 19a5571536

@ -1523,6 +1523,7 @@
groupInvitation = null,
otherOptions = {}
) {
this.clearTypingTimers();
const destination = this.id;

@ -63,6 +63,7 @@
"@journeyapps/sqlcipher": "https://github.com/scottnonnenberg-signal/node-sqlcipher.git#2e28733b61640556b0272a3bfc78b0357daf71e6",
"@sindresorhus/is": "0.8.0",
"@types/dompurify": "^2.0.0",
"@types/emoji-mart": "^2.11.3",
"@types/moment": "^2.13.0",
"@types/rc-slider": "^8.6.5",
"@types/react-mic": "^12.4.1",
@ -84,6 +85,7 @@
"emoji-datasource": "4.0.0",
"emoji-datasource-apple": "4.0.0",
"emoji-js": "3.4.0",
"emoji-mart": "^2.11.2",
"emoji-panel": "https://github.com/scottnonnenberg-signal/emoji-panel.git#v0.5.5",
"filesize": "3.6.1",
"firstline": "1.2.1",

@ -70,7 +70,7 @@ window.CONSTANTS = {
MAX_GROUP_NAME_LENGTH: 64,
DEFAULT_PUBLIC_CHAT_URL,
MAX_CONNECTION_DURATION: 5000,
MAX_MESSAGE_BODY_LENGTH: 64 * 1024,
MAX_MESSAGE_BODY_LENGTH: 2000,
// Limited due to the proof-of-work requirement
SMALL_GROUP_SIZE_LIMIT: 10,
DEFAULT_MEDIA_FETCH_COUNT: 50,
@ -82,7 +82,10 @@ window.CONSTANTS = {
MESSAGE_CONTAINER_BUFFER_OFFSET_PX: 30,
MESSAGE_FETCH_INTERVAL: 1,
// Maximum voice message duraiton of 5 minutes
// which equates to 1.97 MB
MAX_VOICE_MESSAGE_DURATION: 300,
// Max attachment size: 10 MB
MAX_ATTACHMENT_FILESIZE: 10000000,
};
window.versionInfo = {

@ -140,7 +140,7 @@ $composition-container-height: 60px;
position: relative;
overflow-y: auto;
scrollbar-width: 4px;
padding: $session-margin-lg;
padding: $session-margin-sm $session-margin-lg;
&__loading {
position: absolute;
@ -243,6 +243,7 @@ $composition-container-height: 60px;
justify-content: space-between;
align-items: center;
flex-grow: 1;
outline: none;
$actions-element-size: 45px;
@ -320,6 +321,10 @@ $composition-container-height: 60px;
font-weight: bold;
font-size: 14px;
&.playback-timer {
margin-right: $session-margin-sm;
}
&-light{
height: $session-margin-sm;
width: $session-margin-sm;
@ -330,4 +335,4 @@ $composition-container-height: 60px;
animation: pulseLight 4s infinite;
}
}
}
}

@ -1,11 +1,17 @@
import React from 'react';
import { Attachment } from '../../../types/Attachment';
import * as MIME from '../../../types/MIME';
import TextareaAutosize from 'react-autosize-textarea';
import { SessionIconButton, SessionIconSize, SessionIconType } from '../icon';
import { SessionEmojiPanel } from './SessionEmojiPanel';
import { SessionRecording } from './SessionRecording';
import { SignalService } from '../../../../ts/protobuf';
interface Props {
placeholder?: string;
sendMessage: any;
@ -20,8 +26,8 @@ interface State {
mediaSetting: boolean | null;
showEmojiPanel: boolean;
attachments: Array<File>;
voiceRecording?: File;
attachments: Array<Attachment>;
voiceRecording?: Blob;
}
export class SessionCompositionBox extends React.Component<Props, State> {
@ -48,14 +54,17 @@ export class SessionCompositionBox extends React.Component<Props, State> {
this.renderRecordingView = this.renderRecordingView.bind(this);
this.renderCompositionView = this.renderCompositionView.bind(this);
// Recording View render and unrender
// Recording view functions
this.sendVoiceMessage = this.sendVoiceMessage.bind(this);
this.onLoadVoiceNoteView = this.onLoadVoiceNoteView.bind(this);
this.onExitVoiceNoteView = this.onExitVoiceNoteView.bind(this);
this.onKeyDown = this.onKeyDown.bind(this);
this.onSendMessage = this.onSendMessage.bind(this);
// Attachments
this.onChoseAttachment = this.onChoseAttachment.bind(this);
this.onChooseAttachment = this.onChooseAttachment.bind(this);
this.onKeyDown = this.onKeyDown.bind(this);
}
public componentWillReceiveProps(){
@ -90,6 +99,7 @@ export class SessionCompositionBox extends React.Component<Props, State> {
private renderRecordingView() {
return (
<SessionRecording
sendVoiceMessage={this.sendVoiceMessage}
onLoadVoiceNoteView={this.onLoadVoiceNoteView}
onExitVoiceNoteView={this.onExitVoiceNoteView}
/>
@ -113,9 +123,9 @@ export class SessionCompositionBox extends React.Component<Props, State> {
multiple={true}
ref={this.fileInput}
type='file'
onChange={this.onChoseAttachment}
/>
<SessionIconButton
iconType={SessionIconType.Microphone}
iconSize={SessionIconSize.Huge}
@ -158,7 +168,30 @@ export class SessionCompositionBox extends React.Component<Props, State> {
}
private onChoseAttachment() {
// Build attachments list
const attachmentsFileList = this.fileInput.current?.files;
if (!attachmentsFileList) return;
const attachments: Array<Attachment> = [];
Array.from(attachmentsFileList).forEach(async (file: File) => {
const fileBlob = new Blob([file]);
const fileBuffer = await new Response(fileBlob).arrayBuffer();
const attachment = {
fileName: file.name,
flags: undefined,
// FIXME VINCE: Set appropriate type
contentType: undefined,
size: file.size,
data: fileBuffer,
}
// Push if size is nonzero
attachment.data.byteLength && attachments.push(attachment);
});
this.setState({attachments});
}
private onKeyDown(event: any) {
@ -169,21 +202,74 @@ export class SessionCompositionBox extends React.Component<Props, State> {
}
}
private onDrop(){
// On drop attachments!
// this.textarea.current?.ondrop;
// Look into react-dropzone
}
private onSendMessage(){
// FIXME VINCE: Get emoiji, attachments, etc
const messagePlaintext = this.textarea.current?.value;
const attachments = this.fileInput.current?.files;
// FIXME VINCE: Get emoiji, attachments, etc
const messagePlaintext = this.textarea.current?.value;
const {attachments, voiceRecording} = this.state;
const messageInput = this.textarea.current;
console.log(`[vince][msg] Message:`, messagePlaintext);
console.log(`[vince][msg] Attachments:`, attachments);
console.log(`[vince][msg] Voice message:`, this.state.voiceRecording);
if (!messageInput) return;
if (false){
this.props.sendMessage();
console.log(`[vince][msg] Message:`, messagePlaintext);
console.log(`[vince][msg] fileAttachments:`, attachments);
console.log(`[vince][msg] Voice message:`, voiceRecording);
// Verify message length
// Handle emojis
const messageSuccess = this.props.sendMessage(
messagePlaintext,
attachments,
MIME.IMAGE_JPEG,
undefined,
null,
{},
);
if (messageSuccess) {
// Empty composition box
messageInput.value = '';
}
}
private async sendVoiceMessage(audioBlob: Blob) {
if (!this.state.isRecordingView) return;
const fileBuffer = await new Response(audioBlob).arrayBuffer();
const audioAttachment: Attachment = {
data: fileBuffer,
flags: SignalService.AttachmentPointer.Flags.VOICE_MESSAGE,
};
const messageSuccess = this.props.sendMessage(
'',
[audioAttachment],
undefined,
undefined,
null,
{},
);
if (messageSuccess) {
alert('MESSAGE VOICE SUCCESS');
}
console.log(`[compositionbox] Sending voice message:`, audioBlob);
this.onExitVoiceNoteView();
}
private onLoadVoiceNoteView(){
// Do stuff for component, then run callback to SessionConversation
const {mediaSetting} = this.state;

@ -111,11 +111,13 @@ export class SessionConversation extends React.Component<any, State> {
const conversationModel = window.getConversationByKey(conversationKey);
const isRss = conversation.isRss;
const sendMessageFn = conversationModel.sendMessage.bind(conversationModel);
return (
<div
className={classNames('conversation-item', selectionMode && 'selection-mode')}
tabIndex={0}
onKeyDown={this.onKeyDown}
onKeyDown={this.onKeyDown}
>
<div className="conversation-header">
{this.renderHeader()}
@ -145,7 +147,7 @@ export class SessionConversation extends React.Component<any, State> {
{ !isRss && (
<SessionCompositionBox
sendMessage={conversationModel.sendMessage}
sendMessage={sendMessageFn}
onLoadVoiceNoteView={this.onLoadVoiceNoteView}
onExitVoiceNoteView={this.onExitVoiceNoteView}
/>
@ -632,6 +634,7 @@ export class SessionConversation extends React.Component<any, State> {
private onKeyDown(event: any) {
const selectionMode = !!this.state.selectedMessages.length;
const recordingMode = this.state.isRecordingView;
const messageContainer = document.getElementsByClassName('messages-container')[0];
const pageHeight = messageContainer.clientHeight;
@ -648,9 +651,7 @@ export class SessionConversation extends React.Component<any, State> {
switch(event.key){
case 'Escape':
if (selectionMode){
this.resetSelection();
}
if (selectionMode) this.resetSelection();
break;
// Scrolling

@ -1,10 +1,12 @@
import React from 'react';
import classNames from 'classnames';
import moment from 'moment';
import { SessionIconButton, SessionIconSize, SessionIconType } from '../icon';
import { SessionButton, SessionButtonType, SessionButtonColor } from '../SessionButton';
interface Props {
sendVoiceMessage: any;
onLoadVoiceNoteView: any;
onExitVoiceNoteView: any;
}
@ -40,8 +42,6 @@ interface State {
minBarHeight: number;
}
volumeArray?: Array<number>;
startTimestamp: number;
nowTimestamp: number;
@ -83,8 +83,8 @@ export class SessionRecording extends React.Component<Props, State> {
this.onKeyDown = this.onKeyDown.bind(this);
this.updateCanvasDimensions = this.updateCanvasDimensions.bind(this);
const now = Number(moment().format('x')) / 1000;
const updateTimerInterval = setInterval(this.timerUpdate, 1000);
const now = this.getTimestamp();
const updateTimerInterval = setInterval(this.timerUpdate, 500);
this.state = {
recordDuration: 0,
@ -96,7 +96,6 @@ export class SessionRecording extends React.Component<Props, State> {
mediaBlob: undefined,
audioElement: undefined,
streamParams: undefined,
volumeArray: undefined,
startTimestamp: now,
nowTimestamp: now,
@ -145,7 +144,6 @@ export class SessionRecording extends React.Component<Props, State> {
}
}
render() {
const {
actionHover,
@ -154,22 +152,27 @@ export class SessionRecording extends React.Component<Props, State> {
isRecording,
startTimestamp,
nowTimestamp,
audioElement,
} = this.state;
const actionStopRecording = actionHover && isRecording;
const actionPlayAudio = !isRecording && !isPlaying;
const actionPauseAudio = !isRecording && !isPaused && isPlaying;
const actionDefault = !actionStopRecording && !actionPlayAudio && !actionPauseAudio;
const elapsedTimeMs = 1000 * (nowTimestamp - startTimestamp);
const displayTimeString = moment.utc(elapsedTimeMs).format('m:ss');
const displayTimeMs = isRecording
? (nowTimestamp - startTimestamp) * 1000
: audioElement && audioElement?.currentTime * 1000 || 0;
const displayTimeString = moment.utc(displayTimeMs).format('m:ss');
const actionPauseFn = isPlaying ? this.pauseAudio : this.stopRecordingStream;
return (
<div
className="session-recording"
tabIndex={0}
onKeyDown={this.onKeyDown}
>
<div
@ -177,38 +180,38 @@ export class SessionRecording extends React.Component<Props, State> {
onMouseEnter={this.handleHoverActions}
onMouseLeave={this.handleUnhoverActions}
>
{actionStopRecording && (
<SessionIconButton
iconType={SessionIconType.Pause}
iconSize={SessionIconSize.Medium}
// FIXME VINCE: Globalise constants for JS Session Colors
iconColor={'#FF4538'}
onClick={actionPauseFn}
/>
)}
{actionPauseAudio && (
<SessionIconButton
iconType={SessionIconType.Pause}
iconSize={SessionIconSize.Medium}
// FIXME VINCE: Globalise constants for JS Session Colors
iconColor={'#FFFFFF'}
onClick={actionPauseFn}
/>
)}
{actionPlayAudio && (
<SessionIconButton
iconType={SessionIconType.Play}
iconSize={SessionIconSize.Medium}
onClick={this.playAudio}
/>
)}
{actionDefault && (
<SessionIconButton
iconType={SessionIconType.Microphone}
iconSize={SessionIconSize.Huge}
/>
)}
{actionStopRecording && (
<SessionIconButton
iconType={SessionIconType.Pause}
iconSize={SessionIconSize.Medium}
// FIXME VINCE: Globalise constants for JS Session Colors
iconColor={'#FF4538'}
onClick={actionPauseFn}
/>
)}
{actionPauseAudio && (
<SessionIconButton
iconType={SessionIconType.Pause}
iconSize={SessionIconSize.Medium}
// FIXME VINCE: Globalise constants for JS Session Colors
iconColor={'#FFFFFF'}
onClick={actionPauseFn}
/>
)}
{actionPlayAudio && (
<SessionIconButton
iconType={SessionIconType.Play}
iconSize={SessionIconSize.Medium}
onClick={this.playAudio}
/>
)}
{actionDefault && (
<SessionIconButton
iconType={SessionIconType.Microphone}
iconSize={SessionIconSize.Huge}
/>
)}
</div>
<div
@ -219,25 +222,25 @@ export class SessionRecording extends React.Component<Props, State> {
{isRecording && <canvas ref={this.visualisationCanvas}></canvas>}
</div>
{ isRecording ? (
<div className="session-recording--timer">
{ displayTimeString }
<div className="session-recording--timer-light">
</div>
</div>
) : (
<div className="send-message-button">
<SessionIconButton
iconType={SessionIconType.Send}
iconSize={SessionIconSize.Large}
iconColor={'#FFFFFF'}
iconRotation={90}
onClick={this.onSendVoiceMessage}
/>
</div>
<div className={classNames('session-recording--timer', !isRecording && 'playback-timer')}>
{ displayTimeString }
{ isRecording && (
<div className="session-recording--timer-light"></div>
)}
</div>
{ !isRecording && (
<div className="send-message-button">
<SessionIconButton
iconType={SessionIconType.Send}
iconSize={SessionIconSize.Large}
iconColor={'#FFFFFF'}
iconRotation={90}
onClick={this.onSendVoiceMessage}
/>
</div>
)}
<div className="session-recording--status">
{ isRecording ? (
@ -269,16 +272,16 @@ export class SessionRecording extends React.Component<Props, State> {
}
private timerUpdate(){
const { nowTimestamp, startTimestamp, isRecording } = this.state;
const { nowTimestamp, startTimestamp } = this.state;
const elapsedTime = (nowTimestamp - startTimestamp);
if (!isRecording || elapsedTime >= window.CONSTANTS.MAX_VOICE_MESSAGE_DURATION){
clearInterval(this.state.updateTimerInterval);
// Prevent voice messages exceeding max length.
if (elapsedTime >= window.CONSTANTS.MAX_VOICE_MESSAGE_DURATION){
this.stopRecordingStream();
}
this.setState({
nowTimestamp: Number(moment().format('x')) / 1000
nowTimestamp: this.getTimestamp()
});
}
@ -346,10 +349,6 @@ export class SessionRecording extends React.Component<Props, State> {
let audioDuration = this.state.recordDuration
if (audioElement.duration !== Infinity) audioDuration = audioElement.duration;
const progress = width * (audioElement.currentTime / audioDuration);
console.log(`[details] Current Time:`, audioElement.currentTime);
console.log(`[details] Record Duration:`, audioDuration);
console.log(`[details] Audio element duration`, audioElement.duration);
const canvasContext = canvas.getContext(`2d`);
if (!canvasContext) return;
@ -395,12 +394,6 @@ export class SessionRecording extends React.Component<Props, State> {
});
}
private initSendVoiceRecording(){
// Is the audio file < 10mb? That's the attachment filesize limit
return;
}
private onDeleteVoiceMessage() {
this.pauseAudio();
this.stopRecordingStream();
@ -408,7 +401,20 @@ export class SessionRecording extends React.Component<Props, State> {
}
private onSendVoiceMessage() {
console.log(`[vince][mic] Sending voice message`);
console.log(`[vince][mic] Sending voice message to composition box1`);
const audioBlob = this.state.mediaBlob.data;
if (!audioBlob) return;
// Is the audio file > attachment filesize limit
if (audioBlob.size > window.CONSTANTS.MAX_ATTACHMENT_FILESIZE) {
console.log(`[send] Voice message too large: ${audioBlob.size / 1000000} MB`);
return;
}
this.props.sendVoiceMessage(audioBlob);
return;
}
private async initiateRecordingStream() {
@ -416,8 +422,7 @@ export class SessionRecording extends React.Component<Props, State> {
}
private stopRecordingStream() {
const { streamParams, updateTimerInterval} = this.state;
updateTimerInterval && clearInterval(updateTimerInterval);
const { streamParams} = this.state;
// Exit if parameters aren't yet set
if (!streamParams){
@ -429,12 +434,7 @@ export class SessionRecording extends React.Component<Props, State> {
streamParams.input.disconnect();
streamParams.processor.disconnect();
streamParams.stream.getTracks().forEach((track: any) => track.stop);
console.log(`[vince][stream] Stream: `, streamParams.stream);
console.log(`[vince][stream] Media: `, streamParams.media);
console.log(`[vince][stream] Input: `, streamParams.input);
console.log(`[vince][stream] Processor: `, streamParams.processor);
// Stop recording
this.stopRecording();
}
@ -585,8 +585,6 @@ export class SessionRecording extends React.Component<Props, State> {
const numBars = width / (barPadding + barWidth);
console.log(`[] Starting playback view`);
// Scan through audio file getting average volume per bar
// to display amplitude over time as a static image
const blob = this.state.mediaBlob.data;
@ -595,7 +593,9 @@ export class SessionRecording extends React.Component<Props, State> {
const audioContext = new window.AudioContext();
audioContext.decodeAudioData(arrayBuffer, (buffer: AudioBuffer) => {
this.setState({recordDuration: buffer.duration});
this.setState({
recordDuration: buffer.duration
});
// Get audio amplitude with PCM Data in Float32
// Grab single channel only to save compuation
@ -620,21 +620,14 @@ export class SessionRecording extends React.Component<Props, State> {
// CANVAS CONTEXT
const drawPlaybackCanvas = () => {
console.log(`[canvas] Drawing`);
const canvas = this.playbackCanvas.current;
if (!canvas) {
console.log(`[canvas] Couldnt get playback canvas`);
return;
}
if (!canvas) return;
canvas.height = height;
canvas.width = width;
const canvasContext = canvas.getContext(`2d`);
if (!canvasContext){
console.log(`[canvas] Couldnt get cointext canvas`);
return;
}
if (!canvasContext) return;
for (let i = 0; i < barSizeArray.length; i++){
const barHeight = Math.ceil(barSizeArray[i]);
@ -675,6 +668,10 @@ export class SessionRecording extends React.Component<Props, State> {
ctx.fill();
}
private getTimestamp(){
return Number(moment().format('x')) / 1000;
}
private updateCanvasDimensions(){
const canvas = this.visualisationCanvas.current || this.playbackCanvas.current;
const width = canvas?.clientWidth || 0;

@ -191,6 +191,13 @@
dependencies:
"@types/trusted-types" "*"
"@types/emoji-mart@^2.11.3":
version "2.11.3"
resolved "https://registry.yarnpkg.com/@types/emoji-mart/-/emoji-mart-2.11.3.tgz#9949f6a8a231aea47aac1b2d4212597b41140b07"
integrity sha512-pRlU6+CFIB+9+FwjGGCVtDQq78u7N0iUijrO0Qh1j9RJ6T23DSNNfe0X6kf81N4ubVhF9jVckCI1M3kHpkwjqA==
dependencies:
"@types/react" "*"
"@types/events@*":
version "3.0.0"
resolved "https://registry.yarnpkg.com/@types/events/-/events-3.0.0.tgz#2862f3f58a9a7f7c3e78d79f130dd4d71c25c2a7"
@ -3162,6 +3169,13 @@ emoji-js@3.4.0:
dependencies:
emoji-datasource "4.0.0"
emoji-mart@^2.11.2:
version "2.11.2"
resolved "https://registry.yarnpkg.com/emoji-mart/-/emoji-mart-2.11.2.tgz#ed331867f7f55bb33c8421c9a493090fa4a378c7"
integrity sha512-IdHZR5hc3mipTY/r0ergtqBgQ96XxmRdQDSg7fsL+GiJQQ4akMws6+cjLSyIhGQxtvNuPVNaEQiAlU00NsyZUg==
dependencies:
prop-types "^15.6.0"
"emoji-panel@https://github.com/scottnonnenberg-signal/emoji-panel.git#v0.5.5":
version "0.5.5"
resolved "https://github.com/scottnonnenberg-signal/emoji-panel.git#81e236e03458a44d4a174ab5f367cb4b9b1b2f97"

Loading…
Cancel
Save