diff --git a/.changeset/improved_voice_message_recording_ui_it_should_now_feel_a_lot_more_intergrated_.md b/.changeset/improved_voice_message_recording_ui_it_should_now_feel_a_lot_more_intergrated_.md
new file mode 100644
index 000000000..eaeb76ec6
--- /dev/null
+++ b/.changeset/improved_voice_message_recording_ui_it_should_now_feel_a_lot_more_intergrated_.md
@@ -0,0 +1,5 @@
+---
+default: patch
+---
+
+# Improved voice message recording UI, it should now feel a lot more integrated.
diff --git a/package.json b/package.json
index af125cf18..9739e961c 100644
--- a/package.json
+++ b/package.json
@@ -36,6 +36,7 @@
"@fontsource-variable/nunito": "5.2.7",
"@sentry/react": "^10.43.0",
"@fontsource/space-mono": "5.2.9",
+ "@phosphor-icons/react": "^2.1.10",
"@tanstack/react-query": "^5.90.21",
"@tanstack/react-query-devtools": "^5.91.3",
"@tanstack/react-virtual": "^3.13.19",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 24937c80e..be96be6a4 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -31,6 +31,9 @@ importers:
'@fontsource/space-mono':
specifier: 5.2.9
version: 5.2.9
+ '@phosphor-icons/react':
+ specifier: ^2.1.10
+ version: 2.1.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
'@sentry/react':
specifier: ^10.43.0
version: 10.43.0(react@18.3.1)
@@ -1640,6 +1643,13 @@ packages:
cpu: [x64]
os: [win32]
+ '@phosphor-icons/react@2.1.10':
+ resolution: {integrity: sha512-vt8Tvq8GLjheAZZYa+YG/pW7HDbov8El/MANW8pOAz4eGxrwhnbfrQZq0Cp4q8zBEu8NIhHdnr+r8thnfRSNYA==}
+ engines: {node: '>=10'}
+ peerDependencies:
+ react: '>= 16.8'
+ react-dom: '>= 16.8'
+
'@pkgr/core@0.2.9':
resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==}
engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
@@ -6978,6 +6988,11 @@ snapshots:
'@oxc-resolver/binding-win32-x64-msvc@11.19.1':
optional: true
+ '@phosphor-icons/react@2.1.10(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
+ dependencies:
+ react: 18.3.1
+ react-dom: 18.3.1(react@18.3.1)
+
'@pkgr/core@0.2.9': {}
'@polka/url@1.0.0-next.29': {}
diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml
index 392ed8052..b3fbcdfa3 100644
--- a/pnpm-workspace.yaml
+++ b/pnpm-workspace.yaml
@@ -1,4 +1,5 @@
allowBuilds:
+ '@sentry/cli': true
'@swc/core': true
esbuild: true
sharp: true
diff --git a/src/app/components/message/content/AudioContent.tsx b/src/app/components/message/content/AudioContent.tsx
index 26d74db88..fb0f2bef9 100644
--- a/src/app/components/message/content/AudioContent.tsx
+++ b/src/app/components/message/content/AudioContent.tsx
@@ -104,34 +104,41 @@ export function AudioContent({
max={duration || 1}
values={[currentTime]}
onChange={(values) => seek(values[0])}
- renderTrack={(params) => (
-
- {params.children}
-
{
+ const { key, ...restProps } = params.props as any;
+ return (
+
+ );
+ }}
+ renderThumb={(params) => {
+ const { key, style, ...restProps } = params.props as any;
+ return (
+
-
- )}
- renderThumb={(params) => (
-
- )}
+ );
+ }}
/>
),
leftControl: (
@@ -174,34 +181,41 @@ export function AudioContent({
max={1}
values={[volume]}
onChange={(values) => setVolume(values[0])}
- renderTrack={(params) => (
-
- {params.children}
-
{
+ const { key, ...restProps } = params.props as any;
+ return (
+
+ );
+ }}
+ renderThumb={(params) => {
+ const { key, style, ...restProps } = params.props as any;
+ return (
+
-
- )}
- renderThumb={(params) => (
-
- )}
+ );
+ }}
/>
>
),
diff --git a/src/app/components/upload-card/UploadCard.css.ts b/src/app/components/upload-card/UploadCard.css.ts
index ad3caf10e..d02cbe3f5 100644
--- a/src/app/components/upload-card/UploadCard.css.ts
+++ b/src/app/components/upload-card/UploadCard.css.ts
@@ -1,6 +1,6 @@
import { style } from '@vanilla-extract/css';
import { RecipeVariants, recipe } from '@vanilla-extract/recipes';
-import { RadiiVariant, color, config } from 'folds';
+import { DefaultReset, RadiiVariant, color, config, toRem } from 'folds';
export const UploadCard = recipe({
base: {
@@ -34,3 +34,65 @@ export const UploadCardError = style({
padding: `0 ${config.space.S100}`,
color: color.Critical.Main,
});
+
+export const AudioPreviewContainer = style([
+ DefaultReset,
+ {
+ backgroundColor: color.SurfaceVariant.Container,
+ border: `${config.borderWidth.B300} solid ${color.SurfaceVariant.ContainerLine}`,
+ borderRadius: config.radii.R400,
+ padding: config.space.S300,
+ width: '100%',
+ maxWidth: toRem(400),
+ boxSizing: 'border-box',
+ },
+]);
+
+export const AudioWaveformContainer = style([
+ DefaultReset,
+ {
+ minHeight: 44,
+ cursor: 'pointer',
+ userSelect: 'none',
+ overflow: 'hidden',
+ },
+]);
+
+export const AudioWaveformBar = style([
+ DefaultReset,
+ {
+ width: 2,
+ height: 3,
+ borderRadius: 1,
+ flexShrink: 0,
+ transition: 'background-color 40ms, opacity 40ms',
+ pointerEvents: 'none',
+ },
+]);
+
+export const AudioWaveformBarPlayed = style([
+ DefaultReset,
+ {
+ backgroundColor: color.Secondary.Main,
+ opacity: 1,
+ },
+]);
+
+export const AudioWaveformBarUnplayed = style([
+ DefaultReset,
+ {
+ backgroundColor: color.SurfaceVariant.OnContainer,
+ opacity: 0.5,
+ },
+]);
+
+export const AudioTimeDisplay = style([
+ DefaultReset,
+ {
+ fontVariantNumeric: 'tabular-nums',
+ color: color.SurfaceVariant.OnContainer,
+ minWidth: toRem(30),
+ textAlign: 'right',
+ flexShrink: 0,
+ },
+]);
diff --git a/src/app/components/upload-card/UploadCardRenderer.tsx b/src/app/components/upload-card/UploadCardRenderer.tsx
index e5e894398..931215b63 100644
--- a/src/app/components/upload-card/UploadCardRenderer.tsx
+++ b/src/app/components/upload-card/UploadCardRenderer.tsx
@@ -1,4 +1,4 @@
-import { ReactNode, useEffect, useMemo, useState } from 'react';
+import { ReactNode, useEffect, useMemo, useRef, useState } from 'react';
import {
Box,
Chip,
@@ -14,6 +14,7 @@ import {
toRem,
} from 'folds';
import { HTMLReactParserOptions } from 'html-react-parser';
+import { Play, Pause } from '@phosphor-icons/react';
import { useMediaAuthentication } from '$hooks/useMediaAuthentication';
import { Opts as LinkifyOpts } from 'linkifyjs';
import { getReactCustomHtmlParser, LINKIFY_OPTS } from '$plugins/react-custom-html-parser';
@@ -27,6 +28,7 @@ import { roomUploadAtomFamily, TUploadItem, TUploadMetadata } from '$state/room/
import { useObjectURL } from '$hooks/useObjectURL';
import { useMediaConfig } from '$hooks/useMediaConfig';
import { UploadCard, UploadCardError, UploadCardProgress } from './UploadCard';
+import * as css from './UploadCard.css';
import { DescriptionEditor } from './UploadDescriptionEditor';
type PreviewImageProps = {
@@ -71,6 +73,196 @@ function PreviewVideo({ fileItem }: Readonly) {
);
}
+const BAR_COUNT = 44;
+
+function formatAudioTime(s: number): string {
+ const m = Math.floor(s / 60);
+ const sec = Math.floor(s % 60);
+ return `${m}:${sec.toString().padStart(2, '0')}`;
+}
+
+type PreviewAudioProps = {
+ fileItem: TUploadItem;
+};
+function PreviewAudio({ fileItem }: PreviewAudioProps) {
+ const { originalFile, metadata } = fileItem;
+ const audioUrl = useObjectURL(originalFile);
+ const { waveform, audioDuration } = metadata;
+ const duration = audioDuration ?? 0;
+
+ const [isPlaying, setIsPlaying] = useState(false);
+ const [currentTime, setCurrentTime] = useState(0);
+ const audioRef = useRef(null);
+ const rafRef = useRef(null);
+
+ const bars = useMemo(() => {
+ if (!waveform || waveform.length === 0) {
+ return Array(BAR_COUNT).fill(0.3);
+ }
+ if (waveform.length <= BAR_COUNT) {
+ const step = (waveform.length - 1) / (BAR_COUNT - 1);
+ return Array.from({ length: BAR_COUNT }, (_, i) => {
+ const position = i * step;
+ const lower = Math.floor(position);
+ const upper = Math.min(Math.ceil(position), waveform.length - 1);
+ const fraction = position - lower;
+ if (lower === upper) {
+ return waveform[lower] ?? 0.3;
+ }
+ return (waveform[lower] ?? 0.3) * (1 - fraction) + (waveform[upper] ?? 0.3) * fraction;
+ });
+ }
+ const step = waveform.length / BAR_COUNT;
+ return Array.from({ length: BAR_COUNT }, (_, i) => {
+ const start = Math.floor(i * step);
+ const end = Math.floor((i + 1) * step);
+ const slice = waveform.slice(start, end);
+ return slice.length > 0 ? Math.max(...slice) : 0.3;
+ });
+ }, [waveform]);
+
+ const progress = duration > 0 ? Math.min(currentTime / duration, 1) : 0;
+
+ useEffect(() => {
+ if (!audioUrl) {
+ return undefined;
+ }
+ const audio = new Audio(audioUrl);
+ audioRef.current = audio;
+
+ audio.onended = () => {
+ setIsPlaying(false);
+ setCurrentTime(0);
+ if (rafRef.current !== null) {
+ cancelAnimationFrame(rafRef.current);
+ rafRef.current = null;
+ }
+ };
+
+ return () => {
+ audio.pause();
+ if (rafRef.current !== null) {
+ cancelAnimationFrame(rafRef.current);
+ }
+ };
+ }, [audioUrl]);
+
+ const startRaf = (audio: HTMLAudioElement) => {
+ const tick = () => {
+ setCurrentTime(audio.currentTime);
+ rafRef.current = requestAnimationFrame(tick);
+ };
+ rafRef.current = requestAnimationFrame(tick);
+ };
+
+ const stopRaf = () => {
+ if (rafRef.current !== null) {
+ cancelAnimationFrame(rafRef.current);
+ rafRef.current = null;
+ }
+ };
+
+ const handlePlayPause = () => {
+ const audio = audioRef.current;
+ if (!audio) return;
+
+ if (isPlaying) {
+ audio.pause();
+ setIsPlaying(false);
+ stopRaf();
+ } else {
+ audio.play().catch(() => {});
+ setIsPlaying(true);
+ startRaf(audio);
+ }
+ };
+
+ const handleScrubClick = (e: React.MouseEvent) => {
+ const audio = audioRef.current;
+ if (!audio || !duration) return;
+ const rect = e.currentTarget.getBoundingClientRect();
+ const ratio = Math.max(0, Math.min(1, (e.clientX - rect.left) / rect.width));
+ audio.currentTime = ratio * duration;
+ setCurrentTime(audio.currentTime);
+ };
+
+ const handleKeyDown = (e: React.KeyboardEvent) => {
+ const audio = audioRef.current;
+ if (!audio || !duration) return;
+
+ const SEEK_STEP = 5;
+ let newTime = currentTime;
+
+ if (e.key === 'ArrowLeft' || e.key === 'ArrowDown') {
+ e.preventDefault();
+ newTime = Math.max(0, currentTime - SEEK_STEP);
+ } else if (e.key === 'ArrowRight' || e.key === 'ArrowUp') {
+ e.preventDefault();
+ newTime = Math.min(duration, currentTime + SEEK_STEP);
+ } else if (e.key === 'Home') {
+ e.preventDefault();
+ newTime = 0;
+ } else if (e.key === 'End') {
+ e.preventDefault();
+ newTime = duration;
+ } else {
+ return;
+ }
+
+ audio.currentTime = newTime;
+ setCurrentTime(newTime);
+ };
+
+ return (
+
+
+ {isPlaying ? : }
+
+
+
+ {bars.map((level, i) => {
+ const barRatio = i / BAR_COUNT;
+ const played = progress > 0 && barRatio <= progress;
+ return (
+
+ );
+ })}
+
+
+
+ {formatAudioTime(isPlaying ? currentTime : duration)}
+
+
+ );
+}
+
type MediaPreviewProps = {
fileItem: TUploadItem;
onSpoiler: (marked: boolean) => void;
@@ -247,6 +439,7 @@ export function UploadCardRenderer({
)}
+ {fileItem.metadata.waveform && }
{upload.status === UploadStatus.Idle && !fileSizeExceeded && (
)}
diff --git a/src/app/features/room/AudioMessageRecorder.css.ts b/src/app/features/room/AudioMessageRecorder.css.ts
new file mode 100644
index 000000000..47b165841
--- /dev/null
+++ b/src/app/features/room/AudioMessageRecorder.css.ts
@@ -0,0 +1,120 @@
+import { keyframes, style } from '@vanilla-extract/css';
+import { DefaultReset, color, config, toRem } from 'folds';
+
+const RecDotPulse = keyframes({
+ '0%, 100%': { opacity: 1 },
+ '50%': { opacity: 0.25 },
+});
+
+const SlideOutLeft = keyframes({
+ '0%': { transform: 'translateX(0)', opacity: 1 },
+ '100%': { transform: 'translateX(-100%)', opacity: 0 },
+});
+
+const Shake = keyframes({
+ '0%, 100%': { transform: 'translateX(0)' },
+ '20%': { transform: 'translateX(-4px)' },
+ '40%': { transform: 'translateX(4px)' },
+ '60%': { transform: 'translateX(-4px)' },
+ '80%': { transform: 'translateX(4px)' },
+});
+
+export const Container = style([
+ DefaultReset,
+ {
+ flexGrow: 1,
+ minWidth: 0,
+ overflow: 'hidden',
+ touchAction: 'pan-y',
+ userSelect: 'none',
+ },
+]);
+
+export const ContainerCanceling = style({
+ animation: `${SlideOutLeft} 200ms ease-out forwards`,
+});
+
+export const ContainerShake = style({
+ animation: `${Shake} 300ms ease-out`,
+});
+
+export const RecDot = style([
+ DefaultReset,
+ {
+ width: 7,
+ height: 7,
+ borderRadius: '50%',
+ backgroundColor: color.Critical.Main,
+ flexShrink: 0,
+ animation: `${RecDotPulse} 1.4s ease-in-out infinite`,
+ },
+]);
+
+export const WaveformContainer = style([
+ DefaultReset,
+ {
+ height: 22,
+ overflow: 'hidden',
+ minWidth: 0,
+ },
+]);
+
+export const WaveformBar = style([
+ DefaultReset,
+ {
+ width: 2,
+ height: 3,
+ borderRadius: 1,
+ backgroundColor: color.Primary.Main,
+ transition: 'height 70ms ease-out',
+ flexShrink: 0,
+ },
+]);
+
+export const Timer = style([
+ DefaultReset,
+ {
+ fontVariantNumeric: 'tabular-nums',
+ color: color.Critical.Main,
+ minWidth: config.space.S300,
+ flexShrink: 0,
+ fontWeight: 600,
+ },
+]);
+
+export const CancelHint = style([
+ DefaultReset,
+ {
+ position: 'absolute',
+ left: config.space.S200,
+ top: 0,
+ bottom: 0,
+ display: 'flex',
+ alignItems: 'center',
+ color: color.Critical.Main,
+ fontSize: toRem(12),
+ fontWeight: 600,
+ opacity: 0,
+ transition: 'opacity 100ms ease-out',
+ pointerEvents: 'none',
+ },
+]);
+
+export const CancelHintVisible = style({
+ opacity: 1,
+});
+
+export const SrOnly = style([
+ DefaultReset,
+ {
+ position: 'absolute',
+ width: 1,
+ height: 1,
+ padding: 0,
+ margin: -1,
+ overflow: 'hidden',
+ clip: 'rect(0, 0, 0, 0)',
+ whiteSpace: 'nowrap',
+ borderWidth: 0,
+ },
+]);
diff --git a/src/app/features/room/AudioMessageRecorder.tsx b/src/app/features/room/AudioMessageRecorder.tsx
index 073a8cb54..5ec592bc8 100644
--- a/src/app/features/room/AudioMessageRecorder.tsx
+++ b/src/app/features/room/AudioMessageRecorder.tsx
@@ -1,98 +1,168 @@
-import { VoiceRecorder } from '$plugins/voice-recorder-kit';
-import FocusTrap from 'focus-trap-react';
-import { Box, Icon, Icons, Text, color, config } from 'folds';
-import { useRef } from 'react';
+import {
+ forwardRef,
+ useCallback,
+ useEffect,
+ useImperativeHandle,
+ useMemo,
+ useRef,
+ useState,
+} from 'react';
+import { useVoiceRecorder } from '$plugins/voice-recorder-kit';
+import type { VoiceRecorderStopPayload } from '$plugins/voice-recorder-kit';
+import { Box, Text } from 'folds';
+import * as css from './AudioMessageRecorder.css';
+
+export type AudioRecordingCompletePayload = {
+ audioBlob: Blob;
+ waveform: number[];
+ audioLength: number;
+ audioCodec: string;
+};
+
+export type AudioMessageRecorderHandle = {
+ stop: () => void;
+ cancel: () => void;
+};
type AudioMessageRecorderProps = {
- onRecordingComplete: (audioBlob: Blob) => void;
+ onRecordingComplete: (payload: AudioRecordingCompletePayload) => void;
onRequestClose: () => void;
onWaveformUpdate: (waveform: number[]) => void;
onAudioLengthUpdate: (length: number) => void;
- onAudioCodecUpdate?: (codec: string) => void;
};
-// We use a react voice recorder library to handle the recording of audio messages, as it provides a simple API and handles the complexities of recording audio in the browser.
-// The component is wrapped in a focus trap to ensure that keyboard users can easily navigate and interact with the recorder without accidentally losing focus or interacting with other parts of the UI.
-// The styling is kept simple and consistent with the rest of the app, using Folds' design tokens for colors, spacing, and typography.
-// we use a modified version of https://www.npmjs.com/package/react-voice-recorder-kit for the recording
-export function AudioMessageRecorder({
- onRecordingComplete,
- onRequestClose,
- onWaveformUpdate,
- onAudioLengthUpdate,
- onAudioCodecUpdate,
-}: AudioMessageRecorderProps) {
- const containerRef = useRef(null);
+function formatTime(seconds: number): string {
+ const m = Math.floor(seconds / 60);
+ const s = seconds % 60;
+ return `${m}:${s.toString().padStart(2, '0')}`;
+}
+
+export const AudioMessageRecorder = forwardRef<
+ AudioMessageRecorderHandle,
+ AudioMessageRecorderProps
+>(({ onRecordingComplete, onRequestClose, onWaveformUpdate, onAudioLengthUpdate }, ref) => {
const isDismissedRef = useRef(false);
+ const userRequestedStopRef = useRef(false);
+ const [isCanceling, setIsCanceling] = useState(false);
+ const [announcedTime, setAnnouncedTime] = useState(0);
+
+ const onRecordingCompleteRef = useRef(onRecordingComplete);
+ onRecordingCompleteRef.current = onRecordingComplete;
+ const onRequestCloseRef = useRef(onRequestClose);
+ onRequestCloseRef.current = onRequestClose;
+ const onWaveformUpdateRef = useRef(onWaveformUpdate);
+ onWaveformUpdateRef.current = onWaveformUpdate;
+ const onAudioLengthUpdateRef = useRef(onAudioLengthUpdate);
+ onAudioLengthUpdateRef.current = onAudioLengthUpdate;
+
+ const stableOnStop = useCallback((payload: VoiceRecorderStopPayload) => {
+ if (!userRequestedStopRef.current) return;
+ if (isDismissedRef.current) return;
+ onRecordingCompleteRef.current({
+ audioBlob: payload.audioFile,
+ waveform: payload.waveform,
+ audioLength: payload.audioLength,
+ audioCodec: payload.audioCodec,
+ });
+ onWaveformUpdateRef.current(payload.waveform);
+ onAudioLengthUpdateRef.current(payload.audioLength);
+ }, []);
+
+ const stableOnDelete = useCallback(() => {
+ isDismissedRef.current = true;
+ onRequestCloseRef.current();
+ }, []);
+
+ const { levels, seconds, error, handleStop, handleDelete } = useVoiceRecorder({
+ autoStart: true,
+ onStop: stableOnStop,
+ onDelete: stableOnDelete,
+ });
+
+ const doStop = useCallback(() => {
+ if (isDismissedRef.current) return;
+ userRequestedStopRef.current = true;
+ handleStop();
+ }, [handleStop]);
+
+ const doCancel = useCallback(() => {
+ if (isDismissedRef.current) return;
+ setIsCanceling(true);
+ setTimeout(() => {
+ isDismissedRef.current = true;
+ handleDelete();
+ }, 180);
+ }, [handleDelete]);
+
+ useImperativeHandle(ref, () => ({ stop: doStop, cancel: doCancel }), [doStop, doCancel]);
+
+ useEffect(() => {
+ if (seconds > 0 && seconds % 30 === 0 && seconds !== announcedTime) {
+ setAnnouncedTime(seconds);
+ }
+ }, [seconds, announcedTime]);
+
+ const BAR_COUNT = 28;
+ const bars = useMemo(() => {
+ if (levels.length === 0) {
+ return Array(BAR_COUNT).fill(0.15);
+ }
+ if (levels.length <= BAR_COUNT) {
+ const step = (levels.length - 1) / (BAR_COUNT - 1);
+ return Array.from({ length: BAR_COUNT }, (_, i) => {
+ const position = i * step;
+ const lower = Math.floor(position);
+ const upper = Math.min(Math.ceil(position), levels.length - 1);
+ const fraction = position - lower;
+ if (lower === upper) {
+ return levels[lower] ?? 0.15;
+ }
+ return (levels[lower] ?? 0.15) * (1 - fraction) + (levels[upper] ?? 0.15) * fraction;
+ });
+ }
+ const step = levels.length / BAR_COUNT;
+ return Array.from({ length: BAR_COUNT }, (_, i) => {
+ const start = Math.floor(i * step);
+ const end = Math.floor((i + 1) * step);
+ const slice = levels.slice(start, end);
+ return slice.length > 0 ? Math.max(...slice) : 0.15;
+ });
+ }, [levels]);
+
+ const containerClassName = [css.Container, isCanceling ? css.ContainerCanceling : null]
+ .filter(Boolean)
+ .join(' ');
- // uses default styling, we use at other places
return (
- {
- isDismissedRef.current = true;
- onRequestClose();
- },
- clickOutsideDeactivates: true,
- allowOutsideClick: true,
- fallbackFocus: () => containerRef.current!,
- }}
- >
-
-
- Audio Message Recorder
- {
- if (isDismissedRef.current) return;
- // closes the recorder and sends the audio file back to the parent component to be uploaded and sent as a message
- onRecordingComplete(audioFile);
- onWaveformUpdate(waveform);
- onAudioLengthUpdate(audioLength);
- // Pass the audio codec to the parent component
- if (onAudioCodecUpdate) onAudioCodecUpdate(audioCodec);
- }}
- buttonBackgroundColor={color.SurfaceVariant.Container}
- buttonHoverBackgroundColor={color.SurfaceVariant.ContainerHover}
- iconColor={color.Primary.Main}
- // icons for the recorder, we use Folds' icon library to keep the styling consistent with the rest of the app
- customPauseIcon={ }
- customPlayIcon={ }
- customDeleteIcon={ }
- customStopIcon={ }
- customRepeatIcon={ }
- customResumeIcon={ }
- style={{
- backgroundColor: color.Surface.ContainerActive,
- }}
- />
+ <>
+ {error && (
+
+ {error}
+
+ )}
+
+
+
+
+ {bars.map((level, i) => (
+
+ ))}
-
-
+
+
+ {formatTime(seconds)}
+
+ {announcedTime > 0 && announcedTime === seconds && (
+
+ Recording duration: {formatTime(announcedTime)}
+
+ )}
+
+ >
);
-}
+});
diff --git a/src/app/features/room/RoomInput.tsx b/src/app/features/room/RoomInput.tsx
index 60ee8d20b..099cfe5cc 100644
--- a/src/app/features/room/RoomInput.tsx
+++ b/src/app/features/room/RoomInput.tsx
@@ -25,6 +25,7 @@ import { ReactEditor } from 'slate-react';
import { Editor, Point, Range, Transforms } from 'slate';
import {
Box,
+ color,
config,
Dialog,
Icon,
@@ -152,6 +153,7 @@ import { usePowerLevelsContext } from '$hooks/usePowerLevels';
import { useRoomCreators } from '$hooks/useRoomCreators';
import { useRoomPermissions } from '$hooks/useRoomPermissions';
import { AutocompleteNotice } from '$components/editor/autocomplete/AutocompleteNotice';
+import { Microphone, Stop } from '@phosphor-icons/react';
import { getSupportedAudioExtension } from '$plugins/voice-recorder-kit/supportedCodec';
import { SchedulePickerDialog } from './schedule-send';
import * as css from './schedule-send/SchedulePickerDialog.css';
@@ -162,7 +164,7 @@ import {
getVideoMsgContent,
} from './msgContent';
import { CommandAutocomplete } from './CommandAutocomplete';
-import { AudioMessageRecorder } from './AudioMessageRecorder';
+import { AudioMessageRecorder, AudioMessageRecorderHandle } from './AudioMessageRecorder';
// Returns the event ID of the most recent non-reaction/non-edit event in a thread,
// falling back to the thread root if no replies exist yet.
@@ -285,8 +287,9 @@ export const RoomInput = forwardRef(
const [toolbar, setToolbar] = useSetting(settingsAtom, 'editorToolbar');
const [showAudioRecorder, setShowAudioRecorder] = useState(false);
- const [audioMsgWaveform, setAudioMsgWaveform] = useState(undefined);
- const [audioMsgLength, setAudioMsgLength] = useState(undefined);
+ const audioRecorderRef = useRef(null);
+ const micHoldStartRef = useRef(0);
+ const HOLD_THRESHOLD_MS = 400;
const [autocompleteQuery, setAutocompleteQuery] =
useState>();
const [isQuickTextReact, setQuickTextReact] = useState(false);
@@ -296,7 +299,7 @@ export const RoomInput = forwardRef(
const [inputKey, setInputKey] = useState(0);
const handleFiles = useCallback(
- async (files: File[]) => {
+ async (files: File[], audioMeta?: { waveform: number[]; audioDuration: number }) => {
setUploadBoard(true);
const safeFiles = files.map(safeFile);
const fileItems: TUploadItem[] = [];
@@ -310,6 +313,8 @@ export const RoomInput = forwardRef(
...ef,
metadata: {
markedAsSpoiler: false,
+ waveform: audioMeta?.waveform,
+ audioDuration: audioMeta?.audioDuration,
},
})
);
@@ -321,6 +326,8 @@ export const RoomInput = forwardRef(
encInfo: undefined,
metadata: {
markedAsSpoiler: false,
+ waveform: audioMeta?.waveform,
+ audioDuration: audioMeta?.audioDuration,
},
})
);
@@ -489,7 +496,7 @@ export const RoomInput = forwardRef(
return getVideoMsgContent(mx, fileItem, upload.mxc);
}
if (fileItem.file.type.startsWith('audio')) {
- return getAudioMsgContent(fileItem, upload.mxc, audioMsgWaveform, audioMsgLength);
+ return getAudioMsgContent(fileItem, upload.mxc);
}
return getFileMsgContent(fileItem, upload.mxc);
});
@@ -561,7 +568,7 @@ export const RoomInput = forwardRef(
contents.map((content) =>
mx
.sendMessage(roomId, threadRootId ?? null, content as any)
- .then((res) => {
+ .then((res: { event_id: string }) => {
debugLog.info('message', 'Uploaded file message sent', {
roomId,
eventId: res.event_id,
@@ -759,7 +766,7 @@ export const RoomInput = forwardRef(
},
() => mx.sendMessage(roomId, threadRootId ?? null, content as any)
)
- .then((res) => {
+ .then((res: { event_id: string }) => {
debugLog.info('message', 'Message sent successfully', {
roomId,
eventId: res.event_id,
@@ -840,6 +847,10 @@ export const RoomInput = forwardRef(
}
if (isKeyHotkey('escape', evt)) {
evt.preventDefault();
+ if (showAudioRecorder) {
+ audioRecorderRef.current?.cancel();
+ return;
+ }
if (autocompleteQuery) {
setAutocompleteQuery(undefined);
return;
@@ -847,7 +858,15 @@ export const RoomInput = forwardRef(
setReplyDraft(undefined);
}
},
- [submit, roomId, setReplyDraft, enterForNewline, autocompleteQuery, isComposing]
+ [
+ submit,
+ roomId,
+ setReplyDraft,
+ enterForNewline,
+ autocompleteQuery,
+ isComposing,
+ showAudioRecorder,
+ ]
);
const handleKeyUp: KeyboardEventHandler = useCallback(
@@ -1049,7 +1068,7 @@ export const RoomInput = forwardRef(
editableName="RoomInput"
editor={editor}
key={inputKey}
- placeholder="Send a message..."
+ placeholder={showAudioRecorder && mobileOrTablet() ? '' : 'Send a message...'}
onKeyDown={handleKeyDown}
onKeyUp={handleKeyUp}
onPaste={handlePaste}
@@ -1169,71 +1188,108 @@ export const RoomInput = forwardRef(
>
}
before={
- pickFile('*')}
- variant="SurfaceVariant"
- size="300"
- radii="300"
- title="Upload File"
- aria-label="Upload and attach a File"
- >
-
-
+ !(showAudioRecorder && mobileOrTablet()) && (
+ pickFile('*')}
+ variant="SurfaceVariant"
+ size="300"
+ radii="300"
+ title="Upload File"
+ aria-label="Upload and attach a File"
+ >
+
+
+ )
}
after={
<>
+ {showAudioRecorder && (
+ setShowAudioRecorder(false)}
+ onRecordingComplete={(payload) => {
+ const extension = getSupportedAudioExtension(payload.audioCodec);
+ const file = new File(
+ [payload.audioBlob],
+ `sable-audio-message-${Date.now()}.${extension}`,
+ {
+ type: payload.audioCodec,
+ }
+ );
+ handleFiles([file], {
+ waveform: payload.waveform,
+ audioDuration: payload.audioLength,
+ });
+ setShowAudioRecorder(false);
+ }}
+ onAudioLengthUpdate={() => {}}
+ onWaveformUpdate={() => {}}
+ />
+ )}
+
+ {/* ── Mic button — always present; icon swaps to Stop while recording ── */}
setToolbar(!toolbar)}
+ title={showAudioRecorder ? 'Stop recording' : 'Record audio message'}
+ aria-label={showAudioRecorder ? 'Stop recording' : 'Record audio message'}
+ aria-pressed={showAudioRecorder}
+ onClick={() => {
+ if (mobileOrTablet()) return;
+ if (showAudioRecorder) {
+ audioRecorderRef.current?.stop();
+ } else {
+ setShowAudioRecorder(true);
+ }
+ }}
+ onPointerDown={() => {
+ if (!mobileOrTablet()) return;
+ if (showAudioRecorder) return;
+ micHoldStartRef.current = Date.now();
+ setShowAudioRecorder(true);
+
+ let cleanup: () => void;
+ const onUp = () => {
+ cleanup();
+ const held = Date.now() - micHoldStartRef.current;
+ if (held >= HOLD_THRESHOLD_MS) {
+ setTimeout(() => {
+ audioRecorderRef.current?.stop();
+ }, 50);
+ } else {
+ setTimeout(() => {
+ audioRecorderRef.current?.cancel();
+ }, 50);
+ }
+ };
+ cleanup = () => {
+ window.removeEventListener('pointerup', onUp);
+ window.removeEventListener('pointercancel', cleanup);
+ };
+ window.addEventListener('pointerup', onUp);
+ window.addEventListener('pointercancel', cleanup);
+ }}
>
-
+ {showAudioRecorder ? (
+
+ ) : (
+
+ )}
+
setShowAudioRecorder(!showAudioRecorder)}
+ title={toolbar ? 'Hide Toolbar' : 'Show Toolbar'}
+ aria-pressed={toolbar}
+ aria-label={toolbar ? 'Hide Toolbar' : 'Show Toolbar'}
+ onClick={() => setToolbar(!toolbar)}
>
-
+
- {showAudioRecorder && (
- {
- setShowAudioRecorder(false);
- }}
- onRecordingComplete={(audioBlob) => {
- const file = new File(
- [audioBlob],
- `sable-audio-message-${Date.now()}.${getSupportedAudioExtension(audioBlob.type)}`,
- {
- type: audioBlob.type,
- }
- );
- handleFiles([file]);
- // Close the recorder after handling the file, to give some feedback that the recording was successful
- setShowAudioRecorder(false);
- }}
- onAudioLengthUpdate={(len) => setAudioMsgLength(len)}
- onWaveformUpdate={(w) => setAudioMsgWaveform(w)}
- />
- }
- />
- )}
{(emojiBoardTab: EmojiBoardTab | undefined, setEmojiBoardTab) => (
{
- const { file, encInfo } = item;
+export const getAudioMsgContent = (item: TUploadItem, mxc: string): AudioMsgContent => {
+ const { file, encInfo, metadata } = item;
+ const { waveform, audioDuration, markedAsSpoiler } = metadata;
let content: IContent = {
msgtype: MsgType.Audio,
filename: file.name,
@@ -164,17 +160,17 @@ export const getAudioMsgContent = (
info: {
mimetype: file.type,
size: file.size,
- duration: item.metadata.markedAsSpoiler || !audioLength ? 0 : audioLength * 1000,
+ duration: markedAsSpoiler || !audioDuration ? 0 : audioDuration * 1000,
},
// Element-compatible unstable extensible-event keys
'org.matrix.msc1767.audio': {
- waveform: waveform?.map((v) => Math.round(v * 1024)), // scale waveform values to fit in 10 bits (0-1024) for more efficient storage, as per MSC1767 spec
- duration: item.metadata.markedAsSpoiler || !audioLength ? 0 : audioLength * 1000, // if marked as spoiler, set duration to 0 to hide it in clients that support msc1767
+ waveform: waveform?.map((v) => Math.round(v * 1024)),
+ duration: markedAsSpoiler || !audioDuration ? 0 : audioDuration * 1000,
},
'org.matrix.msc1767.text': item.body && item.body.length > 0 ? item.body : 'a voice message',
'org.matrix.msc3245.voice.v2': {
- duration: !audioLength ? 0 : audioLength,
+ duration: markedAsSpoiler || !audioDuration ? 0 : audioDuration,
waveform: waveform?.map((v) => Math.round(v * 1024)),
},
// for element compat
diff --git a/src/app/plugins/voice-recorder-kit/README.md b/src/app/plugins/voice-recorder-kit/README.md
deleted file mode 100644
index 49b9edd3b..000000000
--- a/src/app/plugins/voice-recorder-kit/README.md
+++ /dev/null
@@ -1,492 +0,0 @@
-# react-voice-recorder-kit
-
-A lightweight React library for voice recording with audio waveform visualization and no UI framework dependencies
-
-* No UI framework dependencies (Pure React + Inline CSS)
-* Animated audio waveform visualization (40 bars)
-* Ready-to-use component
-* Fully customizable hook
-* TypeScript support
-* Compatible with Next.js, Vite, CRA, and more
-
----
-
-## Screenshots
-
-### Initial State (Ready to Record)
-
-
-
-
-### Recording in Progress
-
-
-### Paused State
-
-
-### Recorded Audio Ready to Play (Custom Styled)
-
-
----
-
-## Installation
-
-```bash
-npm install react-voice-recorder-kit
-# or
-pnpm add react-voice-recorder-kit
-# or
-yarn add react-voice-recorder-kit
-```
-
-Requires **React 18+**
-
----
-
-## Quick Start (Using Component)
-
-```tsx
-'use client'
-
-import { useState } from 'react'
-import { VoiceRecorder } from 'react-voice-recorder-kit'
-
-export default function Page() {
- const [file, setFile] = useState(null)
- const [url, setUrl] = useState(null)
- const [waveform, setWaveform] = useState([])
- const [audioLength, setAudioLength] = useState(0)
-
- return (
-
-
React Voice Recorder Kit
-
-
{
- setFile(audioFile)
- setUrl(audioUrl)
- setWaveform(waveform)
- setAudioLength(audioLength)
- }}
- onDelete={() => {
- setFile(null)
- setUrl(null)
- setWaveform([])
- setAudioLength(0)
- }}
- />
-
- {url && (
-
-
- {file instanceof File && (
-
- File name: {file.name} | Size: {file.size} bytes
-
- )}
-
- Length: {audioLength}s | Waveform points: {waveform.length}
-
-
- )}
-
- )
-}
-```
-
----
-
-## Usage in Next.js (App Router)
-
-```tsx
-'use client'
-
-import { VoiceRecorder } from 'react-voice-recorder-kit'
-
-export default function VoicePage() {
- return (
-
-
-
- )
-}
-```
-
----
-
-## Component API
-
-### Main Props
-
-| Prop | Type | Default | Description |
-| --------- | --------------------------------- | --------- | ---------------------------------------------- |
-| autoStart | boolean | true | Auto-start recording on mount |
-| onStop | (payload: { audioFile: Blob; audioUrl: string; waveform: number[]; audioLength: number }) => void | undefined | Callback after recording stops (all values batched) |
-| onDelete | () => void | undefined | Callback after recording is deleted |
-| width | string \| number | '100%' | Component width |
-| height | string \| number | undefined | Component height |
-| style | CSSProperties | undefined | Additional styles for container |
-
-### Styling Props
-
-| Prop | Type | Default | Description |
-| --------------------------- | --------------------------------------- | ------------------------------------------------------------ | ------------------------------------ |
-| backgroundColor | string | '#ffffff' | Background color |
-| borderColor | string | '#e5e7eb' | Border color |
-| borderRadius | string \| number | 4 | Border radius |
-| padding | string \| number | '6px 10px' | Internal padding |
-| gap | string \| number | 8 | Gap between elements |
-| recordingIndicatorColor | string | '#ef4444' | Recording indicator color |
-| idleIndicatorColor | string | '#9ca3af' | Idle indicator color |
-| timeTextColor | string | undefined | Time text color |
-| timeFontSize | string \| number | 12 | Time font size |
-| timeFontWeight | string \| number | 500 | Time font weight |
-| timeFontFamily | string | 'system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif' | Time font family |
-| visualizerBarColor | string \| (level: number, index: number) => string | '#4b5563' | Waveform bar color |
-| visualizerBarWidth | number | 3 | Waveform bar width |
-| visualizerBarGap | number | 4 | Gap between bars |
-| visualizerBarHeight | number | 40 | Waveform bar height |
-| visualizerHeight | number | 40 | Total waveform height |
-| buttonSize | number | 28 | Button size |
-| buttonBackgroundColor | string | '#ffffff' | Button background color |
-| buttonBorderColor | string | '#e5e7eb' | Button border color |
-| buttonBorderRadius | string \| number | 999 | Button border radius |
-| buttonHoverBackgroundColor | string | undefined | Button hover background color |
-| buttonGap | number | 4 | Gap between buttons |
-| errorTextColor | string | '#dc2626' | Error text color |
-| errorFontSize | string \| number | 10 | Error font size |
-| errorFontFamily | string | 'system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif' | Error font family |
-| iconSize | number | 18 | Icon size |
-| iconColor | string | undefined | Icon color |
-
-### Custom Icon Props
-
-| Prop | Type | Default | Description |
-| --------------- | --------- | --------- | ------------------------ |
-| customPlayIcon | ReactNode | undefined | Custom play icon |
-| customPauseIcon | ReactNode | undefined | Custom pause icon |
-| customStopIcon | ReactNode | undefined | Custom stop icon |
-| customResumeIcon| ReactNode | undefined | Custom resume icon |
-| customDeleteIcon| ReactNode | undefined | Custom delete icon |
-| customRepeatIcon| ReactNode | undefined | Custom repeat icon |
-
----
-
-## Component Usage Examples
-
-### Example 1: Simple Usage
-
-```tsx
-import { VoiceRecorder } from 'react-voice-recorder-kit'
-
-function SimpleRecorder() {
- return
-}
-```
-
-### Example 2: Custom Styling
-
-```tsx
-import { VoiceRecorder } from 'react-voice-recorder-kit'
-
-function CustomStyledRecorder() {
- return (
-
- )
-}
-```
-
-### Example 3: Using with Callbacks
-
-```tsx
-import { useState } from 'react'
-import { VoiceRecorder } from 'react-voice-recorder-kit'
-
-function RecorderWithCallbacks() {
- const [audioFile, setAudioFile] = useState(null)
-
- return (
- {
- if (audioFile instanceof File) {
- console.log('Recording stopped:', audioFile.name)
- }
- console.log('Audio length (s):', audioLength)
- console.log('Waveform points:', waveform.length)
- setAudioFile(audioFile)
- }}
- onDelete={() => {
- console.log('Recording deleted')
- setAudioFile(null)
- }}
- />
- )
-}
-```
-
-### Example 4: Dynamic Color Waveform
-
-```tsx
-import { VoiceRecorder } from 'react-voice-recorder-kit'
-
-function DynamicColorRecorder() {
- return (
- {
- const hue = (level * 120).toString()
- return `hsl(${hue}, 70%, 50%)`
- }}
- />
- )
-}
-```
-
----
-
-## Using the Hook (useVoiceRecorder)
-
-For full control over the UI, you can use the hook directly.
-
-### Import
-
-```ts
-import { useVoiceRecorder } from 'react-voice-recorder-kit'
-```
-
-### Options
-
-```ts
-type UseVoiceRecorderOptions = {
- autoStart?: boolean
- onStop?: (payload: { audioFile: Blob; audioUrl: string; waveform: number[]; audioLength: number }) => void
- onDelete?: () => void
-}
-```
-
-### Return Values
-
-```ts
-type UseVoiceRecorderReturn = {
- state: RecorderState
- isRecording: boolean
- isStopped: boolean
- isTemporaryStopped: boolean
- isPlaying: boolean
- isPaused: boolean
- seconds: number
- levels: number[]
- error: string | null
- audioUrl: string | null
- audioFile: File | null
- waveform: number[] | null
- start: () => void
- handlePause: () => void
- handleStopTemporary: () => void
- handleStop: () => void
- handleResume: () => void
- handlePreviewPlay: () => void
- handlePlay: () => void
- handleRestart: () => void
- handleDelete: () => void
- handleRecordAgain: () => void
-}
-```
-
-| Property | Type | Description |
-| ----------------- | -------------- | ---------------------------------------------- |
-| state | RecorderState | Current state: 'idle' \| 'recording' \| 'paused' \| 'reviewing' \| 'playing' |
-| isRecording | boolean | Is currently recording |
-| isStopped | boolean | Is recording stopped |
-| isTemporaryStopped| boolean | Is recording temporarily stopped |
-| isPlaying | boolean | Is currently playing |
-| isPaused | boolean | Is recording paused |
-| seconds | number | Time in seconds |
-| levels | number[] | Array of 40 audio levels (0 to 1) |
-| error | string \| null | Error message if any |
-| audioUrl | string \| null | URL of recorded audio file |
-| audioFile | File \| null | Recorded audio file |
-| waveform | number[] \| null | Downsampled waveform points for the recording |
-| start | () => void | Start recording |
-| handlePause | () => void | Pause recording |
-| handleStopTemporary| () => void | Temporary stop and review |
-| handleStop | () => void | Stop and save recording |
-| handleResume | () => void | Resume recording after pause |
-| handlePreviewPlay | () => void | Play preview (in paused state) |
-| handlePlay | () => void | Play recorded file |
-| handleRestart | () => void | Restart recording |
-| handleDelete | () => void | Delete recording and return to initial state |
-| handleRecordAgain | () => void | Record again (same as handleRestart) |
-
----
-
-## Complete Hook Usage Example
-
-```tsx
-'use client'
-
-import { useVoiceRecorder } from 'react-voice-recorder-kit'
-
-export default function CustomRecorder() {
- const {
- state,
- isRecording,
- isPaused,
- isStopped,
- isPlaying,
- seconds,
- levels,
- audioUrl,
- audioFile,
- error,
- start,
- handlePause,
- handleResume,
- handleStop,
- handlePlay,
- handleDelete,
- handleRestart
- } = useVoiceRecorder({ autoStart: false })
-
- const formatTime = (secs: number) => {
- const minutes = Math.floor(secs / 60)
- const sec = secs % 60
- return `${minutes}:${sec.toString().padStart(2, '0')}`
- }
-
- return (
-
-
Custom Voice Recorder
-
-
- Status: {state} | Time: {formatTime(seconds)}
-
-
-
- {!isRecording && !isStopped && (
- Start
- )}
-
- {isRecording && !isPaused && (
- <>
- Pause
- Stop & Save
- Restart
- >
- )}
-
- {isPaused && (
- <>
- Resume
- Restart
- Stop & Save
- >
- )}
-
- {isStopped && audioUrl && (
- <>
-
- {isPlaying ? 'Stop Playback' : 'Play'}
-
- Delete
- Record Again
- >
- )}
-
-
- {error && (
-
- {error}
-
- )}
-
-
- {levels.map((level, index) => {
- const height = 5 + level * 35
- return (
-
- )
- })}
-
-
- {audioUrl && (
-
- )}
-
- )
-}
-```
-
----
-
-## Recording States (RecorderState)
-
-The component and hook have 5 different states:
-
-- **idle**: Initial state, ready to start
-- **recording**: Currently recording
-- **paused**: Recording paused (can be resumed)
-- **reviewing**: Recording completed and under review
-- **playing**: Playing recorded file
-
----
-
-## Features
-
-* Voice recording using MediaRecorder API
-* Animated audio waveform visualization during recording and playback
-* Support for pause and resume
-* Support for playing recorded files
-* Time display in MM:SS format
-* Error handling and error message display
-* Ready-to-use UI with control buttons
-* Fully customizable styling and sizing
-* No external dependencies
-* Support for custom icons
-* Dynamic color waveforms
-
----
-
-## Important Notes
-
-1. Requires microphone access in the browser
-2. Recorded files are saved in WebM format
-3. In paused state, you can play a preview of the recording
-4. You can dynamically set bar colors using `visualizerBarColor`
-5. All created URLs are automatically cleaned up
-
----
-
-## License
-
-MIT
-
-(orignal by Mohammadreza Fallahfaal: https://github.com/mohamad-fallah/react-voice-recorder-kit)
diff --git a/src/app/plugins/voice-recorder-kit/VoiceRecorder.tsx b/src/app/plugins/voice-recorder-kit/VoiceRecorder.tsx
deleted file mode 100644
index a51f634a9..000000000
--- a/src/app/plugins/voice-recorder-kit/VoiceRecorder.tsx
+++ /dev/null
@@ -1,620 +0,0 @@
-import type { CSSProperties } from 'react';
-import { useMemo, useRef, useEffect, useState } from 'react';
-import { useVoiceRecorder } from './useVoiceRecorder';
-import type { VoiceRecorderProps } from './types';
-import { PlayIcon, PauseIcon, StopIcon, RepeatIcon, DeleteIcon, ResumeIcon } from './icons';
-
-function VoiceRecorder(props: VoiceRecorderProps) {
- const {
- width,
- height,
- style,
- backgroundColor = '#ffffff',
- borderColor = '#e5e7eb',
- borderRadius = 4,
- padding = '6px 10px',
- gap = 8,
- recordingIndicatorColor = '#ef4444',
- idleIndicatorColor = '#9ca3af',
- timeTextColor,
- timeFontSize = 12,
- timeFontWeight = 500,
- timeFontFamily = 'system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif',
- visualizerBarColor = '#4b5563',
- visualizerBarWidth = 3,
- visualizerBarGap = 4,
- visualizerBarHeight = 40,
- visualizerHeight = 40,
- buttonSize = 28,
- buttonBackgroundColor = '#ffffff',
- buttonBorderColor = '#e5e7eb',
- buttonBorderRadius = 999,
- buttonHoverBackgroundColor,
- buttonGap = 4,
- errorTextColor = '#dc2626',
- errorFontSize = 10,
- errorFontFamily = 'system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", sans-serif',
- customPlayIcon,
- customPauseIcon,
- customStopIcon,
- customResumeIcon,
- customDeleteIcon,
- customRepeatIcon,
- iconSize = 18,
- iconColor,
- ...recorderOptions
- } = props;
-
- const {
- state,
- isRecording,
- isStopped,
- isPlaying,
- seconds,
- levels,
- error,
- handlePause,
- handleStopTemporary,
- handleStop,
- handleResume,
- handlePreviewPlay,
- handlePlay,
- handleRestart,
- handleDelete,
- } = useVoiceRecorder(recorderOptions);
-
- const containerRef = useRef(null);
- const [visualizerWidth, setVisualizerWidth] = useState(0);
- const visualizerRef = useRef(null);
-
- useEffect(() => {
- const updateWidth = () => {
- if (visualizerRef.current) {
- const availableWidth = visualizerRef.current.offsetWidth;
- setVisualizerWidth(Math.max(0, availableWidth));
- }
- };
-
- updateWidth();
- const resizeObserver = new ResizeObserver(updateWidth);
- if (visualizerRef.current) {
- resizeObserver.observe(visualizerRef.current);
- }
-
- return () => {
- resizeObserver.disconnect();
- };
- }, [width, isStopped, error]);
-
- const formattedTime = useMemo(() => {
- const minutes = Math.floor(seconds / 60);
- const secs = seconds % 60;
- return `${minutes}:${secs.toString().padStart(2, '0')}`;
- }, [seconds]);
-
- const barWidth = visualizerBarWidth;
- const barGap = visualizerBarGap;
-
- const maxBars = useMemo(() => {
- if (visualizerWidth <= 0) {
- return Math.max(levels.length, 40);
- }
- const calculatedBars = Math.floor(visualizerWidth / (barWidth + barGap));
- return Math.max(calculatedBars, 1);
- }, [visualizerWidth, levels.length, barWidth, barGap]);
-
- const displayedLevels = useMemo(() => {
- if (maxBars <= 0 || levels.length === 0) {
- return Array.from({ length: Math.max(maxBars, 40) }, () => 0.15);
- }
-
- if (maxBars <= levels.length) {
- const step = levels.length / maxBars;
- return Array.from({ length: maxBars }, (_, i) => {
- const start = Math.floor(i * step);
- const end = Math.floor((i + 1) * step);
- const slice = levels.slice(start, end);
- return slice.length > 0 ? Math.max(...slice) : 0.15;
- });
- }
-
- const step = (levels.length - 1) / (maxBars - 1);
- return Array.from({ length: maxBars }, (_, i) => {
- const position = i * step;
- const lowerIndex = Math.floor(position);
- const upperIndex = Math.min(Math.ceil(position), levels.length - 1);
- const fraction = position - lowerIndex;
-
- if (lowerIndex === upperIndex) {
- return levels[lowerIndex] || 0.15;
- }
-
- return (
- (levels[lowerIndex] || 0.15) * (1 - fraction) + (levels[upperIndex] || 0.15) * fraction
- );
- });
- }, [levels, maxBars]);
-
- const containerStyle: CSSProperties = useMemo(
- () => ({
- display: 'flex',
- alignItems: 'center',
- gap: typeof gap === 'number' ? `${gap}px` : gap,
- backgroundColor,
- borderRadius: typeof borderRadius === 'number' ? `${borderRadius}px` : borderRadius,
- border: `1px solid ${borderColor}`,
- padding: typeof padding === 'number' ? `${padding}px` : padding,
- width: width ?? '100%',
- height,
- boxSizing: 'border-box',
- ...style,
- }),
- [width, height, style, backgroundColor, borderColor, borderRadius, padding, gap]
- );
-
- return (
-
-
-
- {formattedTime}
-
-
- {displayedLevels.map((level) => (
-
- ))}
-
-
- {state === 'recording' && (
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customPauseIcon || }
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customStopIcon || }
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customRepeatIcon || }
-
-
- )}
-
- {state === 'paused' && (
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customResumeIcon || }
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {isPlaying
- ? customPauseIcon ||
- : customPlayIcon || }
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customRepeatIcon || }
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customStopIcon || }
-
-
- )}
-
- {state === 'reviewing' && (
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {isPlaying
- ? customPauseIcon ||
- : customPlayIcon || }
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customDeleteIcon || }
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customRepeatIcon || }
-
-
- )}
-
- {state === 'playing' && (
-
-
{
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonHoverBackgroundColor;
- }
- }}
- onMouseLeave={(e) => {
- if (buttonHoverBackgroundColor) {
- e.currentTarget.style.backgroundColor = buttonBackgroundColor;
- }
- }}
- >
- {customPauseIcon || }
-
-
- )}
-
- {error && (
-
- {error}
-
- )}
-
- );
-}
-
-export default VoiceRecorder;
diff --git a/src/app/plugins/voice-recorder-kit/icons.tsx b/src/app/plugins/voice-recorder-kit/icons.tsx
deleted file mode 100644
index 29248a00a..000000000
--- a/src/app/plugins/voice-recorder-kit/icons.tsx
+++ /dev/null
@@ -1,76 +0,0 @@
-import type { FC } from 'react';
-
-export const PlayIcon: FC<{ size?: number }> = function ({ size = 18 }) {
- return (
-
-
-
-
- );
-};
-
-export const ResumeIcon: FC<{ size?: number }> = function ({ size = 18 }) {
- return (
-
-
-
- );
-};
-
-export const PauseIcon: FC<{ size?: number }> = function ({ size = 18 }) {
- return (
-
-
-
-
- );
-};
-
-export const StopIcon: FC<{ size?: number }> = function ({ size = 18 }) {
- return (
-
-
-
- );
-};
-
-export const DeleteIcon: FC<{ size?: number }> = function ({ size = 18 }) {
- return (
-
-
-
-
-
-
-
- );
-};
-
-export const RepeatIcon: FC<{ size?: number }> = function ({ size = 18 }) {
- return (
-
-
-
- );
-};
diff --git a/src/app/plugins/voice-recorder-kit/index.ts b/src/app/plugins/voice-recorder-kit/index.ts
index 00564f2bf..00e2e4d7e 100644
--- a/src/app/plugins/voice-recorder-kit/index.ts
+++ b/src/app/plugins/voice-recorder-kit/index.ts
@@ -1,9 +1,7 @@
export { useVoiceRecorder } from './useVoiceRecorder';
-export { default as VoiceRecorder } from './VoiceRecorder';
export type {
UseVoiceRecorderOptions,
UseVoiceRecorderReturn,
RecorderState,
- VoiceRecorderProps,
VoiceRecorderStopPayload,
} from './types';
diff --git a/src/app/plugins/voice-recorder-kit/types.ts b/src/app/plugins/voice-recorder-kit/types.ts
index 9834303c2..609ebe9c1 100644
--- a/src/app/plugins/voice-recorder-kit/types.ts
+++ b/src/app/plugins/voice-recorder-kit/types.ts
@@ -1,5 +1,3 @@
-import type { CSSProperties, ReactNode } from 'react';
-
export type RecorderState = 'idle' | 'recording' | 'paused' | 'reviewing' | 'playing';
export type VoiceRecorderStopPayload = {
@@ -40,42 +38,3 @@ export type UseVoiceRecorderReturn = {
handleDelete: () => void;
handleRecordAgain: () => void;
};
-
-export type VoiceRecorderProps = UseVoiceRecorderOptions & {
- width?: string | number;
- height?: string | number;
- style?: CSSProperties;
- backgroundColor?: string;
- borderColor?: string;
- borderRadius?: string | number;
- padding?: string | number;
- gap?: string | number;
- recordingIndicatorColor?: string;
- idleIndicatorColor?: string;
- timeTextColor?: string;
- timeFontSize?: string | number;
- timeFontWeight?: string | number;
- timeFontFamily?: string;
- visualizerBarColor?: string | ((level: number, index: number) => string);
- visualizerBarWidth?: number;
- visualizerBarGap?: number;
- visualizerBarHeight?: number;
- visualizerHeight?: number;
- buttonSize?: number;
- buttonBackgroundColor?: string;
- buttonBorderColor?: string;
- buttonBorderRadius?: string | number;
- buttonHoverBackgroundColor?: string;
- buttonGap?: number;
- errorTextColor?: string;
- errorFontSize?: string | number;
- errorFontFamily?: string;
- customPlayIcon?: ReactNode;
- customPauseIcon?: ReactNode;
- customStopIcon?: ReactNode;
- customResumeIcon?: ReactNode;
- customDeleteIcon?: ReactNode;
- customRepeatIcon?: ReactNode;
- iconSize?: number;
- iconColor?: string;
-};
diff --git a/src/app/plugins/voice-recorder-kit/useVoiceRecorder.ts b/src/app/plugins/voice-recorder-kit/useVoiceRecorder.ts
index a1eab6ddf..f73f7daf0 100644
--- a/src/app/plugins/voice-recorder-kit/useVoiceRecorder.ts
+++ b/src/app/plugins/voice-recorder-kit/useVoiceRecorder.ts
@@ -1,3 +1,4 @@
+// Based on https://github.com/mohamad-fallah/react-voice-recorder-kit by mohamad-fallah
import { useCallback, useEffect, useRef, useState } from 'react';
import type {
UseVoiceRecorderOptions,
@@ -10,34 +11,40 @@ import { getSupportedAudioCodec, getSupportedAudioExtension } from './supportedC
const BAR_COUNT = 40;
const WAVEFORM_POINT_COUNT = 100;
+let sharedAudioContext: AudioContext | null = null;
+
+function getSharedAudioContext(): AudioContext {
+ if (!sharedAudioContext || sharedAudioContext.state === 'closed') {
+ sharedAudioContext = new AudioContext();
+ }
+ return sharedAudioContext;
+}
+
// downsample an array of samples to a target count by averaging blocks of samples together
function downsampleWaveform(samples: number[], targetCount: number): number[] {
- if (samples.length === 0) return Array.from({ length: targetCount }, () => 0);
+ if (samples.length === 0) return Array.from({ length: targetCount }, () => 0.15);
if (samples.length <= targetCount) {
- const padded = [...samples];
- while (padded.length < targetCount) padded.push(0);
- return padded;
- }
- const result: number[] = [];
- const blockSize = samples.length / targetCount;
- for (let i = 0; i < targetCount; i += 1) {
- const start = Math.floor(i * blockSize);
- const end = Math.floor((i + 1) * blockSize);
- let sum = 0;
- for (let j = start; j < end; j += 1) {
- sum += samples[j];
- }
- result.push(sum / (end - start));
+ const step = (samples.length - 1) / (targetCount - 1);
+ return Array.from({ length: targetCount }, (_, i) => {
+ const position = i * step;
+ const lower = Math.floor(position);
+ const upper = Math.min(Math.ceil(position), samples.length - 1);
+ const fraction = position - lower;
+ if (lower === upper) {
+ return samples[lower] ?? 0.15;
+ }
+ return (samples[lower] ?? 0.15) * (1 - fraction) + (samples[upper] ?? 0.15) * fraction;
+ });
}
- return result;
+ const step = samples.length / targetCount;
+ return Array.from({ length: targetCount }, (_, i) => {
+ const start = Math.floor(i * step);
+ const end = Math.floor((i + 1) * step);
+ const slice = samples.slice(start, end);
+ return slice.length > 0 ? Math.max(...slice) : 0.15;
+ });
}
-/**
- * Custom React hook for recording voice messages using the MediaRecorder API.
- * It manages the recording state, audio data, and provides functions to control the recording process (start, pause, stop, resume, play, etc.).
- * It also handles audio visualization by analyzing the audio stream and generating levels for a visualizer.
- * The hook supports multiple audio codecs and generates appropriate file extensions based on the supported codec.
- */
export function useVoiceRecorder(options: UseVoiceRecorderOptions = {}): UseVoiceRecorderReturn {
const { autoStart = true, onStop, onDelete } = options;
@@ -107,7 +114,9 @@ export function useVoiceRecorder(options: UseVoiceRecorderOptions = {}): UseVoic
}
frameCountRef.current = 0;
if (audioContextRef.current) {
- audioContextRef.current.close().catch(() => {});
+ if (audioContextRef.current.state !== 'closed') {
+ audioContextRef.current.suspend().catch(() => {});
+ }
audioContextRef.current = null;
}
analyserRef.current = null;
@@ -210,7 +219,7 @@ export function useVoiceRecorder(options: UseVoiceRecorderOptions = {}): UseVoic
const setupAudioGraph = useCallback(
(stream: MediaStream): MediaStream => {
- const audioContext = new AudioContext();
+ const audioContext = getSharedAudioContext();
audioContextRef.current = audioContext;
const source = audioContext.createMediaStreamSource(stream);
const analyser = audioContext.createAnalyser();
@@ -227,7 +236,9 @@ export function useVoiceRecorder(options: UseVoiceRecorderOptions = {}): UseVoic
source.connect(analyser);
analyser.connect(destination);
- audioContext.resume().catch(() => {});
+ if (audioContext.state === 'suspended') {
+ audioContext.resume().catch(() => {});
+ }
animateLevels();
return destination.stream;
@@ -237,7 +248,7 @@ export function useVoiceRecorder(options: UseVoiceRecorderOptions = {}): UseVoic
const setupPlaybackGraph = useCallback(
(audio: HTMLAudioElement) => {
- const audioContext = new AudioContext();
+ const audioContext = getSharedAudioContext();
audioContextRef.current = audioContext;
const source = audioContext.createMediaElementSource(audio);
const analyser = audioContext.createAnalyser();
@@ -249,7 +260,9 @@ export function useVoiceRecorder(options: UseVoiceRecorderOptions = {}): UseVoic
dataArrayRef.current = dataArray;
source.connect(analyser);
analyser.connect(audioContext.destination);
- audioContext.resume().catch(() => {});
+ if (audioContext.state === 'suspended') {
+ audioContext.resume().catch(() => {});
+ }
animateLevels();
},
[animateLevels]
diff --git a/src/app/state/room/roomInputDrafts.ts b/src/app/state/room/roomInputDrafts.ts
index 51c3cfaf5..4b167f220 100644
--- a/src/app/state/room/roomInputDrafts.ts
+++ b/src/app/state/room/roomInputDrafts.ts
@@ -9,6 +9,8 @@ import { createListAtom } from '$state/list';
export type TUploadMetadata = {
markedAsSpoiler: boolean;
+ waveform?: number[];
+ audioDuration?: number;
};
export type TUploadItem = {
diff --git a/src/app/utils/debug.ts b/src/app/utils/debug.ts
index 6f7f2b367..916cc1e7f 100644
--- a/src/app/utils/debug.ts
+++ b/src/app/utils/debug.ts
@@ -8,7 +8,8 @@
* localStorage.removeItem('sable_debug'); location.reload();
*/
-const isDebug = (): boolean => localStorage.getItem('sable_debug') === '1';
+export const isDebug = (): boolean =>
+ import.meta.env.DEV || localStorage.getItem('sable_debug') === '1';
type LogLevel = 'log' | 'warn' | 'error';