parent
e9f197740d
commit
6637ecb460
3 changed files with 117 additions and 71 deletions
|
@ -17,6 +17,7 @@ import { Blurhash } from 'mastodon/components/blurhash';
|
||||||
import { Icon } from 'mastodon/components/icon';
|
import { Icon } from 'mastodon/components/icon';
|
||||||
import { SpoilerButton } from 'mastodon/components/spoiler_button';
|
import { SpoilerButton } from 'mastodon/components/spoiler_button';
|
||||||
import { formatTime, getPointerPosition } from 'mastodon/features/video';
|
import { formatTime, getPointerPosition } from 'mastodon/features/video';
|
||||||
|
import { useAudioContext } from 'mastodon/hooks/useAudioContext';
|
||||||
import { useAudioVisualizer } from 'mastodon/hooks/useAudioVisualizer';
|
import { useAudioVisualizer } from 'mastodon/hooks/useAudioVisualizer';
|
||||||
import {
|
import {
|
||||||
displayMedia,
|
displayMedia,
|
||||||
|
@ -119,12 +120,17 @@ export const Audio: React.FC<{
|
||||||
const seekRef = useRef<HTMLDivElement>(null);
|
const seekRef = useRef<HTMLDivElement>(null);
|
||||||
const volumeRef = useRef<HTMLDivElement>(null);
|
const volumeRef = useRef<HTMLDivElement>(null);
|
||||||
const hoverTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>();
|
const hoverTimeoutRef = useRef<ReturnType<typeof setTimeout> | null>();
|
||||||
const [resumeAudio, suspendAudio, frequencyBands] = useAudioVisualizer(
|
|
||||||
audioRef,
|
|
||||||
3,
|
|
||||||
);
|
|
||||||
const accessibilityId = useId();
|
const accessibilityId = useId();
|
||||||
|
|
||||||
|
const { audioContextRef, sourceRef, gainNodeRef, playAudio, pauseAudio } =
|
||||||
|
useAudioContext({ audioElementRef: audioRef });
|
||||||
|
|
||||||
|
const frequencyBands = useAudioVisualizer({
|
||||||
|
audioContextRef,
|
||||||
|
sourceRef,
|
||||||
|
numBands: 3,
|
||||||
|
});
|
||||||
|
|
||||||
const [style, spring] = useSpring(() => ({
|
const [style, spring] = useSpring(() => ({
|
||||||
progress: '0%',
|
progress: '0%',
|
||||||
buffer: '0%',
|
buffer: '0%',
|
||||||
|
@ -152,6 +158,9 @@ export const Audio: React.FC<{
|
||||||
restoreVolume(audioRef.current);
|
restoreVolume(audioRef.current);
|
||||||
setVolume(audioRef.current.volume);
|
setVolume(audioRef.current.volume);
|
||||||
setMuted(audioRef.current.muted);
|
setMuted(audioRef.current.muted);
|
||||||
|
if (gainNodeRef.current) {
|
||||||
|
gainNodeRef.current.gain.value = audioRef.current.volume;
|
||||||
|
}
|
||||||
void spring.start({
|
void spring.start({
|
||||||
volume: `${audioRef.current.volume * 100}%`,
|
volume: `${audioRef.current.volume * 100}%`,
|
||||||
immediate: reduceMotion,
|
immediate: reduceMotion,
|
||||||
|
@ -159,15 +168,14 @@ export const Audio: React.FC<{
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
spring,
|
deployPictureInPicture,
|
||||||
setVolume,
|
|
||||||
setMuted,
|
|
||||||
src,
|
src,
|
||||||
poster,
|
poster,
|
||||||
backgroundColor,
|
backgroundColor,
|
||||||
accentColor,
|
|
||||||
foregroundColor,
|
foregroundColor,
|
||||||
deployPictureInPicture,
|
accentColor,
|
||||||
|
gainNodeRef,
|
||||||
|
spring,
|
||||||
],
|
],
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -178,7 +186,11 @@ export const Audio: React.FC<{
|
||||||
|
|
||||||
audioRef.current.volume = volume;
|
audioRef.current.volume = volume;
|
||||||
audioRef.current.muted = muted;
|
audioRef.current.muted = muted;
|
||||||
}, [volume, muted]);
|
|
||||||
|
if (gainNodeRef.current) {
|
||||||
|
gainNodeRef.current.gain.value = muted ? 0 : volume;
|
||||||
|
}
|
||||||
|
}, [volume, muted, gainNodeRef]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (typeof visible !== 'undefined') {
|
if (typeof visible !== 'undefined') {
|
||||||
|
@ -192,11 +204,10 @@ export const Audio: React.FC<{
|
||||||
}, [visible, sensitive]);
|
}, [visible, sensitive]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!revealed && audioRef.current) {
|
if (!revealed) {
|
||||||
audioRef.current.pause();
|
pauseAudio();
|
||||||
suspendAudio();
|
|
||||||
}
|
}
|
||||||
}, [suspendAudio, revealed]);
|
}, [pauseAudio, revealed]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
let nextFrame: ReturnType<typeof requestAnimationFrame>;
|
let nextFrame: ReturnType<typeof requestAnimationFrame>;
|
||||||
|
@ -228,13 +239,11 @@ export const Audio: React.FC<{
|
||||||
}
|
}
|
||||||
|
|
||||||
if (audioRef.current.paused) {
|
if (audioRef.current.paused) {
|
||||||
resumeAudio();
|
playAudio();
|
||||||
void audioRef.current.play();
|
|
||||||
} else {
|
} else {
|
||||||
audioRef.current.pause();
|
pauseAudio();
|
||||||
suspendAudio();
|
|
||||||
}
|
}
|
||||||
}, [resumeAudio, suspendAudio]);
|
}, [playAudio, pauseAudio]);
|
||||||
|
|
||||||
const handlePlay = useCallback(() => {
|
const handlePlay = useCallback(() => {
|
||||||
setPaused(false);
|
setPaused(false);
|
||||||
|
@ -349,8 +358,7 @@ export const Audio: React.FC<{
|
||||||
document.removeEventListener('mouseup', handleSeekMouseUp, true);
|
document.removeEventListener('mouseup', handleSeekMouseUp, true);
|
||||||
|
|
||||||
setDragging(false);
|
setDragging(false);
|
||||||
resumeAudio();
|
playAudio();
|
||||||
void audioRef.current?.play();
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleSeekMouseMove = (e: MouseEvent) => {
|
const handleSeekMouseMove = (e: MouseEvent) => {
|
||||||
|
@ -377,7 +385,7 @@ export const Audio: React.FC<{
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
},
|
},
|
||||||
[setDragging, spring, resumeAudio],
|
[playAudio, spring],
|
||||||
);
|
);
|
||||||
|
|
||||||
const handleMouseEnter = useCallback(() => {
|
const handleMouseEnter = useCallback(() => {
|
||||||
|
@ -446,10 +454,9 @@ export const Audio: React.FC<{
|
||||||
|
|
||||||
const handleCanPlayThrough = useCallback(() => {
|
const handleCanPlayThrough = useCallback(() => {
|
||||||
if (startPlaying) {
|
if (startPlaying) {
|
||||||
resumeAudio();
|
playAudio();
|
||||||
void audioRef.current?.play();
|
|
||||||
}
|
}
|
||||||
}, [startPlaying, resumeAudio]);
|
}, [startPlaying, playAudio]);
|
||||||
|
|
||||||
const seekBy = (time: number) => {
|
const seekBy = (time: number) => {
|
||||||
if (!audioRef.current) {
|
if (!audioRef.current) {
|
||||||
|
@ -492,7 +499,7 @@ export const Audio: React.FC<{
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const newVolume = audioRef.current.volume + step;
|
const newVolume = Math.max(0, audioRef.current.volume + step);
|
||||||
|
|
||||||
if (!isNaN(newVolume)) {
|
if (!isNaN(newVolume)) {
|
||||||
audioRef.current.volume = newVolume;
|
audioRef.current.volume = newVolume;
|
||||||
|
|
62
app/javascript/mastodon/hooks/useAudioContext.ts
Normal file
62
app/javascript/mastodon/hooks/useAudioContext.ts
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
import { useCallback, useEffect, useRef } from 'react';
|
||||||
|
|
||||||
|
interface AudioContextOptions {
|
||||||
|
audioElementRef: React.MutableRefObject<HTMLAudioElement | null>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create and return an audio context instance for a given audio element [0].
|
||||||
|
* Also returns an associated audio source, a gain node, and play and pause actions
|
||||||
|
* which should be used instead of `audioElementRef.current.play/pause()`.
|
||||||
|
*
|
||||||
|
* [0] https://developer.mozilla.org/en-US/docs/Web/API/AudioContext
|
||||||
|
*/
|
||||||
|
|
||||||
|
export const useAudioContext = ({ audioElementRef }: AudioContextOptions) => {
|
||||||
|
const audioContextRef = useRef<AudioContext>();
|
||||||
|
const sourceRef = useRef<MediaElementAudioSourceNode>();
|
||||||
|
const gainNodeRef = useRef<GainNode>();
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!audioElementRef.current) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const context = audioContextRef.current ?? new AudioContext();
|
||||||
|
const source =
|
||||||
|
sourceRef.current ??
|
||||||
|
context.createMediaElementSource(audioElementRef.current);
|
||||||
|
|
||||||
|
const gainNode = context.createGain();
|
||||||
|
gainNode.connect(context.destination);
|
||||||
|
source.connect(gainNode);
|
||||||
|
|
||||||
|
audioContextRef.current = context;
|
||||||
|
gainNodeRef.current = gainNode;
|
||||||
|
sourceRef.current = source;
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
if (context.state !== 'closed') {
|
||||||
|
void context.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}, [audioElementRef]);
|
||||||
|
|
||||||
|
const playAudio = useCallback(() => {
|
||||||
|
void audioElementRef.current?.play();
|
||||||
|
void audioContextRef.current?.resume();
|
||||||
|
}, [audioElementRef]);
|
||||||
|
|
||||||
|
const pauseAudio = useCallback(() => {
|
||||||
|
audioElementRef.current?.pause();
|
||||||
|
void audioContextRef.current?.suspend();
|
||||||
|
}, [audioElementRef]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
audioContextRef,
|
||||||
|
sourceRef,
|
||||||
|
gainNodeRef,
|
||||||
|
playAudio,
|
||||||
|
pauseAudio,
|
||||||
|
};
|
||||||
|
};
|
|
@ -1,4 +1,4 @@
|
||||||
import { useState, useEffect, useRef, useCallback } from 'react';
|
import { useState, useEffect, useRef } from 'react';
|
||||||
|
|
||||||
const normalizeFrequencies = (arr: Float32Array): number[] => {
|
const normalizeFrequencies = (arr: Float32Array): number[] => {
|
||||||
return new Array(...arr).map((value: number) => {
|
return new Array(...arr).map((value: number) => {
|
||||||
|
@ -10,12 +10,17 @@ const normalizeFrequencies = (arr: Float32Array): number[] => {
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
export const useAudioVisualizer = (
|
interface AudioVisualiserOptions {
|
||||||
ref: React.MutableRefObject<HTMLAudioElement | null>,
|
audioContextRef: React.MutableRefObject<AudioContext | undefined>;
|
||||||
numBands: number,
|
sourceRef: React.MutableRefObject<MediaElementAudioSourceNode | undefined>;
|
||||||
) => {
|
numBands: number;
|
||||||
const audioContextRef = useRef<AudioContext>();
|
}
|
||||||
const sourceRef = useRef<MediaElementAudioSourceNode>();
|
|
||||||
|
export const useAudioVisualizer = ({
|
||||||
|
audioContextRef,
|
||||||
|
sourceRef,
|
||||||
|
numBands,
|
||||||
|
}: AudioVisualiserOptions) => {
|
||||||
const analyzerRef = useRef<AnalyserNode>();
|
const analyzerRef = useRef<AnalyserNode>();
|
||||||
|
|
||||||
const [frequencyBands, setFrequencyBands] = useState<number[]>(
|
const [frequencyBands, setFrequencyBands] = useState<number[]>(
|
||||||
|
@ -23,47 +28,31 @@ export const useAudioVisualizer = (
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!audioContextRef.current) {
|
if (audioContextRef.current) {
|
||||||
audioContextRef.current = new AudioContext();
|
|
||||||
analyzerRef.current = audioContextRef.current.createAnalyser();
|
analyzerRef.current = audioContextRef.current.createAnalyser();
|
||||||
analyzerRef.current.smoothingTimeConstant = 0.6;
|
analyzerRef.current.smoothingTimeConstant = 0.6;
|
||||||
analyzerRef.current.fftSize = 2048;
|
analyzerRef.current.fftSize = 2048;
|
||||||
}
|
}
|
||||||
|
}, [audioContextRef]);
|
||||||
return () => {
|
|
||||||
if (audioContextRef.current) {
|
|
||||||
void audioContextRef.current.close();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (
|
if (analyzerRef.current && sourceRef.current) {
|
||||||
audioContextRef.current &&
|
|
||||||
analyzerRef.current &&
|
|
||||||
!sourceRef.current &&
|
|
||||||
ref.current
|
|
||||||
) {
|
|
||||||
sourceRef.current = audioContextRef.current.createMediaElementSource(
|
|
||||||
ref.current,
|
|
||||||
);
|
|
||||||
sourceRef.current.connect(analyzerRef.current);
|
sourceRef.current.connect(analyzerRef.current);
|
||||||
sourceRef.current.connect(audioContextRef.current.destination);
|
|
||||||
}
|
}
|
||||||
|
const currentSource = sourceRef.current;
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
if (sourceRef.current) {
|
if (currentSource && analyzerRef.current) {
|
||||||
sourceRef.current.disconnect();
|
currentSource.disconnect(analyzerRef.current);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}, [ref]);
|
}, [audioContextRef, sourceRef]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const source = sourceRef.current;
|
|
||||||
const analyzer = analyzerRef.current;
|
const analyzer = analyzerRef.current;
|
||||||
const context = audioContextRef.current;
|
const context = audioContextRef.current;
|
||||||
|
|
||||||
if (!source || !analyzer || !context) {
|
if (!analyzer || !context) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,19 +83,7 @@ export const useAudioVisualizer = (
|
||||||
return () => {
|
return () => {
|
||||||
clearInterval(updateInterval);
|
clearInterval(updateInterval);
|
||||||
};
|
};
|
||||||
}, [numBands]);
|
}, [numBands, audioContextRef]);
|
||||||
|
|
||||||
const resume = useCallback(() => {
|
return frequencyBands;
|
||||||
if (audioContextRef.current) {
|
|
||||||
void audioContextRef.current.resume();
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
const suspend = useCallback(() => {
|
|
||||||
if (audioContextRef.current) {
|
|
||||||
void audioContextRef.current.suspend();
|
|
||||||
}
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return [resume, suspend, frequencyBands] as const;
|
|
||||||
};
|
};
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue