add web visualizer (#314)

* add web visualizer

* fallback to simple model

* less samples, hopefully more efficient

* Use audiomotion analyzer

- Note: fixed to 4.1.1 because 4.2.0 uses esm which breaks in the current workflow...

* revert publish changes

* r2

* don't massively change package.json

* lazy
This commit is contained in:
Kendall Garner 2024-09-09 01:25:01 +00:00 committed by GitHub
parent fbac33ceba
commit 74aa88e082
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 172 additions and 33 deletions

16
package-lock.json generated
View file

@ -28,6 +28,7 @@
"@tanstack/react-query-persist-client": "^4.32.1",
"@ts-rest/core": "^3.23.0",
"@xhayper/discord-rpc": "^1.0.24",
"audiomotion-analyzer": "^4.5.0",
"auto-text-size": "^0.2.3",
"axios": "^1.6.0",
"clsx": "^2.0.0",
@ -6740,6 +6741,16 @@
"node": ">=10.12.0"
}
},
"node_modules/audiomotion-analyzer": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/audiomotion-analyzer/-/audiomotion-analyzer-4.5.0.tgz",
"integrity": "sha512-qnmB8TSbrxYkTbFgsQeeym0Z/suQx4c0jFg9Yh5+gaPw6J4AFLdfFpagdnDbtNEsj6K7BntgsC3bkdut5rxozg==",
"license": "AGPL-3.0-or-later",
"funding": {
"type": "Ko-fi",
"url": "https://ko-fi.com/hvianna"
}
},
"node_modules/auto-text-size": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/auto-text-size/-/auto-text-size-0.2.3.tgz",
@ -28672,6 +28683,11 @@
"resolved": "https://registry.npmjs.org/atomically/-/atomically-1.7.0.tgz",
"integrity": "sha512-Xcz9l0z7y9yQ9rdDaxlmaI4uJHf/T8g9hOEzJcsEqX2SjCj4J20uK7+ldkDHMbpJDK76wF7xEIgxc/vSlsfw5w=="
},
"audiomotion-analyzer": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/audiomotion-analyzer/-/audiomotion-analyzer-4.5.0.tgz",
"integrity": "sha512-qnmB8TSbrxYkTbFgsQeeym0Z/suQx4c0jFg9Yh5+gaPw6J4AFLdfFpagdnDbtNEsj6K7BntgsC3bkdut5rxozg=="
},
"auto-text-size": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/auto-text-size/-/auto-text-size-0.2.3.tgz",

View file

@ -310,6 +310,7 @@
"@ts-rest/core": "^3.23.0",
"@xhayper/discord-rpc": "^1.0.24",
"auto-text-size": "^0.2.3",
"audiomotion-analyzer": "^4.5.0",
"axios": "^1.6.0",
"clsx": "^2.0.0",
"cmdk": "^0.2.0",

View file

@ -1,4 +1,4 @@
import { useEffect, useMemo, useRef } from 'react';
import { useEffect, useMemo, useState, useRef } from 'react';
import { ClientSideRowModelModule } from '@ag-grid-community/client-side-row-model';
import { ModuleRegistry } from '@ag-grid-community/core';
import { InfiniteRowModelModule } from '@ag-grid-community/infinite-row-model';
@ -21,8 +21,9 @@ import { useHandlePlayQueueAdd } from '/@/renderer/features/player/hooks/use-han
import { PlayQueueHandlerContext } from '/@/renderer/features/player';
import { getMpvProperties } from '/@/renderer/features/settings/components/playback/mpv-settings';
import { PlayerState, useCssSettings, usePlayerStore, useQueueControls } from '/@/renderer/store';
import { FontType, PlaybackType, PlayerStatus } from '/@/renderer/types';
import { FontType, PlaybackType, PlayerStatus, WebAudio } from '/@/renderer/types';
import '@ag-grid-community/styles/ag-grid.css';
import { WebAudioContext } from '/@/renderer/features/player/context/webaudio-context';
import { useDiscordRpc } from '/@/renderer/features/discord-rpc/use-discord-rpc';
import i18n from '/@/i18n/i18n';
import { useServerVersion } from '/@/renderer/hooks/use-server-version';
@ -91,6 +92,8 @@ export const App = () => {
}
}, [builtIn, custom, system, type]);
const [webAudio, setWebAudio] = useState<WebAudio>();
useEffect(() => {
if (enabled && content) {
// Yes, CSS is sanitized here as well. Prevent a suer from changing the
@ -125,6 +128,10 @@ export const App = () => {
return { handlePlayQueueAdd };
}, [handlePlayQueueAdd]);
const webAudioProvider = useMemo(() => {
return { setWebAudio, webAudio };
}, [webAudio]);
// Start the mpv instance on startup
useEffect(() => {
const initializeMpv = async () => {
@ -278,7 +285,9 @@ export const App = () => {
>
<PlayQueueHandlerContext.Provider value={providerValue}>
<ContextMenuProvider>
<AppRouter />
<WebAudioContext.Provider value={webAudioProvider}>
<AppRouter />
</WebAudioContext.Provider>{' '}
</ContextMenuProvider>
</PlayQueueHandlerContext.Provider>
<IsUpdatedDialog />

View file

@ -18,6 +18,7 @@ import {
import { useSettingsStore, useSettingsStoreActions } from '/@/renderer/store/settings.store';
import type { CrossfadeStyle } from '/@/renderer/types';
import { PlaybackStyle, PlayerStatus } from '/@/renderer/types';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { getServerById, TranscodingConfig, usePlaybackSettings, useSpeed } from '/@/renderer/store';
import { toast } from '/@/renderer/components/toast';
import { api } from '/@/renderer/api';
@ -44,11 +45,6 @@ const getDuration = (ref: any) => {
return ref.current?.player?.player?.player?.duration;
};
type WebAudio = {
context: AudioContext;
gain: GainNode;
};
// Credits: https://gist.github.com/novwhisky/8a1a0168b94f3b6abfaa?permalink_comment_id=1551393#gistcomment-1551393
// This is used so that the player will always have an <audio> element. This means that
// player1Source and player2Source are connected BEFORE the user presses play for
@ -116,7 +112,7 @@ export const AudioPlayer = forwardRef(
const [isTransitioning, setIsTransitioning] = useState(false);
const audioDeviceId = useSettingsStore((state) => state.playback.audioDeviceId);
const playback = useSettingsStore((state) => state.playback.mpvProperties);
const useWebAudio = useSettingsStore((state) => state.playback.webAudio);
const shouldUseWebAudio = useSettingsStore((state) => state.playback.webAudio);
const { resetSampleRate } = useSettingsStoreActions();
const playbackSpeed = useSpeed();
const { transcode } = usePlaybackSettings();
@ -124,7 +120,7 @@ export const AudioPlayer = forwardRef(
const stream1 = useSongUrl(transcode, currentPlayer === 1, player1);
const stream2 = useSongUrl(transcode, currentPlayer === 2, player2);
const [webAudio, setWebAudio] = useState<WebAudio | null>(null);
const { webAudio, setWebAudio } = useWebAudio();
const [player1Source, setPlayer1Source] = useState<MediaElementAudioSourceNode | null>(
null,
);
@ -181,7 +177,7 @@ export const AudioPlayer = forwardRef(
);
useEffect(() => {
if (useWebAudio && 'AudioContext' in window) {
if (shouldUseWebAudio && 'AudioContext' in window) {
let context: AudioContext;
try {
@ -200,7 +196,7 @@ export const AudioPlayer = forwardRef(
const gain = context.createGain();
gain.connect(context.destination);
setWebAudio({ context, gain });
setWebAudio!({ context, gain });
return () => {
return context.close();

View file

@ -11,7 +11,16 @@ import {
useFullScreenPlayerStoreActions,
} from '/@/renderer/store/full-screen-player.store';
import { Lyrics } from '/@/renderer/features/lyrics/lyrics';
import { FullScreenSimilarSongs } from '/@/renderer/features/player/components/full-screen-similar-songs';
import { Visualizer } from '/@/renderer/features/player/components/visualizer';
import { lazy, useMemo } from 'react';
import { usePlaybackSettings } from '/@/renderer/store';
import { PlaybackType } from '/@/renderer/types';
const FullScreenSimilarSongs = lazy(() =>
import('/@/renderer/features/player/components/full-screen-similar-songs').then((module) => ({
default: module.FullScreenSimilarSongs,
})),
);
const QueueContainer = styled.div`
position: relative;
@ -61,27 +70,41 @@ export const FullScreenPlayerQueue = () => {
const { t } = useTranslation();
const { activeTab, opacity } = useFullScreenPlayerStore();
const { setStore } = useFullScreenPlayerStoreActions();
const { type, webAudio } = usePlaybackSettings();
const headerItems = [
{
active: activeTab === 'queue',
icon: <RiFileMusicLine size="1.5rem" />,
label: t('page.fullscreenPlayer.upNext'),
onClick: () => setStore({ activeTab: 'queue' }),
},
{
active: activeTab === 'related',
icon: <HiOutlineQueueList size="1.5rem" />,
label: t('page.fullscreenPlayer.related'),
onClick: () => setStore({ activeTab: 'related' }),
},
{
active: activeTab === 'lyrics',
icon: <RiFileTextLine size="1.5rem" />,
label: t('page.fullscreenPlayer.lyrics'),
onClick: () => setStore({ activeTab: 'lyrics' }),
},
];
const headerItems = useMemo(() => {
const items = [
{
active: activeTab === 'queue',
icon: <RiFileMusicLine size="1.5rem" />,
label: t('page.fullscreenPlayer.upNext'),
onClick: () => setStore({ activeTab: 'queue' }),
},
{
active: activeTab === 'related',
icon: <HiOutlineQueueList size="1.5rem" />,
label: t('page.fullscreenPlayer.related'),
onClick: () => setStore({ activeTab: 'related' }),
},
{
active: activeTab === 'lyrics',
icon: <RiFileTextLine size="1.5rem" />,
label: t('page.fullscreenPlayer.lyrics'),
onClick: () => setStore({ activeTab: 'lyrics' }),
},
];
if (type === PlaybackType.WEB && webAudio) {
items.push({
active: activeTab === 'visualizer',
icon: <RiFileTextLine size="1.5rem" />,
label: 'Visualizer',
onClick: () => setStore({ activeTab: 'visualizer' }),
});
}
return items;
}, [activeTab, setStore, t, type, webAudio]);
return (
<GridContainer
@ -91,6 +114,7 @@ export const FullScreenPlayerQueue = () => {
<Group
grow
align="center"
className="full-screen-player-queue-header"
position="center"
>
{headerItems.map((item) => (
@ -127,6 +151,8 @@ export const FullScreenPlayerQueue = () => {
</QueueContainer>
) : activeTab === 'lyrics' ? (
<Lyrics />
) : activeTab === 'visualizer' && type === PlaybackType.WEB && webAudio ? (
<Visualizer />
) : null}
</GridContainer>
);

View file

@ -0,0 +1,72 @@
import { createRef, useCallback, useEffect, useState } from 'react';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import AudioMotionAnalyzer from 'audiomotion-analyzer';
import styled from 'styled-components';
import { useSettingsStore } from '/@/renderer/store';
const StyledContainer = styled.div`
margin: auto;
max-width: 100%;
canvas {
margin: auto;
width: 100%;
}
`;
export const Visualizer = () => {
const { webAudio } = useWebAudio();
const canvasRef = createRef<HTMLDivElement>();
const accent = useSettingsStore((store) => store.general.accent);
const [motion, setMotion] = useState<AudioMotionAnalyzer>();
const [length, setLength] = useState(500);
useEffect(() => {
const { context, gain } = webAudio || {};
if (gain && context && canvasRef.current && !motion) {
const audioMotion = new AudioMotionAnalyzer(canvasRef.current, {
ansiBands: true,
audioCtx: context,
connectSpeakers: false,
gradient: 'prism',
mode: 4,
showPeaks: false,
smoothing: 0.8,
});
setMotion(audioMotion);
audioMotion.connectInput(gain);
}
return () => {};
}, [accent, canvasRef, motion, webAudio]);
const resize = useCallback(() => {
const body = document.querySelector('.full-screen-player-queue-container');
const header = document.querySelector('.full-screen-player-queue-header');
if (body && header) {
const width = body.clientWidth - 30;
const height = body.clientHeight - header.clientHeight - 30;
setLength(Math.min(width, height));
}
}, []);
useEffect(() => {
resize();
window.addEventListener('resize', resize);
return () => {
window.removeEventListener('resize', resize);
};
}, [resize]);
return (
<StyledContainer
ref={canvasRef}
style={{ height: length, width: length }}
/>
);
};

View file

@ -0,0 +1,7 @@
import { createContext } from 'react';
import { WebAudio } from '/@/renderer/types';
export const WebAudioContext = createContext<{
setWebAudio?: (audio: WebAudio) => void;
webAudio?: WebAudio;
}>({});

View file

@ -0,0 +1,7 @@
import { useContext } from 'react';
import { WebAudioContext } from '/@/renderer/features/player/context/webaudio-context';
export const useWebAudio = () => {
const { webAudio, setWebAudio } = useContext(WebAudioContext);
return { setWebAudio, webAudio };
};

View file

@ -235,3 +235,8 @@ export enum AuthState {
LOADING = 'loading',
VALID = 'valid',
}
export type WebAudio = {
context: AudioContext;
gain: GainNode;
};