add web visualizer (#314)
* add web visualizer * fallback to simple model * less samples, hopefully more efficient * Use audiomotion analyzer - Note: fixed to 4.1.1 because 4.2.0 uses esm which breaks in the current workflow... * revert publish changes * r2 * don't massively change package.json * lazy
This commit is contained in:
parent
fbac33ceba
commit
74aa88e082
9 changed files with 172 additions and 33 deletions
16
package-lock.json
generated
16
package-lock.json
generated
|
@ -28,6 +28,7 @@
|
||||||
"@tanstack/react-query-persist-client": "^4.32.1",
|
"@tanstack/react-query-persist-client": "^4.32.1",
|
||||||
"@ts-rest/core": "^3.23.0",
|
"@ts-rest/core": "^3.23.0",
|
||||||
"@xhayper/discord-rpc": "^1.0.24",
|
"@xhayper/discord-rpc": "^1.0.24",
|
||||||
|
"audiomotion-analyzer": "^4.5.0",
|
||||||
"auto-text-size": "^0.2.3",
|
"auto-text-size": "^0.2.3",
|
||||||
"axios": "^1.6.0",
|
"axios": "^1.6.0",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
|
@ -6740,6 +6741,16 @@
|
||||||
"node": ">=10.12.0"
|
"node": ">=10.12.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/audiomotion-analyzer": {
|
||||||
|
"version": "4.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/audiomotion-analyzer/-/audiomotion-analyzer-4.5.0.tgz",
|
||||||
|
"integrity": "sha512-qnmB8TSbrxYkTbFgsQeeym0Z/suQx4c0jFg9Yh5+gaPw6J4AFLdfFpagdnDbtNEsj6K7BntgsC3bkdut5rxozg==",
|
||||||
|
"license": "AGPL-3.0-or-later",
|
||||||
|
"funding": {
|
||||||
|
"type": "Ko-fi",
|
||||||
|
"url": "https://ko-fi.com/hvianna"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/auto-text-size": {
|
"node_modules/auto-text-size": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.3",
|
||||||
"resolved": "https://registry.npmjs.org/auto-text-size/-/auto-text-size-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/auto-text-size/-/auto-text-size-0.2.3.tgz",
|
||||||
|
@ -28672,6 +28683,11 @@
|
||||||
"resolved": "https://registry.npmjs.org/atomically/-/atomically-1.7.0.tgz",
|
"resolved": "https://registry.npmjs.org/atomically/-/atomically-1.7.0.tgz",
|
||||||
"integrity": "sha512-Xcz9l0z7y9yQ9rdDaxlmaI4uJHf/T8g9hOEzJcsEqX2SjCj4J20uK7+ldkDHMbpJDK76wF7xEIgxc/vSlsfw5w=="
|
"integrity": "sha512-Xcz9l0z7y9yQ9rdDaxlmaI4uJHf/T8g9hOEzJcsEqX2SjCj4J20uK7+ldkDHMbpJDK76wF7xEIgxc/vSlsfw5w=="
|
||||||
},
|
},
|
||||||
|
"audiomotion-analyzer": {
|
||||||
|
"version": "4.5.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/audiomotion-analyzer/-/audiomotion-analyzer-4.5.0.tgz",
|
||||||
|
"integrity": "sha512-qnmB8TSbrxYkTbFgsQeeym0Z/suQx4c0jFg9Yh5+gaPw6J4AFLdfFpagdnDbtNEsj6K7BntgsC3bkdut5rxozg=="
|
||||||
|
},
|
||||||
"auto-text-size": {
|
"auto-text-size": {
|
||||||
"version": "0.2.3",
|
"version": "0.2.3",
|
||||||
"resolved": "https://registry.npmjs.org/auto-text-size/-/auto-text-size-0.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/auto-text-size/-/auto-text-size-0.2.3.tgz",
|
||||||
|
|
|
@ -310,6 +310,7 @@
|
||||||
"@ts-rest/core": "^3.23.0",
|
"@ts-rest/core": "^3.23.0",
|
||||||
"@xhayper/discord-rpc": "^1.0.24",
|
"@xhayper/discord-rpc": "^1.0.24",
|
||||||
"auto-text-size": "^0.2.3",
|
"auto-text-size": "^0.2.3",
|
||||||
|
"audiomotion-analyzer": "^4.5.0",
|
||||||
"axios": "^1.6.0",
|
"axios": "^1.6.0",
|
||||||
"clsx": "^2.0.0",
|
"clsx": "^2.0.0",
|
||||||
"cmdk": "^0.2.0",
|
"cmdk": "^0.2.0",
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { useEffect, useMemo, useRef } from 'react';
|
import { useEffect, useMemo, useState, useRef } from 'react';
|
||||||
import { ClientSideRowModelModule } from '@ag-grid-community/client-side-row-model';
|
import { ClientSideRowModelModule } from '@ag-grid-community/client-side-row-model';
|
||||||
import { ModuleRegistry } from '@ag-grid-community/core';
|
import { ModuleRegistry } from '@ag-grid-community/core';
|
||||||
import { InfiniteRowModelModule } from '@ag-grid-community/infinite-row-model';
|
import { InfiniteRowModelModule } from '@ag-grid-community/infinite-row-model';
|
||||||
|
@ -21,8 +21,9 @@ import { useHandlePlayQueueAdd } from '/@/renderer/features/player/hooks/use-han
|
||||||
import { PlayQueueHandlerContext } from '/@/renderer/features/player';
|
import { PlayQueueHandlerContext } from '/@/renderer/features/player';
|
||||||
import { getMpvProperties } from '/@/renderer/features/settings/components/playback/mpv-settings';
|
import { getMpvProperties } from '/@/renderer/features/settings/components/playback/mpv-settings';
|
||||||
import { PlayerState, useCssSettings, usePlayerStore, useQueueControls } from '/@/renderer/store';
|
import { PlayerState, useCssSettings, usePlayerStore, useQueueControls } from '/@/renderer/store';
|
||||||
import { FontType, PlaybackType, PlayerStatus } from '/@/renderer/types';
|
import { FontType, PlaybackType, PlayerStatus, WebAudio } from '/@/renderer/types';
|
||||||
import '@ag-grid-community/styles/ag-grid.css';
|
import '@ag-grid-community/styles/ag-grid.css';
|
||||||
|
import { WebAudioContext } from '/@/renderer/features/player/context/webaudio-context';
|
||||||
import { useDiscordRpc } from '/@/renderer/features/discord-rpc/use-discord-rpc';
|
import { useDiscordRpc } from '/@/renderer/features/discord-rpc/use-discord-rpc';
|
||||||
import i18n from '/@/i18n/i18n';
|
import i18n from '/@/i18n/i18n';
|
||||||
import { useServerVersion } from '/@/renderer/hooks/use-server-version';
|
import { useServerVersion } from '/@/renderer/hooks/use-server-version';
|
||||||
|
@ -91,6 +92,8 @@ export const App = () => {
|
||||||
}
|
}
|
||||||
}, [builtIn, custom, system, type]);
|
}, [builtIn, custom, system, type]);
|
||||||
|
|
||||||
|
const [webAudio, setWebAudio] = useState<WebAudio>();
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (enabled && content) {
|
if (enabled && content) {
|
||||||
// Yes, CSS is sanitized here as well. Prevent a suer from changing the
|
// Yes, CSS is sanitized here as well. Prevent a suer from changing the
|
||||||
|
@ -125,6 +128,10 @@ export const App = () => {
|
||||||
return { handlePlayQueueAdd };
|
return { handlePlayQueueAdd };
|
||||||
}, [handlePlayQueueAdd]);
|
}, [handlePlayQueueAdd]);
|
||||||
|
|
||||||
|
const webAudioProvider = useMemo(() => {
|
||||||
|
return { setWebAudio, webAudio };
|
||||||
|
}, [webAudio]);
|
||||||
|
|
||||||
// Start the mpv instance on startup
|
// Start the mpv instance on startup
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const initializeMpv = async () => {
|
const initializeMpv = async () => {
|
||||||
|
@ -278,7 +285,9 @@ export const App = () => {
|
||||||
>
|
>
|
||||||
<PlayQueueHandlerContext.Provider value={providerValue}>
|
<PlayQueueHandlerContext.Provider value={providerValue}>
|
||||||
<ContextMenuProvider>
|
<ContextMenuProvider>
|
||||||
|
<WebAudioContext.Provider value={webAudioProvider}>
|
||||||
<AppRouter />
|
<AppRouter />
|
||||||
|
</WebAudioContext.Provider>{' '}
|
||||||
</ContextMenuProvider>
|
</ContextMenuProvider>
|
||||||
</PlayQueueHandlerContext.Provider>
|
</PlayQueueHandlerContext.Provider>
|
||||||
<IsUpdatedDialog />
|
<IsUpdatedDialog />
|
||||||
|
|
|
@ -18,6 +18,7 @@ import {
|
||||||
import { useSettingsStore, useSettingsStoreActions } from '/@/renderer/store/settings.store';
|
import { useSettingsStore, useSettingsStoreActions } from '/@/renderer/store/settings.store';
|
||||||
import type { CrossfadeStyle } from '/@/renderer/types';
|
import type { CrossfadeStyle } from '/@/renderer/types';
|
||||||
import { PlaybackStyle, PlayerStatus } from '/@/renderer/types';
|
import { PlaybackStyle, PlayerStatus } from '/@/renderer/types';
|
||||||
|
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
|
||||||
import { getServerById, TranscodingConfig, usePlaybackSettings, useSpeed } from '/@/renderer/store';
|
import { getServerById, TranscodingConfig, usePlaybackSettings, useSpeed } from '/@/renderer/store';
|
||||||
import { toast } from '/@/renderer/components/toast';
|
import { toast } from '/@/renderer/components/toast';
|
||||||
import { api } from '/@/renderer/api';
|
import { api } from '/@/renderer/api';
|
||||||
|
@ -44,11 +45,6 @@ const getDuration = (ref: any) => {
|
||||||
return ref.current?.player?.player?.player?.duration;
|
return ref.current?.player?.player?.player?.duration;
|
||||||
};
|
};
|
||||||
|
|
||||||
type WebAudio = {
|
|
||||||
context: AudioContext;
|
|
||||||
gain: GainNode;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Credits: https://gist.github.com/novwhisky/8a1a0168b94f3b6abfaa?permalink_comment_id=1551393#gistcomment-1551393
|
// Credits: https://gist.github.com/novwhisky/8a1a0168b94f3b6abfaa?permalink_comment_id=1551393#gistcomment-1551393
|
||||||
// This is used so that the player will always have an <audio> element. This means that
|
// This is used so that the player will always have an <audio> element. This means that
|
||||||
// player1Source and player2Source are connected BEFORE the user presses play for
|
// player1Source and player2Source are connected BEFORE the user presses play for
|
||||||
|
@ -116,7 +112,7 @@ export const AudioPlayer = forwardRef(
|
||||||
const [isTransitioning, setIsTransitioning] = useState(false);
|
const [isTransitioning, setIsTransitioning] = useState(false);
|
||||||
const audioDeviceId = useSettingsStore((state) => state.playback.audioDeviceId);
|
const audioDeviceId = useSettingsStore((state) => state.playback.audioDeviceId);
|
||||||
const playback = useSettingsStore((state) => state.playback.mpvProperties);
|
const playback = useSettingsStore((state) => state.playback.mpvProperties);
|
||||||
const useWebAudio = useSettingsStore((state) => state.playback.webAudio);
|
const shouldUseWebAudio = useSettingsStore((state) => state.playback.webAudio);
|
||||||
const { resetSampleRate } = useSettingsStoreActions();
|
const { resetSampleRate } = useSettingsStoreActions();
|
||||||
const playbackSpeed = useSpeed();
|
const playbackSpeed = useSpeed();
|
||||||
const { transcode } = usePlaybackSettings();
|
const { transcode } = usePlaybackSettings();
|
||||||
|
@ -124,7 +120,7 @@ export const AudioPlayer = forwardRef(
|
||||||
const stream1 = useSongUrl(transcode, currentPlayer === 1, player1);
|
const stream1 = useSongUrl(transcode, currentPlayer === 1, player1);
|
||||||
const stream2 = useSongUrl(transcode, currentPlayer === 2, player2);
|
const stream2 = useSongUrl(transcode, currentPlayer === 2, player2);
|
||||||
|
|
||||||
const [webAudio, setWebAudio] = useState<WebAudio | null>(null);
|
const { webAudio, setWebAudio } = useWebAudio();
|
||||||
const [player1Source, setPlayer1Source] = useState<MediaElementAudioSourceNode | null>(
|
const [player1Source, setPlayer1Source] = useState<MediaElementAudioSourceNode | null>(
|
||||||
null,
|
null,
|
||||||
);
|
);
|
||||||
|
@ -181,7 +177,7 @@ export const AudioPlayer = forwardRef(
|
||||||
);
|
);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (useWebAudio && 'AudioContext' in window) {
|
if (shouldUseWebAudio && 'AudioContext' in window) {
|
||||||
let context: AudioContext;
|
let context: AudioContext;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -200,7 +196,7 @@ export const AudioPlayer = forwardRef(
|
||||||
const gain = context.createGain();
|
const gain = context.createGain();
|
||||||
gain.connect(context.destination);
|
gain.connect(context.destination);
|
||||||
|
|
||||||
setWebAudio({ context, gain });
|
setWebAudio!({ context, gain });
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
return context.close();
|
return context.close();
|
||||||
|
|
|
@ -11,7 +11,16 @@ import {
|
||||||
useFullScreenPlayerStoreActions,
|
useFullScreenPlayerStoreActions,
|
||||||
} from '/@/renderer/store/full-screen-player.store';
|
} from '/@/renderer/store/full-screen-player.store';
|
||||||
import { Lyrics } from '/@/renderer/features/lyrics/lyrics';
|
import { Lyrics } from '/@/renderer/features/lyrics/lyrics';
|
||||||
import { FullScreenSimilarSongs } from '/@/renderer/features/player/components/full-screen-similar-songs';
|
import { Visualizer } from '/@/renderer/features/player/components/visualizer';
|
||||||
|
import { lazy, useMemo } from 'react';
|
||||||
|
import { usePlaybackSettings } from '/@/renderer/store';
|
||||||
|
import { PlaybackType } from '/@/renderer/types';
|
||||||
|
|
||||||
|
const FullScreenSimilarSongs = lazy(() =>
|
||||||
|
import('/@/renderer/features/player/components/full-screen-similar-songs').then((module) => ({
|
||||||
|
default: module.FullScreenSimilarSongs,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
|
||||||
const QueueContainer = styled.div`
|
const QueueContainer = styled.div`
|
||||||
position: relative;
|
position: relative;
|
||||||
|
@ -61,8 +70,10 @@ export const FullScreenPlayerQueue = () => {
|
||||||
const { t } = useTranslation();
|
const { t } = useTranslation();
|
||||||
const { activeTab, opacity } = useFullScreenPlayerStore();
|
const { activeTab, opacity } = useFullScreenPlayerStore();
|
||||||
const { setStore } = useFullScreenPlayerStoreActions();
|
const { setStore } = useFullScreenPlayerStoreActions();
|
||||||
|
const { type, webAudio } = usePlaybackSettings();
|
||||||
|
|
||||||
const headerItems = [
|
const headerItems = useMemo(() => {
|
||||||
|
const items = [
|
||||||
{
|
{
|
||||||
active: activeTab === 'queue',
|
active: activeTab === 'queue',
|
||||||
icon: <RiFileMusicLine size="1.5rem" />,
|
icon: <RiFileMusicLine size="1.5rem" />,
|
||||||
|
@ -83,6 +94,18 @@ export const FullScreenPlayerQueue = () => {
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
|
if (type === PlaybackType.WEB && webAudio) {
|
||||||
|
items.push({
|
||||||
|
active: activeTab === 'visualizer',
|
||||||
|
icon: <RiFileTextLine size="1.5rem" />,
|
||||||
|
label: 'Visualizer',
|
||||||
|
onClick: () => setStore({ activeTab: 'visualizer' }),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return items;
|
||||||
|
}, [activeTab, setStore, t, type, webAudio]);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<GridContainer
|
<GridContainer
|
||||||
className="full-screen-player-queue-container"
|
className="full-screen-player-queue-container"
|
||||||
|
@ -91,6 +114,7 @@ export const FullScreenPlayerQueue = () => {
|
||||||
<Group
|
<Group
|
||||||
grow
|
grow
|
||||||
align="center"
|
align="center"
|
||||||
|
className="full-screen-player-queue-header"
|
||||||
position="center"
|
position="center"
|
||||||
>
|
>
|
||||||
{headerItems.map((item) => (
|
{headerItems.map((item) => (
|
||||||
|
@ -127,6 +151,8 @@ export const FullScreenPlayerQueue = () => {
|
||||||
</QueueContainer>
|
</QueueContainer>
|
||||||
) : activeTab === 'lyrics' ? (
|
) : activeTab === 'lyrics' ? (
|
||||||
<Lyrics />
|
<Lyrics />
|
||||||
|
) : activeTab === 'visualizer' && type === PlaybackType.WEB && webAudio ? (
|
||||||
|
<Visualizer />
|
||||||
) : null}
|
) : null}
|
||||||
</GridContainer>
|
</GridContainer>
|
||||||
);
|
);
|
||||||
|
|
72
src/renderer/features/player/components/visualizer.tsx
Normal file
72
src/renderer/features/player/components/visualizer.tsx
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
import { createRef, useCallback, useEffect, useState } from 'react';
|
||||||
|
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
|
||||||
|
import AudioMotionAnalyzer from 'audiomotion-analyzer';
|
||||||
|
import styled from 'styled-components';
|
||||||
|
import { useSettingsStore } from '/@/renderer/store';
|
||||||
|
|
||||||
|
const StyledContainer = styled.div`
|
||||||
|
margin: auto;
|
||||||
|
max-width: 100%;
|
||||||
|
|
||||||
|
canvas {
|
||||||
|
margin: auto;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
export const Visualizer = () => {
|
||||||
|
const { webAudio } = useWebAudio();
|
||||||
|
const canvasRef = createRef<HTMLDivElement>();
|
||||||
|
const accent = useSettingsStore((store) => store.general.accent);
|
||||||
|
const [motion, setMotion] = useState<AudioMotionAnalyzer>();
|
||||||
|
|
||||||
|
const [length, setLength] = useState(500);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const { context, gain } = webAudio || {};
|
||||||
|
if (gain && context && canvasRef.current && !motion) {
|
||||||
|
const audioMotion = new AudioMotionAnalyzer(canvasRef.current, {
|
||||||
|
ansiBands: true,
|
||||||
|
audioCtx: context,
|
||||||
|
connectSpeakers: false,
|
||||||
|
gradient: 'prism',
|
||||||
|
mode: 4,
|
||||||
|
showPeaks: false,
|
||||||
|
smoothing: 0.8,
|
||||||
|
});
|
||||||
|
setMotion(audioMotion);
|
||||||
|
audioMotion.connectInput(gain);
|
||||||
|
}
|
||||||
|
|
||||||
|
return () => {};
|
||||||
|
}, [accent, canvasRef, motion, webAudio]);
|
||||||
|
|
||||||
|
const resize = useCallback(() => {
|
||||||
|
const body = document.querySelector('.full-screen-player-queue-container');
|
||||||
|
const header = document.querySelector('.full-screen-player-queue-header');
|
||||||
|
|
||||||
|
if (body && header) {
|
||||||
|
const width = body.clientWidth - 30;
|
||||||
|
const height = body.clientHeight - header.clientHeight - 30;
|
||||||
|
|
||||||
|
setLength(Math.min(width, height));
|
||||||
|
}
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
resize();
|
||||||
|
|
||||||
|
window.addEventListener('resize', resize);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('resize', resize);
|
||||||
|
};
|
||||||
|
}, [resize]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<StyledContainer
|
||||||
|
ref={canvasRef}
|
||||||
|
style={{ height: length, width: length }}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
};
|
7
src/renderer/features/player/context/webaudio-context.ts
Normal file
7
src/renderer/features/player/context/webaudio-context.ts
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
import { createContext } from 'react';
|
||||||
|
import { WebAudio } from '/@/renderer/types';
|
||||||
|
|
||||||
|
export const WebAudioContext = createContext<{
|
||||||
|
setWebAudio?: (audio: WebAudio) => void;
|
||||||
|
webAudio?: WebAudio;
|
||||||
|
}>({});
|
7
src/renderer/features/player/hooks/use-webaudio.ts
Normal file
7
src/renderer/features/player/hooks/use-webaudio.ts
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
import { useContext } from 'react';
|
||||||
|
import { WebAudioContext } from '/@/renderer/features/player/context/webaudio-context';
|
||||||
|
|
||||||
|
export const useWebAudio = () => {
|
||||||
|
const { webAudio, setWebAudio } = useContext(WebAudioContext);
|
||||||
|
return { setWebAudio, webAudio };
|
||||||
|
};
|
|
@ -235,3 +235,8 @@ export enum AuthState {
|
||||||
LOADING = 'loading',
|
LOADING = 'loading',
|
||||||
VALID = 'valid',
|
VALID = 'valid',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type WebAudio = {
|
||||||
|
context: AudioContext;
|
||||||
|
gain: GainNode;
|
||||||
|
};
|
||||||
|
|
Reference in a new issue