diff --git a/src/components/api/visualization/loungeVisual.tsx b/src/components/api/visualization/loungeVisual.tsx index b5aa437..8302d27 100644 --- a/src/components/api/visualization/loungeVisual.tsx +++ b/src/components/api/visualization/loungeVisual.tsx @@ -1,12 +1,12 @@ -import React, { useEffect, useRef, useState, } from 'react' +import React, { useEffect, useRef, useState } from 'react'; import { RootState } from '../../../store'; import { DisplayStatus, ControlStatus } from '../../../react-redux&middleware/redux/typesImports'; import { useSelector } from 'react-redux'; import './canvasFonts.css'; -let audioContext; // reusable; only initialize it once -let analyser; // an AnylyserNode : provide real-time frequency and time-domain analysis information +let audioContext; +let analyser; let dataArray; let source; let rafId; @@ -14,239 +14,142 @@ let canvas; let canvasCtx; let color; -let showLabels : boolean; +let showLabels: boolean; const font = 'Orbitron'; export const LoungeVisual = (props) => { - const [sLabel, setSLabel] = useState(false); - const canvasRef = useRef(null); - - const theme = useSelector((state: RootState) => { - return state.DisplayReducer as DisplayStatus; - }); - - // const control = useSelector((state: RootState) => { - // return state.ControlReducer as ControlStatus; - // }); - - color = theme.textColor; - // showLabels = control.showLabels; - showLabels = sLabel; - - const setSource = async () => { - const newMediaStream = await navigator.mediaDevices.getUserMedia({ - audio: true, - video: false - }) - - await (source = audioContext.createMediaStreamSource(newMediaStream)) - await (source.connect(analyser)) - }; + const [sLabel, setSLabel] = useState(false); + const canvasRef = useRef(null); + const mediaStreamRef = useRef(null); - /** - * @description - * Render audio author and title. - */ - // const renderText = () => { - // let cx = canvas.width / 2; - // let cy = canvas.height / 2; - // let correction = 10; - - // let title = "" - // let author = "Unamed" - // let font = ['12px', 'Helvetica'] - - // canvasCtx.fillStyle = color; - - // canvasCtx.textBaseline = 'top'; - // canvasCtx.fillText(author, cx + correction, cy); - // canvasCtx.font = parseInt(font[0], 10) + 8 + 'px ' + font[1]; - // canvasCtx.textBaseline = 'bottom'; - // canvasCtx.fillText(title, cx + correction, cy); - // canvasCtx.font = font.join(' '); - // }; - - /** - * @description - * Render audio time. - */ - // const renderTime = () => { - // // let time = this.minutes + ':' + this.seconds; - // // canvasCtx.fillText(time, canvas.width / 2 + 10, canvas.height / 2 + 40); - // }; - - /** - * @description - * Render frame by style type. - * - * @return {Function} - */ - const renderByStyleType = () => { - - // return this[TYPE[this.style]](); - - renderLounge(); - }; - - /** - * @description - * Render lounge style type. - */ - const renderLounge = () => { - // let barWidth = 2; - // let barHeight = 2; - // let barSpacing = 4; - - let cx = canvas.width / 2; - let cy = canvas.height / 2; - let radius = Math.min(canvas.width, canvas.height) / 2.5; // determined by the smaller of width or height - // let maxBarNum = Math.floor((2 * Math.PI * radius) / (barWidth + barSpacing)); // control max (total possible) number of bars: circumference / (width + spacing) - // let barNum = maxBarNum * 0.75; // controls how much frequency bars are shown - // const freqArrIdxJump = (dataArray.length / maxBarNum); // gap of index (of frequency array) for each bar - // let eachDataFreq = audioContext.sampleRate / 2 / dataArray.length; // Nyquist Rate Theroem: 2x the range of sampling rate to capture the range. - // // console.log('sampleRate: ', audioContext.sampleRate, '; dataArray.length: ', dataArray.length, '; eachDataFreq: ', eachDataFreq); - // // console.log([barNum, barNum * eachDataFreq * freqArrIdxJump], [maxBarNum, maxBarNum * eachDataFreq * freqArrIdxJump], freqArrIdxJump, audioContext.sampleRate / 2); - - - // changing frequency range: 0Hz ~ 15kHz; fixed bar barSpacing; - // fixed barNum, freqArrIdxJump... - const barNum = 128; - const maxBarNum = 128 / 0.75; - - // const barSpacing = (2 * Math.PI * radius) - barWidth; - let barHeight = 2; - let barSpacing = 4; - const barWidth = (1 / maxBarNum) * (2 * Math.PI * radius) / 2; - const freqArrIdxJump = 2; - const eachDataFreq = audioContext.sampleRate / 2 / dataArray.length; // Nyquist Rate Theroem: 2x the range of sampling rate to capture the range. - // console.log('sampleRate: ', audioContext.sampleRate, '; dataArray.length: ', dataArray.length, '; eachDataFreq: ', eachDataFreq); - // console.log([barNum, barNum * eachDataFreq * freqArrIdxJump], [maxBarNum, maxBarNum * eachDataFreq * freqArrIdxJump], freqArrIdxJump, audioContext.sampleRate / 2); - - - - - const hypotenuseLength = (canvas.width / 4); - - canvasCtx.font = `${canvas.width / 10}px ${font}`; - canvasCtx.textAlign = 'center'; - canvasCtx.textBaseline = 'middle'; - if (showLabels) { - canvasCtx.fillText('kHz', cx, cy + hypotenuseLength) - // canvasCtx.fillText(`${fonts[fontsIdx]}`, cx, cy + 1.5 * hypotenuseLength) - } + const theme = useSelector((state: RootState) => state.DisplayReducer as DisplayStatus); + const listening = useSelector((state: RootState) => (state.ControlReducer as ControlStatus).listening); - canvasCtx.fillStyle = color; - for (let i = 0; i <= barNum; i++) { - let amplitude = dataArray[Math.floor(i * freqArrIdxJump)]; // Db data for each frequency - let alfa = (2 * Math.PI * i) / maxBarNum; // (2 pi i) / (2 pi r / width) => (i * width) / r - let beta = (3 * 45 - barWidth) * Math.PI / 180; // pi * 0.75 - // let beta = Math.PI * 0.75; - let x = 0; - // let y = 1 - radius - (amplitude / 6 - barHeight); // flipped - // let y = (amplitude / 6 - barHeight) - radius; // inverted - let y = 1 - radius - (amplitude / 6 - barHeight); - let w = barWidth; - let h = amplitude / 3 + barHeight; - // Possible Relationship: 2 * 6 = 12 - - - canvasCtx.save(); - canvasCtx.translate(cx, cy); // doesn't need to + barSpacing - - // canvasCtx.fillRect(-hypotenuseLength, 0, 2 * hypotenuseLength, 1); - // canvasCtx.fillRect(0, -hypotenuseLength, 1, 2 * hypotenuseLength); - - canvasCtx.save(); // right before rotation - const rotate = (alfa - beta) * 1; - canvasCtx.rotate(rotate); // controls starting bar (how much to rotate) - canvasCtx.fillRect(x, y, w, h); - // console.log(showLabels); - - if (showLabels) { - // canvasCtx.fillRect(0, 0, w, hypotenuseLength); - if (i % 16 == 0) { - canvasCtx.fillRect(0, -1.2 * hypotenuseLength, w, -0.2 * hypotenuseLength); // 1.3 comes from (0.25 + (0.4 - 0.25)/2) / 0.25 = (0.25 + 0.075) / 0.25 = 0.325 / 0.25 = 1.3 - // canvasCtx.fillRect(0, -radius, w, -0.1 * hypotenuseLength); // 1.3 comes from (0.25 + (0.4 - 0.25)/2) / 0.25 = (0.25 + 0.075) / 0.25 = 0.325 / 0.25 = 1.3 - // canvasCtx.fillRect(0, 0, w, -1 * hypotenuseLength); - - canvasCtx.restore(); // rotate back so that text can be displayed normally - - const roundFreq = Math.round(eachDataFreq * Math.floor(i * freqArrIdxJump) / 100) * 100; // round to nearest 100 - const freqText = `${roundFreq >= 1000 ? `${roundFreq/1000}` : roundFreq}`; - const canvasRotateAngle = rotate; - let angle = -1 * canvasRotateAngle; - const textX = Math.sin(angle) * hypotenuseLength; - const textY = Math.cos(angle) * hypotenuseLength; - - canvasCtx.font = `${canvas.width / 18}px ${font}`; - // Maybe: left side should be align left; vice versa - canvasCtx.fillText(freqText, -textX, -textY); // the x and y might should be determined by cavnas width and height - } else { - canvasCtx.restore(); - } - } else { - canvasCtx.restore(); - } - canvasCtx.restore(); - } + color = theme.textColor; + showLabels = sLabel; + + const setupAudio = async () => { + audioContext = new (window.AudioContext || window.webkitAudioContext)({ latencyHint: 'interactive' }); + + const analyserOptions: AnalyserOptions = { + fftSize: 512, + maxDecibels: -30, + minDecibels: -70, + smoothingTimeConstant: 0.8, }; + analyser = new AnalyserNode(audioContext, analyserOptions); + dataArray = new Uint8Array(analyser.frequencyBinCount); + + mediaStreamRef.current = await navigator.mediaDevices.getUserMedia({ audio: true, video: false }); + source = audioContext.createMediaStreamSource(mediaStreamRef.current); + source.connect(analyser); + + canvas = canvasRef.current; + if (!canvas) return; + canvasCtx = canvas.getContext('2d'); + + rafId = requestAnimationFrame(draw); + }; + + useEffect(() => { + if (listening) { + setupAudio(); + } else { + cancelAnimationFrame(rafId); + if (analyser) analyser.disconnect(); + if (source) source.disconnect(); + if (mediaStreamRef.current) { + mediaStreamRef.current.getTracks().forEach((track) => track.stop()); + } + } - useEffect(() => { - // audioContext = new (window.AudioContext || window.webkitAudioContext)(); - audioContext = new (window.AudioContext || window.webkitAudioContext)({ - latencyHint: 'interactive', - // sampleRate: 51200 // change to 51200 so that eachDataFreq will be 50 - // sampleRate: 38400 // 19200 max - // sampleRate: 40960 // 20480 max - // sampleRate: 32000 // 16000 max - }); - let analyserOptions : AnalyserOptions = { // visual largely affected by fftSize and minDecibels. Roughly direct relationship - "fftSize": 512, // fftSize / 2 is the length of the dataArray. Less: Data are Crunched: Large: the Opposite - "maxDecibels": -30, - "minDecibels": -70, // lowest volume to pick up - "smoothingTimeConstant": 0.8, // lower: less smooth - }; - // analyser = audioContext.createAnalyser(); - analyser = new AnalyserNode(audioContext, analyserOptions); // for AnalyserOptions - - dataArray = new Uint8Array(analyser.frequencyBinCount); // get data for visualization. frequencyBinCount = fftSize / 2 - - // connect the source to be analysed - setSource(); - rafId = requestAnimationFrame(draw); // draw is called before each repaint; // rafId : unique id for the callback funciton draw() - - // setup canvas - canvas = canvasRef.current; - if (!canvas) { - return; - } - canvasCtx = canvas.getContext('2d') + return () => { + cancelAnimationFrame(rafId); + if (analyser) analyser.disconnect(); + if (source) source.disconnect(); + if (mediaStreamRef.current) { + mediaStreamRef.current.getTracks().forEach((track) => track.stop()); + } + }; + }, [listening]); + + const draw = () => { + rafId = requestAnimationFrame(draw); + analyser.getByteFrequencyData(dataArray); + canvasCtx.clearRect(0, 0, canvas.width, canvas.height); + renderByStyleType(); + }; + + const renderByStyleType = () => { + renderLounge(); + }; + + const renderLounge = () => { + const cx = canvas.width / 2; + const cy = canvas.height / 2; + const radius = Math.min(canvas.width, canvas.height) / 2.5; + const barNum = 128; + const maxBarNum = 128 / 0.75; + const barHeight = 2; + const barSpacing = 4; + const barWidth = (1 / maxBarNum) * (2 * Math.PI * radius) / 2; + const freqArrIdxJump = 2; + const eachDataFreq = audioContext.sampleRate / 2 / dataArray.length; + const hypotenuseLength = canvas.width / 4; + + canvasCtx.font = `${canvas.width / 10}px ${font}`; + canvasCtx.textAlign = 'center'; + canvasCtx.textBaseline = 'middle'; + if (showLabels) { + canvasCtx.fillText('kHz', cx, cy + hypotenuseLength); + } - return () => { // clean up funciton - cancelAnimationFrame(rafId); - analyser.disconnect(); - source.disconnect(); + canvasCtx.fillStyle = color; + for (let i = 0; i <= barNum; i++) { + const amplitude = dataArray[Math.floor(i * freqArrIdxJump)]; + const alfa = (2 * Math.PI * i) / maxBarNum; + const beta = (3 * 45 - barWidth) * Math.PI / 180; + const y = 1 - radius - (amplitude / 6 - barHeight); + const w = barWidth; + const h = amplitude / 3 + barHeight; + + canvasCtx.save(); + canvasCtx.translate(cx, cy); + canvasCtx.save(); + const rotate = (alfa - beta); + canvasCtx.rotate(rotate); + canvasCtx.fillRect(0, y, w, h); + + if (showLabels) { + if (i % 16 === 0) { + canvasCtx.fillRect(0, -1.2 * hypotenuseLength, w, -0.2 * hypotenuseLength); + canvasCtx.restore(); + + const roundFreq = Math.round(eachDataFreq * Math.floor(i * freqArrIdxJump) / 100) * 100; + const freqText = `${roundFreq >= 1000 ? `${roundFreq / 1000}` : roundFreq}`; + const angle = -1 * rotate; + const textX = Math.sin(angle) * hypotenuseLength; + const textY = Math.cos(angle) * hypotenuseLength; + + canvasCtx.font = `${canvas.width / 18}px ${font}`; + canvasCtx.fillText(freqText, -textX, -textY); + } else { + canvasCtx.restore(); } - }, []) - - const draw = () => { // the draw function - rafId = requestAnimationFrame(draw); - - // get data into dataArray - analyser.getByteFrequencyData(dataArray); - // if (flag == true) { - // dispatch(volume) - // flag false - // restart timer - // } - - canvasCtx.clearRect(0, 0, canvas.width, canvas.height); - // renderTime(); - // renderText(); - renderByStyleType(); + } else { + canvasCtx.restore(); + } + canvasCtx.restore(); } - - return {setSLabel(!sLabel);}} width={props.visualWidth} height={props.visualHeight} ref={canvasRef} /> - // return {fontsIdx = (fontsIdx + 1) % fonts.length}} width={props.visualWidth} height={props.visualHeight} ref={canvasRef} /> -} + }; + + return ( + setSLabel(!sLabel)} + width={props.visualWidth} + height={props.visualHeight} + ref={canvasRef} + /> + ); +}; \ No newline at end of file diff --git a/src/components/api/visualization/mfccVisual.tsx b/src/components/api/visualization/mfccVisual.tsx index b16d810..862b9ab 100644 --- a/src/components/api/visualization/mfccVisual.tsx +++ b/src/components/api/visualization/mfccVisual.tsx @@ -1,6 +1,6 @@ import React, { useEffect, useRef } from 'react'; import { useSelector } from 'react-redux'; -import { DisplayStatus } from '../../../react-redux&middleware/redux/typesImports'; +import { DisplayStatus, ControlStatus } from '../../../react-redux&middleware/redux/typesImports'; import { RootState } from '../../../store'; import Meyda from 'meyda'; import { MeydaAnalyzer } from 'meyda/dist/esm/meyda-wa'; @@ -31,71 +31,88 @@ let history_write_index = 0; export function MFCCVisual(props: any) { const canvasRef = useRef(null); const theme = useSelector((state: RootState) => state.DisplayReducer as DisplayStatus); + const listening = useSelector((state: RootState) => (state.ControlReducer as ControlStatus).listening); + const mediaStreamRef = useRef(null); useEffect(() => { - audioContext = new (window.AudioContext || window.webkitAudioContext)(); - - navigator.mediaDevices - .getUserMedia({ audio: true, video: false }) - .then((mediaStream) => { - source = audioContext.createMediaStreamSource(mediaStream); - - analyser = Meyda.createMeydaAnalyzer({ - audioContext, - source, - bufferSize: FFT_SIZE, - numberOfMFCCCoefficients: MFCC_COEFFICIENTS, - featureExtractors: ['loudness', 'spectralCentroid', 'mfcc'], - callback: (features) => { - if (features.loudness.total >= LOUDNESS_THRESHOLD) { - history[history_write_index].mfcc.set(features.mfcc); - history[history_write_index].centroid = features.spectralCentroid; - history_write_index = (history_write_index + 1) % HISTORY_LENGTH; - } - }, - }); + let mounted = true; - analyser.start(); + const setupAudio = async () => { + audioContext = new (window.AudioContext || window.webkitAudioContext)(); + + mediaStreamRef.current = await navigator.mediaDevices.getUserMedia({ audio: true, video: false }); + source = audioContext.createMediaStreamSource(mediaStreamRef.current); + + analyser = Meyda.createMeydaAnalyzer({ + audioContext, + source, + bufferSize: FFT_SIZE, + numberOfMFCCCoefficients: MFCC_COEFFICIENTS, + featureExtractors: ['loudness', 'spectralCentroid', 'mfcc'], + callback: (features) => { + if (features.loudness.total >= LOUDNESS_THRESHOLD) { + history[history_write_index].mfcc.set(features.mfcc); + history[history_write_index].centroid = features.spectralCentroid; + history_write_index = (history_write_index + 1) % HISTORY_LENGTH; + } + }, }); - rafId = requestAnimationFrame(draw); + if (listening && mounted) { + analyser.start(); + rafId = requestAnimationFrame(draw); + } + }; + + setupAudio(); return () => { + mounted = false; cancelAnimationFrame(rafId); - analyser.stop(); - source.disconnect(); + if (analyser) analyser.stop(); + if (source) source.disconnect(); + if (mediaStreamRef.current) { + mediaStreamRef.current.getTracks().forEach((track) => track.stop()); + } }; }, []); + // Toggle visualizer on/off based on Redux `listening` state + useEffect(() => { + if (!analyser) return; + + if (listening) { + analyser.start(); + rafId = requestAnimationFrame(draw); + } else { + analyser.stop(); + cancelAnimationFrame(rafId); + } + }, [listening]); + const draw = () => { - requestAnimationFrame(draw); - + rafId = requestAnimationFrame(draw); + const canvas = canvasRef.current!; const canvasCtx = canvas.getContext('2d')!; - + // Set dull white background canvasCtx.fillStyle = '#D3D3D3'; canvasCtx.fillRect(0, 0, canvas.width, canvas.height); - + const sliceWidth = canvas.width / HISTORY_LENGTH; const sliceHeight = canvas.height / MFCC_COEFFICIENTS; - + for (let i = 0; i < HISTORY_LENGTH; i++) { - const historyIndex = (history_write_index - i + HISTORY_LENGTH) % HISTORY_LENGTH; // Map newest to right - const x = canvas.width - sliceWidth * (i + 1); // Calculate position from right to left + const historyIndex = (history_write_index - i + HISTORY_LENGTH) % HISTORY_LENGTH; + const x = canvas.width - sliceWidth * (i + 1); const moment = history[historyIndex]; - + moment.mfcc.forEach((data, row) => { - //const intensity = Math.min(data / 20, 1); // Normalize MFCC value - //const brightness = intensity * 255; // Map intensity to brightness - - // Grayscale color mapping - ///canvasCtx.fillStyle = `rgb(${brightness}, ${brightness}, ${brightness})`; const intensity = data / 255; const centroid = Math.round((720 * moment.centroid) / FFT_SIZE); canvasCtx.fillStyle = `hsl(${centroid}deg 100% 60% / ${Math.min(intensity * 10, 1)})`; - - // Draw rectangle for each coefficient + canvasCtx.fillRect( x, row * sliceHeight, @@ -105,8 +122,6 @@ export function MFCCVisual(props: any) { }); } }; - - return ; } diff --git a/src/components/api/visualization/timeDataVisual.tsx b/src/components/api/visualization/timeDataVisual.tsx index f2d6a47..5a6eb36 100644 --- a/src/components/api/visualization/timeDataVisual.tsx +++ b/src/components/api/visualization/timeDataVisual.tsx @@ -1,142 +1,97 @@ -import React, { useEffect, useRef } from 'react' -import { DisplayStatus } from '../../../react-redux&middleware/redux/typesImports'; +import React, { useEffect, useRef } from 'react'; +import { DisplayStatus, ControlStatus } from '../../../react-redux&middleware/redux/typesImports'; import { useSelector } from 'react-redux'; import { RootState } from '../../../store'; -var audioContext; // reusable; only initialize it once -var analyser; // an AnylyserNode : provide real-time frequency and time-domain analysis information -var dataArray; -var source; -var rafId; -var canvas; -var canvasCtx; +let audioContext: AudioContext; +let analyser: AnalyserNode; +let dataArray: Uint8Array; +let source: MediaStreamAudioSourceNode; +let rafId: number; + +let canvas: HTMLCanvasElement; +let canvasCtx: CanvasRenderingContext2D | null; +let color: string; +let mediaStream: MediaStream; + +const setupSource = async () => { + mediaStream = await navigator.mediaDevices.getUserMedia({ audio: true, video: false }); + source = audioContext.createMediaStreamSource(mediaStream); + source.connect(analyser); +}; -var color; +export function TimeDataVisual(props: any) { + const canvasRef = useRef(null); -const setSource = async () => { - const newMediaStream = await navigator.mediaDevices.getUserMedia({ - audio: true, - video: false - }) + const theme = useSelector((state: RootState) => state.DisplayReducer as DisplayStatus); + const listening = useSelector((state: RootState) => (state.ControlReducer as ControlStatus).listening); - await (source = audioContext.createMediaStreamSource(newMediaStream)) - await (source.connect(analyser)) -}; + color = theme.textColor; + + useEffect(() => { + audioContext = new (window.AudioContext || window.webkitAudioContext)(); + analyser = audioContext.createAnalyser(); + analyser.fftSize = 256; + dataArray = new Uint8Array(analyser.frequencyBinCount); -export function TimeDataVisual(props) { - const canvasRef = useRef(null); + canvas = canvasRef.current!; + canvasCtx = canvas.getContext('2d'); - const theme = useSelector((state: RootState) => { - return state.DisplayReducer as DisplayStatus; + setupSource().then(() => { + if (listening) { + rafId = requestAnimationFrame(draw); + } }); - // const control = useSelector((state: RootState) => { - // return state.ControlReducer as ControlStatus; - // }) - - color = theme.textColor; - - useEffect(() => { - audioContext = new (window.AudioContext || window.webkitAudioContext)(); - analyser = audioContext.createAnalyser(); - dataArray = new Uint8Array(analyser.frequencyBinCount); // data for visualization - - // connect the source to be analysed - setSource(); - rafId = requestAnimationFrame(draw); // draw is called before each repaint; // rafId : unique id for the callback funciton draw() - - // setup canvas - canvas = canvasRef.current; - if (!canvas) { - return; - } - canvasCtx = canvas.getContext('2d') - - return () => { // clean up funciton - cancelAnimationFrame(rafId); - analyser.disconnect(); - source.disconnect(); - } - }, []) - - const draw = () => { // the draw function - requestAnimationFrame(draw); - - // get data into dataArray - analyser.getByteTimeDomainData(dataArray); - - // How much data should we collect - analyser.fftSize = 2 ** 8; // 512 - - canvasCtx.clearRect(0, 0, canvas.width, canvas.height); - - - - - - // now that we have the data, lets turn it into something visual - // 1. Clear the canvas TODO - // 2. setup some canvas drawing - canvasCtx.lineWidth = 2; - // canvasCtx.strokeStyle = "#ffc600"; - canvasCtx.strokeStyle = color; - canvasCtx.beginPath(); - const sliceWidth = canvas.width / analyser.frequencyBinCount; - let x = 0; - dataArray.forEach((data, i) => { - const v = data / 128; - const y = (v * canvas.height) / 2; - // draw our lines - if (i === 0) { - canvasCtx.moveTo(x, y); - } else { - canvasCtx.lineTo(x, y); - } - x += sliceWidth; - }); - canvasCtx.stroke(); - - - - - - - - - // var x = 0; - // const width = canvas.width; - // const height = canvas.height; - // // console.log("drawing line") - - // const sliceWidth = (width * 1.0) / dataArray.length; - // // console.log(sliceWidth) - // canvasCtx.lineWidth = 2; - // canvasCtx.beginPath(); - // // canvasCtx.moveTo(0, height / 2); - // dataArray.forEach((data, i) => { - // // const y = (data / 255.0) * height; - // // canvasCtx.lineTo(x, y); - // // x += sliceWidth; - - // const v = data / 128; - // const y = (v * canvas.height) / 2; - // // draw our lines - // if (i === 0) { - // canvasCtx.moveTo(x, y); - // } else { - // canvasCtx.lineTo(x, y); - // } - // x += sliceWidth; - // }); - // // for (const item of dataArray) { - // // const y = (item / 255.0) * height * (1 / 1); // positive value - // // canvasCtx.lineTo(x, y); - // // x += sliceWidth; - // // } - // canvasCtx.lineTo(x, height / 2); - // canvasCtx.strokeStyle = color; - // canvasCtx.stroke(); + return () => { + cancelAnimationFrame(rafId); + analyser.disconnect(); + source.disconnect(); + mediaStream?.getTracks().forEach((track) => track.stop()); + }; + }, []); + + useEffect(() => { + if (!analyser || !source) return; + + if (listening) { + rafId = requestAnimationFrame(draw); + } else { + cancelAnimationFrame(rafId); } + }, [listening]); + + const draw = () => { + rafId = requestAnimationFrame(draw); + + analyser.getByteTimeDomainData(dataArray); + + if (!canvasCtx) return; + + canvasCtx.clearRect(0, 0, canvas.width, canvas.height); + canvasCtx.lineWidth = 2; + canvasCtx.strokeStyle = color; + canvasCtx.beginPath(); + + const sliceWidth = canvas.width / dataArray.length; + let x = 0; + + dataArray.forEach((data, i) => { + const v = data / 128; + const y = (v * canvas.height) / 2; + + if (i === 0) { + canvasCtx!.moveTo(x, y); + } else { + canvasCtx!.lineTo(x, y); + } + + x += sliceWidth; + }); + + canvasCtx.stroke(); + }; - return + return ; } +