// Manually created for GAVT project on 2024-06-03, based on commit 10df1d9 of the original project
// Update according to 79c427c in start-react on 2024-12-02, also rmv vonage and firebase-related logic mannually.
import { useRef, useState, useEffect } from 'react'
import useUserMedia from './useUserMedia'

import { getSnapData, initAudio, updateCoeffsOffline, updateData, FRAME_SIZE } from './waveAudio'
import { Button } from 'antd'
import { CameraOutlined, ClearOutlined } from '@ant-design/icons'

import './Wave.css'


// =====================================================
const WaveCanvas = props => {  

  const DEFAULT_FILTER_ORDER = 40;

  const {draw, // this is the drawLoop sketch
    isPaused, // req'd for renderer
    isTracking,
    strokeStyle = "#50D3D6",
    ...rest} = props;

  // Audio Vars ------
  const SAMPLE_RATE = 44100;
  const refAudioCtx = useRef(null);       
  const analyser = useRef();
  let microphone = useRef(null);
  let audioData = [];                   // Holds the time domain data.
  let lastReceivedCoeffs = useRef([1, 2, 3]);  // self-explanatory
  const BROADCASTS_PER_SECOND = 6; // used in audioData Interval, affects drawing smoothness, and will eventually be used with Vonage

  // Canvas Vars -------
  const refCanvas = useRef();   // html holds canvasEle
  const refCtx = useRef();      // holds canvas drawing ctx
  const refAniId = useRef();    // holds window.requestAnimationFrame
  let count = 0                 // just a var to count the draw loops
  let drawData = { magnitudes: [], peaks: [], } // set each audio interval
  let receiveEnabled = useRef(false);
  let [sensitivity, setSensitivity] = useState(50);

  // Snapshot Feature --------------
  const snapCtx = useRef();
  const refSnapCanvas = useRef();             // ------------------ #snap
  let snapshotCoeffs = useRef(null);          // ------------------ #snap

  let getMediaInitiated = useRef(false);

  // Services ---------
  const { stream, error, getMedia } = useUserMedia();

  // ------------------------------------------------
  const renderer = () => {
    let ctx = refCtx.current;
    count++
    drawData = updateData(drawData, lastReceivedCoeffs.current, isTracking)

    if(!isPaused) { // drawLoop. Drawing is passed in thru props.
      draw(ctx, count, drawData.magnitudes, drawData.peaks, strokeStyle) // FOR WAVE CUSTOMIZATION!
    } else {
      return;
    }
    if (snapshotCoeffs.current !== null) {
      displaySnapshot();
    }
    refAniId.current = window.requestAnimationFrame(renderer)
  }

  const displaySnapshot = () => {
    let snapData = getSnapData(snapshotCoeffs.current, sensitivity);
    // console.log(snapData)
    snapData.magnitudes = snapData.magnitudes.map(value => value * 0.45); // Just alter the magnitudes of snapdata
    draw(snapCtx.current, 0, snapData.magnitudes, snapData.peaks, 'blue');
  }

  const onClickClearSnapshot = () => {
    clearSnapshot();
  }

  const onClickTakeSnapshot = () => {
      takeSnapshot();
      displaySnapshot();
  };

  const copyArray = (arr) => {
    const newArr = [];
    for (let i = 0; i < arr.length; i++) {
      newArr[i] = arr[i];
    }
    return newArr;
  }

  const takeSnapshot = () => {
    snapshotCoeffs.current = copyArray(lastReceivedCoeffs.current);
    // console.log("snapshot", snapshotCoeffs.current);
  }

  const clearSnapshot = () => {
    snapshotCoeffs.current = null;
    let canvas = refSnapCanvas.current
    const context = refSnapCanvas.current.getContext('2d');
    context.clearRect(0, 0, canvas.width, canvas.height);
  }

  // onMount & onWillUnmount ------------------------------------------------
  useEffect(() => { 
    refCtx.current = refCanvas.current.getContext('2d');
    snapCtx.current = refSnapCanvas.current.getContext('2d');
    let interval;

    if(!stream) { getMedia(); }

    if(stream) {
      // console.log('canvas has stream')
      // console.log('stream.active = ' + stream.active)

      // Anything that attaches to a DOM ele should be CREATED here with a ref.
      // Once the refs are init'd/created, they can be passed to external scripts for maipulation.
      refAudioCtx.current = new AudioContext({sampleRate: SAMPLE_RATE,});
      analyser.current = refAudioCtx.current.createAnalyser();

      // I assume this will be needed later...but I don't really know
      microphone.current = refAudioCtx.current.createMediaStreamSource(stream);

      // configs the analyser to our mic input
      analyser.current = initAudio(microphone.current, refAudioCtx.current, analyser.current)

      // ready to send to audioData to LPC script, which we will do at an interval
      interval = setInterval(() => {
        // console.log('audioData interval');
        // creates a 1D array
        audioData= new Float32Array(analyser.current.fftSize);
        // updates that array with analyser data
        analyser.current.getFloatTimeDomainData(audioData);

        // call to audio script
        lastReceivedCoeffs.current = updateCoeffsOffline(audioData, DEFAULT_FILTER_ORDER);
      }, Math.round(1000 * (FRAME_SIZE / SAMPLE_RATE)));
      console.log(Math.round(1000 * (FRAME_SIZE/SAMPLE_RATE)));


      console.log('audioReady. Starting render')
      renderer()
    }

    // onUnmount -----------------
    return () => {
      clearInterval(interval);
      console.log('cancel render / cancelAnimationFrame')
      window.cancelAnimationFrame(refAniId.current)
    } 
  }, [stream, isPaused, isTracking])


  // _________________________________________
  return (
    <div className="canvas-container" style={{position: "relative"}}>
      <canvas 
        ref={refCanvas} {...rest}
        style={{
          position: 'absolute',
        }}
      >
        Please use an HTML5 browser.
      </canvas>
      <canvas className="snapCanvas" style={{opacity: .4}} ref={refSnapCanvas} {...rest} >
        Please use an HTML5 browser.
      </canvas>
      <div style={ {position: "absolute", left: "85%", top: "5%", zIndex: 999} }>
              <div style={{ marginBottom: "10px", display: "flex", gap: "10px" }}>
                <Button color="primary" onClick={onClickTakeSnapshot} icon={<CameraOutlined />}></Button>
                <Button color="primary" onClick={onClickClearSnapshot} icon={<ClearOutlined />}></Button>
              </div>
      </div>
    </div>
  )
}

export default WaveCanvas