// observe.jsx — WebRTC live-call listen-in HUD.
//
// Open via `window.openObserve(call)` from anywhere. The HUD docks
// bottom-right, REGISTERs against the workspace's FreeSWITCH WSS
// endpoint via sip.js, INVITEs `*7<callId>`, and pipes the remote audio
// stream to a hidden <audio>. Listen-only — agent/caller audio is
// muted on the receive side via SDP a=recvonly.
//
// Lifecycle:
//   open → /observe/start → REGISTER → INVITE → audio plays
//   close → BYE → unREGISTER → /observe/stop
//   call-ended event → auto-close
//
// Dependencies: sip.js loaded from CDN by index.html. We expose the
// hook + a global opener on window so any screen (calls list, call
// detail, ⌘K palette) can trigger the HUD without prop-drilling.

(function () {
  const { useState, useEffect, useRef, useCallback } = React;

  // ── Hook + Mounted element ───────────────────────────────────────
  function useObservePanel({ workspace }) {
    const [state, setState] = useState(null); // { call } when open

    // Expose an imperative opener so anyone can do
    //   window.openObserve(call)
    // without threading props down to the calls table.
    useEffect(() => {
      window.openObserve = (call) => setState({ call });
      window.closeObserve = () => setState(null);
      return () => { window.openObserve = null; window.closeObserve = null; };
    }, []);

    const Mounted = state ? (
      <ObservePanel
        workspace={workspace}
        call={state.call}
        onClose={() => setState(null)}
      />
    ) : null;

    return { Mounted };
  }

  // ── The HUD itself ────────────────────────────────────────────────
  function ObservePanel({ workspace, call, onClose }) {
    const wid = workspace.id;
    // Status: connecting | registering | dialing | live | ended | error
    const [status, setStatus] = useState("connecting");
    const [errorMsg, setErrorMsg] = useState(null);
    const [elapsed, setElapsed] = useState(0);
    const [muted, setMuted] = useState(false);
    const audioRef = useRef(null);
    const uaRef = useRef(null);
    const inviterRef = useRef(null);
    const startedAtRef = useRef(null);

    // Tick the elapsed counter once we're live.
    useEffect(() => {
      if (status !== "live") return;
      startedAtRef.current = Date.now();
      const id = setInterval(() => setElapsed(Math.floor((Date.now() - startedAtRef.current) / 1000)), 500);
      return () => clearInterval(id);
    }, [status]);

    // Main connect effect — runs once per opened call.
    useEffect(() => {
      let cancelled = false;
      let ua = null;
      let inviter = null;

      (async () => {
        try {
          if (!window.SIP) {
            throw new Error("sip.js failed to load. Check the script tag in index.html.");
          }

          setStatus("connecting");
          const r = await API.observe.start(wid, call.id);
          const cfg = r?.data ?? r;
          if (cancelled) return;

          // sip.js v0.21 — the "modern" UserAgent API. We construct
          // a UserAgent, REGISTER, then place an Inviter.
          const SIP = window.SIP;
          const uri = SIP.UserAgent.makeURI(`sip:${cfg.sipUser}@${new URL(cfg.wssUrl).hostname}`);
          if (!uri) throw new Error("Could not parse SIP URI");

          ua = new SIP.UserAgent({
            uri,
            transportOptions: { server: cfg.wssUrl },
            authorizationUsername: cfg.sipUser,
            authorizationPassword: cfg.sipPassword,
            // Listen-only — we don't send any audio. SDP will include
            // a=recvonly via mediaStreamFactory below.
            sessionDescriptionHandlerFactoryOptions: {
              constraints: { audio: false, video: false },
              peerConnectionConfiguration: { iceServers: [] },
            },
          });
          uaRef.current = ua;

          ua.delegate = {
            onDisconnect: (err) => {
              if (cancelled) return;
              if (err) setErrorMsg(err.message);
              setStatus("ended");
            },
          };

          await ua.start();
          if (cancelled) { ua.stop(); return; }

          setStatus("registering");
          const registerer = new SIP.Registerer(ua);
          await registerer.register();
          if (cancelled) { ua.stop(); return; }

          setStatus("dialing");
          const targetHost = new URL(cfg.wssUrl).hostname;
          const target = SIP.UserAgent.makeURI(`sip:${cfg.eavesdropDialString}@${targetHost}`);
          if (!target) throw new Error("Could not parse eavesdrop URI");

          inviter = new SIP.Inviter(ua, target, {
            sessionDescriptionHandlerOptions: {
              constraints: { audio: false, video: false },
            },
          });
          inviterRef.current = inviter;

          // Wire the remote audio track to our <audio> element as soon
          // as the SDP handler emits its peerConnection.
          inviter.stateChange.addListener((newState) => {
            if (cancelled) return;
            if (newState === SIP.SessionState.Established) {
              attachRemoteAudio(inviter, audioRef.current);
              setStatus("live");
            } else if (newState === SIP.SessionState.Terminated) {
              setStatus("ended");
            }
          });

          await inviter.invite();
        } catch (err) {
          if (cancelled) return;
          setErrorMsg(err.message || String(err));
          setStatus("error");
        }
      })();

      return () => {
        cancelled = true;
        try { inviter?.bye(); } catch (_) {}
        try { ua?.stop(); } catch (_) {}
        // Best-effort telemetry — don't await.
        API.observe.stop(wid, call.id).catch(() => {});
      };
      // eslint-disable-next-line react-hooks/exhaustive-deps
    }, [wid, call.id]);

    // Toggle mute on the remote audio element (we're not sending; this
    // just silences playback locally).
    function toggleMute() {
      const a = audioRef.current;
      if (!a) return;
      a.muted = !a.muted;
      setMuted(a.muted);
    }

    return (
      <div className="observe-panel">
        <div className="observe-panel__head">
          <div className="row g8" style={{minWidth:0, flex:1, alignItems:"center"}}>
            <span className={"observe-dot observe-dot--" + status}/>
            <div style={{minWidth:0, flex:1}}>
              <div style={{fontSize:13, fontWeight:500, color:"var(--ink)", overflow:"hidden", textOverflow:"ellipsis", whiteSpace:"nowrap"}}>
                {call.callerName || (window.fmtNumber ? window.fmtNumber(call.callerNumber) : call.callerNumber)}
              </div>
              <div className="muted" style={{fontSize:11}}>
                {statusLabel(status)}{status === "live" ? ` · ${fmtElapsed(elapsed)}` : ""}
              </div>
            </div>
          </div>
          <button className="icon-btn" onClick={onClose} title="End listen-in">
            {window.I?.x ? <window.I.x size={14}/> : "×"}
          </button>
        </div>
        {errorMsg && (
          <div style={{padding:"8px 12px", background:"color-mix(in oklab, var(--bad) 8%, transparent)", color:"var(--bad)", fontSize:12, lineHeight:1.4}}>
            {errorMsg}
          </div>
        )}
        <div className="observe-panel__body">
          <button className={"btn btn-secondary btn-sm" + (muted ? " on" : "")} onClick={toggleMute} disabled={status !== "live"}>
            {muted ? "Unmute" : "Mute"}
          </button>
          <div className="muted" style={{fontSize:11.5, marginLeft:"auto"}}>
            Listen only · agent is notified
          </div>
        </div>
        <audio ref={audioRef} autoPlay playsInline/>
      </div>
    );
  }

  function statusLabel(s) {
    switch (s) {
      case "connecting":  return "Connecting…";
      case "registering": return "Authenticating…";
      case "dialing":     return "Joining call…";
      case "live":        return "Listening";
      case "ended":       return "Ended";
      case "error":       return "Error";
      default:            return s;
    }
  }
  function fmtElapsed(s) {
    const m = Math.floor(s / 60);
    return `${m}:${String(s % 60).padStart(2, "0")}`;
  }

  // sip.js wires the remote MediaStreamTrack onto the underlying
  // RTCPeerConnection at session establishment. We pull it off and pipe
  // into our <audio> element.
  function attachRemoteAudio(session, audioEl) {
    if (!audioEl) return;
    const sdh = session.sessionDescriptionHandler;
    if (!sdh || !sdh.peerConnection) return;
    const stream = new MediaStream();
    sdh.peerConnection.getReceivers().forEach((r) => {
      if (r.track && r.track.kind === "audio") stream.addTrack(r.track);
    });
    audioEl.srcObject = stream;
    audioEl.play().catch((err) => {
      // Autoplay can fail if no user gesture preceded — but the user
      // clicking "Listen" IS a gesture, so this is unlikely.
      console.warn("observe: audio play failed:", err);
    });
  }

  window.useObservePanel = useObservePanel;
  window.ObservePanel = ObservePanel;
})();
