"use client";

import { useState, useRef, useEffect } from "react";
import { GoogleGenAI, LiveServerMessage, Modality, Type } from "@google/genai";
import { Mic, MicOff, Settings, AlertCircle } from "lucide-react";

// ----------------------------------------------------
// PCM Audio Decoder & Player
// ----------------------------------------------------
class AudioStreamPlayer {
  private nextTime: number = 0;
  private sources: AudioBufferSourceNode[] = [];

  constructor(
    private audioContext: AudioContext,
    private destinationNode?: MediaStreamAudioDestinationNode,
  ) {}

  playChunk(base64: string) {
    if (this.audioContext.state === "suspended") {
      this.audioContext.resume();
    }
    const binaryString = atob(base64);
    const len = binaryString.length;
    const bytes = new Int16Array(len / 2);
    for (let i = 0; i < len; i += 2) {
      const low = binaryString.charCodeAt(i);
      const high = binaryString.charCodeAt(i + 1);
      const val = low | (high << 8);
      bytes[i / 2] = val >= 0x8000 ? val - 0x10000 : val;
    }

    const float32Array = new Float32Array(bytes.length);
    for (let i = 0; i < bytes.length; i++) {
      float32Array[i] = bytes[i] / 32768.0;
    }

    const buffer = this.audioContext.createBuffer(
      1,
      float32Array.length,
      24000,
    );
    buffer.getChannelData(0).set(float32Array);

    const source = this.audioContext.createBufferSource();
    source.buffer = buffer;
    source.connect(this.audioContext.destination);

    if (this.destinationNode) {
      source.connect(this.destinationNode);
    }

    const currentTime = this.audioContext.currentTime;
    if (this.nextTime < currentTime) {
      this.nextTime = currentTime;
    }
    source.start(this.nextTime);
    this.sources.push(source);
    source.onended = () => {
      this.sources = this.sources.filter((s) => s !== source);
    };
    this.nextTime += buffer.duration;
  }

  clear() {
    this.sources.forEach((source) => {
      try {
        source.stop();
      } catch (e) {}
      try {
        source.disconnect();
      } catch (e) {}
    });
    this.sources = [];
    this.nextTime = 0;
  }

  stop() {
    // Only close if we are not sharing the context completely, but we rely on LiveSimulator to close.
  }
}

// ----------------------------------------------------
// PCM Mic Capturer
// ----------------------------------------------------
class AudioCapturer {
  private stream: MediaStream | null = null;
  private scriptProcessor: ScriptProcessorNode | null = null;
  private source: MediaStreamAudioSourceNode | null = null;

  constructor(
    private audioContext: AudioContext,
    private onChunk: (base64: string) => void,
  ) {}

  async start() {
    this.stream = await navigator.mediaDevices.getUserMedia({
      audio: { channelCount: 1, sampleRate: 16000 },
    });

    if (this.audioContext.state === "suspended") {
      await this.audioContext.resume();
    }
    this.source = this.audioContext.createMediaStreamSource(this.stream);

    this.scriptProcessor = this.audioContext.createScriptProcessor(4096, 1, 1);

    this.scriptProcessor.onaudioprocess = (e) => {
      const inputData = e.inputBuffer.getChannelData(0);
      const pcm16 = new Int16Array(inputData.length);
      for (let i = 0; i < inputData.length; i++) {
        const s = Math.max(-1, Math.min(1, inputData[i]));
        pcm16[i] = s < 0 ? s * 0x8000 : s * 0x7fff;
      }

      let binary = "";
      const uint8 = new Uint8Array(pcm16.buffer);
      for (let i = 0; i < uint8.byteLength; i++) {
        binary += String.fromCharCode(uint8[i]);
      }
      const base64 = btoa(binary);
      this.onChunk(base64);
    };

    this.source.connect(this.scriptProcessor);
    this.scriptProcessor.connect(this.audioContext.destination);
  }

  getStream() {
    return this.stream;
  }

  stop() {
    if (this.scriptProcessor) this.scriptProcessor.disconnect();
    if (this.source) this.source.disconnect();
    if (this.stream) this.stream.getTracks().forEach((t) => t.stop());
  }
}

export const DEFAULT_LIVE_MODEL = "gemini-3.1-flash-live-preview";

export function LiveSimulator({
  voice = "Puck",
  systemInstruction,
  apiKey,
  model,
  onChapterComplete,
  onSessionStart,
  onSessionEnd,
}: {
  voice?: string;
  systemInstruction: string;
  apiKey?: string;
  model?: string;
  onChapterComplete?: (id: string) => void;
  onSessionStart?: () => any;
  onSessionEnd?: (logs: any[], audioBlob?: Blob) => void;
}) {
  const [isActive, setIsActive] = useState(false);
  const [logs, setLogs] = useState<
    { role: "user" | "agent" | "system"; text: string; timestamp: string }[]
  >([]);
  const [error, setError] = useState<string | null>(null);

  // Audio references
  const playerRef = useRef<AudioStreamPlayer | null>(null);
  const capturerRef = useRef<AudioCapturer | null>(null);
  const sessionRef = useRef<any>(null);
  const recorderRef = useRef<MediaRecorder | null>(null);
  const audioChunksRef = useRef<BlobPart[]>([]);
  const logsRef = useRef<any[]>([]);
  const recognitionRef = useRef<any>(null);
  const userTransRef = useRef<string>("");
  const agentTransRef = useRef<string>("");

  // Wrap setLogs to also update ref
  const addLog = (logObj: {
    role: "user" | "agent" | "system";
    text: string;
  }) => {
    const stamped = { ...logObj, timestamp: new Date().toISOString() };
    setLogs((prev) => {
      const next = [...prev, stamped];
      logsRef.current = next;
      return next;
    });
  };

  // Streaming transcript helpers: maintain an open bubble per role and append
  // chunks as Gemini sends them. Commit on `finished` / turnComplete.
  const openBubbleIndexRef = useRef<{ user: number | null; agent: number | null }>({
    user: null,
    agent: null,
  });

  const appendStreaming = (role: "user" | "agent", chunk: string) => {
    const trimmedChunk = chunk;
    if (!trimmedChunk) return;
    setLogs((prev) => {
      const next = [...prev];
      const openIdx = openBubbleIndexRef.current[role];
      if (openIdx != null && next[openIdx] && next[openIdx].role === role) {
        next[openIdx] = {
          ...next[openIdx],
          text: (next[openIdx].text + trimmedChunk).replace(/\s+/g, " ").trim(),
        };
      } else {
        next.push({
          role,
          text: trimmedChunk.trim(),
          timestamp: new Date().toISOString(),
        });
        openBubbleIndexRef.current = {
          ...openBubbleIndexRef.current,
          [role]: next.length - 1,
        };
      }
      logsRef.current = next;
      return next;
    });
  };

  const commitStreaming = (role: "user" | "agent") => {
    openBubbleIndexRef.current = {
      ...openBubbleIndexRef.current,
      [role]: null,
    };
  };

  // Start the Live SDK connection
  const startSession = async () => {
    setError(null);
    setIsActive(true);
    logsRef.current = [];
    userTransRef.current = "";
    agentTransRef.current = "";
    setLogs([]);
    addLog({ role: "system", text: "Connecting to Agent Double-O-Seven..." });

    audioChunksRef.current = [];
    if (onSessionStart) {
      try {
        await onSessionStart();
      } catch (e) {
        console.warn("onSessionStart failed", e);
      }
    }

    try {
      const SpeechRecognition =
        (window as any).SpeechRecognition ||
        (window as any).webkitSpeechRecognition;
      if (SpeechRecognition) {
        recognitionRef.current = new SpeechRecognition();
        recognitionRef.current.continuous = true;
        recognitionRef.current.interimResults = false;
        recognitionRef.current.onresult = (event: any) => {
          let transcript = "";
          for (let i = event.resultIndex; i < event.results.length; ++i) {
            if (event.results[i].isFinal) {
              transcript += event.results[i][0].transcript;
            }
          }
          if (transcript && transcript.trim().length > 0) {
            addLog({ role: "user", text: transcript.trim() });
          }
        };
        recognitionRef.current.onend = () => {
          if (recognitionRef.current) {
            try {
              recognitionRef.current.start();
            } catch (e) {}
          }
        };
        try {
          recognitionRef.current.start();
        } catch (e) {}
      }
    } catch (e) {
      console.warn("Speech recognition setup failed", e);
    }

    try {
      const localApiKey =
        typeof window !== "undefined"
          ? localStorage.getItem("rms_gemini_api_key")
          : null;
      const resolvedApiKey =
        apiKey || localApiKey || process.env.NEXT_PUBLIC_GEMINI_API_KEY;
      if (!resolvedApiKey) {
        throw new Error(
          "Gemini API key is not configured. Add it in Admin → Settings.",
        );
      }

      // Initialize Audio contexts
      const playCtx = new (
        window.AudioContext || (window as any).webkitAudioContext
      )({ sampleRate: 24000 });
      const recCtx = new (
        window.AudioContext || (window as any).webkitAudioContext
      )({ sampleRate: 16000 });
      playCtx.resume();
      recCtx.resume();

      const recordingDest = playCtx.createMediaStreamDestination();
      try {
        const mimeType = (window as any).MediaRecorder?.isTypeSupported?.(
          "audio/webm",
        )
          ? "audio/webm"
          : "audio/mp4";
        const recorder = new MediaRecorder(recordingDest.stream, { mimeType });
        recorder.ondataavailable = (e) => {
          if (e.data.size > 0) audioChunksRef.current.push(e.data);
        };
        recorderRef.current = recorder;
        recorder.start(1000);
      } catch (e) {
        try {
          const recorder = new MediaRecorder(recordingDest.stream);
          recorder.ondataavailable = (e) => {
            if (e.data.size > 0) audioChunksRef.current.push(e.data);
          };
          recorderRef.current = recorder;
          recorder.start(1000);
        } catch (e2) {}
      }

      const ai = new GoogleGenAI({ apiKey: resolvedApiKey });
      const newPlayer = new AudioStreamPlayer(playCtx, recordingDest);
      playerRef.current = newPlayer;

      const newCapturer = new AudioCapturer(recCtx, (b64) => {
        if (sessionRef.current) {
          sessionRef.current.sendRealtimeInput({
            audio: { data: b64, mimeType: "audio/pcm;rate=16000" },
          });
        }
      });
      capturerRef.current = newCapturer;

      // Connect to Live API
      const liveModel = (model && model.trim()) || DEFAULT_LIVE_MODEL;
      addLog({ role: "system", text: `Using model: ${liveModel}` });
      const sessionPromise = ai.live.connect({
        model: liveModel,
        callbacks: {
          onopen: () => {
            addLog({
              role: "system",
              text: "Connection established. Audio channel active.",
            });
            newCapturer
              .start()
              .then(() => {
                const micStream = newCapturer.getStream();
                if (micStream && recordingDest) {
                  try {
                    const micSource =
                      playCtx.createMediaStreamSource(micStream);
                    micSource.connect(recordingDest);
                  } catch (e) {}
                }
              })
              .catch((e) => {
                setError(
                  "Failed to access microphone. Please ensure mic permissions are granted.",
                );
                stopSession();
              });
          },
          onmessage: async (message: LiveServerMessage) => {
            if (message.serverContent?.interrupted) {
              newPlayer.clear();
            }

            const base64Audio =
              message.serverContent?.modelTurn?.parts?.[0]?.inlineData?.data;
            if (base64Audio) {
              newPlayer.playChunk(base64Audio);
            }

            const functionCalls =
              message.serverContent?.modelTurn?.parts?.filter(
                (p) => p.functionCall,
              );
            if (functionCalls && functionCalls.length > 0) {
              functionCalls.forEach((part) => {
                if (part.functionCall?.name === "markChapterComplete") {
                  const args = part.functionCall.args as any;
                  if (args && args.chapterId && onChapterComplete) {
                    onChapterComplete(args.chapterId);
                  }
                  if (sessionRef.current) {
                    sessionRef.current.sendRealtimeInput({
                      clientContent: {
                        turnComplete: true,
                        turns: [
                          {
                            role: "user",
                            parts: [
                              {
                                functionResponse: {
                                  name: "markChapterComplete",
                                  response: { success: true },
                                },
                              },
                            ],
                          },
                        ],
                      },
                    });
                  }
                }
              });
            }

            const inputTrans = message.serverContent?.inputTranscription;
            if (inputTrans?.text) appendStreaming("user", inputTrans.text);
            if (inputTrans?.finished) commitStreaming("user");

            const outputTrans = message.serverContent?.outputTranscription;
            if (outputTrans?.text) appendStreaming("agent", outputTrans.text);
            if (outputTrans?.finished) commitStreaming("agent");

            const partText = message.serverContent?.modelTurn?.parts?.find(
              (p) => (p as any).text,
            )?.text;
            if (partText) appendStreaming("agent", partText);

            if (message.serverContent?.turnComplete) {
              commitStreaming("user");
              commitStreaming("agent");
            }
            if (message.serverContent?.interrupted) {
              commitStreaming("agent");
            }
          },
          onerror: (err: any) => {
            console.error("Live API Error:", err);
            const reason =
              err?.message || err?.reason || (typeof err === "string" ? err : "");
            setError(
              reason
                ? `Live API error: ${reason}`
                : "Live API connection failed. Check API key and model access.",
            );
            stopSession();
          },
          onclose: (evt: any) => {
            console.warn("Live API close:", evt);
            const code = evt?.code;
            const reason = evt?.reason;
            if (code && code !== 1000 && code !== 1005) {
              setError(
                `Live API closed (${code}${reason ? `: ${reason}` : ""}).`,
              );
            }
            addLog({ role: "system", text: "Session closed." });
            stopSession();
          },
        },
        config: {
          tools: [
            {
              functionDeclarations: [
                {
                  name: "markChapterComplete",
                  description:
                    "Call this immediately after the user successfully completes a chapter's drills and theory checks. This automatically updates their progress in the system. YOU MUST explicitly congratulate the user out loud immediately before or after calling this tool.",
                  parameters: {
                    type: Type.OBJECT,
                    properties: {
                      chapterId: {
                        type: Type.STRING,
                        description:
                          "The ID of the completed chapter (e.g. C1, C2, etc.)",
                      },
                    },
                    required: ["chapterId"],
                  },
                },
              ],
            },
          ],
          responseModalities: [Modality.AUDIO],
          inputAudioTranscription: {},
          outputAudioTranscription: {},
          speechConfig: {
            voiceConfig: {
              prebuiltVoiceConfig: {
                voiceName:
                  voice === "Luna" ? "Kore" : voice === "Nova" ? "Kore" : voice,
              },
            },
          },
          systemInstruction: systemInstruction,
        },
      });

      sessionRef.current = await sessionPromise;

      setTimeout(() => {
        if (sessionRef.current) {
          sessionRef.current.sendRealtimeInput({
            text: "System initialized. Please begin your initial introduction as Agent Double-O-Seven.",
          });
        }
      }, 500);
    } catch (err: any) {
      console.error(err);
      setError(err.message || "Failed to start session.");
      stopSession();
    }
  };

  const stopSession = () => {
    setIsActive(false);

    if (recognitionRef.current) {
      const rec = recognitionRef.current;
      recognitionRef.current = null;
      try { rec.stop(); } catch (e) {}
    }

    if (sessionRef.current) {
      try { sessionRef.current.close(); } catch (e) {}
      sessionRef.current = null;
    }

    if (capturerRef.current) {
      capturerRef.current.stop();
      capturerRef.current = null;
    }

    if (playerRef.current) {
      try { playerRef.current.clear(); } catch (e) {}
      playerRef.current = null;
    }

    if (recorderRef.current) {
      try {
        recorderRef.current.onstop = () => {
          let blob: Blob | undefined;
          if (audioChunksRef.current.length > 0) {
            const firstChunk = audioChunksRef.current[0] as any;
            const type = firstChunk.type || "audio/webm";
            blob = new Blob(audioChunksRef.current, { type });
          }
          if (onSessionEnd) {
            onSessionEnd(
              [...logsRef.current, { role: "system", text: "Session ended." }],
              blob,
            );
          }
        };
        recorderRef.current.stop();
      } catch (e) {
        if (onSessionEnd) onSessionEnd([...logsRef.current, { role: "system", text: "Session ended." }]);
      }
      recorderRef.current = null;
    } else {
      if (onSessionEnd) onSessionEnd([...logsRef.current, { role: "system", text: "Session ended." }]);
    }
  };

  return (
    <div className="flex flex-col min-h-full py-12 items-center justify-center p-4 w-full">
      <div className="flex flex-col items-center justify-center space-y-6 md:space-y-10 w-full max-w-2xl mx-auto py-8">
        <button
          onClick={isActive ? stopSession : startSession}
          className={`relative flex items-center justify-center w-24 h-24 sm:w-32 sm:h-32 rounded-full transition-all duration-500 shadow-xl focus:outline-none focus:ring-4 focus:ring-white/50 ${
            isActive
              ? "bg-[#044f54] shadow-[0_0_80px_rgba(4,79,84,0.6)] hover:bg-[#044f54]/90"
              : "bg-slate-50 border border-[#044f54]/40 hover:border-[#044f54] hover:bg-slate-50"
          }`}
        >
          {!isActive && (
            <div className="absolute inset-2 sm:inset-4 rounded-full border border-[#044f54]/20"></div>
          )}
          {isActive && (
            <span
              className="absolute w-full h-full rounded-full bg-[#044f54]/90 opacity-20 animate-ping"
              style={{ animationDuration: "2s" }}
            />
          )}
          {isActive ? (
            <Mic className="w-10 h-10 sm:w-12 sm:h-12 text-white animate-pulse" />
          ) : (
            <MicOff className="w-8 h-8 sm:w-10 sm:h-10 text-[#044f54]/60" />
          )}
        </button>

        <div className="text-center font-mono text-sm tracking-wider h-6">
          {isActive ? (
            <div className="inline-flex flex-wrap justify-center items-center gap-2 px-3 py-1 rounded-full border border-[#044f54]/30 bg-[#044f54]/10 text-[#044f54]/90 text-[10px] font-mono tracking-widest uppercase">
              <span className="w-2 h-2 rounded-full bg-[#044f54] animate-pulse shrink-0"></span>{" "}
              <span className="text-[#044f54]">
                Recording & Transmitting...
              </span>
            </div>
          ) : (
            <span className="text-[#044f54]/40 uppercase text-[10px] tracking-widest">
              System Standby
            </span>
          )}
        </div>

        {error && (
          <div className="bg-red-500/10 border border-red-500/50 text-red-400 px-4 py-3 rounded-md flex items-center gap-3 w-full max-w-md">
            <AlertCircle className="w-5 h-5 shrink-0" />
            <span className="text-sm">{error}</span>
          </div>
        )}

        {/* System Log */}
        <div className="hidden w-full max-w-md border border-[#044f54]/10 rounded-xl bg-[#044f54]/[0.02] backdrop-blur-md overflow-hidden shadow-sm">
          <div className="border-b border-[#044f54]/5 px-4 py-3 flex items-center justify-between">
            <span className="text-[10px] font-mono text-[#044f54]/40 uppercase tracking-widest">
              System Log
            </span>
            <Settings className="w-3 h-3 text-[#044f54]/40" />
          </div>
          <div className="p-4 h-48 overflow-y-auto space-y-2 font-mono text-xs flex flex-col-reverse">
            {[...logs]
              .reverse()
              .filter((log) => log.role !== "system")
              .map((log, i) => (
                <div
                  key={i}
                  className={`py-1 ${
                    log.role === "agent"
                      ? "text-[#044f54]/80"
                      : "text-[#044f54]/80 font-semibold"
                  }`}
                >
                  <span className="opacity-60 select-none mr-2">
                    [{log.role.toUpperCase()}]
                  </span>
                  <span
                    className={log.role === "agent" ? "text-[#044f54]" : ""}
                  >
                    {log.text}
                  </span>
                </div>
              ))}
            {logs.length === 0 && (
              <div className="text-[#044f54]/30 italic py-2">
                Initialize session to view logs.
              </div>
            )}
          </div>
        </div>
      </div>
    </div>
  );
}
