DAFC CLI Output Example

Avi Santoso

Okay, let's implement the voice interaction architecture and apply the new visual design system.

1. Update Dependencies

First, add the typed-emitter package.

bun add typed-emitter

2. Update Type Definitions

Modify src/types.ts to include the new interfaces and types for voice interaction and the audio recorder.


src/types.ts:

import { EventEmitter } from "events"; // Node's EventEmitter or a compatible one
import TypedEmitter from "typed-emitter";

export type VoiceControlType = "push-to-talk" | "streaming-audio";

// Type for simple transcript messages (used in Phone/Visual Novel)
export interface SimpleTranscriptMessage {
  id: number | string; // Allow string IDs for potential UUIDs
  speaker: string; // "You" or the other speaker's name
  text: string;
  time?: string; // Optional timestamp
}

// Type for agent information
export interface AgentInfo {
  department: string;
  specialization: string;
  languages: string;
}

// Type for tracking data attachment
export interface TrackingData {
  trackingNumber: string;
  carrier: string;
  estimatedDelivery: string;
}

// Type for file attachment
export interface AttachmentFile {
  type: "file";
  fileType: string; // e.g., 'pdf', 'jpg'
  name: string;
  size: string;
}

// Type for form attachment
export interface AttachmentForm {
  type: "form";
  formType: "address" | string; // Extendable for other form types
}

// Type for tracking attachment
export interface AttachmentTracking {
  type: "tracking";
  data: TrackingData;
}

// Union type for possible attachments in CardLayout transcript
export type TranscriptAttachment =
  | AttachmentFile
  | AttachmentForm
  | AttachmentTracking;

// Type for transcript messages in CardLayout (includes attachments)
export interface TranscriptMessage extends SimpleTranscriptMessage {
  attachment?: TranscriptAttachment;
}

// --- Voice Interaction Interfaces ---

/**
 * Functions the UI component can call to signal user actions or requests
 * related to voice interaction.
 */
export interface VoiceDispatcher {
  /** Called when the user intends to start sending audio (e.g., unmutes in streaming, starts push-to-talk). */
  startAudioInput: () => void;

  /** Called when the user intends to stop sending audio (e.g., mutes in streaming, ends push-to-talk). */
  stopAudioInput: () => void;

  /** Signals the user wants to end the call/interaction. */
  endInteraction: () => void;

  /** Signals a change in the preferred voice control type (PTT vs Streaming). */
  setVoiceControlType: (controlType: VoiceControlType) => void;

  /** Optional: Send a specific audio chunk (e.g., after PTT release if not handled internally). */
  sendAudioChunk?: (audioBlob: Blob) => void;
}

/**
 * Events emitted by the voice processing logic to update the UI.
 */
export interface VoiceEvents {
  /** Emitted when a new chunk of streaming text is available (for temporary display). */
  streamingText: (textChunk: string) => void;

  /** Emitted when a new complete transcript message is available. */
  transcriptUpdate: (message: SimpleTranscriptMessage | TranscriptMessage) => void;

  /** Emitted when a new chunk of streaming audio (base64 encoded) is available for playback. */
  streamingAudioB64: (audioB64: string) => void;

  /** Emitted to indicate if the agent/other party is currently speaking. */
  agentSpeaking: (isSpeaking: boolean) => void;

  /** Emitted when an error occurs during voice processing or communication. */
  error: (errorMessage: string) => void;

  /** Emitted to update the overall status of the call/interaction. */
  interactionStatus: (
    status: "idle" | "connecting" | "connected" | "reconnecting" | "ended"
  ) => void;

  /** Emitted when the voice control type is changed externally. */
  voiceControlChange: (controlType: VoiceControlType) => void;
}

/**
 * Typed EventEmitter instance for voice interaction events.
 */
export type VoiceEventEmitter = TypedEmitter<VoiceEvents>;

// --- Audio Recorder Interfaces ---

export interface AudioRecorderOptions {
  /** WebSocket URL to send audio data */
  wsUrl: string;
  /** Event emitter to broadcast recorder state changes and receive backend events */
  eventEmitter: VoiceEventEmitter; // Use the specific VoiceEventEmitter
  /** Audio sample rate in Hz (Optional, browser default is often sufficient) */
  sampleRate?: number;
  /** MIME type for recording (e.g., 'audio/webm;codecs=opus') */
  mimeType?: string;
  /** Time slice in milliseconds for continuous recording (e.g., 250ms) */
  timeSlice?: number;
  /** Initial voice control type */
  initialVoiceControlType?: VoiceControlType;
}

export interface AudioRecorderInterface {
  /** Start recording audio (called internally by dispatcher methods) */
  startRecording(): Promise<void>;

  /** Stop recording audio (called internally by dispatcher methods) */
  stopRecording(): Promise<void>; // Changed return type, blob handled internally

  /** Check if currently recording */
  isRecording(): boolean;

  /** Clean up resources */
  dispose(): void;

  /** Get the current voice control type */
  getVoiceControlType(): VoiceControlType;
}

/** Combined interface for the default browser recorder */
export type BrowserAudioRecorder = AudioRecorderInterface & VoiceDispatcher;

3. Update Styling (CSS Variables and Tailwind Config)

Replace the content of base.css and tailwind.config.js with the versions reflecting the new design brief.


base.css:

/* Import base Tailwind styles */
@tailwind base;
@tailwind components;
@tailwind utilities;

/* Define CSS variables based on the visual design brief */
@layer base {
  :root {
    /* Theme Colors (Light Mode) - HSL format */
    --dawn-coral: 6 100% 71%; /* #FF7F6B */
    --horizon-blue: 210 76% 54%; /* #4A90E2 */
    --morning-gold: 35 100% 64%; /* #FFB946 */
    --earth-brown: 25 25% 46%; /* #8B6D5C */
    --muted-sage: 98 15% 67%; /* #A8B8A0 */
    --deep-valley: 222 47% 20%; /* #1A2B49 */
    --warm-parchment: 38 33% 95%; /* #F7F3EE */
    --light-parchment: 38 33% 97%; /* Lighter version for inputs maybe */

    /* shadcn/ui compatible variables (Light Mode) */
    --background: var(--warm-parchment); /* #F7F3EE */
    --foreground: var(--deep-valley); /* #1A2B49 */

    --card: var(--warm-parchment); /* #F7F3EE */
    --card-foreground: var(--deep-valley); /* #1A2B49 */

    --popover: var(--warm-parchment); /* #F7F3EE */
    --popover-foreground: var(--deep-valley); /* #1A2B49 */

    --primary: var(--dawn-coral); /* #FF7F6B - Primary Accent */
    --primary-foreground: var(--deep-valley); /* Dark text on Coral */

    --secondary: var(--horizon-blue); /* #4A90E2 - Secondary Accent */
    --secondary-foreground: var(--warm-parchment); /* Light text on Blue */

    --muted: 25 20% 88%; /* Lighter gray derived from Parchment/Brown */
    --muted-foreground: 25 15% 55%; /* Darker gray derived from Brown/Valley */

    --accent: var(--morning-gold); /* #FFB946 - Supporting Accent */
    --accent-foreground: var(--deep-valley); /* Dark text on Gold */

    --destructive: 0 84% 60%; /* Standard red */
    --destructive-foreground: 38 33% 98%; /* Light text on red (almost white) */

    --border: 25 20% 80%; /* Subtle border derived from Brown/Parchment */
    --input: 25 20% 86%; /* Slightly darker border for inputs */
    --ring: var(--horizon-blue); /* Focus ring color */

    /* Border Radius (12px as per brief suggestion for organic feel) */
    --radius: 0.75rem; /* 12px */
  }

  .dark {
    /* Theme Colors (Dark Mode) - Adjust HSL values for dark theme aesthetics */
    /* Keeping dark mode simple for now, focusing on light mode implementation */
    --dawn-coral: 6 100% 71%;
    --horizon-blue: 210 76% 64%; /* Lighter blue */
    --morning-gold: 35 100% 74%; /* Lighter gold */
    --earth-brown: 25 15% 65%; /* Lighter brown */
    --muted-sage: 98 15% 55%; /* Darker Sage */
    --deep-valley: 210 33% 98%; /* Light text */
    --warm-parchment: 222 47% 11%; /* Dark background */
    --light-parchment: 222 47% 15%; /* Slightly lighter dark */

    /* shadcn/ui compatible variables (Dark Mode) */
    --background: 222 47% 11%; /* Dark blue-gray */
    --foreground: 210 33% 98%; /* Light text */

    --card: 222 47% 13%; /* Slightly lighter than background */
    --card-foreground: 210 33% 98%; /* Light text */

    --popover: 222 47% 13%;
    --popover-foreground: 210 33% 98%;

    --primary: var(--dawn-coral); /* Keep Coral */
    --primary-foreground: 222 47% 15%; /* Dark text on light coral */

    --secondary: var(--horizon-blue); /* Lighter blue */
    --secondary-foreground: 222 47% 15%; /* Dark text on light blue */

    --muted: 222 30% 20%; /* Dark muted */
    --muted-foreground: 210 20% 70%; /* Lighter muted text */

    --accent: var(--morning-gold); /* Lighter gold */
    --accent-foreground: 222 47% 15%; /* Dark text on light gold */

    --destructive: 0 74% 70%; /* Lighter red */
    --destructive-foreground: 210 33% 98%; /* Light text on red */

    --border: 222 30% 25%; /* Darker border */
    --input: 222 30% 25%; /* Darker input */
    --ring: var(--horizon-blue); /* Lighter blue for focus */
  }
}

/* Apply base border color */
@layer base {
  * {
    @apply border-border;
  }
  body {
    @apply bg-background text-foreground;
    /* Apply default sans-serif font */
    font-family: theme(fontFamily.sans);
  }

  /* Apply serif font to headings globally */
  h1,
  h2,
  h3,
  h4,
  h5,
  h6 {
    font-family: theme(fontFamily.serif);
  }
}

/* Add smooth transitions globally */
@layer components {
  .transition-smooth {
    @apply transition-all duration-300 ease-in-out;
  }

  /* Apply base styles for better consistency */
  body {
    @apply min-h-screen antialiased;
  }
}

tailwind.config.js:

/** @type {import('tailwindcss').Config} */
module.exports = {
  darkMode: ["class"], // Enable dark mode using class strategy
  content: [
    "./src/**/*.{js,ts,jsx,tsx}", // Scan source files in the library for Tailwind classes
  ],
  prefix: "", // No prefix for library components
  theme: {
    container: {
      center: true,
      padding: "2rem",
      screens: {
        "2xl": "1400px",
      },
    },
    extend: {
      colors: {
        // Theme colors based on Visual Design Brief - mapped to CSS variables
        "dawn-coral": "hsl(var(--dawn-coral))", // #FF7F6B
        "horizon-blue": "hsl(var(--horizon-blue))", // #4A90E2
        "morning-gold": "hsl(var(--morning-gold))", // #FFB946
        "earth-brown": "hsl(var(--earth-brown))", // #8B6D5C
        "muted-sage": "hsl(var(--muted-sage))", // #A8B8A0 (NEW)
        "deep-valley": "hsl(var(--deep-valley))", // #1A2B49
        "warm-parchment": "hsl(var(--warm-parchment))", // #F7F3EE (Replaces Mountain Mist)
        "light-parchment": "hsl(var(--light-parchment))", // Lighter version

        // shadcn/ui compatible CSS variables (values defined in base.css)
        border: "hsl(var(--border))",
        input: "hsl(var(--input))",
        ring: "hsl(var(--ring))", // Use horizon-blue for focus rings
        background: "hsl(var(--background))", // Use warm-parchment
        foreground: "hsl(var(--foreground))", // Use deep-valley
        primary: {
          DEFAULT: "hsl(var(--primary))", // Use dawn-coral
          foreground: "hsl(var(--primary-foreground))", // Use deep-valley (dark text on coral)
        },
        secondary: {
          DEFAULT: "hsl(var(--secondary))", // Use horizon-blue
          foreground: "hsl(var(--secondary-foreground))", // Use warm-parchment (light text on blue)
        },
        destructive: {
          DEFAULT: "hsl(var(--destructive))", // Standard red
          foreground: "hsl(var(--destructive-foreground))", // Light text on red
        },
        muted: {
          DEFAULT: "hsl(var(--muted))", // Lighter gray derived from Parchment/Brown
          foreground: "hsl(var(--muted-foreground))", // Darker gray derived from Brown/Valley
        },
        accent: {
          DEFAULT: "hsl(var(--accent))", // Use morning-gold
          foreground: "hsl(var(--accent-foreground))", // Use deep-valley (dark text on gold)
        },
        popover: {
          DEFAULT: "hsl(var(--popover))", // Use warm-parchment
          foreground: "hsl(var(--popover-foreground))", // Use deep-valley
        },
        card: {
          DEFAULT: "hsl(var(--card))", // Use warm-parchment
          foreground: "hsl(var(--card-foreground))", // Use deep-valley
        },
      },
      borderRadius: {
        // Use 12px as per brief suggestion for organic feel, defined via CSS variable
        lg: "var(--radius)", // 12px
        md: "calc(var(--radius) - 4px)", // 8px
        sm: "calc(var(--radius) - 6px)", // 6px
      },
      keyframes: {
        // Standard shadcn/ui keyframes
        "accordion-down": {
          from: { height: "0" },
          to: { height: "var(--radix-accordion-content-height)" },
        },
        "accordion-up": {
          from: { height: "var(--radix-accordion-content-height)" },
          to: { height: "0" },
        },
        "caret-blink": {
          // For input-otp
          "0%,70%,100%": { opacity: "1" },
          "20%,50%": { opacity: "0" },
        },
      },
      animation: {
        // Standard shadcn/ui animations
        "accordion-down": "accordion-down 0.3s ease-in-out", // Smoother transition
        "accordion-up": "accordion-up 0.3s ease-in-out", // Smoother transition
        "caret-blink": "caret-blink 1.25s ease-out infinite",
      },
      fontFamily: {
        // Match the brief
        serif: ["Spectral", "Merriweather", "Lora", "Georgia", "serif"], // Humanist serif for headings
        sans: [
          "Inter",
          "Nunito Sans",
          "Source Sans Pro",
          "system-ui",
          "-apple-system",
          "BlinkMacSystemFont",
          "Segoe UI",
          "Roboto",
          "Helvetica Neue",
          "Arial",
          "sans-serif",
        ], // Modern sans-serif for body
      },
      boxShadow: {
        // Soft shadows as per brief, using Deep Valley base color
        // Example: 0 4px 12px rgba(26, 43, 73, 0.08)
        DEFAULT: "0 4px 12px hsla(var(--deep-valley) / 0.08)",
        md: "0 6px 16px hsla(var(--deep-valley) / 0.08)",
        lg: "0 10px 30px hsla(var(--deep-valley) / 0.1)",
        xl: "0 15px 40px hsla(var(--deep-valley) / 0.12)",
      },
      // Add support for backdrop blur if needed
      backdropBlur: {
        DEFAULT: "10px",
        sm: "4px",
        md: "16px",
        lg: "24px",
      },
      // Add subtle textures (use sparingly)
      backgroundImage: {
        "wood-grain": "url('/path/to/wood-grain.png')", // Replace with actual path if used
        "canvas-texture": "url('/path/to/canvas-texture.png')", // Replace with actual path if used
        "topo-pattern": "url('/path/to/topo-pattern.svg')", // Replace with actual path if used
        // Gradients from brief
        "dawn-gradient":
          "linear-gradient(to right, hsl(var(--dawn-coral)), hsl(var(--morning-gold)), hsl(var(--horizon-blue)))",
        "sky-gradient":
          "linear-gradient(to bottom, hsl(var(--horizon-blue)), hsl(var(--light-parchment)))",
      },
      fontSize: {
        // Define font sizes from brief
        "3xl": ["3rem", { lineHeight: "1.2" }], // H1: 48px
        "2xl": ["2rem", { lineHeight: "1.3" }], // H2: 32px
        xl: ["1.5rem", { lineHeight: "1.4" }], // H3: 24px
        lg: ["1.125rem", { lineHeight: "1.5" }], // Slightly larger base body if needed
        base: ["1rem", { lineHeight: "1.6" }], // Body: 16px
        sm: ["0.875rem", { lineHeight: "1.5" }], // Small/Meta: 14px
        xs: ["0.75rem", { lineHeight: "1.4" }], // Extra small
      },
    },
  },
  plugins: [
    require("tailwindcss-animate"), // Standard shadcn/ui animation plugin
    // Enable backdrop filter plugin if needed
    // require('tailwindcss-filters'),
  ],
  // Optional: If using backdrop filters
  // variants: {
  //   extend: {
  //     backdropFilter: ['responsive'],
  //     backdropBlur: ['responsive'],
  //   },
  // },
};

4. Recompile Tailwind CSS

Run the compile script to update src/styles/globals.css based on the changes in base.css and tailwind.config.js.

bun run compile

(You should see the updated content in src/styles/globals.css after this step)

5. Implement Browser Audio Recorder

Create the default browser audio recorder implementation.


src/lib/BrowserAudioRecorder.ts:

import { EventEmitter } from "events";
import TypedEmitter from "typed-emitter";
import {
  AudioRecorderInterface,
  AudioRecorderOptions,
  VoiceControlType,
  VoiceDispatcher,
  VoiceEventEmitter,
  VoiceEvents,
} from "@/types";

// Default options
const DEFAULT_MIME_TYPE = "audio/webm;codecs=opus";
const DEFAULT_TIME_SLICE = 250; // ms

export class BrowserAudioRecorder
  implements AudioRecorderInterface, VoiceDispatcher
{
  private options: Required<AudioRecorderOptions>;
  private mediaRecorder: MediaRecorder | null = null;
  private audioContext: AudioContext | null = null;
  private audioStream: MediaStream | null = null;
  private ws: WebSocket | null = null;
  private recordingState: "idle" | "recording" | "stopping" = "idle";
  private voiceControlType: VoiceControlType;
  private emitter: VoiceEventEmitter;
  private audioQueue: ArrayBuffer[] = [];
  private isPlaying = false;
  private audioPlaybackContext: AudioContext | null = null;

  constructor(options: AudioRecorderOptions) {
    // Ensure emitter is provided
    if (!options.eventEmitter) {
      throw new Error("VoiceEventEmitter instance must be provided.");
    }
    this.emitter = options.eventEmitter;

    this.options = {
      wsUrl: options.wsUrl,
      eventEmitter: this.emitter,
      sampleRate: options.sampleRate || 48000, // Default to a common rate
      mimeType: options.mimeType || DEFAULT_MIME_TYPE,
      timeSlice: options.timeSlice || DEFAULT_TIME_SLICE,
      initialVoiceControlType:
        options.initialVoiceControlType || "streaming-audio",
    };
    this.voiceControlType = this.options.initialVoiceControlType;

    // Bind methods to ensure 'this' context is correct
    this.startAudioInput = this.startAudioInput.bind(this);
    this.stopAudioInput = this.stopAudioInput.bind(this);
    this.endInteraction = this.endInteraction.bind(this);
    this.setVoiceControlType = this.setVoiceControlType.bind(this);
    this.startRecording = this.startRecording.bind(this);
    this.stopRecording = this.stopRecording.bind(this);
    this.isRecording = this.isRecording.bind(this);
    this.dispose = this.dispose.bind(this);
    this.getVoiceControlType = this.getVoiceControlType.bind(this);
    this._handleWebSocketMessage = this._handleWebSocketMessage.bind(this);
    this._handleDataAvailable = this._handleDataAvailable.bind(this);
    this._playNextAudioChunk = this._playNextAudioChunk.bind(this);

    // Subscribe to relevant events from the emitter (if needed for internal logic)
    // e.g., this.emitter.on('someEvent', this._handleSomeEvent);

    // Initialize WebSocket
    this._connectWebSocket();

    // Initialize AudioContext for playback
    this.audioPlaybackContext = new (window.AudioContext ||
      (window as any).webkitAudioContext)();
    this.emitter.on("streamingAudioB64", this._handleStreamingAudio.bind(this));
  }

  // --- VoiceDispatcher Implementation ---

  startAudioInput(): void {
    if (this.recordingState === "idle") {
      console.log("BrowserAudioRecorder: Starting audio input...");
      this.startRecording().catch((err) => {
        console.error("Error starting recording:", err);
        this.emitter.emit("error", "Failed to start audio input.");
      });
    } else {
      console.warn("BrowserAudioRecorder: Already recording or stopping.");
    }
  }

  stopAudioInput(): void {
    if (this.recordingState === "recording") {
      console.log("BrowserAudioRecorder: Stopping audio input...");
      this.stopRecording().catch((err) => {
        console.error("Error stopping recording:", err);
        this.emitter.emit("error", "Failed to stop audio input properly.");
      });
    } else {
      console.warn("BrowserAudioRecorder: Not currently recording.");
    }
  }

  endInteraction(): void {
    console.log("BrowserAudioRecorder: Ending interaction...");
    this.stopRecording() // Ensure recording stops
      .finally(() => {
        this.ws?.close();
        this.dispose(); // Clean up resources
        this.emitter.emit("interactionStatus", "ended");
      });
  }

  setVoiceControlType(controlType: VoiceControlType): void {
    if (this.voiceControlType !== controlType) {
      console.log(
        `BrowserAudioRecorder: Setting voice control type to ${controlType}`
      );
      this.voiceControlType = controlType;
      // If switching to PTT while recording in streaming mode, stop recording
      if (
        controlType === "push-to-talk" &&
        this.recordingState === "recording"
      ) {
        this.stopAudioInput();
      }
      this.emitter.emit("voiceControlChange", controlType); // Notify UI
    }
  }

  // --- AudioRecorderInterface Implementation ---

  async startRecording(): Promise<void> {
    if (this.recordingState !== "idle") {
      console.warn("Recording already in progress or stopping.");
      return;
    }

    this.recordingState = "recording";
    this.emitter.emit("interactionStatus", "connecting"); // Or 'recording'

    try {
      this.audioStream = await navigator.mediaDevices.getUserMedia({
        audio: { sampleRate: this.options.sampleRate },
      });
      this.emitter.emit("interactionStatus", "connected"); // Or 'recording'

      // Check MIME type support
      let mimeType = this.options.mimeType;
      if (!MediaRecorder.isTypeSupported(mimeType)) {
        console.warn(
          `MIME type ${mimeType} not supported. Trying default webm/opus.`
        );
        mimeType = "audio/webm;codecs=opus";
        if (!MediaRecorder.isTypeSupported(mimeType)) {
          console.warn(
            `Default webm/opus not supported. Trying audio/ogg;codecs=opus.`
          );
          mimeType = "audio/ogg;codecs=opus";
          if (!MediaRecorder.isTypeSupported(mimeType)) {
            console.error(
              "Neither specified nor fallback MIME types are supported."
            );
            this.emitter.emit(
              "error",
              "Audio recording format not supported by this browser."
            );
            this.recordingState = "idle";
            this.dispose(); // Clean up stream
            return;
          }
        }
      }

      this.mediaRecorder = new MediaRecorder(this.audioStream, { mimeType });

      this.mediaRecorder.ondataavailable = this._handleDataAvailable;

      this.mediaRecorder.onstop = () => {
        console.log("BrowserAudioRecorder: MediaRecorder stopped.");
        this.recordingState = "idle";
        // Ensure any remaining data is processed if needed (might be handled by ondataavailable)
        this.disposeStream(); // Clean up stream after stopping
      };

      this.mediaRecorder.onerror = (event) => {
        console.error("MediaRecorder error:", event);
        this.emitter.emit("error", `MediaRecorder error: ${event}`);
        this.recordingState = "idle";
        this.dispose();
      };

      // Start recording with time slicing for streaming
      this.mediaRecorder.start(this.options.timeSlice);
      console.log(
        `BrowserAudioRecorder: MediaRecorder started with timeslice ${this.options.timeSlice}ms and mimeType ${mimeType}`
      );
    } catch (err) {
      console.error("Error accessing microphone:", err);
      this.emitter.emit("error", "Microphone access denied or unavailable.");
      this.recordingState = "idle";
      this.emitter.emit("interactionStatus", "idle");
      this.dispose(); // Clean up any partial resources
      throw err; // Re-throw for external handling if necessary
    }
  }

  async stopRecording(): Promise<void> {
    if (this.recordingState !== "recording" || !this.mediaRecorder) {
      console.warn("Not recording or MediaRecorder not initialized.");
      // Ensure state consistency if called unexpectedly
      if (this.recordingState === "stopping") return; // Already stopping
      this.recordingState = "idle";
      this.disposeStream(); // Clean up stream just in case
      return;
    }

    this.recordingState = "stopping";
    return new Promise((resolve) => {
      if (this.mediaRecorder) {
        // The 'stop' event will handle state transition and stream disposal
        this.mediaRecorder.onstop = () => {
          console.log("BrowserAudioRecorder: MediaRecorder stopped via stopRecording.");
          this.recordingState = "idle";
          this.disposeStream(); // Clean up stream after stopping
          resolve();
        };
        this.mediaRecorder.stop();
      } else {
        // Should not happen if state is 'recording', but handle defensively
        this.recordingState = "idle";
        this.disposeStream();
        resolve();
      }
    });
  }

  isRecording(): boolean {
    return this.recordingState === "recording";
  }

  getVoiceControlType(): VoiceControlType {
    return this.voiceControlType;
  }

  dispose(): void {
    console.log("BrowserAudioRecorder: Disposing resources...");
    if (this.recordingState === "recording") {
      this.stopRecording().catch(console.error); // Attempt graceful stop
    } else {
      this.disposeStream();
    }
    this.ws?.close();
    this.ws = null;
    this.audioContext?.close().catch(console.error);
    this.audioContext = null;
    this.audioPlaybackContext?.close().catch(console.error);
    this.audioPlaybackContext = null;
    this.audioQueue = [];
    this.isPlaying = false;
    // Remove specific listeners if added
    this.emitter.off("streamingAudioB64", this._handleStreamingAudio);
    console.log("BrowserAudioRecorder: Resources disposed.");
  }

  // --- Private Helper Methods ---

  private disposeStream(): void {
    this.audioStream?.getTracks().forEach((track) => track.stop());
    this.audioStream = null;
    this.mediaRecorder = null; // Ensure recorder is cleared
    console.log("BrowserAudioRecorder: Audio stream disposed.");
  }

  private _connectWebSocket(): void {
    if (this.ws && this.ws.readyState === WebSocket.OPEN) {
      console.log("WebSocket already open.");
      return;
    }

    console.log(`BrowserAudioRecorder: Connecting WebSocket to ${this.options.wsUrl}...`);
    this.emitter.emit("interactionStatus", "connecting");
    this.ws = new WebSocket(this.options.wsUrl);

    this.ws.onopen = () => {
      console.log("BrowserAudioRecorder: WebSocket connected.");
      // Only emit 'connected' if not already recording (handled in startRecording)
      if (this.recordingState !== 'recording') {
          this.emitter.emit("interactionStatus", "connected");
      }
    };

    this.ws.onmessage = this._handleWebSocketMessage;

    this.ws.onerror = (event) => {
      console.error("WebSocket error:", event);
      this.emitter.emit("error", "WebSocket connection error.");
      // Consider adding reconnection logic here
      this.emitter.emit("interactionStatus", "reconnecting"); // Or 'error'
    };

    this.ws.onclose = (event) => {
      console.log(`WebSocket closed: Code=${event.code}, Reason=${event.reason}`);
      // Avoid emitting 'ended' if dispose was called intentionally
      if (this.recordingState !== "idle" || this.ws !== null) { // Check ws null in case dispose was called
          this.emitter.emit("interactionStatus", "reconnecting"); // Or 'ended' based on context
      }
       // Attempt to reconnect after a delay?
       // setTimeout(() => this._connectWebSocket(), 5000);
    };
  }

  private _handleWebSocketMessage(event: MessageEvent): void {
    try {
      const data = JSON.parse(event.data);
      // console.log("WebSocket message received:", data);

      // --- Event Emission based on received data ---
      // Adjust these based on your actual backend message format

      if (data.type === "streaming_text" && data.text) {
        this.emitter.emit("streamingText", data.text);
      } else if (data.type === "transcript_update" && data.message) {
         // Ensure message has an ID, generate if missing (though backend should provide)
         const messageWithId = {
            ...data.message,
            id: data.message.id || `msg-${Date.now()}-${Math.random()}`,
         };
        this.emitter.emit("transcriptUpdate", messageWithId);
      } else if (data.type === "agent_speaking" && typeof data.is_speaking === "boolean") {
        this.emitter.emit("agentSpeaking", data.is_speaking);
      } else if (data.type === "interaction_status" && data.status) {
        this.emitter.emit("interactionStatus", data.status);
      } else if (data.type === "error" && data.message) {
        this.emitter.emit("error", data.message);
      } else if (data.type === "audio_chunk" && data.audio_b64) {
        this.emitter.emit("streamingAudioB64", data.audio_b64);
      }
      // Add more handlers as needed
    } catch (error) {
      console.error("Error parsing WebSocket message:", error, "Data:", event.data);
      // Handle non-JSON messages or parsing errors if necessary
       if (typeof event.data === 'string' && event.data.startsWith("Error:")) {
            this.emitter.emit("error", event.data);
       }
    }
  }

  private _handleDataAvailable(event: BlobEvent): void {
    if (event.data.size > 0 && this.ws && this.ws.readyState === WebSocket.OPEN) {
      // console.log(`Sending audio chunk: ${event.data.size} bytes`);
      this.ws.send(event.data);
      // Optional: Call sendAudioChunk if defined and needed
      // this.options.sendAudioChunk?.(event.data);
    }
  }

  // --- Audio Playback Handling ---
  private _handleStreamingAudio(audioB64: string): void {
    try {
      // Decode base64 string to ArrayBuffer
      const binaryString = window.atob(audioB64);
      const len = binaryString.length;
      const bytes = new Uint8Array(len);
      for (let i = 0; i < len; i++) {
        bytes[i] = binaryString.charCodeAt(i);
      }
      this.audioQueue.push(bytes.buffer);
      // console.log(`Audio chunk added to queue. Queue size: ${this.audioQueue.length}`);
      if (!this.isPlaying) {
        this._playNextAudioChunk();
      }
    } catch (error) {
      console.error("Error decoding or queueing audio chunk:", error);
      this.emitter.emit("error", "Failed to process incoming audio.");
    }
  }

  private async _playNextAudioChunk(): Promise<void> {
    if (this.audioQueue.length === 0 || !this.audioPlaybackContext) {
      this.isPlaying = false;
      // console.log("Audio queue empty or context missing. Stopping playback.");
      return;
    }

    this.isPlaying = true;
    const audioData = this.audioQueue.shift();

    if (!audioData) {
        this.isPlaying = false;
        return; // Should not happen due to the check above, but safety first
    }

    // console.log(`Playing next audio chunk. Remaining queue size: ${this.audioQueue.length}`);

    try {
      const audioBuffer = await this.audioPlaybackContext.decodeAudioData(audioData);
      const source = this.audioPlaybackContext.createBufferSource();
      source.buffer = audioBuffer;
      source.connect(this.audioPlaybackContext.destination);
      source.onended = () => {
        // console.log("Audio chunk finished playing.");
        // Check buffer state before playing next
        if (this.audioPlaybackContext?.state === 'running') {
             this._playNextAudioChunk();
        } else {
            console.warn("Audio context not running, pausing playback queue.");
            this.isPlaying = false;
            // Optionally try to resume context later or notify user
        }
      };
      source.start();
    } catch (error) {
      console.error("Error decoding or playing audio data:", error);
      this.emitter.emit("error", "Failed to play incoming audio chunk.");
      // Skip corrupted chunk and try next one
      this._playNextAudioChunk();
    }
  }
}

// Factory function
export function createBrowserAudioRecorder(
  options: AudioRecorderOptions
): BrowserAudioRecorder {
  return new BrowserAudioRecorder(options);
}

6. Update Layout Components

Modify the layout components (CardLayout, PhoneLayout, VisualNovelLayout) to accept and use the dispatcher and eventEmitter props, and apply new styles.

  • CardLayout:

src/components/layouts/CardLayout/CardLayout.tsx:

import { useState, useEffect, useRef, useCallback } from "react";
import { CardLayoutPresenter } from "./CardLayoutPresenter";
import {
  VoiceControlType,
  TranscriptMessage,
  AgentInfo,
  VoiceDispatcher,
  VoiceEventEmitter,
  SimpleTranscriptMessage,
} from "@/types";
import { AgentPanel } from "./AgentPanel";
import { TranscriptPanel } from "./TranscriptPanel";
import { CallAction } from "./CallAction";
import { VoiceControlToggle } from "./VoiceControlToggle";
import { useToast } from "@/hooks/use-toast"; // Import useToast

// Define props for CardLayout to accept external data and interaction handlers
interface CardLayoutProps {
  agentName: string;
  agentAvatarSrc: string;
  agentAvatarFallback: string;
  agentStatusText: string; // Initial status text
  agentInfo: AgentInfo;
  initialTranscriptMessages: TranscriptMessage[]; // Renamed for clarity
  initialVoiceControlType?: VoiceControlType;
  dispatcher: VoiceDispatcher; // Required: For sending actions
  eventEmitter: VoiceEventEmitter; // Required: For receiving updates
}

export function CardLayout({
  agentName,
  agentAvatarSrc,
  agentAvatarFallback,
  agentStatusText: initialAgentStatusText,
  agentInfo,
  initialTranscriptMessages,
  initialVoiceControlType = "push-to-talk",
  dispatcher,
  eventEmitter,
}: CardLayoutProps) {
  const [voiceControlType, setVoiceControlType] = useState<VoiceControlType>(
    initialVoiceControlType
  );
  const [isMicEffectivelyMuted, setIsMicEffectivelyMuted] = useState(
    initialVoiceControlType === "push-to-talk" // Mic is muted unless pushing in PTT
  );
  const [isPushing, setIsPushing] = useState(false); // Only relevant for PTT
  const [callDuration, setCallDuration] = useState(0);
  const [interactionStatus, setInteractionStatus] = useState<
    "idle" | "connecting" | "connected" | "reconnecting" | "ended"
  >("idle");
  const [agentStatusText, setAgentStatusText] =
    useState(initialAgentStatusText);
  const [transcriptMessages, setTranscriptMessages] = useState<
    TranscriptMessage[]
  >(initialTranscriptMessages);
  const [activeForm, setActiveForm] = useState<string | number | null>(null); // Use message ID
  const scrollAreaRef = useRef<HTMLDivElement>(null);
  const [mobileTab, setMobileTab] = useState("persona");
  const [hoveredMessage, setHoveredMessage] = useState<string | number | null>(
    null
  );
  const [isDesktop, setIsDesktop] = useState(false);
  const { toast } = useToast(); // Get toast function

  // --- Effects for Event Emitter ---

  useEffect(() => {
    const handleStatusUpdate = (
      status: "idle" | "connecting" | "connected" | "reconnecting" | "ended"
    ) => {
      console.log("CardLayout: Interaction status update:", status);
      setInteractionStatus(status);
      if (status === "connected") {
        setCallDuration(0); // Reset duration on connect
      }
      if (status === "ended") {
        // Optionally reset other states
      }
    };

    const handleTranscriptUpdate = (
      message: SimpleTranscriptMessage | TranscriptMessage
    ) => {
      console.log("CardLayout: Transcript update:", message);
      // Ensure the message conforms to TranscriptMessage structure if needed
      const newMessage: TranscriptMessage = {
        ...message,
        id: message.id || `msg-${Date.now()}`, // Ensure ID exists
        // attachment handling might need adjustment based on Simple vs TranscriptMessage
        attachment: (message as TranscriptMessage).attachment || undefined,
      };
      setTranscriptMessages((prev) => [...prev, newMessage]);
    };

    const handleError = (errorMessage: string) => {
      console.error("CardLayout: Received error:", errorMessage);
      toast({
        variant: "destructive",
        title: "Interaction Error",
        description: errorMessage,
      });
    };

    const handleVoiceControlChange = (type: VoiceControlType) => {
        console.log("CardLayout: Voice control changed externally to:", type);
        setVoiceControlType(type);
        setIsMicEffectivelyMuted(type === 'push-to-talk');
        setIsPushing(false);
    };

    // Subscribe
    eventEmitter.on("interactionStatus", handleStatusUpdate);
    eventEmitter.on("transcriptUpdate", handleTranscriptUpdate);
    eventEmitter.on("error", handleError);
    eventEmitter.on("voiceControlChange", handleVoiceControlChange);
    // Add listeners for streamingText, agentSpeaking, streamingAudioB64 if needed visually

    // Cleanup
    return () => {
      eventEmitter.off("interactionStatus", handleStatusUpdate);
      eventEmitter.off("transcriptUpdate", handleTranscriptUpdate);
      eventEmitter.off("error", handleError);
      eventEmitter.off("voiceControlChange", handleVoiceControlChange);
    };
  }, [eventEmitter, toast]);

  // --- Other Effects ---

  useEffect(() => {
    const checkIsDesktop = () => {
      setIsDesktop(window.innerWidth >= 1024);
    };
    checkIsDesktop();
    window.addEventListener("resize", checkIsDesktop);
    return () => window.removeEventListener("resize", checkIsDesktop);
  }, []);

  useEffect(() => {
    if (interactionStatus !== "connected") return;
    const interval = setInterval(() => {
      setCallDuration((prev) => prev + 1);
    }, 1000);
    return () => clearInterval(interval);
  }, [interactionStatus]);

  useEffect(() => {
    if (scrollAreaRef.current) {
      setTimeout(() => {
        if (scrollAreaRef.current) {
          scrollAreaRef.current.scrollTop = scrollAreaRef.current.scrollHeight;
        }
      }, 50); // Slightly increased delay
    }
  }, [transcriptMessages, activeForm]);

  // --- Callbacks ---

  const formatDuration = (seconds: number) => {
    const mins = Math.floor(seconds / 60);
    const secs = seconds % 60;
    return `${mins.toString().padStart(2, "0")}:${secs
      .toString()
      .padStart(2, "0")}`;
  };

  const toggleMute = useCallback(() => {
    if (voiceControlType === "streaming-audio") {
      const currentlyMuted = isMicEffectivelyMuted;
      if (currentlyMuted) {
        dispatcher.startAudioInput(); // Unmuting means starting input
      } else {
        dispatcher.stopAudioInput(); // Muting means stopping input
      }
      setIsMicEffectivelyMuted(!currentlyMuted); // Toggle local state
    }
  }, [voiceControlType, dispatcher, isMicEffectivelyMuted]);

  const handlePushStart = useCallback(() => {
    if (voiceControlType === "push-to-talk" && !isPushing) {
      setIsPushing(true);
      setIsMicEffectivelyMuted(false); // Mic is active while pushing
      dispatcher.startAudioInput();
    }
  }, [voiceControlType, dispatcher, isPushing]);

  const handlePushEnd = useCallback(() => {
    if (voiceControlType === "push-to-talk" && isPushing) {
      setIsPushing(false);
      setIsMicEffectivelyMuted(true); // Mic is muted after push
      dispatcher.stopAudioInput();
      // Optional: dispatcher.sendAudioChunk() if recorder doesn't handle it on stop
    }
  }, [voiceControlType, dispatcher, isPushing]);

  const endCall = useCallback(() => {
    dispatcher.endInteraction();
    // Status update will be handled by the event listener
  }, [dispatcher]);

  const restartCall = useCallback(() => {
    // This logic might belong in the parent application
    // For demo, we reset state and assume connection happens
    console.log("Restarting call (demo)...");
    setTranscriptMessages(initialTranscriptMessages); // Reset transcript
    setInteractionStatus("connecting");
    // Simulate connection success
    setTimeout(() => {
        setInteractionStatus("connected");
        setCallDuration(0);
        setIsMicEffectivelyMuted(voiceControlType === 'push-to-talk');
        setIsPushing(false);
    }, 1000);
  }, [initialTranscriptMessages, voiceControlType]);


  const toggleVoiceControlMode = useCallback(() => {
    const newMode =
      voiceControlType === "push-to-talk" ? "streaming-audio" : "push-to-talk";
    dispatcher.setVoiceControlType(newMode); // Inform the dispatcher/recorder
    // State update will be handled by the voiceControlChange event listener
  }, [voiceControlType, dispatcher]);

  const isCallActive = interactionStatus === "connected" || interactionStatus === "reconnecting";

  return (
    <CardLayoutPresenter
      isDesktop={isDesktop}
      mobileTab={mobileTab}
      setMobileTab={setMobileTab}
      subPanelSlot={
        <AgentPanel
          agentName={agentName}
          agentAvatarSrc={agentAvatarSrc}
          agentAvatarFallback={agentAvatarFallback}
          agentStatusText={
            isCallActive ? agentStatusText : interactionStatus // Show status like 'ended'
          }
          agentInfo={agentInfo}
          isCallActive={isCallActive}
          callDuration={callDuration}
          isMuted={isMicEffectivelyMuted} // Reflects actual mic state
          formatDuration={formatDuration}
          endCall={endCall}
          restartCall={restartCall} // Pass restart handler
          voiceControlType={voiceControlType}
          subActionSlot={
            isCallActive && (
              <VoiceControlToggle
                voiceControlType={voiceControlType}
                onToggleMode={toggleVoiceControlMode}
              />
            )
          }
          mainActionSlot={
            <CallAction
              voiceControlType={voiceControlType}
              isMuted={isMicEffectivelyMuted} // Pass effective mute state for button appearance/action
              isPushing={isPushing}
              onPushStart={handlePushStart}
              onPushEnd={handlePushEnd}
              onToggleMute={toggleMute} // Pass toggleMute for streaming button action
              isCallActive={isCallActive}
              onRestartCall={restartCall} // Pass restart handler
            />
          }
        />
      }
      mainPanelSlot={
        <TranscriptPanel
          isCallActive={isCallActive}
          callDuration={callDuration}
          formatDuration={formatDuration}
          messages={transcriptMessages} // Pass messages state
          activeForm={activeForm}
          hoveredMessage={hoveredMessage}
          setHoveredMessage={setHoveredMessage}
          setActiveForm={setActiveForm}
          scrollAreaRef={scrollAreaRef}
        />
      }
    />
  );
}

src/components/layouts/CardLayout/CardLayoutPresenter.tsx:

import { Button } from "@/components/ui/button";
import { ReactNode } from "react";

interface CardLayoutPresenterProps {
  isDesktop: boolean;
  mobileTab: string;
  setMobileTab: (tab: string) => void;
  subPanelSlot: ReactNode;
  mainPanelSlot: ReactNode;
}

export function CardLayoutPresenter({
  isDesktop,
  mobileTab,
  setMobileTab,
  subPanelSlot: agentPanelSlot,
  mainPanelSlot: transcriptPanelSlot,
}: CardLayoutPresenterProps) {
  return (
    // Use Warm Parchment as the base background for the page containing this layout
    <div className="bg-warm-parchment p-4 md:p-6 lg:p-8 min-h-screen">
      {/* Mobile Tabs - Only visible on small screens */}
      {!isDesktop && (
        <div className="mb-4">
          <div className="grid grid-cols-2 gap-2">
            <Button
              variant={mobileTab === "persona" ? "secondary" : "outline"} // Use secondary (Horizon Blue) for active tab
              className={`rounded-lg transition-smooth ${
                mobileTab === "persona"
                  ? "text-secondary-foreground" // Ensure text contrast
                  : "border-earth-brown/50 text-earth-brown hover:bg-earth-brown/10"
              }`}
              onClick={() => setMobileTab("persona")}
            >
              Agent
            </Button>
            <Button
              variant={mobileTab === "interaction" ? "secondary" : "outline"}
              className={`rounded-lg transition-smooth ${
                mobileTab === "interaction"
                  ? "text-secondary-foreground"
                  : "border-earth-brown/50 text-earth-brown hover:bg-earth-brown/10"
              }`}
              onClick={() => setMobileTab("interaction")}
            >
              Conversation
            </Button>
          </div>
        </div>
      )}

      {/* Use CSS Grid for better layout control */}
      <div className="grid grid-cols-1 lg:grid-cols-3 gap-4 md:gap-6 lg:gap-8">
        {/* Agent Panel */}
        <div
          className={`lg:col-span-1 ${
            !isDesktop && mobileTab === "interaction" ? "hidden" : "block"
          }`}
        >
          {agentPanelSlot}
        </div>

        {/* Transcript Panel */}
        <div
          className={`lg:col-span-2 ${
            !isDesktop && mobileTab === "persona" ? "hidden" : "block"
          }`}
        >
          {transcriptPanelSlot}
        </div>
      </div>
    </div>
  );
}

src/components/layouts/CardLayout/AgentPanel.tsx:

import { PhoneOff, Clock } from "lucide-react";
import { Button } from "@/components/ui/button";
import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar";
import { ReactNode } from "react";
import { VoiceControlType } from "@/types";

interface AgentInfo {
  department: string;
  specialization: string;
  languages: string;
}

interface AgentPanelProps {
  agentName: string;
  agentAvatarSrc: string;
  agentAvatarFallback: string;
  agentStatusText: string; // e.g., "Available", "Busy", "Connecting", "Ended"
  agentInfo: AgentInfo;
  isCallActive: boolean;
  callDuration: number;
  isMuted: boolean;
  voiceControlType: VoiceControlType;
  formatDuration: (seconds: number) => string;
  endCall: () => void;
  restartCall: () => void; // Added restart handler
  subActionSlot: ReactNode;
  mainActionSlot: ReactNode;
}

export function AgentPanel({
  agentName,
  agentAvatarSrc,
  agentAvatarFallback,
  agentStatusText,
  agentInfo,
  isCallActive,
  callDuration,
  isMuted,
  voiceControlType,
  formatDuration,
  endCall,
  restartCall, // Use restart handler
  subActionSlot,
  mainActionSlot,
}: AgentPanelProps) {
  // Determine status color based on text
  const getStatusColor = (status: string) => {
    switch (status.toLowerCase()) {
      case "available":
      case "connected":
        return "bg-green-500";
      case "busy":
      case "reconnecting":
        return "bg-morning-gold";
      case "connecting":
        return "bg-horizon-blue animate-pulse";
      case "ended":
      case "idle":
      default:
        return "bg-earth-brown/50";
    }
  };

  return (
    // Use Warm Parchment, more prominent Earth Brown, larger border radius
    <div className="bg-warm-parchment rounded-lg shadow-md overflow-hidden border border-earth-brown/20 flex flex-col h-[500px] lg:h-[600px] transition-smooth">
      {/* Header with agent info */}
      <div className="p-5 border-b border-earth-brown/10">
        <div className="flex items-center">
          <Avatar className="h-16 w-16 mr-4 border-2 border-horizon-blue/30">
            <AvatarImage src={agentAvatarSrc} alt={agentName} />
            <AvatarFallback className="bg-gradient-to-br from-horizon-blue to-muted-sage text-white text-xl">
              {agentAvatarFallback}
            </AvatarFallback>
          </Avatar>
          <div className="flex-1">
            <h1 className="font-serif text-xl font-medium text-deep-valley">
              {agentName}
            </h1>
            <div className="flex items-center text-earth-brown text-sm mt-1">
              <span
                className={`inline-block h-2.5 w-2.5 rounded-full ${getStatusColor(
                  agentStatusText
                )} mr-2 transition-colors duration-300`}
              ></span>
              <span className="capitalize">{agentStatusText}</span>
              {isCallActive && (
                <>
                  <span className="mx-2">•</span>
                  <Clock size={14} className="mr-1" />
                  <span>{formatDuration(callDuration)}</span>
                </>
              )}
            </div>
          </div>
          {/* Call control buttons */}
          <div className="text-center ml-4">
            {isCallActive ? (
              <Button
                variant="destructive" // Use destructive variant for ending
                size="sm"
                onClick={endCall}
                className="bg-dawn-coral hover:bg-dawn-coral/80 text-deep-valley rounded-md px-3 py-1.5 transition-smooth"
                title="End Call"
              >
                <PhoneOff size={16} />
                {/* <span className="ml-1 hidden sm:inline">End</span> */}
              </Button>
            ) : (
              <Button
                variant="secondary" // Use secondary (Horizon Blue) for restarting
                size="sm"
                onClick={restartCall}
                className="rounded-md px-3 py-1.5 transition-smooth"
                title="Restart Call"
              >
                <PhoneOff size={16} /> {/* TODO: Use Phone icon */}
                {/* <span className="ml-1 hidden sm:inline">Restart</span> */}
              </Button>
            )}
          </div>
        </div>
      </div>

      {/* Agent status and information */}
      <div className="p-5 flex-grow overflow-y-auto">
        <div className="mb-6">
          <h2 className="text-deep-valley font-serif font-medium mb-2 text-base">
            Agent Information
          </h2>
          {/* Use lighter parchment for inner cards, Earth Brown text */}
          <div className="bg-light-parchment rounded-md p-3 space-y-2 border border-earth-brown/10">
            <div className="flex justify-between">
              <span className="text-earth-brown text-sm">Department:</span>
              <span className="text-deep-valley text-sm font-medium">
                {agentInfo.department}
              </span>
            </div>
            <div className="flex justify-between">
              <span className="text-earth-brown text-sm">Specialization:</span>
              <span className="text-deep-valley text-sm font-medium">
                {agentInfo.specialization}
              </span>
            </div>
            <div className="flex justify-between">
              <span className="text-earth-brown text-sm">Languages:</span>
              <span className="text-deep-valley text-sm font-medium">
                {agentInfo.languages}
              </span>
            </div>
          </div>
        </div>

        <div className="mb-6">
          <h2 className="text-deep-valley font-serif font-medium mb-2 text-base">
            Voice Control Mode
          </h2>
          <div className="bg-light-parchment rounded-md p-3 space-y-3 border border-earth-brown/10">
            <div className="flex items-center justify-between">
              <span className="text-earth-brown text-sm">Mode</span>
              <span className="text-deep-valley text-sm font-medium capitalize">
                {voiceControlType.replace("-", " ")}
              </span>
            </div>
            {/* Optional: Add status indicators using Muted Sage */}
            {/* <div className="flex items-center text-muted-sage text-xs">
              <Info size={12} className="mr-1"/>
              <span>{voiceControlType === 'push-to-talk' ? 'Hold button to speak' : 'Mic is live'}</span>
            </div> */}
          </div>
        </div>

        {/* Main action slot (PTT/Mute button) */}
        {mainActionSlot}
      </div>

      {/* Footer with sub-action (Mode Toggle) */}
      <div className="p-4 border-t border-earth-brown/10 bg-light-parchment/50">
        {isCallActive && (
          <div className="flex justify-between items-center">
            <div className="flex items-center">
              <div
                className={`h-3 w-3 rounded-full ${
                  isMuted ? "bg-dawn-coral" : "bg-green-500" // Use green for active mic
                } mr-2 transition-colors duration-300`}
              ></div>
              <span className="text-sm text-earth-brown">
                {isMuted ? "Mic Muted" : "Mic Active"}
              </span>
            </div>
            {subActionSlot}
          </div>
        )}
         {!isCallActive && (
             <div className="text-center text-sm text-earth-brown">Call Ended</div>
         )}
      </div>
    </div>
  );
}

src/components/layouts/CardLayout/CallAction.tsx:

import { Button } from "@/components/ui/button";
import { VoiceControlType } from "@/types";
import { Mic, MicOff, PhoneCall } from "lucide-react"; // Added PhoneCall

export function CallAction({
  voiceControlType,
  isMuted,
  isPushing,
  onPushStart,
  onPushEnd,
  onToggleMute,
  isCallActive,
  onRestartCall, // Added restart handler
}: {
  voiceControlType: VoiceControlType;
  isMuted: boolean;
  isPushing: boolean;
  onPushStart: () => void;
  onPushEnd: () => void;
  onToggleMute: () => void;
  isCallActive: boolean;
  onRestartCall: () => void; // Added restart handler type
}) {
  if (!isCallActive) {
    return (
      <div className="text-center mt-4">
        <p className="text-earth-brown mb-3 text-sm">Call has ended</p>
        <Button
          onClick={onRestartCall}
          variant="secondary" // Use Horizon Blue
          className="w-full py-3 text-base rounded-md transition-smooth"
        >
          <PhoneCall size={18} className="mr-2" />
          Restart Call
        </Button>
      </div>
    );
  }

  if (voiceControlType === "push-to-talk") {
    return (
      <div className="text-center mt-4">
        <p className="text-earth-brown mb-3 text-sm">Push and hold to speak</p>
        <Button
          className={`w-full py-4 text-base rounded-md transition-smooth ${
            isPushing
              ? "bg-gradient-to-r from-dawn-coral to-morning-gold text-deep-valley shadow-lg scale-105" // Active PTT gradient
              : "bg-earth-brown hover:bg-earth-brown/90 text-warm-parchment border border-earth-brown/50" // Grounded button
          }`}
          onMouseDown={onPushStart}
          onMouseUp={onPushEnd}
          onMouseLeave={onPushEnd} // Stop if mouse leaves while pressed
          onTouchStart={onPushStart} // Add touch events for mobile
          onTouchEnd={onPushEnd}
        >
          <Mic size={18} className="mr-2" />
          {isPushing ? "Speaking..." : "Push to Talk"}
        </Button>
      </div>
    );
  }

  // Streaming Audio Mode
  return (
    <div className="text-center mt-4">
      <p className="text-earth-brown mb-3 text-sm">Click to toggle microphone</p>
      <Button
        onClick={onToggleMute}
        variant={isMuted ? "outline" : "secondary"} // Outline when muted, Blue when active
        className={`w-full py-4 text-base rounded-md transition-smooth ${
          isMuted
            ? "border-earth-brown/50 text-earth-brown hover:bg-earth-brown/10" // Muted outline
            : "bg-horizon-blue hover:bg-horizon-blue/90 text-secondary-foreground" // Active blue
        }`}
      >
        {isMuted ? (
          <MicOff size={18} className="mr-2" />
        ) : (
          <Mic size={18} className="mr-2" />
        )}
        {isMuted ? "Unmute Microphone" : "Mute Microphone"}
      </Button>
    </div>
  );
}

src/components/layouts/CardLayout/TranscriptPanel.tsx:

import { Clock } from "lucide-react";
import { ScrollArea } from "@/components/ui/scroll-area";
import { Button } from "@/components/ui/button";
import { cn } from "@/lib/utils";
import { MutableRefObject } from "react";
import { AddressForm } from "./AddressForm";
import { FileAttachment } from "./FileAttachment";
import { TrackingInfo } from "./TrackingInfo";
import { TranscriptMessage } from "@/types"; // Import the main type

interface TranscriptPanelProps {
  isCallActive: boolean;
  callDuration: number;
  formatDuration: (seconds: number) => string;
  messages: TranscriptMessage[]; // Use the imported type
  activeForm: string | number | null; // Allow string IDs
  hoveredMessage: string | number | null; // Allow string IDs
  setHoveredMessage: (id: string | number | null) => void;
  setActiveForm: (id: string | number | null) => void;
  scrollAreaRef: MutableRefObject<HTMLDivElement | null>;
}

export const TranscriptPanel = ({
  isCallActive,
  callDuration,
  formatDuration,
  messages,
  activeForm,
  hoveredMessage,
  setHoveredMessage,
  setActiveForm,
  scrollAreaRef,
}: TranscriptPanelProps) => {
  const handleFormSubmit = () => {
    console.log("Address form submitted (placeholder)"); // Placeholder action
    setActiveForm(null);
  };

  const handleFormCancel = () => {
    console.log("Address form cancelled"); // Placeholder action
    setActiveForm(null);
  };

  return (
    // Use Warm Parchment, Earth Brown border, larger radius
    <div
      className="bg-warm-parchment rounded-lg shadow-md overflow-hidden border border-earth-brown/20 flex flex-col h-[500px] lg:h-[600px] transition-smooth"
      style={{ flex: 2 }} // Keep flex for grid layout compatibility if needed elsewhere
    >
      {/* Header */}
      <div className="border-b border-earth-brown/10 p-4 flex items-center justify-between bg-light-parchment/50">
        <h2 className="text-deep-valley font-serif text-lg font-medium">
          Conversation Transcript
        </h2>
        {isCallActive && (
          <div className="flex items-center text-earth-brown text-sm">
            <Clock size={14} className="mr-1" />
            <span>{formatDuration(callDuration)}</span>
          </div>
        )}
        {!isCallActive && messages.length > 0 && (
           <div className="text-sm text-earth-brown">Call Ended</div>
        )}
      </div>

      {/* Transcript Area */}
      <ScrollArea
        className="flex-grow p-4 md:p-6 h-[calc(100%-64px)]" // Adjusted padding
        ref={scrollAreaRef}
      >
        <div className="space-y-5 pb-4 max-w-full"> {/* Removed max-w-3xl */}
          {messages.map((message) => (
            <div
              key={message.id}
              className="group" // Add group for potential hover effects on children
              onMouseEnter={() => setHoveredMessage(message.id)}
              onMouseLeave={() => setHoveredMessage(null)}
            >
              <div
                className={cn(
                  "transition-colors duration-200 py-2 px-3 rounded-md", // Use md radius
                  // Subtle hover background based on speaker
                  hoveredMessage === message.id
                    ? message.speaker === "You"
                      ? "bg-horizon-blue/5"
                      : "bg-dawn-coral/5"
                    : "bg-transparent"
                )}
              >
                <div className="flex items-baseline justify-between mb-1">
                  <span
                    className={`font-medium text-sm ${ // Use sm size
                      message.speaker === "You"
                        ? "text-horizon-blue" // User speaker color
                        : "text-dawn-coral" // Agent speaker color
                    }`}
                  >
                    {message.speaker}
                  </span>
                  <span className="text-xs text-earth-brown/60"> {/* Darker meta text */}
                    {message.time}
                  </span>
                </div>
                {/* Use standard body text size and color */}
                <p className="text-deep-valley/95 leading-relaxed text-base">
                  {message.text}
                </p>
              </div>

              {/* Render attachments */}
              {message.attachment && (
                <div className="ml-4 mt-2 max-w-md overflow-hidden pl-3"> {/* Indent attachments */}
                  {message.attachment.type === "tracking" && (
                    <TrackingInfo data={message.attachment.data} />
                  )}
                  {message.attachment.type === "file" && (
                    <FileAttachment file={message.attachment} />
                  )}
                  {message.attachment.type === "form" &&
                    message.attachment.formType === "address" &&
                    (activeForm === message.id ? (
                      <AddressForm
                        onSubmit={handleFormSubmit}
                        onCancel={handleFormCancel}
                      />
                    ) : (
                      <Button
                        variant="secondary" // Use Horizon Blue
                        size="sm"
                        className="mt-2 rounded-md transition-smooth"
                        onClick={() => setActiveForm(message.id)}
                      >
                        Fill Address Form
                      </Button>
                    ))}
                </div>
              )}
            </div>
          ))}
           {messages.length === 0 && (
                <div className="text-center text-earth-brown/80 pt-10">
                    Transcript will appear here...
                </div>
           )}
        </div>
      </ScrollArea>
    </div>
  );
};

src/components/layouts/CardLayout/AddressForm.tsx:

import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Check } from "lucide-react";

// Address form component props
interface AddressFormProps {
  onSubmit: () => void;
  onCancel: () => void;
  // Add props for default values if needed later
}

export const AddressForm = ({ onSubmit, onCancel }: AddressFormProps) => {
  return (
    // Use light parchment, earth brown border/text, md radius
    <div className="bg-light-parchment rounded-md p-3 md:p-4 border border-earth-brown/20 mt-2 overflow-hidden transition-smooth">
      <h3 className="text-deep-valley font-serif font-medium mb-2 md:mb-3 text-base">
        Update Delivery Address
      </h3>
      <div className="space-y-2 md:space-y-3">
        <div>
          <label className="text-sm text-earth-brown mb-1 block">
            Street Address
          </label>
          {/* Use earth brown border, warm parchment bg */}
          <Input
            placeholder="123 Pioneer Trail"
            className="border-earth-brown/40 bg-warm-parchment focus:border-horizon-blue focus:ring-horizon-blue"
          />
        </div>
        <div className="grid grid-cols-1 sm:grid-cols-2 gap-2 md:gap-3">
          <div>
            <label className="text-sm text-earth-brown mb-1 block">City</label>
            <Input
              placeholder="Settlement City"
              className="border-earth-brown/40 bg-warm-parchment focus:border-horizon-blue focus:ring-horizon-blue"
            />
          </div>
          <div>
            <label className="text-sm text-earth-brown mb-1 block">
              State/Province
            </label>
            <Input
              placeholder="Territory"
              className="border-earth-brown/40 bg-warm-parchment focus:border-horizon-blue focus:ring-horizon-blue"
            />
          </div>
        </div>
        <div className="grid grid-cols-1 sm:grid-cols-2 gap-2 md:gap-3">
          <div>
            <label className="text-sm text-earth-brown mb-1 block">
              Postal Code
            </label>
            <Input
              placeholder="12345"
              className="border-earth-brown/40 bg-warm-parchment focus:border-horizon-blue focus:ring-horizon-blue"
            />
          </div>
          <div>
            <label className="text-sm text-earth-brown mb-1 block">
              Country
            </label>
            <Input
              placeholder="Homestead Nation"
              className="border-earth-brown/40 bg-warm-parchment focus:border-horizon-blue focus:ring-horizon-blue"
            />
          </div>
        </div>
        <div className="flex justify-end space-x-2 mt-3">
          {/* Secondary button: Earth Brown outline */}
          <Button
            variant="outline"
            size="sm"
            onClick={onCancel}
            className="border-earth-brown/50 text-earth-brown hover:bg-earth-brown/10 rounded-md transition-smooth"
          >
            Cancel
          </Button>
          {/* Primary button: Horizon Blue */}
          <Button
            variant="secondary" // Use Horizon Blue
            size="sm"
            className="rounded-md transition-smooth"
            onClick={onSubmit}
          >
            <Check size={16} className="mr-1" />
            Submit
          </Button>
        </div>
      </div>
    </div>
  );
};

src/components/layouts/CardLayout/FileAttachment.tsx:

import { Button } from "@/components/ui/button";
import { FileText, Download } from "lucide-react";
import { AttachmentFile } from "@/types"; // Import the type

// File attachment component props
interface FileAttachmentProps {
  file: AttachmentFile; // Use the specific type
}

export const FileAttachment = ({ file }: FileAttachmentProps) => {
  return (
    // Use light parchment, earth brown border/text, md radius
    <div className="bg-light-parchment rounded-md p-3 border border-earth-brown/20 mt-2 flex items-center justify-between transition-smooth hover:border-earth-brown/40">
      <div className="flex items-center overflow-hidden mr-2">
        {/* Icon with Muted Sage */}
        <div className="h-10 w-10 bg-muted-sage/10 rounded-md flex items-center justify-center text-muted-sage mr-3 flex-shrink-0">
          <FileText size={20} />
        </div>
        <div className="overflow-hidden">
          <p className="text-deep-valley font-medium text-sm truncate" title={file.name}>{file.name}</p>
          <p className="text-earth-brown text-xs">{file.size}</p>
        </div>
      </div>
      {/* Download button using Horizon Blue */}
      <Button
        variant="ghost"
        size="icon" // Make it an icon button
        className="h-8 w-8 p-0 text-horizon-blue hover:bg-horizon-blue/10 rounded-full flex-shrink-0"
        title={`Download ${file.name}`}
      >
        <Download size={16} />
      </Button>
    </div>
  );
};

src/components/layouts/CardLayout/TrackingInfo.tsx:

import { Maximize2 } from "lucide-react";
import { Button } from "@/components/ui/button";
import { TrackingData } from "@/types"; // Import the type

// Tracking information component props
interface TrackingInfoProps {
  data: TrackingData; // Use the specific type
}

export const TrackingInfo = ({ data }: TrackingInfoProps) => {
  return (
    // Use light parchment, horizon blue border/accents, md radius
    <div className="bg-light-parchment rounded-md p-3 border border-horizon-blue/20 mt-2 overflow-x-auto transition-smooth hover:border-horizon-blue/40">
      <div className="flex items-center justify-between mb-2">
        <h3 className="text-deep-valley font-serif font-medium text-base">Tracking Information</h3>
        {/* Maximize button */}
        <Button
          variant="ghost"
          size="icon" // Make it an icon button
          className="h-8 w-8 p-0 text-horizon-blue hover:bg-horizon-blue/10 rounded-full"
          title="View Details" // Add title for accessibility
        >
          <Maximize2 size={14} />
        </Button>
      </div>
      <div className="space-y-1 text-sm">
        <div className="flex justify-between flex-wrap gap-x-4"> {/* Added gap */}
          <span className="text-earth-brown">Tracking No:</span>
          <span className="text-deep-valley font-medium font-mono"> {/* Use mono for tracking */}
            {data.trackingNumber}
          </span>
        </div>
        <div className="flex justify-between flex-wrap gap-x-4">
          <span className="text-earth-brown">Carrier:</span>
          <span className="text-deep-valley">{data.carrier}</span>
        </div>
        <div className="flex justify-between flex-wrap gap-x-4">
          <span className="text-earth-brown">Est. Delivery:</span>
          <span className="text-deep-valley">{data.estimatedDelivery}</span>
        </div>
      </div>
    </div>
  );
};

src/components/layouts/CardLayout/VoiceControlToggle.tsx:

import { Button } from "@/components/ui/button";
import { VoiceControlType } from "@/types";
import { Radio, Mic } from "lucide-react"; // Use Mic for Live Voice

export function VoiceControlToggle({
  voiceControlType,
  onToggleMode,
}: {
  voiceControlType: VoiceControlType;
  onToggleMode: () => void;
}) {
  const isPTT = voiceControlType === "push-to-talk";
  const Icon = isPTT ? Mic : Radio; // Show Mic for switching TO Live, Radio for switching TO PTT
  const text = isPTT ? "Switch to Live Voice" : "Switch to Push-to-Talk";

  return (
    <Button
      variant="ghost"
      size="sm"
      onClick={onToggleMode}
      // Use Earth Brown text, hover with Horizon Blue accent
      className="text-earth-brown hover:text-horizon-blue hover:bg-horizon-blue/10 rounded-md transition-smooth px-2 py-1"
      title={text}
    >
      <Icon size={16} className="mr-1.5" />
      <span className="text-xs">{text}</span>
    </Button>
  );
}
  • PhoneLayout:

src/components/layouts/PhoneLayout/PhoneLayout.tsx:

import { useState, useEffect, useRef, useCallback } from "react";
import { PhoneLayoutPresenter } from "./PhoneLayoutPresenter";
import { PhoneHeader } from "./PhoneHeader";
import { PhoneMainContent } from "./PhoneMainContent";
import { PhoneTranscript } from "./PhoneTranscript";
import { PhoneControls } from "./PhoneControls";
import { PhoneCallAction } from "./PhoneCallAction";
import { PhoneControlToggle } from "./PhoneControlToggle";
import { PhoneTranscriptToggle } from "./PhoneTranscriptToggle";
import { PhoneCallEnded } from "./PhoneCallEnded";
import {
  VoiceControlType,
  SimpleTranscriptMessage,
  VoiceDispatcher,
  VoiceEventEmitter,
} from "@/types";
import { useToast } from "@/hooks/use-toast";

// Define props for PhoneLayout
interface PhoneLayoutProps {
  avatarSrc: string;
  avatarFallback: string;
  name: string;
  initialCurrentMessageText: string; // Initial message display
  initialTranscriptMessages: SimpleTranscriptMessage[]; // Renamed for clarity
  initialVoiceControlType?: VoiceControlType;
  backgroundUrl?: string; // Optional background image URL
  dispatcher: VoiceDispatcher; // Required
  eventEmitter: VoiceEventEmitter; // Required
}

export function PhoneLayout({
  avatarSrc,
  avatarFallback,
  name,
  initialCurrentMessageText,
  initialTranscriptMessages,
  initialVoiceControlType = "streaming-audio",
  backgroundUrl,
  dispatcher,
  eventEmitter,
}: PhoneLayoutProps) {
  const [voiceControlType, setVoiceControlType] = useState<VoiceControlType>(
    initialVoiceControlType
  );
  const [isMicEffectivelyMuted, setIsMicEffectivelyMuted] = useState(
    initialVoiceControlType === "push-to-talk"
  );
  const [isPushing, setIsPushing] = useState(false);
  const [callDuration, setCallDuration] = useState(0);
   const [interactionStatus, setInteractionStatus] = useState<
    "idle" | "connecting" | "connected" | "reconnecting" | "ended"
  >("idle");
  const [isTranscriptOpen, setIsTranscriptOpen] = useState(false);
  const [currentMessageText, setCurrentMessageText] = useState(
    initialCurrentMessageText
  );
  const [transcriptMessages, setTranscriptMessages] = useState<
    SimpleTranscriptMessage[]
  >(initialTranscriptMessages);
  const scrollAreaRef = useRef<HTMLDivElement>(null);
  const [isAgentSpeaking, setIsAgentSpeaking] = useState(false); // Renamed for clarity
  const [speakingIntensity, setSpeakingIntensity] = useState(0); // For visualization
  const { toast } = useToast();

  // --- Effects for Event Emitter ---
  useEffect(() => {
    const handleStatusUpdate = (
      status: "idle" | "connecting" | "connected" | "reconnecting" | "ended"
    ) => {
      console.log("PhoneLayout: Interaction status update:", status);
      setInteractionStatus(status);
       if (status === "connected") {
        setCallDuration(0); // Reset duration on connect
      }
    };

    const handleTranscriptUpdate = (message: SimpleTranscriptMessage) => {
       console.log("PhoneLayout: Transcript update:", message);
       const newMessage = {
            ...message,
            id: message.id || `msg-${Date.now()}-${Math.random()}`,
       };
       setTranscriptMessages((prev) => [...prev, newMessage]);
       // Update current message if it's from the other party
       if (message.speaker !== "You") {
           setCurrentMessageText(message.text);
       }
    };

    const handleAgentSpeaking = (speaking: boolean) => {
      setIsAgentSpeaking(speaking);
      if (speaking) {
        // Simulate intensity variation
        setSpeakingIntensity(Math.random() * 0.5 + 0.5);
      } else {
        setSpeakingIntensity(0);
      }
    };

     const handleStreamingText = (textChunk: string) => {
        // Display streaming text temporarily, maybe overlaying the current message briefly
        // For simplicity, we'll just update the main message area if the agent is speaking
        if (isAgentSpeaking) {
            setCurrentMessageText(prev => prev.endsWith('...') ? prev.slice(0, -3) + textChunk : textChunk); // Basic append/replace logic
        }
     };

    const handleError = (errorMessage: string) => {
      console.error("PhoneLayout: Received error:", errorMessage);
      toast({
        variant: "destructive",
        title: "Interaction Error",
        description: errorMessage,
      });
    };

     const handleVoiceControlChange = (type: VoiceControlType) => {
        console.log("PhoneLayout: Voice control changed externally to:", type);
        setVoiceControlType(type);
        setIsMicEffectivelyMuted(type === 'push-to-talk');
        setIsPushing(false);
    };

    // Subscribe
    eventEmitter.on("interactionStatus", handleStatusUpdate);
    eventEmitter.on("transcriptUpdate", handleTranscriptUpdate);
    eventEmitter.on("agentSpeaking", handleAgentSpeaking);
    eventEmitter.on("streamingText", handleStreamingText); // Handle streaming text
    eventEmitter.on("error", handleError);
    eventEmitter.on("voiceControlChange", handleVoiceControlChange);
    // Note: streamingAudioB64 is handled by BrowserAudioRecorder internally for playback

    // Cleanup
    return () => {
      eventEmitter.off("interactionStatus", handleStatusUpdate);
      eventEmitter.off("transcriptUpdate", handleTranscriptUpdate);
      eventEmitter.off("agentSpeaking", handleAgentSpeaking);
      eventEmitter.off("streamingText", handleStreamingText);
      eventEmitter.off("error", handleError);
      eventEmitter.off("voiceControlChange", handleVoiceControlChange);
    };
  }, [eventEmitter, toast, isAgentSpeaking]); // Added isAgentSpeaking dependency for streamingText

  // --- Other Effects ---
  useEffect(() => {
    if (interactionStatus === "connected") {
      const interval = setInterval(() => {
        setCallDuration((prev) => prev + 1);
      }, 1000);
      return () => clearInterval(interval);
    }
  }, [interactionStatus]);

  // Scroll transcript effect
  useEffect(() => {
    if (isTranscriptOpen && scrollAreaRef.current) {
      setTimeout(() => {
        if (scrollAreaRef.current) {
          scrollAreaRef.current.scrollTop = scrollAreaRef.current.scrollHeight;
        }
      }, 100);
    }
  }, [isTranscriptOpen, transcriptMessages]); // Scroll when opened or messages update

  // --- Callbacks ---
  const formatDuration = (seconds: number) => {
    const mins = Math.floor(seconds / 60);
    const secs = seconds % 60;
    return `${mins.toString().padStart(2, "0")}:${secs
      .toString()
      .padStart(2, "0")}`;
  };

  const toggleMute = useCallback(() => {
    if (voiceControlType === "streaming-audio") {
      const currentlyMuted = isMicEffectivelyMuted;
      if (currentlyMuted) {
        dispatcher.startAudioInput();
      } else {
        dispatcher.stopAudioInput();
      }
      setIsMicEffectivelyMuted(!currentlyMuted);
    }
  }, [voiceControlType, dispatcher, isMicEffectivelyMuted]);

  const handlePushStart = useCallback(() => {
    if (voiceControlType === "push-to-talk" && !isPushing) {
      setIsPushing(true);
      setIsMicEffectivelyMuted(false);
      dispatcher.startAudioInput();
    }
  }, [voiceControlType, dispatcher, isPushing]);

  const handlePushEnd = useCallback(() => {
    if (voiceControlType === "push-to-talk" && isPushing) {
      setIsPushing(false);
      setIsMicEffectivelyMuted(true);
      dispatcher.stopAudioInput();
    }
  }, [voiceControlType, dispatcher, isPushing]);

  const toggleVoiceControlMode = useCallback(() => {
    const newMode =
      voiceControlType === "push-to-talk" ? "streaming-audio" : "push-to-talk";
    dispatcher.setVoiceControlType(newMode);
     // State update via event listener
  }, [voiceControlType, dispatcher]);

  const toggleTranscript = useCallback(() => {
    setIsTranscriptOpen((prev) => !prev);
  }, []);

  const endCall = useCallback(() => {
    dispatcher.endInteraction();
     // Status update via event listener
  }, [dispatcher]);

  const restartCall = useCallback(() => {
    console.log("Restarting call (demo)...");
    setTranscriptMessages(initialTranscriptMessages);
    setCurrentMessageText(initialCurrentMessageText);
    setInteractionStatus("connecting");
    setTimeout(() => {
        setInteractionStatus("connected");
        setCallDuration(0);
        setIsMicEffectivelyMuted(voiceControlType === 'push-to-talk');
        setIsPushing(false);
        setIsTranscriptOpen(false);
    }, 1000);
  }, [initialTranscriptMessages, initialCurrentMessageText, voiceControlType]);

  const isCallActive = interactionStatus === "connected" || interactionStatus === "reconnecting";

  return (
    <PhoneLayoutPresenter
      isCallActive={isCallActive}
      isTranscriptOpen={isTranscriptOpen}
      backgroundUrl={backgroundUrl}
      headerSlot={
        isCallActive ? (
          <PhoneHeader
            avatarSrc={avatarSrc}
            avatarFallback={avatarFallback}
            name={name}
            callDurationFormatted={formatDuration(callDuration)}
            isSpeaking={isAgentSpeaking} // Use agent speaking state
            isMuted={isMicEffectivelyMuted}
            onEndCall={endCall}
          />
        ) : null
      }
      mainContentSlot={
        isCallActive ? (
          <PhoneMainContent
            avatarSrc={avatarSrc}
            avatarFallback={avatarFallback}
            name={name}
            isSpeaking={isAgentSpeaking} // Use agent speaking state
            speakingIntensity={speakingIntensity}
            currentMessageText={currentMessageText}
          />
        ) : (
          <PhoneCallEnded
            callDurationFormatted={formatDuration(callDuration)}
            onRestartCall={restartCall}
          />
        )
      }
      transcriptSlot={
        <PhoneTranscript
          messages={transcriptMessages}
          scrollAreaRef={scrollAreaRef}
          onClose={toggleTranscript}
        />
      }
      controlsSlot={
        isCallActive ? (
          <PhoneControls
            transcriptToggleSlot={
              <PhoneTranscriptToggle onToggleTranscript={toggleTranscript} />
            }
            callActionSlot={
              <PhoneCallAction
                isPushToTalk={voiceControlType === "push-to-talk"}
                isPushing={isPushing}
                isMuted={isMicEffectivelyMuted} // Use effective state
                onPushStart={handlePushStart}
                onPushEnd={handlePushEnd}
                onToggleMute={toggleMute}
              />
            }
            modeToggleSlot={
              <PhoneControlToggle
                isPushToTalk={voiceControlType === "push-to-talk"}
                onToggleMode={toggleVoiceControlMode}
              />
            }
          />
        ) : null
      }
    />
  );
}

src/components/layouts/PhoneLayout/PhoneLayoutPresenter.tsx:

import { ReactNode } from "react";
import { AnimatePresence, motion } from "framer-motion";
import { cn } from "@/lib/utils";

interface PhoneLayoutPresenterProps {
  isCallActive: boolean;
  isTranscriptOpen: boolean;
  headerSlot: ReactNode;
  mainContentSlot: ReactNode;
  transcriptSlot: ReactNode;
  controlsSlot: ReactNode;
  backgroundUrl?: string; // Optional background image URL
}

export function PhoneLayoutPresenter({
  isCallActive,
  isTranscriptOpen,
  headerSlot,
  mainContentSlot,
  transcriptSlot,
  controlsSlot,
  backgroundUrl,
}: PhoneLayoutPresenterProps) {
  return (
    // Apply new styles: Warm Parchment base, larger radius, subtle shadow
    <div
      className={cn(
        "w-full max-w-md aspect-[9/16] mx-auto flex flex-col rounded-3xl overflow-hidden shadow-lg bg-warm-parchment/95 backdrop-blur-md relative border border-earth-brown/10 transition-smooth",
        // Add class for background image styling
        backgroundUrl ? "has-bg-image" : ""
      )}
    >
      {/* Background gradient/image */}
      <div
        className={cn(
          "absolute inset-0 -z-10 transition-opacity duration-500",
          // Default gradient
          !backgroundUrl &&
            "bg-gradient-to-b from-morning-gold/10 via-dawn-coral/5 to-horizon-blue/10",
          // Image styles
          backgroundUrl && "bg-cover bg-center opacity-20" // Lower opacity for image
        )}
        style={
          backgroundUrl ? { backgroundImage: `url(${backgroundUrl})` } : {}
        }
      >
        {/* Optional overlay for image contrast */}
        {backgroundUrl && (
          <div className="absolute inset-0 bg-gradient-to-t from-warm-parchment/50 to-transparent"></div>
        )}
      </div>

      {/* Call header */}
      {headerSlot}

      {/* Avatar and message area */}
      <div className="flex-grow flex flex-col items-center justify-between p-6 relative overflow-hidden">
        {mainContentSlot}
      </div>

      {/* Transcript Area */}
      <AnimatePresence>
        {isTranscriptOpen && (
          <motion.div
            initial={{ opacity: 0, y: "100%" }}
            animate={{ opacity: 1, y: 0 }}
            exit={{ opacity: 0, y: "100%" }}
            transition={{ duration: 0.3, ease: "easeInOut" }}
            // Use light parchment, earth brown border
            className="absolute bottom-0 left-0 w-full h-[70%] bg-light-parchment/95 backdrop-blur-md z-10 rounded-t-3xl border-t border-earth-brown/20 shadow-xl"
          >
            {transcriptSlot}
          </motion.div>
        )}
      </AnimatePresence>

      {/* Call controls */}
      {isCallActive && ( // Only render controls container if call is active
        // Use light parchment, earth brown border
        <div className="bg-light-parchment/80 backdrop-blur-sm border-t border-earth-brown/10 p-4">
          {controlsSlot}
        </div>
      )}
    </div>
  );
}

src/components/layouts/PhoneLayout/PhoneHeader.tsx:

import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar";
import { Button } from "@/components/ui/button";
import { Clock, PhoneOff } from "lucide-react";

interface PhoneHeaderProps {
  avatarSrc: string;
  avatarFallback: string;
  name: string;
  callDurationFormatted: string;
  isSpeaking: boolean;
  isMuted: boolean;
  onEndCall: () => void;
}

export function PhoneHeader({
  avatarSrc,
  avatarFallback,
  name,
  callDurationFormatted,
  isSpeaking,
  isMuted,
  onEndCall,
}: PhoneHeaderProps) {
  return (
    // Use light parchment, earth brown border
    <div className="bg-light-parchment/80 backdrop-blur-sm border-b border-earth-brown/10 p-4 flex items-center justify-between">
      <div className="flex items-center">
        {/* Avatar with Horizon Blue border */}
        <Avatar className="h-12 w-12 mr-3 border-2 border-horizon-blue/40">
          <AvatarImage src={avatarSrc} alt={name} />
          {/* Gradient fallback */}
          <AvatarFallback className="bg-gradient-to-br from-horizon-blue to-muted-sage text-white">
            {avatarFallback}
          </AvatarFallback>
        </Avatar>
        <div>
          <h1 className="font-serif text-xl font-medium text-deep-valley">
            {name}
          </h1>
          <div className="flex items-center justify-start space-x-2 text-earth-brown text-sm">
            <Clock size={14} />
            <span>{callDurationFormatted}</span>
            {/* Speaking indicator using Horizon Blue */}
            {isSpeaking && !isMuted && (
              <span className="text-horizon-blue text-xs font-medium ml-1 animate-pulse">
                Speaking...
              </span>
            )}
          </div>
        </div>
      </div>
      {/* End call button using Dawn Coral */}
      <Button
        variant="ghost" // Use ghost for icon-like button
        size="icon"
        className="rounded-full h-10 w-10 transition-colors duration-300 bg-dawn-coral/10 text-dawn-coral border border-dawn-coral/20 hover:bg-dawn-coral/20 hover:border-dawn-coral/40"
        onClick={onEndCall}
        title="End Call"
      >
        <PhoneOff size={18} />
      </Button>
    </div>
  );
}

src/components/layouts/PhoneLayout/PhoneMainContent.tsx:

import { Avatar, AvatarFallback, AvatarImage } from "@/components/ui/avatar";
import { motion } from "framer-motion";

interface PhoneMainContentProps {
  avatarSrc: string;
  avatarFallback: string;
  name: string;
  isSpeaking: boolean;
  speakingIntensity: number;
  currentMessageText: string;
}

export function PhoneMainContent({
  avatarSrc,
  avatarFallback,
  name,
  isSpeaking,
  speakingIntensity,
  currentMessageText,
}: PhoneMainContentProps) {
  return (
    <>
      {/* Audio Visualizer */}
      <div className="flex-grow flex items-center justify-center w-full">
        <div className="relative">
          {/* Outer rings - using theme colors */}
          {[...Array(4)].map((_, i) => (
            <motion.div
              key={i}
              className={`absolute rounded-full bg-gradient-to-r ${
                // Use theme colors with varying opacity
                i === 0
                  ? "from-dawn-coral/30 to-morning-gold/30"
                  : i === 1
                  ? "from-horizon-blue/20 to-dawn-coral/20"
                  : i === 2
                  ? "from-morning-gold/15 to-horizon-blue/15"
                  : "from-horizon-blue/10 to-morning-gold/10"
              }`}
              style={{
                left: "50%",
                top: "50%",
                transform: "translate(-50%, -50%)",
              }}
              animate={{
                // Adjust size based on speaking intensity
                width: isSpeaking
                  ? `${144 + (i + 1) * 40 + speakingIntensity * 40}px` // Base size + ring offset + intensity boost
                  : `${144 + (i + 1) * 30}px`, // Base size + ring offset
                height: isSpeaking
                  ? `${144 + (i + 1) * 40 + speakingIntensity * 40}px`
                  : `${144 + (i + 1) * 30}px`,
                opacity: isSpeaking ? 0.6 - i * 0.12 : 0.3 - i * 0.06, // Adjust opacity
              }}
              transition={{
                duration: 0.4,
                ease: "circOut", // Smoother easing
              }}
            />
          ))}

          {/* Avatar - Use Warm Parchment border */}
          <Avatar className="h-36 w-36 border-4 border-warm-parchment shadow-lg relative z-10">
            <AvatarImage src={avatarSrc} alt={name} />
            {/* Gradient fallback */}
            <AvatarFallback className="bg-gradient-to-br from-horizon-blue to-muted-sage text-white text-4xl font-serif">
              {avatarFallback}
            </AvatarFallback>
          </Avatar>

          {/* Pulsing effect for speaking - subtle white pulse */}
          {isSpeaking && (
            <motion.div
              className="absolute inset-0 rounded-full bg-warm-parchment/50 z-5 pointer-events-none" // Use semi-transparent parchment
              style={{
                left: "50%",
                top: "50%",
                transform: "translate(-50%, -50%)",
                width: "144px", // Match avatar size
                height: "144px",
              }}
              animate={{
                opacity: [0, 0.4, 0], // Pulse opacity
                scale: [1, 1.15, 1], // Pulse scale
              }}
              transition={{
                duration: 1.5,
                ease: "easeInOut",
                repeat: Infinity, // Loop indefinitely
                repeatType: "loop",
              }}
            />
          )}
        </div>
      </div>

      {/* Current message - Use light parchment, earth brown border */}
      <motion.div
        key={currentMessageText} // Re-trigger animation on text change
        className="bg-light-parchment/90 backdrop-blur-sm p-4 rounded-lg shadow-md border border-earth-brown/10 w-full mt-6"
        initial={{ y: 20, opacity: 0 }}
        animate={{ y: 0, opacity: 1 }}
        transition={{ duration: 0.3 }}
      >
        <p className="text-deep-valley leading-relaxed text-base text-center">
          "{currentMessageText}"
        </p>
      </motion.div>
    </>
  );
}

src/components/layouts/PhoneLayout/PhoneCallAction.tsx:

import { Button } from "@/components/ui/button";
import { cn } from "@/lib/utils";
import { Mic, MicOff } from "lucide-react";

interface PhoneCallActionProps {
  isPushToTalk: boolean;
  isPushing: boolean;
  isMuted: boolean;
  onPushStart: () => void;
  onPushEnd: () => void;
  onToggleMute: () => void;
}

export function PhoneCallAction({
  isPushToTalk,
  isPushing,
  isMuted,
  onPushStart,
  onPushEnd,
  onToggleMute,
}: PhoneCallActionProps) {
  if (isPushToTalk) {
    return (
      <Button
        className={cn(
          // Adjusted size, rounded-full, theme colors
          "h-16 px-8 rounded-full transition-all duration-300 text-lg mx-2 flex-grow max-w-[220px] shadow-md",
          isPushing
            ? "bg-gradient-to-r from-dawn-coral to-morning-gold text-deep-valley scale-105 shadow-lg" // Active PTT gradient
            : "bg-earth-brown hover:bg-earth-brown/90 text-warm-parchment" // Grounded PTT button
        )}
        onMouseDown={onPushStart}
        onMouseUp={onPushEnd}
        onTouchStart={onPushStart}
        onTouchEnd={onPushEnd}
        onMouseLeave={onPushEnd}
      >
        {isPushing ? "Speaking..." : "Hold to Speak"}
      </Button>
    );
  }

  // Streaming Audio Mode
  return (
    <Button
      variant="outline"
      className={cn(
        // Adjusted size, rounded-full, theme colors
        "rounded-full h-16 px-6 transition-colors duration-300 text-lg mx-2 flex-grow max-w-[220px] shadow-sm",
        isMuted
          ? "bg-dawn-coral/10 text-dawn-coral border-dawn-coral/30 hover:bg-dawn-coral/20" // Muted state (Coral accent)
          : "bg-horizon-blue/10 text-horizon-blue border-horizon-blue/30 hover:bg-horizon-blue/20" // Active mic state (Blue accent)
      )}
      onClick={onToggleMute}
    >
      {isMuted ? (
        <>
          <MicOff size={20} className="mr-2" />
          <span>Unmute</span>
        </>
      ) : (
        <>
          <Mic size={20} className="mr-2" />
          <span>Mute</span>
        </>
      )}
    </Button>
  );
}

src/components/layouts/PhoneLayout/PhoneControlToggle.tsx:

import { Button } from "@/components/ui/button";
import { cn } from "@/lib/utils";
import { Radio, Mic } from "lucide-react"; // Use Mic and Radio

interface PhoneControlToggleProps {
  isPushToTalk: boolean;
  onToggleMode: () => void;
}

export function PhoneControlToggle({
  isPushToTalk,
  onToggleMode,
}: PhoneControlToggleProps) {
  const Icon = isPushToTalk ? Mic : Radio; // Mic for switching TO Live, Radio for switching TO PTT
  const title = isPushToTalk ? "Switch to Live Voice" : "Switch to Push-to-Talk";

  return (
    <Button
      variant="outline" // Use outline style
      size="icon"
      className={cn(
        // Adjusted size, rounded-full, theme colors
        "rounded-full h-12 w-12 p-0 transition-colors duration-300",
        "bg-light-parchment/60 border border-earth-brown/20 text-earth-brown/80", // Base style
        "hover:bg-earth-brown/10 hover:border-earth-brown/40 hover:text-deep-valley", // Hover style
        // Optional: Highlight based on mode
         isPushToTalk ? "focus:ring-morning-gold" : "focus:ring-horizon-blue"
      )}
      onClick={onToggleMode}
      title={title}
    >
      <Icon size={20} />
    </Button>
  );
}

src/components/layouts/PhoneLayout/PhoneTranscriptToggle.tsx:

import { Button } from "@/components/ui/button";
import { MessageSquare } from "lucide-react";
import { cn } from "@/lib/utils";

interface PhoneTranscriptToggleProps {
  onToggleTranscript: () => void;
}

export function PhoneTranscriptToggle({
  onToggleTranscript,
}: PhoneTranscriptToggleProps) {
  return (
    <Button
      variant="outline" // Use outline style
      size="icon" // Keep as icon button
      className={cn(
        // Adjusted size, rounded-full, theme colors
        "rounded-full h-12 w-12 transition-colors duration-300",
        "bg-light-parchment/60 border border-earth-brown/20 text-earth-brown/80", // Base style
        "hover:bg-horizon-blue/10 hover:border-horizon-blue/30 hover:text-horizon-blue" // Hover uses Horizon Blue
      )}
      onClick={onToggleTranscript}
      title="Toggle Transcript"
    >
      <MessageSquare size={20} />
    </Button>
  );
}

src/components/layouts/PhoneLayout/PhoneTranscript.tsx:

import { Button } from "@/components/ui/button";
import { ScrollArea } from "@/components/ui/scroll-area";
import { MutableRefObject } from "react";
import { SimpleTranscriptMessage } from "@/types"; // Import the type
import { X } from "lucide-react"; // Import X icon

interface PhoneTranscriptProps {
  messages: SimpleTranscriptMessage[]; // Use the imported type
  scrollAreaRef: MutableRefObject<HTMLDivElement | null>;
  onClose: () => void;
}

export function PhoneTranscript({
  messages,
  scrollAreaRef,
  onClose,
}: PhoneTranscriptProps) {
  return (
    <>
      {/* Header with Close button */}
      <div className="flex items-center justify-between p-4 border-b border-earth-brown/10 sticky top-0 bg-light-parchment/95 backdrop-blur-md z-10">
        <h2 className="font-serif text-lg text-deep-valley">Call Transcript</h2>
        <Button
          variant="ghost"
          size="icon"
          onClick={onClose}
          className="rounded-full h-8 w-8 text-earth-brown hover:bg-earth-brown/10 hover:text-deep-valley"
          title="Close Transcript"
        >
          <X size={18} />
        </Button>
      </div>
      {/* Scrollable Content Area */}
      <div className="h-[calc(100%-60px)]"> {/* Adjust height based on header */}
        <ScrollArea className="h-full p-4" ref={scrollAreaRef}>
          <div className="space-y-4 pb-4"> {/* Add padding bottom */}
            {messages.map((message) => (
              <div
                key={message.id}
                className={`flex flex-col p-3 rounded-lg shadow-sm ${ // Use lg radius, add subtle shadow
                  message.speaker === "You"
                    ? "bg-horizon-blue/10 ml-6" // User message (Blue)
                    : "bg-dawn-coral/10 mr-6" // Other speaker message (Coral)
                }`}
              >
                <div className="flex items-baseline">
                  <span
                    className={`font-serif font-medium text-sm ${ // Use sm size
                      message.speaker === "You"
                        ? "text-horizon-blue"
                        : "text-dawn-coral"
                    }`}
                  >
                    {message.speaker}
                  </span>
                  {/* Optional: Timestamp */}
                  {message.time && (
                     <>
                       <span className="mx-1.5 text-earth-brown/40 text-xs">•</span>
                       <span className="text-xs text-earth-brown/60">
                         {message.time}
                       </span>
                     </>
                  )}
                </div>
                <p className="text-deep-valley/90 mt-1 text-base">{message.text}</p>
              </div>
            ))}
             {messages.length === 0 && (
                <div className="text-center text-earth-brown/80 pt-10">
                    Transcript is empty.
                </div>
             )}
          </div>
        </ScrollArea>
      </div>
    </>
  );
}

src/components/layouts/PhoneLayout/PhoneCallEnded.tsx:

import { Button } from "@/components/ui/button";
import { PhoneCall, PhoneOff } from "lucide-react";

interface PhoneCallEndedProps {
  callDurationFormatted: string;
  onRestartCall: () => void;
}

export function PhoneCallEnded({
  callDurationFormatted,
  onRestartCall,
}: PhoneCallEndedProps) {
  return (
    <div className="flex flex-col items-center justify-center h-full p-4">
      {/* Use light parchment, earth brown border */}
      <div className="bg-light-parchment/90 backdrop-blur-md p-6 rounded-lg shadow-lg border border-earth-brown/20 text-center">
        {/* Icon using Dawn Coral */}
        <PhoneOff size={40} className="text-dawn-coral mx-auto mb-4" />
        <h2 className="font-serif text-2xl text-deep-valley mb-2">Call Ended</h2>
        <p className="text-earth-brown text-sm">Duration: {callDurationFormatted}</p>
        {/* Button using Horizon Blue */}
        <Button
          variant="secondary" // Use Horizon Blue
          className="mt-5 rounded-full px-6 py-2 transition-smooth"
          onClick={onRestartCall}
        >
          <PhoneCall size={16} className="mr-2" />
          Call Again
        </Button>
      </div>
    </div>
  );
}
  • VisualNovelLayout:

src/components/layouts/VisualNovelLayout/VisualNovelLayout.tsx:

import { useState, useEffect, useRef, useCallback } from "react";
import { VisualNovelLayoutPresenter } from "./VisualNovelLayoutPresenter";
import { VisualNovelBackground } from "./VisualNovelBackground";
import { VisualNovelCharacter } from "./VisualNovelCharacter";
import { VisualNovelMessageArea } from "./VisualNovelMessageArea";
import { VisualNovelControls } from "./VisualNovelControls";
import { VisualNovelCallAction } from "./VisualNovelCallAction";
import { VisualNovelControlToggle } from "./VisualNovelControlToggle";
import { VisualNovelVolumeToggle } from "./VisualNovelVolumeToggle";
import {
  VoiceControlType,
  SimpleTranscriptMessage,
  VoiceDispatcher,
  VoiceEventEmitter,
} from "@/types";
import { useToast } from "@/hooks/use-toast";

// Define props for VisualNovelLayout
interface VisualNovelLayoutProps {
  characterImageSrc: string;
  characterAlt: string;
  characterName: string; // Name to display in message area
  initialCurrentMessage: SimpleTranscriptMessage; // The message initially displayed
  initialTranscriptMessages: SimpleTranscriptMessage[]; // Renamed for clarity
  initialVoiceControlType?: VoiceControlType;
  backgroundUrl?: string; // Optional background image URL
  dispatcher: VoiceDispatcher; // Required
  eventEmitter: VoiceEventEmitter; // Required
}

export function VisualNovelLayout({
  characterImageSrc,
  characterAlt,
  characterName,
  initialCurrentMessage,
  initialTranscriptMessages,
  initialVoiceControlType = "streaming-audio",
  backgroundUrl,
  dispatcher,
  eventEmitter,
}: VisualNovelLayoutProps) {
  const [voiceControlType, setVoiceControlType] = useState<VoiceControlType>(
    initialVoiceControlType
  );
  const [isTranscriptExpanded, setIsTranscriptExpanded] = useState(false);
  const [isMicEffectivelyMuted, setIsMicEffectivelyMuted] = useState(
    initialVoiceControlType === "push-to-talk"
  );
  const [isVolumeMuted, setIsVolumeMuted] = useState(false); // Output volume mute state
  const [isPushing, setIsPushing] = useState(false);
  const [currentMessage, setCurrentMessage] = useState(initialCurrentMessage);
  const [transcriptMessages, setTranscriptMessages] = useState<
    SimpleTranscriptMessage[]
  >(initialTranscriptMessages);
  const scrollAreaRef = useRef<HTMLDivElement>(null);
  const { toast } = useToast();
  const [interactionStatus, setInteractionStatus] = useState<
    "idle" | "connecting" | "connected" | "reconnecting" | "ended"
  >("idle"); // Track status

  // --- Effects for Event Emitter ---
  useEffect(() => {
     const handleStatusUpdate = (
      status: "idle" | "connecting" | "connected" | "reconnecting" | "ended"
    ) => {
      console.log("VisualNovelLayout: Interaction status update:", status);
      setInteractionStatus(status);
      // Potentially disable controls when not 'connected'
    };

    const handleTranscriptUpdate = (message: SimpleTranscriptMessage) => {
       console.log("VisualNovelLayout: Transcript update:", message);
        const newMessage = {
            ...message,
            id: message.id || `msg-${Date.now()}-${Math.random()}`,
             // Ensure speaker name consistency
            speaker: message.speaker === "You" ? "You" : characterName,
       };
       setTranscriptMessages((prev) => [...prev, newMessage]);
       // Update current message display immediately
       setCurrentMessage(newMessage);
    };

     const handleStreamingText = (textChunk: string) => {
        // Update the current message incrementally
        setCurrentMessage(prev => ({
            ...prev,
            // Basic append logic, might need refinement for better streaming display
            text: prev.text.endsWith('...') ? prev.text.slice(0, -3) + textChunk : textChunk,
            speaker: characterName, // Assume streaming text is from the character
        }));
     };

    const handleError = (errorMessage: string) => {
      console.error("VisualNovelLayout: Received error:", errorMessage);
      toast({
        variant: "destructive",
        title: "Interaction Error",
        description: errorMessage,
      });
    };

     const handleVoiceControlChange = (type: VoiceControlType) => {
        console.log("VisualNovelLayout: Voice control changed externally to:", type);
        setVoiceControlType(type);
        setIsMicEffectivelyMuted(type === 'push-to-talk');
        setIsPushing(false);
    };

    // Subscribe
    eventEmitter.on("interactionStatus", handleStatusUpdate);
    eventEmitter.on("transcriptUpdate", handleTranscriptUpdate);
    eventEmitter.on("streamingText", handleStreamingText);
    eventEmitter.on("error", handleError);
    eventEmitter.on("voiceControlChange", handleVoiceControlChange);
    // Note: agentSpeaking and streamingAudioB64 might be less relevant here visually,
    // but audio playback is handled by the recorder.

    // Cleanup
    return () => {
      eventEmitter.off("interactionStatus", handleStatusUpdate);
      eventEmitter.off("transcriptUpdate", handleTranscriptUpdate);
      eventEmitter.off("streamingText", handleStreamingText);
      eventEmitter.off("error", handleError);
      eventEmitter.off("voiceControlChange", handleVoiceControlChange);
    };
  }, [eventEmitter, toast, characterName]); // Added characterName dependency

  // --- Other Effects ---
  useEffect(() => {
    if (isTranscriptExpanded && scrollAreaRef.current) {
      setTimeout(() => {
        if (scrollAreaRef.current) {
          scrollAreaRef.current.scrollTop = scrollAreaRef.current.scrollHeight;
        }
      }, 100);
    }
  }, [isTranscriptExpanded, transcriptMessages]);

  // --- Callbacks ---
  const toggleTranscript = useCallback(() => {
    setIsTranscriptExpanded((prev) => !prev);
  }, []);

  const toggleMicMute = useCallback(() => {
    if (voiceControlType === "streaming-audio") {
      const currentlyMuted = isMicEffectivelyMuted;
      if (currentlyMuted) {
        dispatcher.startAudioInput();
      } else {
        dispatcher.stopAudioInput();
      }
      setIsMicEffectivelyMuted(!currentlyMuted);
    }
  }, [voiceControlType, dispatcher, isMicEffectivelyMuted]);

  const toggleVolumeMute = useCallback(() => {
    setIsVolumeMuted((prev) => !prev);
    // TODO: Implement actual audio output muting if required globally
    console.log("Volume Mute Toggled:", !isVolumeMuted);
    toast({
        title: `Volume ${!isVolumeMuted ? 'Muted' : 'Unmuted'}`,
        duration: 2000,
    });
  }, [isVolumeMuted, toast]);

  const handlePushStart = useCallback(() => {
    if (voiceControlType === "push-to-talk" && !isPushing) {
      setIsPushing(true);
      setIsMicEffectivelyMuted(false);
      dispatcher.startAudioInput();
    }
  }, [voiceControlType, dispatcher, isPushing]);

  const handlePushEnd = useCallback(() => {
    if (voiceControlType === "push-to-talk" && isPushing) {
      setIsPushing(false);
      setIsMicEffectivelyMuted(true);
      dispatcher.stopAudioInput();
    }
  }, [voiceControlType, dispatcher, isPushing]);

  const toggleVoiceControlMode = useCallback(() => {
    const newMode =
      voiceControlType === "push-to-talk" ? "streaming-audio" : "push-to-talk";
    dispatcher.setVoiceControlType(newMode);
    // State update via event listener
  }, [voiceControlType, dispatcher]);

  const isCallActive = interactionStatus === "connected" || interactionStatus === "reconnecting";

  // Prepare transcript messages with consistent speaker names
  const displayTranscript = transcriptMessages.map((msg) => ({
    ...msg,
    speaker: msg.speaker === "You" ? "You" : characterName,
  }));

  return (
    <VisualNovelLayoutPresenter
      backgroundSlot={<VisualNovelBackground backgroundUrl={backgroundUrl} />}
      characterSlot={
        <VisualNovelCharacter
          characterImageSrc={characterImageSrc}
          characterAlt={characterAlt}
        />
      }
      messageAreaSlot={
        <VisualNovelMessageArea
          isTranscriptExpanded={isTranscriptExpanded}
          currentMessage={currentMessage} // Pass the managed current message
          transcriptMessages={displayTranscript} // Pass formatted transcript
          scrollAreaRef={scrollAreaRef}
          onToggleTranscript={toggleTranscript}
        />
      }
      controlsSlot={
        // Only show controls if connected
        isCallActive ? (
          <VisualNovelControls
            modeToggleSlot={
              <VisualNovelControlToggle
                isPushToTalk={voiceControlType === "push-to-talk"}
                onToggleMode={toggleVoiceControlMode}
              />
            }
            volumeToggleSlot={
              <VisualNovelVolumeToggle
                isVolumeMuted={isVolumeMuted}
                onToggleVolume={toggleVolumeMute}
              />
            }
            callActionSlot={
              <VisualNovelCallAction
                isPushToTalk={voiceControlType === "push-to-talk"}
                isPushing={isPushing}
                isMuted={isMicEffectivelyMuted} // Pass effective mic mute state
                onPushStart={handlePushStart}
                onPushEnd={handlePushEnd}
                onToggleMute={toggleMicMute} // Pass mic mute toggle
              />
            }
          />
        ) : (
            <div className="text-center text-earth-brown text-sm p-3">
                {interactionStatus === 'ended' ? 'Interaction Ended' : 'Connecting...'}
            </div>
        )
      }
    />
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelLayoutPresenter.tsx:

import { ReactNode } from "react";
import { cn } from "@/lib/utils";

interface VisualNovelLayoutPresenterProps {
  backgroundSlot: ReactNode;
  characterSlot: ReactNode;
  messageAreaSlot: ReactNode;
  controlsSlot: ReactNode;
}

export function VisualNovelLayoutPresenter({
  backgroundSlot,
  characterSlot,
  messageAreaSlot,
  controlsSlot,
}: VisualNovelLayoutPresenterProps) {
  return (
    // Use Warm Parchment, larger radius, subtle shadow
    <div
      className={cn(
        "w-full h-[600px] md:h-[700px] lg:h-[80vh] flex flex-col rounded-lg overflow-hidden shadow-lg bg-warm-parchment/90 backdrop-blur-md border border-earth-brown/10 transition-smooth"
      )}
    >
      {/* Background and character */}
      <div className="relative flex-grow overflow-hidden">
        {backgroundSlot}
        {characterSlot}
        {/* Message and transcript area container */}
        <div className="absolute bottom-6 left-0 right-0 mx-auto w-[90%] max-w-3xl z-10">
          {messageAreaSlot}
        </div>
      </div>

      {/* Controls at the bottom - Use light parchment, earth brown border */}
      <div className="bg-light-parchment/80 backdrop-blur-sm border-t border-earth-brown/10 p-3 z-10">
        {controlsSlot}
      </div>
    </div>
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelBackground.tsx:

import { cn } from "@/lib/utils";

interface VisualNovelBackgroundProps {
  backgroundUrl?: string; // Make background image optional and configurable
}

export function VisualNovelBackground({
  backgroundUrl,
}: VisualNovelBackgroundProps) {
  return (
    <div
      className={cn(
        "absolute inset-0 transition-opacity duration-500",
        // Default gradient using theme colors
        !backgroundUrl &&
          "bg-gradient-to-b from-morning-gold/15 via-transparent to-horizon-blue/15",
        // Image styles
        backgroundUrl && "bg-cover bg-center"
      )}
      style={
        backgroundUrl ? { backgroundImage: `url(${backgroundUrl})` } : {}
      }
    >
      {/* Subtle overlay to enhance readability, especially over images */}
      <div className="absolute inset-0 bg-gradient-to-t from-deep-valley/20 via-deep-valley/5 to-transparent"></div>
    </div>
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelCharacter.tsx:

import { motion } from "framer-motion";

interface VisualNovelCharacterProps {
  characterImageSrc: string; // Make image source dynamic
  characterAlt: string;
  // Add mood prop if animation depends on it
}

export function VisualNovelCharacter({
  characterImageSrc,
  characterAlt,
}: VisualNovelCharacterProps) {
  return (
    <motion.div
      className="absolute bottom-0 left-1/2 transform -translate-x-1/2 h-[85%] flex items-end justify-center pointer-events-none" // Increased height slightly
      initial={{ y: 30, opacity: 0 }} // Start slightly lower
      animate={{ y: 0, opacity: 1 }}
      transition={{ duration: 0.6, ease: "easeOut", delay: 0.1 }} // Slightly longer duration
    >
      <img
        src={characterImageSrc}
        alt={characterAlt}
        className="h-auto max-h-full w-auto object-contain" // Use max-h-full
        style={{
          // Softer drop shadow using theme color base
          filter: "drop-shadow(0 8px 20px hsla(var(--deep-valley) / 0.15))",
          // Consider removing scale or making it dynamic
          // transform: "scale(1.1)",
        }}
      />
    </motion.div>
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelMessageArea.tsx:

import { AnimatePresence, motion } from "framer-motion";
import { Button } from "@/components/ui/button";
import { ScrollArea } from "@/components/ui/scroll-area";
import { ChevronDown, ChevronUp } from "lucide-react";
import { MutableRefObject } from "react";
import { SimpleTranscriptMessage } from "@/types"; // Import the type
import { cn } from "@/lib/utils";

interface VisualNovelMessageAreaProps {
  isTranscriptExpanded: boolean;
  currentMessage: SimpleTranscriptMessage; // Use the imported type
  transcriptMessages: SimpleTranscriptMessage[]; // Use the imported type
  scrollAreaRef: MutableRefObject<HTMLDivElement | null>;
  onToggleTranscript: () => void;
}

export function VisualNovelMessageArea({
  isTranscriptExpanded,
  currentMessage,
  transcriptMessages,
  scrollAreaRef,
  onToggleTranscript,
}: VisualNovelMessageAreaProps) {
  return (
    <AnimatePresence mode="wait">
      {isTranscriptExpanded ? (
        // Expanded Transcript View
        <motion.div
          key="expanded"
          // Use light parchment, earth brown border, lg radius
          className="bg-light-parchment/80 backdrop-blur-md rounded-lg shadow-lg border border-earth-brown/20 overflow-hidden"
          initial={{ height: 100, opacity: 0.8 }}
          animate={{ height: 350, opacity: 1 }} // Fixed height
          exit={{ height: 100, opacity: 0.8 }}
          transition={{ duration: 0.3, ease: "easeInOut" }}
        >
          {/* Transcript header */}
          <div className="flex items-center justify-between p-3 border-b border-earth-brown/10 sticky top-0 bg-light-parchment/90 z-10">
            <h3 className="font-serif text-base text-deep-valley">
              Conversation History
            </h3>
            <Button
              variant="ghost"
              size="icon" // Use icon size
              className="rounded-full h-8 w-8 p-0 text-earth-brown hover:bg-earth-brown/10"
              onClick={onToggleTranscript}
              title="Collapse Transcript"
            >
              <ChevronDown size={18} />
            </Button>
          </div>

          {/* Scrollable transcript */}
          <div className="h-[calc(350px-48px)]"> {/* Adjust height */}
            <ScrollArea className="h-full p-4" ref={scrollAreaRef}>
              <div className="space-y-3 pb-4"> {/* Reduced spacing */}
                {transcriptMessages.map((message) => (
                  <div key={message.id} className="flex flex-col">
                    <div className="flex items-baseline">
                      <span
                        className={cn(
                          "font-serif font-medium text-sm", // Use sm size
                          message.speaker === "You"
                            ? "text-horizon-blue" // User color
                            : "text-dawn-coral" // Character color
                        )}
                      >
                        {message.speaker}
                      </span>
                      {/* Optional Timestamp */}
                      {message.time && (
                        <>
                          <span className="mx-1.5 text-earth-brown/40 text-xs">•</span>
                          <span className="text-xs text-earth-brown/60">
                            {message.time}
                          </span>
                        </>
                      )}
                    </div>
                    <p className="text-deep-valley/80 mt-0.5 pl-1 text-base">{message.text}</p>
                  </div>
                ))}
                 {transcriptMessages.length === 0 && (
                    <div className="text-center text-earth-brown/80 pt-10">
                        Transcript is empty.
                    </div>
                 )}
              </div>
            </ScrollArea>
          </div>
        </motion.div>
      ) : (
        // Collapsed Message View
        <motion.div
          key="collapsed"
          // Use light parchment, earth brown border, lg radius
          className="bg-light-parchment/80 backdrop-blur-md p-5 rounded-lg shadow-lg border border-earth-brown/20"
          initial={{ y: 20, opacity: 0 }}
          animate={{ y: 0, opacity: 1 }}
          exit={{ y: 20, opacity: 0 }}
          transition={{ duration: 0.3, ease: "easeOut" }}
        >
          <div className="flex items-center justify-between mb-1.5">
            {/* Speaker Name - Use Dawn Coral */}
            <h3 className="font-serif text-lg text-dawn-coral font-medium">
              {currentMessage.speaker}
            </h3>
            <Button
              variant="ghost"
              size="icon" // Use icon size
              className="rounded-full h-8 w-8 p-0 text-earth-brown hover:bg-earth-brown/10"
              onClick={onToggleTranscript}
              title="Expand Transcript"
            >
              <ChevronUp size={18} />
            </Button>
          </div>
          {/* Message Text */}
          <p className="font-sans text-deep-valley/90 leading-relaxed text-base">
            {currentMessage.text}
          </p>
        </motion.div>
      )}
    </AnimatePresence>
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelControls.tsx:

import { ReactNode } from "react";

interface VisualNovelControlsProps {
  modeToggleSlot: ReactNode;
  volumeToggleSlot: ReactNode;
  callActionSlot: ReactNode;
}

export function VisualNovelControls({
  modeToggleSlot,
  volumeToggleSlot,
  callActionSlot,
}: VisualNovelControlsProps) {
  return (
    <div className="flex items-center justify-between px-1"> {/* Added padding */}
      {/* Left side controls */}
      <div className="flex items-center space-x-3"> {/* Adjusted spacing */}
        {modeToggleSlot}
        {volumeToggleSlot}
      </div>

      {/* Right side controls (Mic) */}
      <div>{callActionSlot}</div>
    </div>
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelCallAction.tsx:

import { Button } from "@/components/ui/button";
import { cn } from "@/lib/utils";
import { Mic, MicOff } from "lucide-react";

interface VisualNovelCallActionProps {
  isPushToTalk: boolean;
  isPushing: boolean;
  isMuted: boolean; // Microphone mute state
  onPushStart: () => void;
  onPushEnd: () => void;
  onToggleMute: () => void; // Toggles microphone mute
}

export function VisualNovelCallAction({
  isPushToTalk,
  isPushing,
  isMuted,
  onPushStart,
  onPushEnd,
  onToggleMute,
}: VisualNovelCallActionProps) {
  if (isPushToTalk) {
    return (
      <Button
        className={cn(
          // Adjusted size, rounded-full, theme colors
          "h-12 px-6 rounded-full transition-all duration-300 font-medium shadow-md",
          isPushing
            ? "bg-gradient-to-r from-dawn-coral to-morning-gold text-deep-valley scale-105 shadow-lg" // Active PTT gradient
            : "bg-earth-brown hover:bg-earth-brown/90 text-warm-parchment" // Grounded PTT button
        )}
        onMouseDown={onPushStart}
        onMouseUp={onPushEnd}
        onTouchStart={onPushStart}
        onTouchEnd={onPushEnd}
        onMouseLeave={onPushEnd}
      >
        {isPushing ? "Speaking..." : "Hold to Speak"}
      </Button>
    );
  }

  // Streaming Audio Mode
  return (
    <Button
      variant="outline"
      className={cn(
        // Adjusted size, rounded-full, theme colors
        "rounded-full transition-all duration-300 h-12 w-auto px-5 shadow-sm", // Adjusted padding
        isMuted
          ? "bg-dawn-coral/10 text-dawn-coral border-dawn-coral/30 hover:bg-dawn-coral/20" // Muted state (Coral accent)
          : "bg-horizon-blue/10 text-horizon-blue border-horizon-blue/30 hover:bg-horizon-blue/20" // Active mic state (Blue accent)
      )}
      onClick={onToggleMute}
    >
      {isMuted ? (
        <MicOff size={18} className="mr-1.5" /> // Adjusted margin
      ) : (
        <Mic size={18} className="mr-1.5" />
      )}
      {isMuted ? "Unmute" : "Mute"} {/* Simplified text */}
    </Button>
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelControlToggle.tsx:

import { Switch } from "@/components/ui/switch";
import { cn } from "@/lib/utils";

interface VisualNovelControlToggleProps {
  isPushToTalk: boolean;
  onToggleMode: () => void;
}

export function VisualNovelControlToggle({
  isPushToTalk,
  onToggleMode,
}: VisualNovelControlToggleProps) {
  return (
    <div className="flex items-center space-x-2">
      {/* Switch component */}
      <Switch
        checked={isPushToTalk}
        onCheckedChange={onToggleMode}
        // Use Morning Gold for checked (PTT), Horizon Blue for unchecked (Streaming)
        className={cn(
            "data-[state=checked]:bg-morning-gold data-[state=unchecked]:bg-horizon-blue/50",
            "focus-visible:ring-horizon-blue" // Ring color
        )}
        aria-label={
          isPushToTalk
            ? "Push to Talk mode enabled"
            : "Live Voice mode enabled"
        }
        id="voice-control-switch" // Add id for label association
      />
      {/* Label - Use Earth Brown */}
      <label
        htmlFor="voice-control-switch"
        className="text-xs text-earth-brown font-medium cursor-pointer select-none"
      >
        {isPushToTalk ? "Push to Talk" : "Live Voice"}
      </label>
    </div>
  );
}

src/components/layouts/VisualNovelLayout/VisualNovelVolumeToggle.tsx:

import { Button } from "@/components/ui/button";
import { Volume2, VolumeX } from "lucide-react";
import { cn } from "@/lib/utils";

interface VisualNovelVolumeToggleProps {
  isVolumeMuted: boolean;
  onToggleVolume: () => void;
}

export function VisualNovelVolumeToggle({
  isVolumeMuted,
  onToggleVolume,
}: VisualNovelVolumeToggleProps) {
  return (
    <Button
      variant="outline" // Use outline style
      size="icon"
      className={cn(
        // Adjusted size, rounded-full, theme colors
        "rounded-full transition-all duration-300 h-10 w-10", // Adjusted size
        "bg-light-parchment/60 border border-earth-brown/20 text-earth-brown/80", // Base style
        "hover:bg-earth-brown/10 hover:border-earth-brown/40 hover:text-deep-valley", // Hover style
        isVolumeMuted ? "text-dawn-coral" : "" // Indicate muted state with color
      )}
      onClick={onToggleVolume}
      title={isVolumeMuted ? "Unmute Volume" : "Mute Volume"}
    >
      {isVolumeMuted ? <VolumeX size={18} /> : <Volume2 size={18} />}
    </Button>
  );
}

7. Update Storybook Stories

Update the stories to provide mock dispatcher and eventEmitter instances.


src/components/layouts/CardLayout/CardLayout.stories.tsx:

import type { Meta, StoryObj } from "@storybook/react";
import { CardLayout } from "./CardLayout"; // Adjust path if necessary
import { sampleTranscript } from "./sampleData"; // Import sample data
import { VoiceDispatcher, VoiceEventEmitter } from "@/types";
import { EventEmitter } from "events"; // Use Node's EventEmitter for mocking
import { fn } from "@storybook/test"; // Use Storybook's mock function

// Mock Voice Dispatcher
const mockDispatcher: VoiceDispatcher = {
  startAudioInput: fn(() => console.log("Dispatcher: startAudioInput called")),
  stopAudioInput: fn(() => console.log("Dispatcher: stopAudioInput called")),
  endInteraction: fn(() => console.log("Dispatcher: endInteraction called")),
  setVoiceControlType: fn((type) =>
    console.log(`Dispatcher: setVoiceControlType called with ${type}`)
  ),
  sendAudioChunk: fn((blob) =>
    console.log(`Dispatcher: sendAudioChunk called with blob size ${blob.size}`)
  ),
};

// Mock Voice Event Emitter
const mockEventEmitter = new EventEmitter() as VoiceEventEmitter;

// Example of emitting events for testing in Storybook's "Interactions" tab
const play = async () => {
    // Simulate connection
    await new Promise(resolve => setTimeout(resolve, 500));
    mockEventEmitter.emit("interactionStatus", "connecting");
    await new Promise(resolve => setTimeout(resolve, 1500));
    mockEventEmitter.emit("interactionStatus", "connected");

    // Simulate transcript updates
    await new Promise(resolve => setTimeout(resolve, 2000));
    mockEventEmitter.emit("transcriptUpdate", { id: 100, speaker: "Agent", text: "This is a simulated message.", time: "10:10 AM" });
     await new Promise(resolve => setTimeout(resolve, 1500));
    mockEventEmitter.emit("transcriptUpdate", { id: 101, speaker: "You", text: "Okay, I see the simulation.", time: "10:11 AM" });

     // Simulate ending
    // await new Promise(resolve => setTimeout(resolve, 5000));
    // mockEventEmitter.emit("interactionStatus", "ended");
};


const meta = {
  title: "Layouts/CardLayout",
  component: CardLayout,
  parameters: {
    layout: "fullscreen",
  },
  tags: ["autodocs"],
  argTypes: {
    agentName: { control: "text" },
    agentAvatarSrc: { control: "text" },
    agentAvatarFallback: { control: "text" },
    agentStatusText: { control: "text" },
    agentInfo: { control: "object" },
    initialTranscriptMessages: { control: "object" },
    initialVoiceControlType: {
      control: "radio",
      options: ["push-to-talk", "streaming-audio"],
    },
    // Make dispatcher and eventEmitter args (though they won't be controllable via UI)
    dispatcher: { control: false },
    eventEmitter: { control: false },
  },
  // Provide default mocks for all stories
  args: {
    dispatcher: mockDispatcher,
    eventEmitter: mockEventEmitter,
  }
} satisfies Meta<typeof CardLayout>;

export default meta;
type Story = StoryObj<typeof meta>;

export const Default: Story = {
  args: {
    agentName: "Ariel Support",
    agentAvatarSrc: "https://storage.wagn.ai/images/ariel-support-profile.jpg",
    agentAvatarFallback: "AS",
    agentStatusText: "Available", // Initial status
    agentInfo: {
      department: "Customer Support",
      specialization: "Order Issues",
      languages: "English, Spanish",
    },
    initialTranscriptMessages: sampleTranscript,
    initialVoiceControlType: "push-to-talk",
  },
   play: play, // Add interaction simulation
};

export const StreamingAudio: Story = {
  args: {
    ...Default.args,
    initialVoiceControlType: "streaming-audio",
    agentStatusText: "Ready", // Different initial status example
    initialTranscriptMessages: [], // Start with empty transcript
  },
  play: play,
};

export const EndedState: Story = {
  args: {
    ...Default.args,
    // Simulate ended state by emitting event immediately (or setting initial state if component supports it)
    // For now, we rely on the interaction simulation
  },
  play: async () => {
     await new Promise(resolve => setTimeout(resolve, 100));
     mockEventEmitter.emit("interactionStatus", "ended");
  }
};

src/components/layouts/PhoneLayout/PhoneLayout.stories.tsx:

import type { Meta, StoryObj } from "@storybook/react";
import { PhoneLayout } from "./PhoneLayout"; // Adjust path if necessary
import { samplePhoneTranscript } from "./sampleData"; // Import sample data
import { VoiceDispatcher, VoiceEventEmitter } from "@/types";
import { EventEmitter } from "events";
import { fn } from "@storybook/test";

// Mock Voice Dispatcher
const mockDispatcher: VoiceDispatcher = {
  startAudioInput: fn(() => console.log("Dispatcher: startAudioInput called")),
  stopAudioInput: fn(() => console.log("Dispatcher: stopAudioInput called")),
  endInteraction: fn(() => console.log("Dispatcher: endInteraction called")),
  setVoiceControlType: fn((type) =>
    console.log(`Dispatcher: setVoiceControlType called with ${type}`)
  ),
};

// Mock Voice Event Emitter
const mockEventEmitter = new EventEmitter() as VoiceEventEmitter;

// Example interaction simulation
const play = async () => {
    await new Promise(resolve => setTimeout(resolve, 500));
    mockEventEmitter.emit("interactionStatus", "connecting");
    await new Promise(resolve => setTimeout(resolve, 1500));
    mockEventEmitter.emit("interactionStatus", "connected");

    await new Promise(resolve => setTimeout(resolve, 2000));
    mockEventEmitter.emit("agentSpeaking", true);
    mockEventEmitter.emit("streamingText", "Thinking about the ");
    await new Promise(resolve => setTimeout(resolve, 500));
    mockEventEmitter.emit("streamingText", "market trends...");
    await new Promise(resolve => setTimeout(resolve, 800));
    mockEventEmitter.emit("transcriptUpdate", { id: 100, speaker: "Aria", text: "Thinking about the market trends..." });
    mockEventEmitter.emit("agentSpeaking", false);

    await new Promise(resolve => setTimeout(resolve, 1500));
     mockEventEmitter.emit("transcriptUpdate", { id: 101, speaker: "You", text: "Yes, I agree." });

    // Simulate ending
    // await new Promise(resolve => setTimeout(resolve, 5000));
    // mockEventEmitter.emit("interactionStatus", "ended");
};

const meta = {
  title: "Layouts/PhoneLayout",
  component: PhoneLayout,
  parameters: {
    layout: "centered",
  },
  tags: ["autodocs"],
  argTypes: {
    avatarSrc: { control: "text" },
    avatarFallback: { control: "text" },
    name: { control: "text" },
    initialCurrentMessageText: { control: "text" },
    initialTranscriptMessages: { control: "object" },
    initialVoiceControlType: {
      control: "radio",
      options: ["push-to-talk", "streaming-audio"],
    },
    backgroundUrl: { control: "text" },
    dispatcher: { control: false },
    eventEmitter: { control: false },
  },
  args: {
    dispatcher: mockDispatcher,
    eventEmitter: mockEventEmitter,
  },
} satisfies Meta<typeof PhoneLayout>;

export default meta;
type Story = StoryObj<typeof meta>;

export const Default: Story = {
  args: {
    avatarSrc: "https://storage.wagn.ai/images/aria-business-profile.jpg",
    avatarFallback: "AJ",
    name: "Aria Johnson",
    initialCurrentMessageText:
      "We believe it has significant potential, especially in the emerging markets.",
    initialTranscriptMessages: samplePhoneTranscript,
    initialVoiceControlType: "streaming-audio",
    backgroundUrl: "https://storage.wagn.ai/images/dawn-clifftop-bg.jpg", // Use new background
  },
  play: play,
};

export const PushToTalk: Story = {
  args: {
    ...Default.args,
    initialVoiceControlType: "push-to-talk",
    initialTranscriptMessages: [], // Start empty
     initialCurrentMessageText: "Ready for your input.",
  },
   play: play,
};

export const EndedStatePhone: Story = {
  args: {
    ...Default.args,
  },
  play: async () => {
     await new Promise(resolve => setTimeout(resolve, 100));
     mockEventEmitter.emit("interactionStatus", "ended");
  }
};

src/components/layouts/VisualNovelLayout/VisualNovelLayout.stories.tsx:

import type { Meta, StoryObj } from "@storybook/react";
import { VisualNovelLayout } from "./VisualNovelLayout"; // Adjust path if necessary
import { sampleVisualNovelTranscript } from "./sampleData"; // Import sample data
import { VoiceDispatcher, VoiceEventEmitter } from "@/types";
import { EventEmitter } from "events";
import { fn } from "@storybook/test";

// Mock Voice Dispatcher
const mockDispatcher: VoiceDispatcher = {
  startAudioInput: fn(() => console.log("Dispatcher: startAudioInput called")),
  stopAudioInput: fn(() => console.log("Dispatcher: stopAudioInput called")),
  endInteraction: fn(() => console.log("Dispatcher: endInteraction called")),
  setVoiceControlType: fn((type) =>
    console.log(`Dispatcher: setVoiceControlType called with ${type}`)
  ),
};

// Mock Voice Event Emitter
const mockEventEmitter = new EventEmitter() as VoiceEventEmitter;

// Example interaction simulation
const play = async () => {
    await new Promise(resolve => setTimeout(resolve, 500));
    mockEventEmitter.emit("interactionStatus", "connecting");
    await new Promise(resolve => setTimeout(resolve, 1500));
    mockEventEmitter.emit("interactionStatus", "connected");

    await new Promise(resolve => setTimeout(resolve, 2000));
    mockEventEmitter.emit("streamingText", "The path ahead ");
    await new Promise(resolve => setTimeout(resolve, 500));
    mockEventEmitter.emit("streamingText", "looks promising...");
    await new Promise(resolve => setTimeout(resolve, 800));
    mockEventEmitter.emit("transcriptUpdate", { id: 100, speaker: "Aria", text: "The path ahead looks promising..." });

    await new Promise(resolve => setTimeout(resolve, 1500));
     mockEventEmitter.emit("transcriptUpdate", { id: 101, speaker: "You", text: "Indeed it does." });

    // Simulate ending
    // await new Promise(resolve => setTimeout(resolve, 5000));
    // mockEventEmitter.emit("interactionStatus", "ended");
};


const meta = {
  title: "Layouts/VisualNovelLayout",
  component: VisualNovelLayout,
  parameters: {
    layout: "fullscreen",
  },
  tags: ["autodocs"],
  argTypes: {
    characterImageSrc: { control: "text" },
    characterAlt: { control: "text" },
    characterName: { control: "text" },
    initialCurrentMessage: { control: "object" },
    initialTranscriptMessages: { control: "object" },
    initialVoiceControlType: {
      control: "radio",
      options: ["push-to-talk", "streaming-audio"],
    },
    backgroundUrl: { control: "text" },
    dispatcher: { control: false },
    eventEmitter: { control: false },
  },
  args: {
    dispatcher: mockDispatcher,
    eventEmitter: mockEventEmitter,
  },
} satisfies Meta<typeof VisualNovelLayout>;

export default meta;
type Story = StoryObj<typeof meta>;

export const Default: Story = {
  args: {
    characterImageSrc: "https://storage.wagn.ai/images/anime-character-dawn.png", // Use dawn version
    characterAlt: "Aria",
    characterName: "Aria",
    initialCurrentMessage:
      sampleVisualNovelTranscript[sampleVisualNovelTranscript.length - 1],
    initialTranscriptMessages: sampleVisualNovelTranscript,
    initialVoiceControlType: "streaming-audio",
    backgroundUrl: "https://storage.wagn.ai/images/dawn-clifftop-bg.jpg", // Use new background
  },
   play: play,
};

export const PushToTalk: Story = {
  args: {
    ...Default.args,
    initialVoiceControlType: "push-to-talk",
    initialTranscriptMessages: [], // Start empty
    initialCurrentMessage: { id: 0, speaker: "Aria", text: "The air is crisp this morning. What's on your mind?" },
  },
   play: play,
};

export const EndedStateVisualNovel: Story = {
  args: {
    ...Default.args,
  },
  play: async () => {
     await new Promise(resolve => setTimeout(resolve, 100));
     mockEventEmitter.emit("interactionStatus", "ended");
  }
};

8. Update UI Component Imports (Optional but Recommended)

Since many UI components were updated, ensure imports are correct. For example, use-mobile.tsx was moved. Update the import in SidebarProvider.


src/components/ui/sidebar.tsx:

// lint-ignore

import * as React from "react";
import { Slot } from "@radix-ui/react-slot";
import { VariantProps, cva } from "class-variance-authority";
import { PanelLeft } from "lucide-react";

// Correct the import path for useIsMobile
import { useIsMobile } from "@/hooks/use-mobile";
import { cn } from "@/lib/utils";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { Separator } from "@/components/ui/separator";
import { Sheet, SheetContent } from "@/components/ui/sheet";
import { Skeleton } from "@/components/ui/skeleton";
import {
  Tooltip,
  TooltipContent,
  TooltipProvider,
  TooltipTrigger,
} from "@/components/ui/tooltip";

const SIDEBAR_COOKIE_NAME = "sidebar:state";
const SIDEBAR_COOKIE_MAX_AGE = 60 * 60 * 24 * 7;
const SIDEBAR_WIDTH = "16rem";
const SIDEBAR_WIDTH_MOBILE = "18rem";
const SIDEBAR_WIDTH_ICON = "3rem";
const SIDEBAR_KEYBOARD_SHORTCUT = "b";

type SidebarContext = {
  state: "expanded" | "collapsed";
  open: boolean;
  setOpen: (open: boolean) => void;
  openMobile: boolean;
  setOpenMobile: (open: boolean) => void;
  isMobile: boolean;
  toggleSidebar: () => void;
};

const SidebarContext = React.createContext<SidebarContext | null>(null);

function useSidebar() {
  const context = React.useContext(SidebarContext);
  if (!context) {
    throw new Error("useSidebar must be used within a SidebarProvider.");
  }

  return context;
}

const SidebarProvider = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div"> & {
    defaultOpen?: boolean;
    open?: boolean;
    onOpenChange?: (open: boolean) => void;
  }
>(
  (
    {
      defaultOpen = true,
      open: openProp,
      onOpenChange: setOpenProp,
      className,
      style,
      children,
      ...props
    },
    ref
  ) => {
    const isMobile = useIsMobile();
    const [openMobile, setOpenMobile] = React.useState(false);

    // This is the internal state of the sidebar.
    // We use openProp and setOpenProp for control from outside the component.
    const [_open, _setOpen] = React.useState(defaultOpen);
    const open = openProp ?? _open;
    const setOpen = React.useCallback(
      (value: boolean | ((value: boolean) => boolean)) => {
        const openState = typeof value === "function" ? value(open) : value;
        if (setOpenProp) {
          setOpenProp(openState);
        } else {
          _setOpen(openState);
        }

        // This sets the cookie to keep the sidebar state.
        document.cookie = `${SIDEBAR_COOKIE_NAME}=${openState}; path=/; max-age=${SIDEBAR_COOKIE_MAX_AGE}`;
      },
      [setOpenProp, open]
    );

    // Helper to toggle the sidebar.
    const toggleSidebar = React.useCallback(() => {
      return isMobile
        ? setOpenMobile((open) => !open)
        : setOpen((open) => !open);
    }, [isMobile, setOpen, setOpenMobile]);

    // Adds a keyboard shortcut to toggle the sidebar.
    React.useEffect(() => {
      const handleKeyDown = (event: KeyboardEvent) => {
        if (
          event.key === SIDEBAR_KEYBOARD_SHORTCUT &&
          (event.metaKey || event.ctrlKey)
        ) {
          event.preventDefault();
          toggleSidebar();
        }
      };

      window.addEventListener("keydown", handleKeyDown);
      return () => window.removeEventListener("keydown", handleKeyDown);
    }, [toggleSidebar]);

    // We add a state so that we can do data-state="expanded" or "collapsed".
    // This makes it easier to style the sidebar with Tailwind classes.
    const state = open ? "expanded" : "collapsed";

    const contextValue = React.useMemo<SidebarContext>(
      () => ({
        state,
        open,
        setOpen,
        isMobile,
        openMobile,
        setOpenMobile,
        toggleSidebar,
      }),
      [state, open, setOpen, isMobile, openMobile, setOpenMobile, toggleSidebar]
    );

    return (
      <SidebarContext.Provider value={contextValue}>
        <TooltipProvider delayDuration={0}>
          <div
            style={
              {
                "--sidebar-width": SIDEBAR_WIDTH,
                "--sidebar-width-icon": SIDEBAR_WIDTH_ICON,
                ...style,
              } as React.CSSProperties
            }
            className={cn(
              "group/sidebar-wrapper flex min-h-svh w-full has-[[data-variant=inset]]:bg-sidebar",
              className
            )}
            ref={ref}
            {...props}
          >
            {children}
          </div>
        </TooltipProvider>
      </SidebarContext.Provider>
    );
  }
);
SidebarProvider.displayName = "SidebarProvider";

const Sidebar = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div"> & {
    side?: "left" | "right";
    variant?: "sidebar" | "floating" | "inset";
    collapsible?: "offcanvas" | "icon" | "none";
  }
>(
  (
    {
      side = "left",
      variant = "sidebar",
      collapsible = "offcanvas",
      className,
      children,
      ...props
    },
    ref
  ) => {
    const { isMobile, state, openMobile, setOpenMobile } = useSidebar();

    if (collapsible === "none") {
      return (
        <div
          className={cn(
            "flex h-full w-[--sidebar-width] flex-col bg-sidebar text-sidebar-foreground",
            className
          )}
          ref={ref}
          {...props}
        >
          {children}
        </div>
      );
    }

    if (isMobile) {
      return (
        <Sheet open={openMobile} onOpenChange={setOpenMobile} {...props}>
          <SheetContent
            data-sidebar="sidebar"
            data-mobile="true"
            className="w-[--sidebar-width] bg-sidebar p-0 text-sidebar-foreground [&>button]:hidden"
            style={
              {
                "--sidebar-width": SIDEBAR_WIDTH_MOBILE,
              } as React.CSSProperties
            }
            side={side}
          >
            <div className="flex h-full w-full flex-col">{children}</div>
          </SheetContent>
        </Sheet>
      );
    }

    return (
      <div
        ref={ref}
        className="group peer hidden md:block text-sidebar-foreground"
        data-state={state}
        data-collapsible={state === "collapsed" ? collapsible : ""}
        data-variant={variant}
        data-side={side}
      >
        {/* This is what handles the sidebar gap on desktop */}
        <div
          className={cn(
            "duration-200 relative h-svh w-[--sidebar-width] bg-transparent transition-[width] ease-linear",
            "group-data-[collapsible=offcanvas]:w-0",
            "group-data-[side=right]:rotate-180",
            variant === "floating" || variant === "inset"
              ? "group-data-[collapsible=icon]:w-[calc(var(--sidebar-width-icon)_+_theme(spacing.4))]"
              : "group-data-[collapsible=icon]:w-[--sidebar-width-icon]"
          )}
        />
        <div
          className={cn(
            "duration-200 fixed inset-y-0 z-10 hidden h-svh w-[--sidebar-width] transition-[left,right,width] ease-linear md:flex",
            side === "left"
              ? "left-0 group-data-[collapsible=offcanvas]:left-[calc(var(--sidebar-width)*-1)]"
              : "right-0 group-data-[collapsible=offcanvas]:right-[calc(var(--sidebar-width)*-1)]",
            // Adjust the padding for floating and inset variants.
            variant === "floating" || variant === "inset"
              ? "p-2 group-data-[collapsible=icon]:w-[calc(var(--sidebar-width-icon)_+_theme(spacing.4)_+2px)]"
              : "group-data-[collapsible=icon]:w-[--sidebar-width-icon] group-data-[side=left]:border-r group-data-[side=right]:border-l",
            className
          )}
          {...props}
        >
          <div
            data-sidebar="sidebar"
            className="flex h-full w-full flex-col bg-sidebar group-data-[variant=floating]:rounded-lg group-data-[variant=floating]:border group-data-[variant=floating]:border-sidebar-border group-data-[variant=floating]:shadow"
          >
            {children}
          </div>
        </div>
      </div>
    );
  }
);
Sidebar.displayName = "Sidebar";

const SidebarTrigger = React.forwardRef<
  React.ElementRef<typeof Button>,
  React.ComponentProps<typeof Button>
>(({ className, onClick, ...props }, ref) => {
  const { toggleSidebar } = useSidebar();

  return (
    <Button
      ref={ref}
      data-sidebar="trigger"
      variant="ghost"
      size="icon"
      className={cn("h-7 w-7", className)}
      onClick={(event) => {
        onClick?.(event);
        toggleSidebar();
      }}
      {...props}
    >
      <PanelLeft />
      <span className="sr-only">Toggle Sidebar</span>
    </Button>
  );
});
SidebarTrigger.displayName = "SidebarTrigger";

const SidebarRail = React.forwardRef<
  HTMLButtonElement,
  React.ComponentProps<"button">
>(({ className, ...props }, ref) => {
  const { toggleSidebar } = useSidebar();

  return (
    <button
      ref={ref}
      data-sidebar="rail"
      aria-label="Toggle Sidebar"
      tabIndex={-1}
      onClick={toggleSidebar}
      title="Toggle Sidebar"
      className={cn(
        "absolute inset-y-0 z-20 hidden w-4 -translate-x-1/2 transition-all ease-linear after:absolute after:inset-y-0 after:left-1/2 after:w-[2px] hover:after:bg-sidebar-border group-data-[side=left]:-right-4 group-data-[side=right]:left-0 sm:flex",
        "[[data-side=left]_&]:cursor-w-resize [[data-side=right]_&]:cursor-e-resize",
        "[[data-side=left][data-state=collapsed]_&]:cursor-e-resize [[data-side=right][data-state=collapsed]_&]:cursor-w-resize",
        "group-data-[collapsible=offcanvas]:translate-x-0 group-data-[collapsible=offcanvas]:after:left-full group-data-[collapsible=offcanvas]:hover:bg-sidebar",
        "[[data-side=left][data-collapsible=offcanvas]_&]:-right-2",
        "[[data-side=right][data-collapsible=offcanvas]_&]:-left-2",
        className
      )}
      {...props}
    />
  );
});
SidebarRail.displayName = "SidebarRail";

const SidebarInset = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"main">
>(({ className, ...props }, ref) => {
  return (
    <main
      ref={ref}
      className={cn(
        "relative flex min-h-svh flex-1 flex-col bg-background",
        "peer-data-[variant=inset]:min-h-[calc(100svh-theme(spacing.4))] md:peer-data-[variant=inset]:m-2 md:peer-data-[state=collapsed]:peer-data-[variant=inset]:ml-2 md:peer-data-[variant=inset]:ml-0 md:peer-data-[variant=inset]:rounded-xl md:peer-data-[variant=inset]:shadow",
        className
      )}
      {...props}
    />
  );
});
SidebarInset.displayName = "SidebarInset";

const SidebarInput = React.forwardRef<
  React.ElementRef<typeof Input>,
  React.ComponentProps<typeof Input>
>(({ className, ...props }, ref) => {
  return (
    <Input
      ref={ref}
      data-sidebar="input"
      className={cn(
        "h-8 w-full bg-background shadow-none focus-visible:ring-2 focus-visible:ring-sidebar-ring",
        className
      )}
      {...props}
    />
  );
});
SidebarInput.displayName = "SidebarInput";

const SidebarHeader = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div">
>(({ className, ...props }, ref) => {
  return (
    <div
      ref={ref}
      data-sidebar="header"
      className={cn("flex flex-col gap-2 p-2", className)}
      {...props}
    />
  );
});
SidebarHeader.displayName = "SidebarHeader";

const SidebarFooter = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div">
>(({ className, ...props }, ref) => {
  return (
    <div
      ref={ref}
      data-sidebar="footer"
      className={cn("flex flex-col gap-2 p-2", className)}
      {...props}
    />
  );
});
SidebarFooter.displayName = "SidebarFooter";

const SidebarSeparator = React.forwardRef<
  React.ElementRef<typeof Separator>,
  React.ComponentProps<typeof Separator>
>(({ className, ...props }, ref) => {
  return (
    <Separator
      ref={ref}
      data-sidebar="separator"
      className={cn("mx-2 w-auto bg-sidebar-border", className)}
      {...props}
    />
  );
});
SidebarSeparator.displayName = "SidebarSeparator";

const SidebarContent = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div">
>(({ className, ...props }, ref) => {
  return (
    <div
      ref={ref}
      data-sidebar="content"
      className={cn(
        "flex min-h-0 flex-1 flex-col gap-2 overflow-auto group-data-[collapsible=icon]:overflow-hidden",
        className
      )}
      {...props}
    />
  );
});
SidebarContent.displayName = "SidebarContent";

const SidebarGroup = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div">
>(({ className, ...props }, ref) => {
  return (
    <div
      ref={ref}
      data-sidebar="group"
      className={cn("relative flex w-full min-w-0 flex-col p-2", className)}
      {...props}
    />
  );
});
SidebarGroup.displayName = "SidebarGroup";

const SidebarGroupLabel = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div"> & { asChild?: boolean }
>(({ className, asChild = false, ...props }, ref) => {
  const Comp = asChild ? Slot : "div";

  return (
    <Comp
      ref={ref}
      data-sidebar="group-label"
      className={cn(
        "duration-200 flex h-8 shrink-0 items-center rounded-md px-2 text-xs font-medium text-sidebar-foreground/70 outline-none ring-sidebar-ring transition-[margin,opa] ease-linear focus-visible:ring-2 [&>svg]:size-4 [&>svg]:shrink-0",
        "group-data-[collapsible=icon]:-mt-8 group-data-[collapsible=icon]:opacity-0",
        className
      )}
      {...props}
    />
  );
});
SidebarGroupLabel.displayName = "SidebarGroupLabel";

const SidebarGroupAction = React.forwardRef<
  HTMLButtonElement,
  React.ComponentProps<"button"> & { asChild?: boolean }
>(({ className, asChild = false, ...props }, ref) => {
  const Comp = asChild ? Slot : "button";

  return (
    <Comp
      ref={ref}
      data-sidebar="group-action"
      className={cn(
        "absolute right-3 top-3.5 flex aspect-square w-5 items-center justify-center rounded-md p-0 text-sidebar-foreground outline-none ring-sidebar-ring transition-transform hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 [&>svg]:size-4 [&>svg]:shrink-0",
        // Increases the hit area of the button on mobile.
        "after:absolute after:-inset-2 after:md:hidden",
        "group-data-[collapsible=icon]:hidden",
        className
      )}
      {...props}
    />
  );
});
SidebarGroupAction.displayName = "SidebarGroupAction";

const SidebarGroupContent = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div">
>(({ className, ...props }, ref) => (
  <div
    ref={ref}
    data-sidebar="group-content"
    className={cn("w-full text-sm", className)}
    {...props}
  />
));
SidebarGroupContent.displayName = "SidebarGroupContent";

const SidebarMenu = React.forwardRef<
  HTMLUListElement,
  React.ComponentProps<"ul">
>(({ className, ...props }, ref) => (
  <ul
    ref={ref}
    data-sidebar="menu"
    className={cn("flex w-full min-w-0 flex-col gap-1", className)}
    {...props}
  />
));
SidebarMenu.displayName = "SidebarMenu";

const SidebarMenuItem = React.forwardRef<
  HTMLLIElement,
  React.ComponentProps<"li">
>(({ className, ...props }, ref) => (
  <li
    ref={ref}
    data-sidebar="menu-item"
    className={cn("group/menu-item relative", className)}
    {...props}
  />
));
SidebarMenuItem.displayName = "SidebarMenuItem";

const sidebarMenuButtonVariants = cva(
  "peer/menu-button flex w-full items-center gap-2 overflow-hidden rounded-md p-2 text-left text-sm outline-none ring-sidebar-ring transition-[width,height,padding] hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 active:bg-sidebar-accent active:text-sidebar-accent-foreground disabled:pointer-events-none disabled:opacity-50 group-has-[[data-sidebar=menu-action]]/menu-item:pr-8 aria-disabled:pointer-events-none aria-disabled:opacity-50 data-[active=true]:bg-sidebar-accent data-[active=true]:font-medium data-[active=true]:text-sidebar-accent-foreground data-[state=open]:hover:bg-sidebar-accent data-[state=open]:hover:text-sidebar-accent-foreground group-data-[collapsible=icon]:!size-8 group-data-[collapsible=icon]:!p-2 [&>span:last-child]:truncate [&>svg]:size-4 [&>svg]:shrink-0",
  {
    variants: {
      variant: {
        default: "hover:bg-sidebar-accent hover:text-sidebar-accent-foreground",
        outline:
          "bg-background shadow-[0_0_0_1px_hsl(var(--sidebar-border))] hover:bg-sidebar-accent hover:text-sidebar-accent-foreground hover:shadow-[0_0_0_1px_hsl(var(--sidebar-accent))]",
      },
      size: {
        default: "h-8 text-sm",
        sm: "h-7 text-xs",
        lg: "h-12 text-sm group-data-[collapsible=icon]:!p-0",
      },
    },
    defaultVariants: {
      variant: "default",
      size: "default",
    },
  }
);

const SidebarMenuButton = React.forwardRef<
  HTMLButtonElement,
  React.ComponentProps<"button"> & {
    asChild?: boolean;
    isActive?: boolean;
    tooltip?: string | React.ComponentProps<typeof TooltipContent>;
  } & VariantProps<typeof sidebarMenuButtonVariants>
>(
  (
    {
      asChild = false,
      isActive = false,
      variant = "default",
      size = "default",
      tooltip,
      className,
      ...props
    },
    ref
  ) => {
    const Comp = asChild ? Slot : "button";
    const { isMobile, state } = useSidebar();

    const button = (
      <Comp
        ref={ref}
        data-sidebar="menu-button"
        data-size={size}
        data-active={isActive}
        className={cn(sidebarMenuButtonVariants({ variant, size }), className)}
        {...props}
      />
    );

    if (!tooltip) {
      return button;
    }

    if (typeof tooltip === "string") {
      tooltip = {
        children: tooltip,
      };
    }

    return (
      <Tooltip>
        <TooltipTrigger asChild>{button}</TooltipTrigger>
        <TooltipContent
          side="right"
          align="center"
          hidden={state !== "collapsed" || isMobile}
          {...tooltip}
        />
      </Tooltip>
    );
  }
);
SidebarMenuButton.displayName = "SidebarMenuButton";

const SidebarMenuAction = React.forwardRef<
  HTMLButtonElement,
  React.ComponentProps<"button"> & {
    asChild?: boolean;
    showOnHover?: boolean;
  }
>(({ className, asChild = false, showOnHover = false, ...props }, ref) => {
  const Comp = asChild ? Slot : "button";

  return (
    <Comp
      ref={ref}
      data-sidebar="menu-action"
      className={cn(
        "absolute right-1 top-1.5 flex aspect-square w-5 items-center justify-center rounded-md p-0 text-sidebar-foreground outline-none ring-sidebar-ring transition-transform hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 peer-hover/menu-button:text-sidebar-accent-foreground [&>svg]:size-4 [&>svg]:shrink-0",
        // Increases the hit area of the button on mobile.
        "after:absolute after:-inset-2 after:md:hidden",
        "peer-data-[size=sm]/menu-button:top-1",
        "peer-data-[size=default]/menu-button:top-1.5",
        "peer-data-[size=lg]/menu-button:top-2.5",
        "group-data-[collapsible=icon]:hidden",
        showOnHover &&
          "group-focus-within/menu-item:opacity-100 group-hover/menu-item:opacity-100 data-[state=open]:opacity-100 peer-data-[active=true]/menu-button:text-sidebar-accent-foreground md:opacity-0",
        className
      )}
      {...props}
    />
  );
});
SidebarMenuAction.displayName = "SidebarMenuAction";

const SidebarMenuBadge = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div">
>(({ className, ...props }, ref) => (
  <div
    ref={ref}
    data-sidebar="menu-badge"
    className={cn(
      "absolute right-1 flex h-5 min-w-5 items-center justify-center rounded-md px-1 text-xs font-medium tabular-nums text-sidebar-foreground select-none pointer-events-none",
      "peer-hover/menu-button:text-sidebar-accent-foreground peer-data-[active=true]/menu-button:text-sidebar-accent-foreground",
      "peer-data-[size=sm]/menu-button:top-1",
      "peer-data-[size=default]/menu-button:top-1.5",
      "peer-data-[size=lg]/menu-button:top-2.5",
      "group-data-[collapsible=icon]:hidden",
      className
    )}
    {...props}
  />
));
SidebarMenuBadge.displayName = "SidebarMenuBadge";

const SidebarMenuSkeleton = React.forwardRef<
  HTMLDivElement,
  React.ComponentProps<"div"> & {
    showIcon?: boolean;
  }
>(({ className, showIcon = false, ...props }, ref) => {
  // Random width between 50 to 90%.
  const width = React.useMemo(() => {
    return `${Math.floor(Math.random() * 40) + 50}%`;
  }, []);

  return (
    <div
      ref={ref}
      data-sidebar="menu-skeleton"
      className={cn("rounded-md h-8 flex gap-2 px-2 items-center", className)}
      {...props}
    >
      {showIcon && (
        <Skeleton
          className="size-4 rounded-md"
          data-sidebar="menu-skeleton-icon"
        />
      )}
      <Skeleton
        className="h-4 flex-1 max-w-[--skeleton-width]"
        data-sidebar="menu-skeleton-text"
        style={
          {
            "--skeleton-width": width,
          } as React.CSSProperties
        }
      />
    </div>
  );
});
SidebarMenuSkeleton.displayName = "SidebarMenuSkeleton";

const SidebarMenuSub = React.forwardRef<
  HTMLUListElement,
  React.ComponentProps<"ul">
>(({ className, ...props }, ref) => (
  <ul
    ref={ref}
    data-sidebar="menu-sub"
    className={cn(
      "mx-3.5 flex min-w-0 translate-x-px flex-col gap-1 border-l border-sidebar-border px-2.5 py-0.5",
      "group-data-[collapsible=icon]:hidden",
      className
    )}
    {...props}
  />
));
SidebarMenuSub.displayName = "SidebarMenuSub";

const SidebarMenuSubItem = React.forwardRef<
  HTMLLIElement,
  React.ComponentProps<"li">
>(({ ...props }, ref) => <li ref={ref} {...props} />);
SidebarMenuSubItem.displayName = "SidebarMenuSubItem";

const SidebarMenuSubButton = React.forwardRef<
  HTMLAnchorElement,
  React.ComponentProps<"a"> & {
    asChild?: boolean;
    size?: "sm" | "md";
    isActive?: boolean;
  }
>(({ asChild = false, size = "md", isActive, className, ...props }, ref) => {
  const Comp = asChild ? Slot : "a";

  return (
    <Comp
      ref={ref}
      data-sidebar="menu-sub-button"
      data-size={size}
      data-active={isActive}
      className={cn(
        "flex h-7 min-w-0 -translate-x-px items-center gap-2 overflow-hidden rounded-md px-2 text-sidebar-foreground outline-none ring-sidebar-ring hover:bg-sidebar-accent hover:text-sidebar-accent-foreground focus-visible:ring-2 active:bg-sidebar-accent active:text-sidebar-accent-foreground disabled:pointer-events-none disabled:opacity-50 aria-disabled:pointer-events-none aria-disabled:opacity-50 [&>span:last-child]:truncate [&>svg]:size-4 [&>svg]:shrink-0 [&>svg]:text-sidebar-accent-foreground",
        "data-[active=true]:bg-sidebar-accent data-[active=true]:text-sidebar-accent-foreground",
        size === "sm" && "text-xs",
        size === "md" && "text-sm",
        "group-data-[collapsible=icon]:hidden",
        className
      )}
      {...props}
    />
  );
});
SidebarMenuSubButton.displayName = "SidebarMenuSubButton";

export {
  Sidebar,
  SidebarContent,
  SidebarFooter,
  SidebarGroup,
  SidebarGroupAction,
  SidebarGroupContent,
  SidebarGroupLabel,
  SidebarHeader,
  SidebarInput,
  SidebarInset,
  SidebarMenu,
  SidebarMenuAction,
  SidebarMenuBadge,
  SidebarMenuButton,
  SidebarMenuItem,
  SidebarMenuSkeleton,
  SidebarMenuSub,
  SidebarMenuSubButton,
  SidebarMenuSubItem,
  SidebarProvider,
  SidebarRail,
  SidebarSeparator,
  SidebarTrigger,
  useSidebar,
};

9. Review and Test

  • Thoroughly review the changes, especially in the layout components and the BrowserAudioRecorder.
  • Run Storybook (bun run storybook) and test the different states and interactions (PTT, streaming mute/unmute, mode toggle, end call, restart call).
  • Check the console for logs from the dispatcher and recorder.
  • Verify that the new styling is applied correctly across all components.
  • Test in different browsers (Chrome, Firefox, Safari) if possible, focusing on microphone access and recording functionality.

This completes the integration of the voice interaction architecture and the application of the new visual design system based on your specifications. Remember that the audio playback in BrowserAudioRecorder is a basic implementation; more robust handling (error correction, buffering strategies) might be needed for production.

Recent Posts