'use client'; import { useEffect, useRef } from 'react'; import { useTranslations } from 'next-intl'; import { motion, AnimatePresence, useMotionValue, useTransform } from 'framer-motion'; import { Mic, MicOff, PhoneOff, Loader2 } from 'lucide-react'; import { cn } from '@/lib/utils'; import Chip from '@/components/ui/Chip'; import { useVoiceAgent, type TranscriptEntry } from './VoiceAgentProvider'; import type { WizardFormData } from './WizardContainer'; // ─── Types ─────────────────────────────────────────────────────────────────── interface VoiceAgentProps { locale: string; onComplete: (brief: string, formData: WizardFormData) => void; } // ─── Transcript Bubble ─────────────────────────────────────────────────────── function TranscriptBubble({ entry }: { entry: TranscriptEntry }) { return (
{entry.text}
); } // ─── Main Component ────────────────────────────────────────────────────────── export default function VoiceAgent({ locale, onComplete }: VoiceAgentProps) { const t = useTranslations('configurator'); const { status, errorMessage, isMicActive, toggleMic, transcript, selections, isAnalyzingSite, agentAmplitude, startConversation, endConversation, completedBrief, completedFormData, } = useVoiceAgent(); const transcriptEndRef = useRef(null); // Auto-scroll transcript within its container only useEffect(() => { const el = transcriptEndRef.current; if (el?.parentElement) { el.parentElement.scrollTop = el.parentElement.scrollHeight; } }, [transcript]); // Handle completion useEffect(() => { if (completedBrief && completedFormData) { console.log('[VoiceAgent] Brief complete, transitioning in 1.5s...'); const timer = setTimeout(() => { console.log('[VoiceAgent] Calling onComplete'); onComplete(completedBrief, completedFormData); }, 1500); return () => clearTimeout(timer); } }, [completedBrief, completedFormData, onComplete]); // Orb animation driven by agent amplitude const amplitudeValue = useMotionValue(0); useEffect(() => { amplitudeValue.set(agentAmplitude); }, [agentAmplitude, amplitudeValue]); const orbScale = useTransform(amplitudeValue, [0, 0.5], [1, 1.18]); const orbGlow = useTransform( amplitudeValue, [0, 0.5], ['0px 0px 0px rgba(0,100,148,0)', '0px 0px 30px rgba(0,100,148,0.3)'], ); // Build selection chips — use i18n for known keys, raw value otherwise const KNOWN_SERVICES = ['web', 'systems', 'infrastructure']; const KNOWN_AI_TYPES = ['teammate', 'customer-facing', 'data-intelligence', 'notsure']; const KNOWN_INDUSTRIES = ['maritime', 'hospitality', 'technology', 'realestate', 'finance', 'ngo', 'other']; const KNOWN_TIMELINES = ['asap', '1-3months', '3-6months', 'exploring']; const chipLabels: string[] = []; if (selections.services) { for (const svc of selections.services) { chipLabels.push(KNOWN_SERVICES.includes(svc) ? t(`services.${svc}.title`) : svc); } } if (selections.aiEnabled && selections.aiTypes) { for (const ai of selections.aiTypes) { chipLabels.push(KNOWN_AI_TYPES.includes(ai) ? t(`aiTypes.${ai}.title`) : ai); } } if (selections.industry) { const ind = selections.industry; chipLabels.push(KNOWN_INDUSTRIES.includes(ind) ? t(`industries.${ind}`) : ind); } if (selections.timeline) { const tl = selections.timeline; chipLabels.push(KNOWN_TIMELINES.includes(tl) ? t(`timelines.${tl}`) : tl); } return (
{/* Agent card header */}
L

{t('voice.agentName')}

{status === 'active' ? 'Connected' : status === 'connecting' ? t('voice.connecting') : 'Ready'}
{/* Waveform orb */}
{status === 'idle' && ( )} {status === 'connecting' && ( )} {status === 'active' && ( )} {/* Analyzing site badge */} {isAnalyzingSite && ( {t('voice.analyzingSite')} )} {/* Error message */} {errorMessage && (

{errorMessage}

)}
{/* Live transcript */} {transcript.length > 0 && (
{transcript.map((entry, i) => ( ))}
)} {/* Selection chips */} {chipLabels.length > 0 && (

{t('voice.capturedSoFar')}

{chipLabels.map((label, i) => ( {label} ))}
)}
{/* Controls */}
{status === 'idle' && ( )} {status === 'active' && ( <> )} {status === 'connecting' && (

{t('voice.connecting')}

)}
); }