import React, { useState, useEffect, useRef } from 'react';
// Assume necessary firebase imports if state needed persistence, but for this chat simulation, we'll keep it local.
// --- Helper Functions for API Call (Required for Gemini Integration) ---
// Exponential backoff retry mechanism for API calls
const MAX_RETRIES = 5;
const INITIAL_DELAY_MS = 1000;
/**
* Custom fetch with exponential backoff for API robustness.
* @param {string} url - API URL.
* @param {object} options - Fetch options (method, headers, body).
* @returns {Promise}
*/
async function fetchWithRetry(url, options) {
for (let attempt = 0; attempt < MAX_RETRIES; attempt++) {
try {
const response = await fetch(url, options);
if (response.status !== 429) { // Not a rate limit error, proceed normally
return response;
}
// Rate limit hit (429), wait and retry
} catch (error) {
// Network error or other fetch issue, wait and retry
}
const delay = INITIAL_DELAY_MS * Math.pow(2, attempt) + (Math.random() * 1000);
await new Promise(resolve => setTimeout(resolve, delay));
}
throw new Error('API request failed after multiple retries.');
}
// --- TTS Utility Functions (Must be outside the component) ---
// Function to convert Base64 string to ArrayBuffer
const base64ToArrayBuffer = (base64) => {
const binaryString = atob(base64);
const len = binaryString.length;
const bytes = new Uint8Array(len);
for (let i = 0; i < len; i++) {
bytes[i] = binaryString.charCodeAt(i);
}
return bytes.buffer;
};
// Function to convert PCM audio data into a WAV Blob
const pcmToWav = (pcm16, sampleRate) => {
const numChannels = 1;
const bytesPerSample = 2; // 16-bit PCM (signed)
const buffer = new ArrayBuffer(44 + pcm16.length * bytesPerSample);
const view = new DataView(buffer);
let offset = 0;
const writeString = (str) => {
for (let i = 0; i < str.length; i++) {
view.setUint8(offset++, str.charCodeAt(i));
}
};
const writeUint32 = (val) => {
view.setUint32(offset, val, true);
offset += 4;
};
const writeUint16 = (val) => {
view.setUint16(offset, val, true);
offset += 2;
};
// RIFF chunk
writeString('RIFF');
writeUint32(36 + pcm16.length * bytesPerSample); // ChunkSize
writeString('WAVE');
// FMT sub-chunk
writeString('fmt ');
writeUint32(16); // Subchunk1Size (16 for PCM)
writeUint16(1); // AudioFormat (1 for PCM)
writeUint16(numChannels);
writeUint32(sampleRate);
writeUint32(sampleRate * numChannels * bytesPerSample); // ByteRate
writeUint16(numChannels * bytesPerSample); // BlockAlign
writeUint16(bytesPerSample * 8); // BitsPerSample
// DATA sub-chunk
writeString('data');
writeUint32(pcm16.length * bytesPerSample); // Subchunk2Size
// Write PCM data
for (let i = 0; i < pcm16.length; i++) {
view.setInt16(offset, pcm16[i], true); // Write 16-bit signed PCM
offset += 2;
}
return new Blob([buffer], { type: 'audio/wav' });
};
// Function to play the audio blob
const playAudioBlob = (blob) => {
const url = URL.createObjectURL(blob);
const audio = new Audio(url);
// Attempt to play, catching errors if playback is blocked by browser policy
audio.play().catch(e => console.error("Error playing audio (User interaction required):", e));
audio.onended = () => {
URL.revokeObjectURL(url); // Clean up the blob URL after playback
};
};
// --- Component Definition ---
// Consolidated Sprite list with a fallback for loading/errors
const emotionSprites = {
// Primary Emotions (Must be supported by model)
Neutral: { url: 'https://cdn.pixilart.com/photos/large/d207b2651ac72cd.png', color: 'text-gray-500', description: 'Neutral' },
Happy: { url: 'https://cdn.pixilart.com/photos/large/0216143ada438f7.png', color: 'text-yellow-500', description: 'Happy' },
Sad: { url: 'https://cdn.pixilart.com/photos/large/28bcbdfa247fcf5.png', color: 'text-blue-500', description: 'Sad' },
Angry: { url: 'https://cdn.pixilart.com/photos/large/e61998fe23f011d.png', color: 'text-red-500', description: 'Angry' },
// User's Expanded Emotion List
Disbelieve: { url: 'https://cdn.pixilart.com/photos/large/1da50edc371e2a4.png', color: 'text-purple-500', description: 'Disbelieve' },
Evil: { url: 'https://cdn.pixilart.com/photos/large/2cdf85deb349c6c.png', color: 'text-purple-500', description: 'Evil' },
Jelous: { url: 'https://cdn.pixilart.com/photos/large/e656c5b6b859104.png', color: 'text-purple-500', description: 'Jelous' },
Avoidant: { url: 'https://cdn.pixilart.com/photos/large/4eb4ef654f65b2f.png', color: 'text-purple-500', description: 'Avoidant' },
Panic: { url: 'https://cdn.pixilart.com/photos/large/318a082ff49b536.png', color: 'text-purple-500', description: 'Panic' },
Contemplating: { url: 'https://cdn.pixilart.com/photos/large/653573e311e71de.png', color: 'text-purple-500', description: 'Contemplating' },
Disapointed: { url: 'https://cdn.pixilart.com/photos/large/f341a297a2700f6.png', color: 'text-purple-500', description: 'Disapointed' },
Mad: { url: 'https://cdn.pixilart.com/photos/large/fcdeddfd8c7aab5.png', color: 'text-purple-500', description: 'Mad' },
Unphased: { url: 'https://cdn.pixilart.com/photos/large/40a0ff87791b57d.png', color: 'text-purple-500', description: 'Unphased' },
Disagree: { url: 'https://cdn.pixilart.com/photos/large/3acab5e952986fe.png', color: 'text-purple-500', description: 'Disagree' },
Flirty: { url: 'https://cdn.pixilart.com/photos/large/72c4d8406f408e9.png', color: 'text-purple-500', description: 'Flirty' },
Admire: { url: 'https://cdn.pixilart.com/photos/large/d99912dc05971fc.png', color: 'text-purple-500', description: 'Admire' },
Condescending: { url: 'https://cdn.pixilart.com/photos/large/672969a4d27e0e7.png', color: 'text-purple-500', description: 'Condescending' },
Satisfied: { url: 'https://cdn.pixilart.com/photos/large/9db6054b19c544b.png', color: 'text-purple-500', description: 'Satisfied' },
Laughing: { url: 'https://cdn.pixilart.com/photos/large/9221e0acfc54838.png', color: 'text-purple-500', description: 'Laughing' },
Unhappy: { url: 'https://cdn.pixilart.com/photos/large/9fc68433cbc9a9c.png', color: 'text-purple-500', description: 'Unhappy' },
Pouting: { url: 'https://cdn.pixilart.com/photos/large/8ba49d8b46ea048.png', color: 'text-purple-500', description: 'Pouting' },
Suprised: { url: 'https://cdn.pixilart.com/photos/large/b9d2950c54dc603.png', color: 'text-purple-500', description: 'Suprised' },
Shy: { url: 'https://cdn.pixilart.com/photos/large/5354407a42d2fc4.png', color: 'text-purple-500', description: 'Shy' },
Flustered: { url: 'https://cdn.pixilart.com/photos/large/8582bd373ea9a68.png', color: 'text-purple-500', description: 'Flustered' },
Very_Happy: { url: 'https://cdn.pixilart.com/photos/large/d3dbd4a776030ee.png', color: 'text-purple-500', description: 'Very_Happy' },
Bleh: { url: 'https://cdn.pixilart.com/photos/large/10416953b96969e.png', color: 'text-purple-500', description: 'Bleh' },
Exited: { url: 'https://cdn.pixilart.com/photos/large/f389a4f799fea08.png', color: 'text-purple-500', description: 'Exited' },
Enthusiastic: { url: 'https://cdn.pixilart.com/photos/large/e963335eb0cc1d1.png', color: 'text-purple-500', description: 'Enthusiastic' },
// Special State for Loading/Thinking (uses a placeholder image URL)
Loading: { url: 'https://placehold.co/500x500/AAD5F7/333333?text=Kael+Thinking', color: 'text-blue-500', description: 'Thinking...' },
};
// Main App Component
const App = () => {
const allEmotions = Object.keys(emotionSprites).filter(key => key !== 'Loading');
const [chatHistory, setChatHistory] = useState([
// CHANGED: Role to 'kael' and name to 'Kael'
{ role: 'kael', text: "Hello there! I'm Kael. What do you want to talk about today?", emotion: 'Neutral' }
]);
const [userInput, setUserInput] = useState('');
const [isLoading, setIsLoading] = useState(false);
const [currentEmotion, setCurrentEmotion] = useState('Neutral'); // Tracks the current state of Kael
const chatEndRef = useRef(null);
// Scroll to bottom whenever chatHistory updates
useEffect(() => {
chatEndRef.current?.scrollIntoView({ behavior: 'smooth' });
}, [chatHistory]);
const handleSend = async (e) => {
e.preventDefault();
if (!userInput.trim() || isLoading) return;
const userMessage = userInput.trim();
// 1. Update history with user message
setChatHistory(prev => [...prev, { role: 'user', text: userMessage }]);
setUserInput('');
setIsLoading(true);
setCurrentEmotion('Loading'); // Set a generic loading state
let newText = '';
let newEmotion = 'Neutral';
const apiKey = ""; // Canvas will automatically provide the API key
// --- Step 2: Generate Text and Emotion (Structured Response) ---
try {
const textApiUrl = `https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash-preview-09-2025:generateContent?key=${apiKey}`;
const chatPrompt = `The user says: "${userMessage}". As Kael, the visual novel companion, respond to this in a conversational style. Your response MUST be a JSON object containing the text and the primary emotion.`;
const responseSchema = {
type: "OBJECT",
properties: {
"text": { "type": "STRING", description: "The dialogue text Kael says." },
"emotion": {
"type": "STRING",
description: "Kael's emotion associated with the text.",
// IMPORTANT: Updated enum to include ALL custom sprites
enum: allEmotions
}
},
required: ["text", "emotion"]
};
// CHANGED: System prompt to use male name 'Kael' and male pronouns
const systemPrompt = "You are 'Kael', a cheerful but slightly mischievous male companion in a visual novel. Respond conversationally to the user's input. Your responses must ONLY be in the specified JSON format, containing the generated 'text' and the primary 'emotion' associated with that text. Use any of the available emotions (Neutral, Happy, Sad, Angry, Disbelieve, Evil, Jelous, Avoidant, Panic, Contemplating, Disapointed, Mad, Unphased, Disagree, Flirty, Admire, Condescending, Satisfied, Laughing, Unhappy, Pouting, Suprised, Shy, Flustered, Very_Happy, Bleh, Exited, Enthusiastic) based on his reaction to the user's input.";
const payload = {
contents: [{ parts: [{ text: chatPrompt }] }],
systemInstruction: { parts: [{ text: systemPrompt }] },
generationConfig: {
responseMimeType: "application/json",
responseSchema: responseSchema
}
};
const response = await fetchWithRetry(textApiUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(payload)
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
const result = await response.json();
const candidate = result.candidates?.[0];
if (candidate && candidate.content?.parts?.[0]?.text) {
const jsonString = candidate.content.parts[0].text;
const parsedJson = JSON.parse(jsonString);
newText = parsedJson.text || "I apologize, I seem to have lost my voice for a moment.";
// Ensure the emotion is one of the valid keys, default to Neutral otherwise
newEmotion = emotionSprites[parsedJson.emotion] ? parsedJson.emotion : 'Neutral';
// CHANGED: Role updated to 'kael'
setChatHistory(prev => [...prev, { role: 'kael', text: newText, emotion: newEmotion }]);
setCurrentEmotion(newEmotion);
} else {
throw new Error('Invalid response structure from AI.');
}
} catch (error) {
console.error("Gemini Text API Error:", error);
newText = `[Error: Failed to generate text response. (${error.message})]`;
setChatHistory(prev => [...prev, { role: 'system', text: newText, emotion: 'Neutral' }]);
setCurrentEmotion('Neutral');
}
// --- Step 4: Generate and Play TTS Audio (Only if text was successfully generated) ---
if (newText && !newText.startsWith('[Error')) {
try {
const ttsApiUrl = `https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash-preview-tts:generateContent?key=${apiKey}`;
// TTS API payload, using a cheerful voice (Puck) - kept as a versatile voice
const ttsPayload = {
contents: [{ parts: [{ text: newText }] }],
generationConfig: {
responseModalities: ["AUDIO"],
speechConfig: {
voiceConfig: {
prebuiltVoiceConfig: { voiceName: "Puck" }
}
}
},
model: "gemini-2.5-flash-preview-tts"
};
const ttsResponse = await fetchWithRetry(ttsApiUrl, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(ttsPayload)
});
if (!ttsResponse.ok) {
throw new Error(`TTS HTTP error! status: ${ttsResponse.status}`);
}
const ttsResult = await ttsResponse.json();
const part = ttsResult?.candidates?.[0]?.content?.parts?.[0];
const audioData = part?.inlineData?.data;
const mimeType = part?.inlineData?.mimeType;
if (audioData && mimeType && mimeType.startsWith("audio/")) {
// Extract sample rate from mimeType (e.g., audio/L16;rate=24000)
const rateMatch = mimeType.match(/rate=(\d+)/);
const sampleRate = rateMatch ? parseInt(rateMatch[1], 10) : 24000;
const pcmData = base64ToArrayBuffer(audioData);
const pcm16 = new Int16Array(pcmData);
const wavBlob = pcmToWav(pcm16, sampleRate);
playAudioBlob(wavBlob);
} else {
console.error("TTS: Missing or invalid audio data in response.");
}
} catch (ttsError) {
console.error("TTS API Error:", ttsError);
}
}
// --- Step 5: Final Cleanup ---
setIsLoading(false);
};
const currentSprite = emotionSprites[currentEmotion] || emotionSprites.Neutral;
// Use the Neutral sprite while loading, but the 'Loading' text/color
const loadingSprite = emotionSprites['Loading'];
const displaySprite = currentEmotion === 'Loading' ? loadingSprite : currentSprite;
return (
// Updated main background and text color for pastel theme
{/* --- Visual Novel Pane (Sprite and Dialogue) --- */}
{/* Character Sprite Area (Image/Avatar) */}
{/* Replaced emoji span with img tag */}

{
e.target.onerror = null; // Prevent infinite loop
e.target.src = emotionSprites['Neutral'].url;
}}
/>
{/* Dialogue Box (Shows the latest AI text) */}
{/* CHANGED: Display name to Kael */}
Kael ({displaySprite.description}):
{/* CHANGED: Dialogue lookup role to 'kael' */}
{chatHistory.length > 0 && chatHistory.slice().reverse().find(msg => msg.role === 'kael' || msg.role === 'system')?.text || '...'}
{isLoading && (
Kael is thinking and speaking...
)}
{/* --- Chat History and Input Pane --- */}
{/* Updated accent color for title and border */}
Chat Log
{/* History */}
{chatHistory.map((msg, index) => (
{/* CHANGED: Name displayed in chat log */}
{msg.role === 'user' ? 'You' : 'Kael'}
{msg.emotion && msg.role === 'kael' && (
({msg.emotion})
)}
{msg.text}
))}
{/* Input Form */}
{/* Updated Tailwind CSS Custom Scrollbar for pastel theme */}
);
};
export default App;