M96820
commited on
feat: Sarah makes decisions
Browse files- client/src/pages/game/App.jsx +207 -2
client/src/pages/game/App.jsx
CHANGED
|
@@ -20,6 +20,7 @@ import {
|
|
| 20 |
} from "../../layouts/utils";
|
| 21 |
import { LAYOUTS } from "../../layouts/config";
|
| 22 |
import html2canvas from "html2canvas";
|
|
|
|
| 23 |
|
| 24 |
// Get API URL from environment or default to localhost in development
|
| 25 |
const isHFSpace = window.location.hostname.includes("hf.space");
|
|
@@ -29,6 +30,10 @@ const API_URL = isHFSpace
|
|
| 29 |
|
| 30 |
// Generate a unique client ID
|
| 31 |
const CLIENT_ID = `client_${Math.random().toString(36).substring(2)}`;
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
|
| 33 |
// Create axios instance with default config
|
| 34 |
const api = axios.create({
|
|
@@ -74,16 +79,195 @@ function App() {
|
|
| 74 |
const [currentChoices, setCurrentChoices] = useState([]);
|
| 75 |
const [isLoading, setIsLoading] = useState(false);
|
| 76 |
const [isDebugMode, setIsDebugMode] = useState(false);
|
| 77 |
-
const
|
| 78 |
-
const
|
|
|
|
| 79 |
const audioRef = useRef(new Audio());
|
| 80 |
const comicContainerRef = useRef(null);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
|
| 82 |
// Start the story on first render
|
| 83 |
useEffect(() => {
|
| 84 |
handleStoryAction("restart");
|
| 85 |
}, []); // Empty dependency array for first render only
|
| 86 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 87 |
const generateImagesForStory = async (
|
| 88 |
imagePrompts,
|
| 89 |
segmentIndex,
|
|
@@ -485,8 +669,29 @@ function App() {
|
|
| 485 |
top: 16,
|
| 486 |
right: 16,
|
| 487 |
zIndex: 1000,
|
|
|
|
|
|
|
| 488 |
}}
|
| 489 |
>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 490 |
<Tooltip title="Sauvegarder en PNG">
|
| 491 |
<IconButton
|
| 492 |
onClick={handleSaveAsImage}
|
|
|
|
| 20 |
} from "../../layouts/utils";
|
| 21 |
import { LAYOUTS } from "../../layouts/config";
|
| 22 |
import html2canvas from "html2canvas";
|
| 23 |
+
import { useConversation } from "@11labs/react";
|
| 24 |
|
| 25 |
// Get API URL from environment or default to localhost in development
|
| 26 |
const isHFSpace = window.location.hostname.includes("hf.space");
|
|
|
|
| 30 |
|
| 31 |
// Generate a unique client ID
|
| 32 |
const CLIENT_ID = `client_${Math.random().toString(36).substring(2)}`;
|
| 33 |
+
// Constants
|
| 34 |
+
const AGENT_ID = "2MF9st3s1mNFbX01Y106";
|
| 35 |
+
|
| 36 |
+
const WS_URL = import.meta.env.VITE_WS_URL || "ws://localhost:8000/ws";
|
| 37 |
|
| 38 |
// Create axios instance with default config
|
| 39 |
const api = axios.create({
|
|
|
|
| 79 |
const [currentChoices, setCurrentChoices] = useState([]);
|
| 80 |
const [isLoading, setIsLoading] = useState(false);
|
| 81 |
const [isDebugMode, setIsDebugMode] = useState(false);
|
| 82 |
+
const [isRecording, setIsRecording] = useState(false);
|
| 83 |
+
const [wsConnected, setWsConnected] = useState(false);
|
| 84 |
+
|
| 85 |
const audioRef = useRef(new Audio());
|
| 86 |
const comicContainerRef = useRef(null);
|
| 87 |
+
const narrationAudioRef = useRef(new Audio()); // Separate audio ref for narration
|
| 88 |
+
const wsRef = useRef(null);
|
| 89 |
+
const mediaRecorderRef = useRef(null);
|
| 90 |
+
const audioChunksRef = useRef([]);
|
| 91 |
+
|
| 92 |
|
| 93 |
// Start the story on first render
|
| 94 |
useEffect(() => {
|
| 95 |
handleStoryAction("restart");
|
| 96 |
}, []); // Empty dependency array for first render only
|
| 97 |
|
| 98 |
+
// Only setup WebSocket connection with server
|
| 99 |
+
useEffect(() => {
|
| 100 |
+
const setupWebSocket = () => {
|
| 101 |
+
wsRef.current = new WebSocket(WS_URL);
|
| 102 |
+
|
| 103 |
+
wsRef.current.onopen = () => {
|
| 104 |
+
console.log('Server WebSocket connected');
|
| 105 |
+
setWsConnected(true);
|
| 106 |
+
};
|
| 107 |
+
|
| 108 |
+
wsRef.current.onclose = (event) => {
|
| 109 |
+
const reason = event.reason || 'No reason provided';
|
| 110 |
+
const code = event.code;
|
| 111 |
+
console.log(`Server WebSocket disconnected - Code: ${code}, Reason: ${reason}`);
|
| 112 |
+
console.log('Attempting to reconnect in 3 seconds...');
|
| 113 |
+
setWsConnected(false);
|
| 114 |
+
// Attempt to reconnect after 3 seconds
|
| 115 |
+
setTimeout(setupWebSocket, 3000);
|
| 116 |
+
};
|
| 117 |
+
|
| 118 |
+
wsRef.current.onmessage = async (event) => {
|
| 119 |
+
const data = JSON.parse(event.data);
|
| 120 |
+
|
| 121 |
+
if (data.type === 'audio') {
|
| 122 |
+
// Stop any ongoing narration
|
| 123 |
+
if (narrationAudioRef.current) {
|
| 124 |
+
narrationAudioRef.current.pause();
|
| 125 |
+
narrationAudioRef.current.currentTime = 0;
|
| 126 |
+
}
|
| 127 |
+
|
| 128 |
+
// Play the conversation audio response
|
| 129 |
+
const audioBlob = await fetch(`data:audio/mpeg;base64,${data.audio}`).then(r => r.blob());
|
| 130 |
+
const audioUrl = URL.createObjectURL(audioBlob);
|
| 131 |
+
audioRef.current.src = audioUrl;
|
| 132 |
+
await audioRef.current.play();
|
| 133 |
+
}
|
| 134 |
+
};
|
| 135 |
+
};
|
| 136 |
+
|
| 137 |
+
setupWebSocket();
|
| 138 |
+
|
| 139 |
+
return () => {
|
| 140 |
+
if (wsRef.current) {
|
| 141 |
+
wsRef.current.close();
|
| 142 |
+
}
|
| 143 |
+
};
|
| 144 |
+
}, []);
|
| 145 |
+
|
| 146 |
+
const conversation = useConversation({
|
| 147 |
+
agentId: AGENT_ID,
|
| 148 |
+
onResponse: async (response) => {
|
| 149 |
+
if (response.type === 'audio') {
|
| 150 |
+
// Play the conversation audio response
|
| 151 |
+
const audioBlob = new Blob([response.audio], { type: 'audio/mpeg' });
|
| 152 |
+
const audioUrl = URL.createObjectURL(audioBlob);
|
| 153 |
+
audioRef.current.src = audioUrl;
|
| 154 |
+
await audioRef.current.play();
|
| 155 |
+
}
|
| 156 |
+
},
|
| 157 |
+
clientTools: {
|
| 158 |
+
make_decision: async ({ decision }) => {
|
| 159 |
+
console.log('AI made decision:', decision);
|
| 160 |
+
// End the ElevenLabs conversation
|
| 161 |
+
await conversation.endSession();
|
| 162 |
+
setIsConversationMode(false);
|
| 163 |
+
// Handle the choice and generate next story part
|
| 164 |
+
await handleChoice(parseInt(decision));
|
| 165 |
+
}
|
| 166 |
+
}
|
| 167 |
+
});
|
| 168 |
+
const { isSpeaking } = conversation;
|
| 169 |
+
const [isConversationMode, setIsConversationMode] = useState(false);
|
| 170 |
+
|
| 171 |
+
// Audio recording setup
|
| 172 |
+
const startRecording = async () => {
|
| 173 |
+
try {
|
| 174 |
+
// Stop narration audio if it's playing
|
| 175 |
+
if (narrationAudioRef.current) {
|
| 176 |
+
narrationAudioRef.current.pause();
|
| 177 |
+
narrationAudioRef.current.currentTime = 0;
|
| 178 |
+
}
|
| 179 |
+
// Also stop any conversation audio if playing
|
| 180 |
+
if (audioRef.current) {
|
| 181 |
+
audioRef.current.pause();
|
| 182 |
+
audioRef.current.currentTime = 0;
|
| 183 |
+
}
|
| 184 |
+
|
| 185 |
+
if (!isConversationMode) {
|
| 186 |
+
// If we're not in conversation mode, this is the first recording
|
| 187 |
+
setIsConversationMode(true);
|
| 188 |
+
// Initialize ElevenLabs WebSocket connection
|
| 189 |
+
try {
|
| 190 |
+
// Pass available choices to the conversation
|
| 191 |
+
const currentChoiceIds = currentChoices.map(choice => choice.id).join(',');
|
| 192 |
+
await conversation.startSession({
|
| 193 |
+
agentId: AGENT_ID,
|
| 194 |
+
initialContext: `Available choices: ${currentChoiceIds}. Use the makeDecision tool with one of these IDs to make a choice.`
|
| 195 |
+
});
|
| 196 |
+
console.log('ElevenLabs WebSocket connected');
|
| 197 |
+
} catch (error) {
|
| 198 |
+
console.error('Error initializing ElevenLabs conversation:', error);
|
| 199 |
+
return;
|
| 200 |
+
}
|
| 201 |
+
} else if (isSpeaking) {
|
| 202 |
+
// Only handle stopping the agent if we're in conversation mode
|
| 203 |
+
await conversation.endSession();
|
| 204 |
+
const wsUrl = `wss://api.elevenlabs.io/v1/convai/conversation?agent_id=${AGENT_ID}`;
|
| 205 |
+
await conversation.startSession({ url: wsUrl });
|
| 206 |
+
}
|
| 207 |
+
|
| 208 |
+
// Only stop narration if it's actually playing
|
| 209 |
+
if (!isConversationMode && narrationAudioRef.current) {
|
| 210 |
+
narrationAudioRef.current.pause();
|
| 211 |
+
narrationAudioRef.current.currentTime = 0;
|
| 212 |
+
}
|
| 213 |
+
|
| 214 |
+
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
| 215 |
+
mediaRecorderRef.current = new MediaRecorder(stream);
|
| 216 |
+
audioChunksRef.current = [];
|
| 217 |
+
|
| 218 |
+
mediaRecorderRef.current.ondataavailable = (event) => {
|
| 219 |
+
if (event.data.size > 0) {
|
| 220 |
+
audioChunksRef.current.push(event.data);
|
| 221 |
+
}
|
| 222 |
+
};
|
| 223 |
+
|
| 224 |
+
mediaRecorderRef.current.onstop = async () => {
|
| 225 |
+
const audioBlob = new Blob(audioChunksRef.current, { type: 'audio/wav' });
|
| 226 |
+
const reader = new FileReader();
|
| 227 |
+
|
| 228 |
+
reader.onload = async () => {
|
| 229 |
+
const base64Audio = reader.result.split(',')[1];
|
| 230 |
+
if (isConversationMode) {
|
| 231 |
+
try {
|
| 232 |
+
// Send audio to ElevenLabs conversation
|
| 233 |
+
await conversation.send({
|
| 234 |
+
type: 'audio',
|
| 235 |
+
data: base64Audio
|
| 236 |
+
});
|
| 237 |
+
} catch (error) {
|
| 238 |
+
console.error('Error sending audio to ElevenLabs:', error);
|
| 239 |
+
}
|
| 240 |
+
} else {
|
| 241 |
+
// Otherwise use the original WebSocket connection
|
| 242 |
+
if (wsRef.current && wsRef.current.readyState === WebSocket.OPEN) {
|
| 243 |
+
console.log('Sending audio to server via WebSocket');
|
| 244 |
+
wsRef.current.send(JSON.stringify({
|
| 245 |
+
type: 'audio_input',
|
| 246 |
+
audio: base64Audio,
|
| 247 |
+
client_id: CLIENT_ID
|
| 248 |
+
}));
|
| 249 |
+
}
|
| 250 |
+
}
|
| 251 |
+
};
|
| 252 |
+
|
| 253 |
+
reader.readAsDataURL(audioBlob);
|
| 254 |
+
};
|
| 255 |
+
|
| 256 |
+
mediaRecorderRef.current.start();
|
| 257 |
+
setIsRecording(true);
|
| 258 |
+
} catch (error) {
|
| 259 |
+
console.error('Error starting recording:', error);
|
| 260 |
+
}
|
| 261 |
+
};
|
| 262 |
+
|
| 263 |
+
const stopRecording = () => {
|
| 264 |
+
if (mediaRecorderRef.current && isRecording) {
|
| 265 |
+
mediaRecorderRef.current.stop();
|
| 266 |
+
setIsRecording(false);
|
| 267 |
+
mediaRecorderRef.current.stream.getTracks().forEach(track => track.stop());
|
| 268 |
+
}
|
| 269 |
+
};
|
| 270 |
+
|
| 271 |
const generateImagesForStory = async (
|
| 272 |
imagePrompts,
|
| 273 |
segmentIndex,
|
|
|
|
| 669 |
top: 16,
|
| 670 |
right: 16,
|
| 671 |
zIndex: 1000,
|
| 672 |
+
display: "flex",
|
| 673 |
+
gap: 1,
|
| 674 |
}}
|
| 675 |
>
|
| 676 |
+
<Tooltip title={isRecording ? "Stop Recording" : "Start Recording"}>
|
| 677 |
+
<IconButton
|
| 678 |
+
onClick={isRecording ? stopRecording : startRecording}
|
| 679 |
+
sx={{
|
| 680 |
+
border: "1px solid",
|
| 681 |
+
borderColor: isRecording ? "error.main" : "primary.main",
|
| 682 |
+
borderRadius: "8px",
|
| 683 |
+
backgroundColor: isRecording ? "error.main" : "transparent",
|
| 684 |
+
color: isRecording ? "white" : "primary.main",
|
| 685 |
+
padding: "8px",
|
| 686 |
+
"&:hover": {
|
| 687 |
+
backgroundColor: isRecording ? "error.dark" : "primary.main",
|
| 688 |
+
color: "background.paper",
|
| 689 |
+
},
|
| 690 |
+
}}
|
| 691 |
+
>
|
| 692 |
+
{isRecording ? "⏹" : "⏺"}
|
| 693 |
+
</IconButton>
|
| 694 |
+
</Tooltip>
|
| 695 |
<Tooltip title="Sauvegarder en PNG">
|
| 696 |
<IconButton
|
| 697 |
onClick={handleSaveAsImage}
|