Commit f7364633 authored by salma's avatar salma

update frontend

parent 2fc0442c
......@@ -23,14 +23,6 @@
text-align: center;
margin-bottom: 30px;
}
.note {
background: #e7f3ff;
border: 1px solid #b3d9ff;
padding: 15px;
border-radius: 5px;
margin-bottom: 20px;
color: #004085;
}
.controls {
display: flex;
flex-direction: column;
......@@ -89,10 +81,16 @@
}
.recording {
background: #dc3545 !important;
animation: pulse 1.5s infinite;
}
.recording:hover {
background: #c82333 !important;
}
@keyframes pulse {
0% { box-shadow: 0 0 0 0 rgba(220, 53, 69, 0.7); }
70% { box-shadow: 0 0 0 10px rgba(220, 53, 69, 0); }
100% { box-shadow: 0 0 0 0 rgba(220, 53, 69, 0); }
}
.processing {
background: #ffc107 !important;
color: #212529 !important;
......@@ -189,7 +187,7 @@
BACKEND_URL: `${window.location.origin}/chat`,
AUDIO_RESPONSE_URL: `${window.location.origin}/get-audio-response`
};
const StatusType = { SUCCESS: 'success', ERROR: 'error', PROCESSING: 'processing' };
const StatusType = { SUCCESS: 'success', ERROR: 'error', PROCESSING: 'processing', RECORDING: 'recording' };
class TextDecoderUtil {
static decode(str) {
......@@ -215,7 +213,70 @@
const err = await response.json().catch(() => ({ detail: `HTTP ${response.status}` }));
throw new Error(err.detail || 'Failed to get audio response');
}
return response; // Return the full response object
return response;
}
}
class AudioRecorder {
constructor() {
this.mediaRecorder = null;
this.audioChunks = [];
this.mimeType = '';
}
async start() {
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
const types = [
'audio/webm;codecs=opus',
'audio/webm',
'audio/mp4',
'audio/ogg'
];
this.mimeType = types.find(type => MediaRecorder.isTypeSupported(type)) || '';
if (this.mimeType) {
this.mediaRecorder = new MediaRecorder(stream, { mimeType: this.mimeType });
} else {
this.mediaRecorder = new MediaRecorder(stream);
this.mimeType = this.mediaRecorder.mimeType;
}
this.audioChunks = [];
this.mediaRecorder.ondataavailable = (event) => {
if (event.data.size > 0) this.audioChunks.push(event.data);
};
this.mediaRecorder.start();
return true;
} catch (error) {
console.error("Error accessing microphone:", error);
alert("Cannot access microphone. Please ensure you have granted permission.");
return false;
}
}
stop() {
return new Promise((resolve) => {
if (!this.mediaRecorder) return resolve(null);
this.mediaRecorder.onstop = () => {
const audioBlob = new Blob(this.audioChunks, { type: this.mimeType });
let extension = 'webm';
if (this.mimeType.includes('mp4')) extension = 'mp4';
else if (this.mimeType.includes('ogg')) extension = 'ogg';
else if (this.mimeType.includes('wav')) extension = 'wav';
this.mediaRecorder.stream.getTracks().forEach(track => track.stop());
resolve({ audioBlob, extension });
};
this.mediaRecorder.stop();
});
}
}
......@@ -225,6 +286,8 @@
this.studentIdInput = document.getElementById('studentIdInput');
this.textInput = document.getElementById('textInput');
this.sendTextBtn = document.getElementById('sendTextBtn');
this.startBtn = document.getElementById('startBtn');
this.stopBtn = document.getElementById('stopBtn');
this.chatContainer = document.getElementById('chatContainer');
this.status = document.getElementById('status');
}
......@@ -236,19 +299,40 @@
addMessage(text, sender, audioUrl = null) {
const msgDiv = document.createElement('div');
msgDiv.className = `message ${sender}-message`;
const senderName = sender === 'user' ? 'أنت' : 'المساعد';
const senderName = sender === 'user' ? 'أنت' : 'عنان';
msgDiv.innerHTML = `<strong>${senderName}:</strong> <div class="message-content"></div>`;
msgDiv.querySelector('.message-content').innerHTML = text;
if (audioUrl) {
const audio = document.createElement('audio');
audio.controls = true;
audio.src = audioUrl;
msgDiv.appendChild(audio);
// --- MODIFICATION: Only autoplay if sender is NOT user ---
if (sender !== 'user') {
audio.play().catch(() => {});
}
}
this.chatContainer.appendChild(msgDiv);
this.chatContainer.scrollTop = this.chatContainer.scrollHeight;
}
setRecordingState(isRecording) {
this.startBtn.disabled = isRecording;
this.stopBtn.disabled = !isRecording;
this.sendTextBtn.disabled = isRecording;
if (isRecording) {
this.startBtn.classList.add('recording');
this.startBtn.textContent = 'جاري التسجيل...';
this.showStatus('جاري تسجيل الصوت...', StatusType.RECORDING);
} else {
this.startBtn.classList.remove('recording');
this.startBtn.textContent = 'بدء التسجيل';
}
}
getStudentId() { return this.studentIdInput.value.trim() || 'student_001'; }
getTextInput() { return this.textInput.value.trim(); }
clearTextInput() { this.textInput.value = ''; }
......@@ -263,7 +347,6 @@
try {
const response = await this.apiClient.sendFormData(formData);
if (response.status === 'success') {
// ALWAYS poll after a successful chat request.
await this.getAgentResponse(this.uiManager.getStudentId());
} else {
throw new Error(response.message || 'Unknown server error');
......@@ -282,9 +365,25 @@
await this.sendRequest(formData);
}
// --- THIS IS THE INTELLIGENT PART OF THE FRONT-END ---
async sendAudioMessage(recordingData, studentId) {
const { audioBlob, extension } = recordingData;
if (!audioBlob || !studentId) return;
this.uiManager.showStatus('جاري معالجة الصوت...', StatusType.PROCESSING);
// --- MODIFICATION: Create URL for playback, but UIManager handles autoplay logic ---
const userAudioUrl = URL.createObjectURL(audioBlob);
this.uiManager.addMessage('🎤 رسالة صوتية', 'user', userAudioUrl);
const formData = new FormData();
formData.append('file', audioBlob, `recording.${extension}`);
formData.append('student_id', studentId);
await this.sendRequest(formData);
}
async getAgentResponse(studentId) {
this.uiManager.showStatus('جاري جلب رد المساعد...', StatusType.PROCESSING);
this.uiManager.showStatus('جاري جلب رد عنان...', StatusType.PROCESSING);
try {
const response = await this.apiClient.fetchAudioResponse(studentId);
const responseType = response.headers.get('X-Response-Type');
......@@ -295,11 +394,9 @@
let displayText = "";
if (responseType === 'mcq') {
// Decode the Base64 string, then parse the JSON
const jsonString = TextDecoderUtil.decode(encodedText);
const questionData = JSON.parse(jsonString);
// Format the question text for display
displayText = `${questionData.question_text}\n`;
const options = [
questionData.correct_answer, questionData.wrong_answer_1,
......@@ -312,11 +409,12 @@
displayText = `<pre>${displayText}</pre>`;
this.uiManager.showStatus('✓ تم استلام السؤال!', StatusType.SUCCESS);
} else { // Default to 'text'
} else {
displayText = TextDecoderUtil.decode(encodedText);
this.uiManager.showStatus('✓ تم استلام الرد!', StatusType.SUCCESS);
}
// agent (Anan) messages
this.uiManager.addMessage(displayText, 'agent', audioUrl);
} catch (error) {
......@@ -329,6 +427,7 @@
constructor() {
this.ui = new UIManager();
this.api = new APIClient();
this.recorder = new AudioRecorder();
this.chatService = new ChatService(this.api, this.ui);
this.initEventListeners();
}
......@@ -342,9 +441,20 @@
};
this.ui.textInput.onkeypress = (e) => { if (e.key === 'Enter') this.ui.sendTextBtn.click(); };
// Note: Simplified to remove audio recording logic for clarity
document.getElementById('startBtn').style.display = 'none';
document.getElementById('stopBtn').style.display = 'none';
this.ui.startBtn.onclick = async () => {
const success = await this.recorder.start();
if (success) {
this.ui.setRecordingState(true);
}
};
this.ui.stopBtn.onclick = async () => {
const recordingData = await this.recorder.stop();
this.ui.setRecordingState(false);
if (recordingData && recordingData.audioBlob) {
this.chatService.sendAudioMessage(recordingData, this.ui.getStudentId());
}
};
}
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment