Commit 13ca82ce authored by salma's avatar salma

add test yourself and multiplayer test features

parent 56712e0e
...@@ -262,7 +262,7 @@ if __name__ == "__main__": ...@@ -262,7 +262,7 @@ if __name__ == "__main__":
json_file_path = "All_Curriculums_grouped.json" json_file_path = "All_Curriculums_grouped.json"
# Setup curriculum database with JSON data # Setup curriculum database with JSON data
setup_curriculum_database(json_file_path, drop_existing_table=True) setup_curriculum_database(json_file_path, drop_existing_table=False)
print("\n" + "=" * 60) print("\n" + "=" * 60)
print("🔍 Verifying Setup") print("🔍 Verifying Setup")
......
This diff is collapsed.
...@@ -907,4 +907,135 @@ def run_full_pipeline(pdf_path: str, grade: int, subject: str, output_json_path: ...@@ -907,4 +907,135 @@ def run_full_pipeline(pdf_path: str, grade: int, subject: str, output_json_path:
except Exception as e: except Exception as e:
logging.critical(f"Pipeline error: {e}", exc_info=True) logging.critical(f"Pipeline error: {e}", exc_info=True)
logging.info(f"\n--- Pipeline finished for {pdf_path} ---") logging.info(f"\n--- Pipeline finished for {pdf_path} ---")
\ No newline at end of file
def run_processing_pipeline(pdf_path: str, grade: int, subject: str) -> tuple[str, str]:
"""
Runs the full PDF processing pipeline and returns paths to the generated CSV and JSON files.
"""
temp_json_path = "temp_json.json"
temp_csv_path = "temp_embeddings.csv"
run_full_pipeline(pdf_path, grade, subject, temp_json_path, temp_csv_path, remove_lessons=True)
return temp_csv_path, temp_json_path
from fastapi import BackgroundTasks
import os
import shutil
import tempfile
from pathlib import Path
import json
import pandas as pd
from services import DataIngestionService
from services import ConnectionPool
from curriculum_structure import convert_json_to_db_format
def process_pdf_curriculum_in_background(pdf_bytes: bytes, original_filename: str, grade: int, subject: str):
"""
Background task to process uploaded curriculum PDF.
This function runs asynchronously and won't block the API response.
"""
print(f"--- Background task started: Processing PDF '{original_filename}'. ---", flush=True)
pool_handler = None
try:
# --- Setup Paths ---
project_root = Path(__file__).parent
embeddings_dir = project_root / "embeddings"
main_json_path = project_root / "All_Curriculums_grouped.json"
embeddings_dir.mkdir(exist_ok=True)
# --- Create Dependencies ---
pool_handler = ConnectionPool(
dbname=os.getenv("POSTGRES_DB"),
user=os.getenv("POSTGRES_USER"),
password=os.getenv("POSTGRES_PASSWORD"),
host=os.getenv("DB_HOST", "postgres"),
port=int(os.getenv("DB_PORT", 5432))
)
ingestion_service = DataIngestionService(pool_handler=pool_handler)
# --- 1. Save and Run Pipeline ---
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as temp_pdf:
temp_pdf.write(pdf_bytes)
temp_pdf_path = temp_pdf.name
print(f"--- Background task: Saved temp PDF to {temp_pdf_path} ---", flush=True)
temp_csv_path, temp_json_path = run_processing_pipeline(temp_pdf_path, grade, subject)
# --- 2. Save the generated CSV ---
csv_filename = Path(temp_csv_path).name
csv_dest_path = embeddings_dir / csv_filename
shutil.move(temp_csv_path, csv_dest_path)
print(f"--- Background task: Saved new embeddings to '{csv_dest_path}' ---", flush=True)
# --- 3. Read both JSON files ---
print("--- Background task: Reading generated JSON structure... ---", flush=True)
with open(temp_json_path, 'r', encoding='utf-8') as f:
new_structure_data = json.load(f)
print(f"--- Background task: New structure contains keys: {list(new_structure_data.keys())} ---", flush=True)
# Load existing main JSON or start with empty dict
try:
with open(main_json_path, 'r', encoding='utf-8') as f:
existing_structure_data = json.load(f)
print(f"--- Background task: Loaded existing structure with {len(existing_structure_data)} curricula ---", flush=True)
except FileNotFoundError:
print("--- Background task: Main JSON file not found. Creating new one. ---", flush=True)
existing_structure_data = {}
except json.JSONDecodeError:
print("--- Background task: Main JSON file corrupted. Starting fresh. ---", flush=True)
existing_structure_data = {}
# Append new curriculum keys to the existing structure
for curriculum_key, curriculum_content in new_structure_data.items():
if curriculum_key in existing_structure_data:
print(f"--- WARNING: Key '{curriculum_key}' already exists. Overwriting. ---", flush=True)
else:
print(f"--- Background task: Adding new curriculum '{curriculum_key}' to main JSON. ---", flush=True)
existing_structure_data[curriculum_key] = curriculum_content
# Write the updated data back to the file
with open(main_json_path, 'w', encoding='utf-8') as f:
json.dump(existing_structure_data, f, indent=2, ensure_ascii=False)
print(f"--- Background task: Main JSON now contains {len(existing_structure_data)} curricula ---", flush=True)
# ==========================================================
# --- 4. Ingest structure into DB ---
print("--- Background task: Ingesting new structure into DB... ---", flush=True)
db_formatted_structure = convert_json_to_db_format(new_structure_data)
ingestion_service.ingest_curriculum_structure(db_formatted_structure)
# --- 5. Ingest embeddings into DB ---
print("--- Background task: Ingesting new embeddings into DB... ---", flush=True)
embeddings_df = pd.read_csv(csv_dest_path)
ingestion_service.ingest_embeddings_from_csv(embeddings_df)
print("--- Background task: Verifying database insertions... ---", flush=True)
from services.pgvector_service import PGVectorService
pgvector_service = PGVectorService(pool_handler)
pgvector_service.verify_recent_insertions()
# --- 6. Cleanup ---
os.unlink(temp_pdf_path)
os.unlink(temp_json_path)
print("--- Background task: Cleaned up temporary files ---", flush=True)
print("--- ✅ Background task completed successfully. ---", flush=True)
except Exception as e:
import traceback
print(f"--- ❌ FATAL ERROR in background task: {e} ---", flush=True)
print(f"--- Traceback: {traceback.format_exc()} ---", flush=True)
finally:
if pool_handler:
pool_handler.close_all()
print("--- Background task: Database connection pool closed. ---", flush=True)
\ No newline at end of file
from pydantic import BaseModel from pydantic import BaseModel
from typing import List, Optional from typing import List, Optional, Dict
class QuestionResponse(BaseModel): class QuestionResponse(BaseModel):
"""Defines the exact 11 fields to be returned for each question.""" """Defines the exact 11 fields to be returned for each question."""
...@@ -28,4 +28,9 @@ class MCQListResponse(BaseModel): ...@@ -28,4 +28,9 @@ class MCQListResponse(BaseModel):
"""Defines the structure for the GET /mcq endpoint.""" """Defines the structure for the GET /mcq endpoint."""
status: str status: str
count: int count: int
questions: List[QuestionResponse] questions: List[QuestionResponse]
\ No newline at end of file
class QuizSubmission(BaseModel):
questions: List[Dict]
answers: Dict[str, str]
\ No newline at end of file
...@@ -10,4 +10,6 @@ from .chat_database_service import ChatDatabaseService ...@@ -10,4 +10,6 @@ from .chat_database_service import ChatDatabaseService
from .connection_pool import ConnectionPool from .connection_pool import ConnectionPool
from .pedagogy_service import PedagogyService from .pedagogy_service import PedagogyService
from .segmentation_service import LanguageSegmentationService from .segmentation_service import LanguageSegmentationService
from .data_ingestion_service import DataIngestionService from .data_ingestion_service import DataIngestionService
\ No newline at end of file from .websocket_service import WebSocketManager
from .redis_client import redis_client, redis_listener, get_room_key, get_room_channel
\ No newline at end of file
import logging import logging
from services.agent_helpers.agent_prompts import tashkeel_agent_prompt tashkeel_agent_prompt = "شكل الكلام"
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class TashkeelAgent: class TashkeelAgent:
...@@ -15,7 +14,6 @@ class TashkeelAgent: ...@@ -15,7 +14,6 @@ class TashkeelAgent:
if not self.openai_service.is_available(): if not self.openai_service.is_available():
logger.warning("OpenAI service not available for TashkeelAgent") logger.warning("OpenAI service not available for TashkeelAgent")
return text # fallback: return original return text # fallback: return original
messages = [ messages = [
{"role": "system", "content": tashkeel_agent_prompt}, {"role": "system", "content": tashkeel_agent_prompt},
{"role": "user", "content": text} {"role": "user", "content": text}
......
import redis import redis
import redis.asyncio as aioredis
import os import os
import asyncio
from .websocket_service import WebSocketManager
# Synchronous client for regular operations
try: try:
redis_host = os.getenv("REDIS_HOST", "localhost") redis_host = os.getenv("REDIS_HOST", "localhost")
redis_port = int(os.getenv("REDIS_PORT", 6379)) redis_port = int(os.getenv("REDIS_PORT", 6379))
# decode_responses=True makes the client return strings instead of bytes
redis_client = redis.Redis(host=redis_host, port=redis_port, db=0, decode_responses=True) redis_client = redis.Redis(host=redis_host, port=redis_port, db=0, decode_responses=True)
redis_client.ping() redis_client.ping()
print(f"Successfully connected to Redis at {redis_host}:{redis_port}") print(f"Successfully connected to Redis (sync) at {redis_host}:{redis_port}")
except redis.exceptions.ConnectionError as e: except redis.exceptions.ConnectionError as e:
print(f"FATAL: Could not connect to Redis: {e}") print(f"FATAL: Could not connect to Redis: {e}")
redis_client = None redis_client = None
\ No newline at end of file
# Async client for pub/sub
async_redis_client = None
if redis_client:
try:
async_redis_client = aioredis.Redis(host=redis_host, port=redis_port, db=0, decode_responses=True)
print(f"Created async Redis client for pub/sub")
except Exception as e:
print(f"Could not create async Redis client: {e}")
async def redis_listener(manager: WebSocketManager):
"""
Listens to Redis Pub/Sub for messages and broadcasts them to local clients.
This is the core of the multi-worker communication.
"""
from services.redis_client import async_redis_client
if not async_redis_client:
print("ERROR: Async Redis client not available for pub/sub listener")
return
pubsub = async_redis_client.pubsub()
await pubsub.psubscribe("quiz_channel:*")
print("Redis listener started and subscribed to quiz_channel:*")
try:
while True:
message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0)
if message and message.get("type") == "pmessage":
channel = message['channel']
if isinstance(channel, bytes):
channel = channel.decode('utf-8')
room_id = channel.split(':')[-1]
data_raw = message['data']
if isinstance(data_raw, bytes):
data_raw = data_raw.decode('utf-8')
data = json.loads(data_raw)
print(f"Redis listener received message for room {room_id}: {data.get('type')}")
await manager.broadcast_local(room_id, data)
except asyncio.CancelledError:
print("Redis listener cancelled.")
finally:
await pubsub.unsubscribe("quiz_channel:*")
await pubsub.close()
print("Redis listener stopped.")
# --- HELPER FUNCTIONS FOR REDIS INTERACTIONS ---
def get_room_key(room_id: str) -> str:
return f"quiz_room:{room_id}"
def get_room_channel(room_id: str) -> str:
return f"quiz_channel:{room_id}"
from typing import Dict, List
from fastapi import WebSocket
class WebSocketManager:
"""Manages active WebSocket connections for each room on a single worker."""
def __init__(self):
self.active_connections: Dict[str, List[WebSocket]] = {}
async def connect(self, websocket: WebSocket, room_id: str):
await websocket.accept()
if room_id not in self.active_connections:
self.active_connections[room_id] = []
self.active_connections[room_id].append(websocket)
def disconnect(self, websocket: WebSocket, room_id: str):
if room_id in self.active_connections:
self.active_connections[room_id].remove(websocket)
if not self.active_connections[room_id]:
del self.active_connections[room_id]
async def broadcast_local(self, room_id: str, message: Dict):
"""Broadcasts a message only to clients connected to this specific worker."""
if room_id in self.active_connections:
for connection in self.active_connections[room_id]:
await connection.send_json(message)
\ No newline at end of file
...@@ -74,4 +74,4 @@ def setup_mcq_table(drop_existing_table: bool = False): ...@@ -74,4 +74,4 @@ def setup_mcq_table(drop_existing_table: bool = False):
if __name__ == "__main__": if __name__ == "__main__":
print("Setting up the MCQ table structure...") print("Setting up the MCQ table structure...")
setup_mcq_table(drop_existing_table=True) setup_mcq_table(drop_existing_table=False)
\ No newline at end of file \ No newline at end of file
...@@ -106,6 +106,16 @@ ...@@ -106,6 +106,16 @@
const populateDropdown = (selectElement, options, placeholder) => { const populateDropdown = (selectElement, options, placeholder) => {
selectElement.innerHTML = `<option value="">-- ${placeholder} --</option>`; selectElement.innerHTML = `<option value="">-- ${placeholder} --</option>`;
// ++ ADD THIS LOGIC BLOCK ++
if ((selectElement.id === 'unitSelect' || selectElement.id === 'conceptSelect') && options.length > 0) {
const allOpt = document.createElement('option');
allOpt.value = 'All';
allOpt.textContent = '-- All --';
selectElement.appendChild(allOpt);
}
// ++ END OF ADDED BLOCK ++
options.forEach(option => { options.forEach(option => {
const opt = document.createElement('option'); const opt = document.createElement('option');
opt.value = option; opt.value = option;
...@@ -183,15 +193,37 @@ ...@@ -183,15 +193,37 @@
// --- Main Action: Generate Quiz --- // --- Main Action: Generate Quiz ---
generateButton.addEventListener('click', async () => { generateButton.addEventListener('click', async () => {
const [curriculum, grade, subject, unit, concept, count, isArabic] = [ // ++ NEW, SMARTER VALIDATION ++
curriculumSelect.value, gradeSelect.value, subjectSelect.value, const curriculum = curriculumSelect.value;
unitSelect.value, conceptSelect.value, countInput.value, isArabicInput.checked const grade = gradeSelect.value;
]; const subject = subjectSelect.value;
let unit = unitSelect.value;
let concept = conceptSelect.value; // May be "" initially
// Basic validation: the first three dropdowns are always required.
if (!curriculum || !grade || !subject || !unit) {
showStatus('Please select a Curriculum, Grade, Subject, and Unit.', 'error');
return;
}
// If a specific unit is chosen but no concept, default the concept to "All".
// This is the key fix for your issue.
if (unit !== 'All' && !concept) {
concept = 'All';
}
// If Unit is "All", Concept must also be "All".
if (unit === 'All') {
concept = 'All';
}
if (!curriculum || !grade || !subject || !unit || !concept) { // Final check: If after all that, we still don't have a concept, it's an error.
showStatus('Please make a selection in all dropdown menus.', 'error'); // This case should now be rare.
if (!concept) {
showStatus('Please make a selection for the Concept.', 'error');
return; return;
} }
// ++ END OF NEW VALIDATION ++
showStatus('Generating dynamic quiz... This may take a moment.', 'processing'); showStatus('Generating dynamic quiz... This may take a moment.', 'processing');
generateButton.disabled = true; generateButton.disabled = true;
...@@ -205,8 +237,8 @@ ...@@ -205,8 +237,8 @@
formData.append('subject', subject); formData.append('subject', subject);
formData.append('unit', unit); formData.append('unit', unit);
formData.append('concept', concept); formData.append('concept', concept);
formData.append('count', count); formData.append('count', countInput.value); // You can read these directly
formData.append('is_arabic', isArabic); formData.append('is_arabic', isArabicInput.checked);
try { try {
const response = await fetch('/quiz/dynamic', { method: 'POST', body: formData }); const response = await fetch('/quiz/dynamic', { method: 'POST', body: formData });
......
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AI Tutor Project Hub</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
max-width: 800px;
margin: 40px auto;
padding: 20px;
background-color: #f9f9f9;
color: #333;
line-height: 1.6;
}
.container {
background: white;
padding: 30px;
border-radius: 8px;
box-shadow: 0 4px 15px rgba(0,0,0,0.1);
}
h1 {
text-align: center;
color: #2c3e50;
border-bottom: 2px solid #eee;
padding-bottom: 20px;
margin-bottom: 30px;
}
.link-list {
list-style-type: none;
padding: 0;
}
.link-list li {
margin-bottom: 15px;
}
.link-list a {
display: block;
padding: 20px;
background-color: #007bff;
color: white;
text-decoration: none;
font-size: 18px;
font-weight: bold;
text-align: center;
border-radius: 5px;
transition: background-color 0.2s, transform 0.2s;
}
.link-list a:hover {
background-color: #0056b3;
transform: translateY(-2px);
}
/* Style different links with different colors for better distinction */
.link-list a.chat { background-color: #007bff; }
.link-list a.chat:hover { background-color: #0056b3; }
.link-list a.dynamic-quiz { background-color: #6f42c1; }
.link-list a.dynamic-quiz:hover { background-color: #5a32a3; }
.link-list a.upload { background-color: #dc3545; }
.link-list a.upload:hover { background-color: #c82333; }
.link-list a.test-yourself { background-color: #28a745; }
.link-list a.test-yourself:hover { background-color: #218838; }
.link-list a.live-quiz { background-color: #fd7e14; }
.link-list a.live-quiz:hover { background-color: #e36a04; }
</style>
</head>
<body>
<div class="container">
<h1>SSLabs AI Feature Hub</h1>
<ul class="link-list">
<li><a href="/chat-interface" class="chat">Voice Chat Interface</a></li>
<li><a href="/test-yourself" class="test-yourself">Test Yourself (Single Player)</a></li>
<li><a href="/live-quiz" class="live-quiz">Live Quiz Challenge (Multiplayer)</a></li>
<li><a href="/quiz-interface" class="dynamic-quiz">Dynamic Quiz Generator (for CSV)</a></li>
<li><a href="/curriculum-upload" class="upload">Curriculum PDF Uploader</a></li>
</ul>
</div>
</body>
</html>
\ No newline at end of file
This diff is collapsed.
from .utils import DateTimeEncoder
\ No newline at end of file
from datetime import datetime
import json
class DateTimeEncoder(json.JSONEncoder):
""" Custom JSON encoder to handle datetime objects """
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
return super().default(obj)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment