Commit 13ca82ce authored by salma's avatar salma

add test yourself and multiplayer test features

parent 56712e0e
...@@ -262,7 +262,7 @@ if __name__ == "__main__": ...@@ -262,7 +262,7 @@ if __name__ == "__main__":
json_file_path = "All_Curriculums_grouped.json" json_file_path = "All_Curriculums_grouped.json"
# Setup curriculum database with JSON data # Setup curriculum database with JSON data
setup_curriculum_database(json_file_path, drop_existing_table=True) setup_curriculum_database(json_file_path, drop_existing_table=False)
print("\n" + "=" * 60) print("\n" + "=" * 60)
print("🔍 Verifying Setup") print("🔍 Verifying Setup")
......
This diff is collapsed.
...@@ -908,3 +908,134 @@ def run_full_pipeline(pdf_path: str, grade: int, subject: str, output_json_path: ...@@ -908,3 +908,134 @@ def run_full_pipeline(pdf_path: str, grade: int, subject: str, output_json_path:
logging.critical(f"Pipeline error: {e}", exc_info=True) logging.critical(f"Pipeline error: {e}", exc_info=True)
logging.info(f"\n--- Pipeline finished for {pdf_path} ---") logging.info(f"\n--- Pipeline finished for {pdf_path} ---")
def run_processing_pipeline(pdf_path: str, grade: int, subject: str) -> tuple[str, str]:
"""
Runs the full PDF processing pipeline and returns paths to the generated CSV and JSON files.
"""
temp_json_path = "temp_json.json"
temp_csv_path = "temp_embeddings.csv"
run_full_pipeline(pdf_path, grade, subject, temp_json_path, temp_csv_path, remove_lessons=True)
return temp_csv_path, temp_json_path
from fastapi import BackgroundTasks
import os
import shutil
import tempfile
from pathlib import Path
import json
import pandas as pd
from services import DataIngestionService
from services import ConnectionPool
from curriculum_structure import convert_json_to_db_format
def process_pdf_curriculum_in_background(pdf_bytes: bytes, original_filename: str, grade: int, subject: str):
"""
Background task to process uploaded curriculum PDF.
This function runs asynchronously and won't block the API response.
"""
print(f"--- Background task started: Processing PDF '{original_filename}'. ---", flush=True)
pool_handler = None
try:
# --- Setup Paths ---
project_root = Path(__file__).parent
embeddings_dir = project_root / "embeddings"
main_json_path = project_root / "All_Curriculums_grouped.json"
embeddings_dir.mkdir(exist_ok=True)
# --- Create Dependencies ---
pool_handler = ConnectionPool(
dbname=os.getenv("POSTGRES_DB"),
user=os.getenv("POSTGRES_USER"),
password=os.getenv("POSTGRES_PASSWORD"),
host=os.getenv("DB_HOST", "postgres"),
port=int(os.getenv("DB_PORT", 5432))
)
ingestion_service = DataIngestionService(pool_handler=pool_handler)
# --- 1. Save and Run Pipeline ---
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as temp_pdf:
temp_pdf.write(pdf_bytes)
temp_pdf_path = temp_pdf.name
print(f"--- Background task: Saved temp PDF to {temp_pdf_path} ---", flush=True)
temp_csv_path, temp_json_path = run_processing_pipeline(temp_pdf_path, grade, subject)
# --- 2. Save the generated CSV ---
csv_filename = Path(temp_csv_path).name
csv_dest_path = embeddings_dir / csv_filename
shutil.move(temp_csv_path, csv_dest_path)
print(f"--- Background task: Saved new embeddings to '{csv_dest_path}' ---", flush=True)
# --- 3. Read both JSON files ---
print("--- Background task: Reading generated JSON structure... ---", flush=True)
with open(temp_json_path, 'r', encoding='utf-8') as f:
new_structure_data = json.load(f)
print(f"--- Background task: New structure contains keys: {list(new_structure_data.keys())} ---", flush=True)
# Load existing main JSON or start with empty dict
try:
with open(main_json_path, 'r', encoding='utf-8') as f:
existing_structure_data = json.load(f)
print(f"--- Background task: Loaded existing structure with {len(existing_structure_data)} curricula ---", flush=True)
except FileNotFoundError:
print("--- Background task: Main JSON file not found. Creating new one. ---", flush=True)
existing_structure_data = {}
except json.JSONDecodeError:
print("--- Background task: Main JSON file corrupted. Starting fresh. ---", flush=True)
existing_structure_data = {}
# Append new curriculum keys to the existing structure
for curriculum_key, curriculum_content in new_structure_data.items():
if curriculum_key in existing_structure_data:
print(f"--- WARNING: Key '{curriculum_key}' already exists. Overwriting. ---", flush=True)
else:
print(f"--- Background task: Adding new curriculum '{curriculum_key}' to main JSON. ---", flush=True)
existing_structure_data[curriculum_key] = curriculum_content
# Write the updated data back to the file
with open(main_json_path, 'w', encoding='utf-8') as f:
json.dump(existing_structure_data, f, indent=2, ensure_ascii=False)
print(f"--- Background task: Main JSON now contains {len(existing_structure_data)} curricula ---", flush=True)
# ==========================================================
# --- 4. Ingest structure into DB ---
print("--- Background task: Ingesting new structure into DB... ---", flush=True)
db_formatted_structure = convert_json_to_db_format(new_structure_data)
ingestion_service.ingest_curriculum_structure(db_formatted_structure)
# --- 5. Ingest embeddings into DB ---
print("--- Background task: Ingesting new embeddings into DB... ---", flush=True)
embeddings_df = pd.read_csv(csv_dest_path)
ingestion_service.ingest_embeddings_from_csv(embeddings_df)
print("--- Background task: Verifying database insertions... ---", flush=True)
from services.pgvector_service import PGVectorService
pgvector_service = PGVectorService(pool_handler)
pgvector_service.verify_recent_insertions()
# --- 6. Cleanup ---
os.unlink(temp_pdf_path)
os.unlink(temp_json_path)
print("--- Background task: Cleaned up temporary files ---", flush=True)
print("--- ✅ Background task completed successfully. ---", flush=True)
except Exception as e:
import traceback
print(f"--- ❌ FATAL ERROR in background task: {e} ---", flush=True)
print(f"--- Traceback: {traceback.format_exc()} ---", flush=True)
finally:
if pool_handler:
pool_handler.close_all()
print("--- Background task: Database connection pool closed. ---", flush=True)
\ No newline at end of file
from pydantic import BaseModel from pydantic import BaseModel
from typing import List, Optional from typing import List, Optional, Dict
class QuestionResponse(BaseModel): class QuestionResponse(BaseModel):
"""Defines the exact 11 fields to be returned for each question.""" """Defines the exact 11 fields to be returned for each question."""
...@@ -29,3 +29,8 @@ class MCQListResponse(BaseModel): ...@@ -29,3 +29,8 @@ class MCQListResponse(BaseModel):
status: str status: str
count: int count: int
questions: List[QuestionResponse] questions: List[QuestionResponse]
class QuizSubmission(BaseModel):
questions: List[Dict]
answers: Dict[str, str]
\ No newline at end of file
...@@ -11,3 +11,5 @@ from .connection_pool import ConnectionPool ...@@ -11,3 +11,5 @@ from .connection_pool import ConnectionPool
from .pedagogy_service import PedagogyService from .pedagogy_service import PedagogyService
from .segmentation_service import LanguageSegmentationService from .segmentation_service import LanguageSegmentationService
from .data_ingestion_service import DataIngestionService from .data_ingestion_service import DataIngestionService
from .websocket_service import WebSocketManager
from .redis_client import redis_client, redis_listener, get_room_key, get_room_channel
\ No newline at end of file
import logging import logging
from services.agent_helpers.agent_prompts import tashkeel_agent_prompt tashkeel_agent_prompt = "شكل الكلام"
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class TashkeelAgent: class TashkeelAgent:
...@@ -15,7 +14,6 @@ class TashkeelAgent: ...@@ -15,7 +14,6 @@ class TashkeelAgent:
if not self.openai_service.is_available(): if not self.openai_service.is_available():
logger.warning("OpenAI service not available for TashkeelAgent") logger.warning("OpenAI service not available for TashkeelAgent")
return text # fallback: return original return text # fallback: return original
messages = [ messages = [
{"role": "system", "content": tashkeel_agent_prompt}, {"role": "system", "content": tashkeel_agent_prompt},
{"role": "user", "content": text} {"role": "user", "content": text}
......
...@@ -269,9 +269,26 @@ class AgentService: ...@@ -269,9 +269,26 @@ class AgentService:
) )
response_content = response.choices[0].message.content response_content = response.choices[0].message.content
json_response = json.loads(response_content) json_response = json.loads(response_content)
generated_questions = next((v for v in json_response.values() if isinstance(v, list)), None)
# +++ THIS IS THE NEW, MORE ROBUST PARSING LOGIC +++
generated_questions = []
if isinstance(json_response, list):
# Case 1: The root of the JSON is already a list of questions.
generated_questions = json_response
elif isinstance(json_response, dict):
# Case 2: The root is a dictionary.
# First, try to find a list within the dictionary's values.
found_list = next((v for v in json_response.values() if isinstance(v, list)), None)
if found_list:
generated_questions = found_list
# If no list is found, maybe the dictionary ITSELF is the single question.
elif "question_text" in json_response:
generated_questions = [json_response] # Wrap the single object in a list.
if not generated_questions: if not generated_questions:
raise ValueError("LLM did not return a list of questions in the JSON response.") # If we still have nothing, the format is truly unknown.
raise ValueError("LLM response did not contain a recognizable question list or object.")
except (json.JSONDecodeError, ValueError, KeyError, StopIteration) as e: except (json.JSONDecodeError, ValueError, KeyError, StopIteration) as e:
logger.error(f"Failed to parse MCQ response from LLM: {e}\nRaw Response: {response_content}") logger.error(f"Failed to parse MCQ response from LLM: {e}\nRaw Response: {response_content}")
raise HTTPException(status_code=500, detail="Failed to generate or parse MCQs from AI.") raise HTTPException(status_code=500, detail="Failed to generate or parse MCQs from AI.")
...@@ -437,17 +454,73 @@ class AgentService: ...@@ -437,17 +454,73 @@ class AgentService:
self, curriculum: str, grade: str, subject: str, unit: str, concept: str, is_arabic: bool, count: int self, curriculum: str, grade: str, subject: str, unit: str, concept: str, is_arabic: bool, count: int
) -> List[Dict]: ) -> List[Dict]:
""" """
Generates a dynamic quiz of 'count' questions using a hybrid approach with BATCHED generation. Generates a dynamic quiz. Handles "All" for unit or concept by recursively
calling itself for each sub-topic and dividing the question count.
""" """
if not self.pgvector: if not self.pgvector:
raise HTTPException(status_code=503, detail="Vector service is not available for this feature.") raise HTTPException(status_code=503, detail="Vector service is not available for this feature.")
MAX_QUESTIONS_PER_BATCH = 10 # --- RECURSIVE AGGREGATION LOGIC ---
num_fresh_questions = min(max(1, math.floor(count / 3)), 5)
logger.info(f"Request for {count} questions. Step 1: Generating {num_fresh_questions} new 'freshness' questions.") # Case 1: Broadest scope - All Units in a Subject
if unit == "All":
logger.info(f"Broad scope: All Units for Subject '{subject}'. Fetching units...")
units = self.pgvector.get_distinct_units_from_structure(curriculum, grade, subject)
if not units:
raise HTTPException(status_code=404, detail=f"No units found for {subject}.")
final_quiz = []
num_parts = len(units)
base_count = count // num_parts
remainder = count % num_parts
for i, u in enumerate(units):
q_count = base_count + (1 if i < remainder else 0)
if q_count > 0:
# Recursive call for each unit, passing "All" for concept
logger.info(f"Fetching {q_count} questions for Unit '{u}'...")
final_quiz.extend(self.get_dynamic_quiz(
curriculum, grade, subject, u, "All", is_arabic, q_count
))
random.shuffle(final_quiz)
return final_quiz[:count]
# Case 2: Medium scope - All Concepts in a Unit
elif concept == "All":
logger.info(f"Medium scope: All Concepts for Unit '{unit}'. Fetching concepts...")
concepts = self.pgvector.get_distinct_concepts_from_structure(curriculum, grade, subject, unit)
if not concepts:
raise HTTPException(status_code=404, detail=f"No concepts found for {unit}.")
final_quiz = []
num_parts = len(concepts)
base_count = count // num_parts
remainder = count % num_parts
for i, c in enumerate(concepts):
q_count = base_count + (1 if i < remainder else 0)
if q_count > 0:
# Recursive call for each concept (this will hit the base case below)
logger.info(f"Fetching {q_count} questions for Concept '{c}'...")
final_quiz.extend(self.get_dynamic_quiz(
curriculum, grade, subject, unit, c, is_arabic, q_count
))
random.shuffle(final_quiz)
return final_quiz[:count]
# --- BASE CASE: A SINGLE, SPECIFIC CONCEPT ---
# This is the original logic you wanted to keep.
else:
logger.info(f"Base Case: Fetching {count} questions for specific Concept '{concept}'.")
MAX_QUESTIONS_PER_BATCH = 10
# Generate a proportional number of freshness questions
num_fresh_questions = min(max(1, math.floor(count / 3)), 5) if count > 0 else 0
if num_fresh_questions > 0:
logger.info(f"Generating {num_fresh_questions} new 'freshness' questions.")
try: try:
# --- FIX #1: Removed the erroneous 'difficulty_level' argument ---
self.generate_and_store_mcqs( self.generate_and_store_mcqs(
curriculum=curriculum, grade=grade, subject=subject, unit=unit, concept=concept, curriculum=curriculum, grade=grade, subject=subject, unit=unit, concept=concept,
is_arabic=is_arabic, num_questions=num_fresh_questions is_arabic=is_arabic, num_questions=num_fresh_questions
...@@ -455,54 +528,41 @@ class AgentService: ...@@ -455,54 +528,41 @@ class AgentService:
except Exception as e: except Exception as e:
logger.warning(f"Could not generate 'freshness' questions for the quiz due to an error: {e}") logger.warning(f"Could not generate 'freshness' questions for the quiz due to an error: {e}")
all_mcqs_after_freshness = self.pgvector.get_mcqs( # Fetch all available questions for this specific concept
curriculum=curriculum, final_pool = self.pgvector.get_mcqs(
grade=grade, subject=subject, unit=unit, concept=concept, curriculum=curriculum, grade=grade, subject=subject, unit=unit, concept=concept,
is_arabic=is_arabic, limit=None is_arabic=is_arabic, limit=None
) )
questions_still_needed = count - len(all_mcqs_after_freshness) if not final_pool:
# If no questions exist at all, we can't proceed for this part.
logger.warning(f"No questions could be found or generated for '{concept}'. Returning empty list for this part.")
return []
# If we still don't have enough after freshness, generate more in batches.
questions_still_needed = count - len(final_pool)
if questions_still_needed > 0: if questions_still_needed > 0:
logger.info(f"After freshness batch, have {len(all_mcqs_after_freshness)} questions. Need to generate {questions_still_needed} more to meet count of {count}.") logger.info(f"Need to generate {questions_still_needed} more to meet count of {count}.")
remaining = questions_still_needed remaining = questions_still_needed
while remaining > 0: while remaining > 0:
batch_size = min(remaining, MAX_QUESTIONS_PER_BATCH) batch_size = min(remaining, MAX_QUESTIONS_PER_BATCH)
try: try:
logger.info(f"Generating batch of {remaining // MAX_QUESTIONS_PER_BATCH + 1} of {batch_size} questions...")
# --- FIX #2: Added the missing 'curriculum' argument ---
self.generate_and_store_mcqs( self.generate_and_store_mcqs(
curriculum=curriculum, curriculum=curriculum, grade=grade, subject=subject, unit=unit, concept=concept,
grade=grade, is_arabic=is_arabic, num_questions=batch_size
subject=subject,
unit=unit,
concept=concept,
is_arabic=is_arabic,
num_questions=batch_size
) )
remaining -= batch_size remaining -= batch_size
except Exception as e: except Exception as e:
logger.error(f"Failed to generate batch of {batch_size} questions: {e}") logger.error(f"Failed to generate batch of {batch_size} questions: {e}")
# Break the loop if generation fails to prevent an infinite loop break # Prevent infinite loop on failure
break
# Re-fetch the pool after batch generation
final_pool = self.pgvector.get_mcqs( final_pool = self.pgvector.get_mcqs(
curriculum=curriculum, curriculum=curriculum, grade=grade, subject=subject, unit=unit, concept=concept,
grade=grade, subject=subject, unit=unit, concept=concept,
is_arabic=is_arabic, limit=None is_arabic=is_arabic, limit=None
) )
if not final_pool:
raise HTTPException(status_code=404, detail="No questions could be found or generated for this topic.")
if len(final_pool) < count:
logger.warning(f"Could only gather {len(final_pool)} questions out of {count} requested. Returning all available questions.")
random.shuffle(final_pool) random.shuffle(final_pool)
final_quiz = final_pool[:min(count, len(final_pool))]
logger.info(f"Returning a dynamic quiz of {len(final_quiz)} questions for '{concept}'.") # Return the number of questions requested for this part of the recursion
return final_quiz return final_pool[:min(count, len(final_pool))]
\ No newline at end of file \ No newline at end of file
import redis import redis
import redis.asyncio as aioredis
import os import os
import asyncio
from .websocket_service import WebSocketManager
# Synchronous client for regular operations
try: try:
redis_host = os.getenv("REDIS_HOST", "localhost") redis_host = os.getenv("REDIS_HOST", "localhost")
redis_port = int(os.getenv("REDIS_PORT", 6379)) redis_port = int(os.getenv("REDIS_PORT", 6379))
# decode_responses=True makes the client return strings instead of bytes
redis_client = redis.Redis(host=redis_host, port=redis_port, db=0, decode_responses=True) redis_client = redis.Redis(host=redis_host, port=redis_port, db=0, decode_responses=True)
redis_client.ping() redis_client.ping()
print(f"Successfully connected to Redis at {redis_host}:{redis_port}") print(f"Successfully connected to Redis (sync) at {redis_host}:{redis_port}")
except redis.exceptions.ConnectionError as e: except redis.exceptions.ConnectionError as e:
print(f"FATAL: Could not connect to Redis: {e}") print(f"FATAL: Could not connect to Redis: {e}")
redis_client = None redis_client = None
# Async client for pub/sub
async_redis_client = None
if redis_client:
try:
async_redis_client = aioredis.Redis(host=redis_host, port=redis_port, db=0, decode_responses=True)
print(f"Created async Redis client for pub/sub")
except Exception as e:
print(f"Could not create async Redis client: {e}")
async def redis_listener(manager: WebSocketManager):
"""
Listens to Redis Pub/Sub for messages and broadcasts them to local clients.
This is the core of the multi-worker communication.
"""
from services.redis_client import async_redis_client
if not async_redis_client:
print("ERROR: Async Redis client not available for pub/sub listener")
return
pubsub = async_redis_client.pubsub()
await pubsub.psubscribe("quiz_channel:*")
print("Redis listener started and subscribed to quiz_channel:*")
try:
while True:
message = await pubsub.get_message(ignore_subscribe_messages=True, timeout=1.0)
if message and message.get("type") == "pmessage":
channel = message['channel']
if isinstance(channel, bytes):
channel = channel.decode('utf-8')
room_id = channel.split(':')[-1]
data_raw = message['data']
if isinstance(data_raw, bytes):
data_raw = data_raw.decode('utf-8')
data = json.loads(data_raw)
print(f"Redis listener received message for room {room_id}: {data.get('type')}")
await manager.broadcast_local(room_id, data)
except asyncio.CancelledError:
print("Redis listener cancelled.")
finally:
await pubsub.unsubscribe("quiz_channel:*")
await pubsub.close()
print("Redis listener stopped.")
# --- HELPER FUNCTIONS FOR REDIS INTERACTIONS ---
def get_room_key(room_id: str) -> str:
return f"quiz_room:{room_id}"
def get_room_channel(room_id: str) -> str:
return f"quiz_channel:{room_id}"
from typing import Dict, List
from fastapi import WebSocket
class WebSocketManager:
"""Manages active WebSocket connections for each room on a single worker."""
def __init__(self):
self.active_connections: Dict[str, List[WebSocket]] = {}
async def connect(self, websocket: WebSocket, room_id: str):
await websocket.accept()
if room_id not in self.active_connections:
self.active_connections[room_id] = []
self.active_connections[room_id].append(websocket)
def disconnect(self, websocket: WebSocket, room_id: str):
if room_id in self.active_connections:
self.active_connections[room_id].remove(websocket)
if not self.active_connections[room_id]:
del self.active_connections[room_id]
async def broadcast_local(self, room_id: str, message: Dict):
"""Broadcasts a message only to clients connected to this specific worker."""
if room_id in self.active_connections:
for connection in self.active_connections[room_id]:
await connection.send_json(message)
\ No newline at end of file
...@@ -74,4 +74,4 @@ def setup_mcq_table(drop_existing_table: bool = False): ...@@ -74,4 +74,4 @@ def setup_mcq_table(drop_existing_table: bool = False):
if __name__ == "__main__": if __name__ == "__main__":
print("Setting up the MCQ table structure...") print("Setting up the MCQ table structure...")
setup_mcq_table(drop_existing_table=True) setup_mcq_table(drop_existing_table=False)
\ No newline at end of file \ No newline at end of file
...@@ -106,6 +106,16 @@ ...@@ -106,6 +106,16 @@
const populateDropdown = (selectElement, options, placeholder) => { const populateDropdown = (selectElement, options, placeholder) => {
selectElement.innerHTML = `<option value="">-- ${placeholder} --</option>`; selectElement.innerHTML = `<option value="">-- ${placeholder} --</option>`;
// ++ ADD THIS LOGIC BLOCK ++
if ((selectElement.id === 'unitSelect' || selectElement.id === 'conceptSelect') && options.length > 0) {
const allOpt = document.createElement('option');
allOpt.value = 'All';
allOpt.textContent = '-- All --';
selectElement.appendChild(allOpt);
}
// ++ END OF ADDED BLOCK ++
options.forEach(option => { options.forEach(option => {
const opt = document.createElement('option'); const opt = document.createElement('option');
opt.value = option; opt.value = option;
...@@ -183,15 +193,37 @@ ...@@ -183,15 +193,37 @@
// --- Main Action: Generate Quiz --- // --- Main Action: Generate Quiz ---
generateButton.addEventListener('click', async () => { generateButton.addEventListener('click', async () => {
const [curriculum, grade, subject, unit, concept, count, isArabic] = [ // ++ NEW, SMARTER VALIDATION ++
curriculumSelect.value, gradeSelect.value, subjectSelect.value, const curriculum = curriculumSelect.value;
unitSelect.value, conceptSelect.value, countInput.value, isArabicInput.checked const grade = gradeSelect.value;
]; const subject = subjectSelect.value;
let unit = unitSelect.value;
let concept = conceptSelect.value; // May be "" initially
// Basic validation: the first three dropdowns are always required.
if (!curriculum || !grade || !subject || !unit) {
showStatus('Please select a Curriculum, Grade, Subject, and Unit.', 'error');
return;
}
// If a specific unit is chosen but no concept, default the concept to "All".
// This is the key fix for your issue.
if (unit !== 'All' && !concept) {
concept = 'All';
}
// If Unit is "All", Concept must also be "All".
if (unit === 'All') {
concept = 'All';
}
if (!curriculum || !grade || !subject || !unit || !concept) { // Final check: If after all that, we still don't have a concept, it's an error.
showStatus('Please make a selection in all dropdown menus.', 'error'); // This case should now be rare.
if (!concept) {
showStatus('Please make a selection for the Concept.', 'error');
return; return;
} }
// ++ END OF NEW VALIDATION ++
showStatus('Generating dynamic quiz... This may take a moment.', 'processing'); showStatus('Generating dynamic quiz... This may take a moment.', 'processing');
generateButton.disabled = true; generateButton.disabled = true;
...@@ -205,8 +237,8 @@ ...@@ -205,8 +237,8 @@
formData.append('subject', subject); formData.append('subject', subject);
formData.append('unit', unit); formData.append('unit', unit);
formData.append('concept', concept); formData.append('concept', concept);
formData.append('count', count); formData.append('count', countInput.value); // You can read these directly
formData.append('is_arabic', isArabic); formData.append('is_arabic', isArabicInput.checked);
try { try {
const response = await fetch('/quiz/dynamic', { method: 'POST', body: formData }); const response = await fetch('/quiz/dynamic', { method: 'POST', body: formData });
......
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AI Tutor Project Hub</title>
<style>
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
max-width: 800px;
margin: 40px auto;
padding: 20px;
background-color: #f9f9f9;
color: #333;
line-height: 1.6;
}
.container {
background: white;
padding: 30px;
border-radius: 8px;
box-shadow: 0 4px 15px rgba(0,0,0,0.1);
}
h1 {
text-align: center;
color: #2c3e50;
border-bottom: 2px solid #eee;
padding-bottom: 20px;
margin-bottom: 30px;
}
.link-list {
list-style-type: none;
padding: 0;
}
.link-list li {
margin-bottom: 15px;
}
.link-list a {
display: block;
padding: 20px;
background-color: #007bff;
color: white;
text-decoration: none;
font-size: 18px;
font-weight: bold;
text-align: center;
border-radius: 5px;
transition: background-color 0.2s, transform 0.2s;
}
.link-list a:hover {
background-color: #0056b3;
transform: translateY(-2px);
}
/* Style different links with different colors for better distinction */
.link-list a.chat { background-color: #007bff; }
.link-list a.chat:hover { background-color: #0056b3; }
.link-list a.dynamic-quiz { background-color: #6f42c1; }
.link-list a.dynamic-quiz:hover { background-color: #5a32a3; }
.link-list a.upload { background-color: #dc3545; }
.link-list a.upload:hover { background-color: #c82333; }
.link-list a.test-yourself { background-color: #28a745; }
.link-list a.test-yourself:hover { background-color: #218838; }
.link-list a.live-quiz { background-color: #fd7e14; }
.link-list a.live-quiz:hover { background-color: #e36a04; }
</style>
</head>
<body>
<div class="container">
<h1>SSLabs AI Feature Hub</h1>
<ul class="link-list">
<li><a href="/chat-interface" class="chat">Voice Chat Interface</a></li>
<li><a href="/test-yourself" class="test-yourself">Test Yourself (Single Player)</a></li>
<li><a href="/live-quiz" class="live-quiz">Live Quiz Challenge (Multiplayer)</a></li>
<li><a href="/quiz-interface" class="dynamic-quiz">Dynamic Quiz Generator (for CSV)</a></li>
<li><a href="/curriculum-upload" class="upload">Curriculum PDF Uploader</a></li>
</ul>
</div>
</body>
</html>
\ No newline at end of file
This diff is collapsed.
from .utils import DateTimeEncoder
\ No newline at end of file
from datetime import datetime
import json
class DateTimeEncoder(json.JSONEncoder):
""" Custom JSON encoder to handle datetime objects """
def default(self, obj):
if isinstance(obj, datetime):
return obj.isoformat()
return super().default(obj)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment