feat(class): improve class/id
This commit is contained in:
@@ -35,8 +35,12 @@ from schemas.class_group import (
|
||||
HistogramBin,
|
||||
DomainStats,
|
||||
CompetenceStats,
|
||||
AssessmentScore,
|
||||
DomainStudentStats,
|
||||
CompetenceStudentStats,
|
||||
)
|
||||
from domain.services.grading_calculator import GradingCalculator
|
||||
from domain.services.class_statistics_service import ClassStatisticsService
|
||||
from schemas.student import StudentWithClass, StudentList
|
||||
from schemas.csv_import import (
|
||||
CSVImportResponse,
|
||||
@@ -221,10 +225,10 @@ async def get_class_stats(
|
||||
Récupère les statistiques complètes d'une classe pour un trimestre.
|
||||
|
||||
Inclut:
|
||||
- Moyennes par élève
|
||||
- Moyennes par élève avec détail par évaluation
|
||||
- Statistiques globales (moyenne, médiane, écart-type)
|
||||
- Histogramme des moyennes
|
||||
- Analyse par domaines et compétences
|
||||
- Analyse par domaines et compétences (nombre d'évaluations + points)
|
||||
"""
|
||||
# Vérifier que la classe existe
|
||||
class_query = select(ClassGroup).where(ClassGroup.id == class_id)
|
||||
@@ -247,26 +251,34 @@ async def get_class_stats(
|
||||
students_result = await session.execute(students_query)
|
||||
students = students_result.scalars().all()
|
||||
|
||||
# Récupérer les évaluations du trimestre
|
||||
assessments_query = select(Assessment).where(
|
||||
Assessment.class_group_id == class_id,
|
||||
Assessment.trimester == trimester
|
||||
# Récupérer les évaluations du trimestre avec leurs relations
|
||||
assessments_query = (
|
||||
select(Assessment)
|
||||
.options(
|
||||
selectinload(Assessment.exercises).selectinload(Exercise.grading_elements)
|
||||
)
|
||||
.where(
|
||||
Assessment.class_group_id == class_id,
|
||||
Assessment.trimester == trimester
|
||||
)
|
||||
.order_by(Assessment.date)
|
||||
)
|
||||
assessments_result = await session.execute(assessments_query)
|
||||
assessments = assessments_result.scalars().all()
|
||||
|
||||
# Calculer les moyennes de chaque élève
|
||||
calculator = GradingCalculator()
|
||||
student_averages = []
|
||||
all_averages = []
|
||||
# Récupérer les domaines et compétences
|
||||
domains_query = select(Domain).order_by(Domain.name)
|
||||
domains_result = await session.execute(domains_query)
|
||||
domains = domains_result.scalars().all()
|
||||
|
||||
competences_query = select(Competence).order_by(Competence.order_index)
|
||||
competences_result = await session.execute(competences_query)
|
||||
competences = competences_result.scalars().all()
|
||||
|
||||
# Récupérer toutes les notes en une seule requête pour optimiser
|
||||
grades_by_student_assessment = {}
|
||||
for student in students:
|
||||
weighted_sum = 0.0
|
||||
total_coefficient = 0.0
|
||||
assessment_count = 0
|
||||
|
||||
for assessment in assessments:
|
||||
# Récupérer les notes de l'élève pour cette évaluation
|
||||
grades_query = (
|
||||
select(Grade, GradingElement)
|
||||
.join(GradingElement, Grade.grading_element_id == GradingElement.id)
|
||||
@@ -277,45 +289,30 @@ async def get_class_stats(
|
||||
)
|
||||
)
|
||||
grades_result = await session.execute(grades_query)
|
||||
grades_data = grades_result.all()
|
||||
grades_by_student_assessment[(student.id, assessment.id)] = grades_result.all()
|
||||
|
||||
if grades_data:
|
||||
total_score = 0.0
|
||||
total_max_points = 0.0
|
||||
# Utiliser le service pour calculer les statistiques
|
||||
stats_service = ClassStatisticsService()
|
||||
student_averages = await stats_service.calculate_student_statistics(
|
||||
students=students,
|
||||
assessments=assessments,
|
||||
grades_by_student_assessment=grades_by_student_assessment,
|
||||
domains=domains,
|
||||
competences=competences,
|
||||
)
|
||||
|
||||
for grade, element in grades_data:
|
||||
if grade.value:
|
||||
score = calculator.calculate_score(
|
||||
grade.value, element.grading_type, element.max_points
|
||||
)
|
||||
if score is not None and calculator.is_counted_in_total(grade.value):
|
||||
total_score += score
|
||||
total_max_points += element.max_points
|
||||
|
||||
if total_max_points > 0:
|
||||
# Ramener sur 20
|
||||
score_on_20 = total_score / total_max_points * 20
|
||||
weighted_sum += score_on_20 * assessment.coefficient
|
||||
total_coefficient += assessment.coefficient
|
||||
assessment_count += 1
|
||||
|
||||
# Calculer la moyenne pondérée
|
||||
average = None
|
||||
if total_coefficient > 0:
|
||||
average = round(weighted_sum / total_coefficient, 2)
|
||||
all_averages.append(average)
|
||||
|
||||
student_averages.append(StudentAverage(
|
||||
student_id=student.id,
|
||||
first_name=student.first_name,
|
||||
last_name=student.last_name,
|
||||
full_name=f"{student.first_name} {student.last_name}",
|
||||
average=average,
|
||||
assessment_count=assessment_count
|
||||
))
|
||||
# Calculer les statistiques domaines/compétences depuis les éléments de notation
|
||||
# Perspective enseignant : ce qui a été évalué, pas les résultats des élèves
|
||||
domains_stats, competences_stats = stats_service.calculate_domain_competence_from_elements(
|
||||
assessments=assessments,
|
||||
domains=domains,
|
||||
competences=competences,
|
||||
)
|
||||
|
||||
# Calculer les statistiques globales
|
||||
all_averages = [s.average for s in student_averages if s.average is not None]
|
||||
mean = median = std_dev = min_score = max_score = None
|
||||
|
||||
if all_averages:
|
||||
mean = round(sum(all_averages) / len(all_averages), 2)
|
||||
sorted_averages = sorted(all_averages)
|
||||
@@ -345,9 +342,10 @@ async def get_class_stats(
|
||||
count=count
|
||||
))
|
||||
# Ajouter le dernier bin pour 20
|
||||
count_20 = sum(1 for avg in all_averages if avg == 20)
|
||||
if count_20 > 0:
|
||||
histogram[-1].count += count_20
|
||||
if histogram:
|
||||
count_20 = sum(1 for avg in all_averages if avg == 20)
|
||||
if count_20 > 0:
|
||||
histogram[-1].count += count_20
|
||||
|
||||
# Compter les évaluations par statut
|
||||
assessments_completed = 0
|
||||
@@ -379,36 +377,6 @@ async def get_class_stats(
|
||||
elif grades_count > 0:
|
||||
assessments_in_progress += 1
|
||||
|
||||
# Statistiques par domaine et compétence (simplifié)
|
||||
domains_stats = []
|
||||
competences_stats = []
|
||||
|
||||
# Récupérer les domaines
|
||||
domains_query = select(Domain).order_by(Domain.name)
|
||||
domains_result = await session.execute(domains_query)
|
||||
domains = domains_result.scalars().all()
|
||||
for domain in domains:
|
||||
domains_stats.append(DomainStats(
|
||||
id=domain.id,
|
||||
name=domain.name,
|
||||
color=domain.color,
|
||||
mean=None,
|
||||
elements_count=0
|
||||
))
|
||||
|
||||
# Récupérer les compétences
|
||||
competences_query = select(Competence).order_by(Competence.order_index)
|
||||
competences_result = await session.execute(competences_query)
|
||||
competences = competences_result.scalars().all()
|
||||
for competence in competences:
|
||||
competences_stats.append(CompetenceStats(
|
||||
id=competence.id,
|
||||
name=competence.name,
|
||||
color=competence.color,
|
||||
mean=None,
|
||||
elements_count=0
|
||||
))
|
||||
|
||||
return ClassDashboardStats(
|
||||
class_id=class_id,
|
||||
class_name=cls.name,
|
||||
|
||||
@@ -29,6 +29,7 @@ from .student_report_service import (
|
||||
StudentReportData,
|
||||
generate_report_html,
|
||||
)
|
||||
from .class_statistics_service import ClassStatisticsService
|
||||
|
||||
__all__ = [
|
||||
# Grading Calculator
|
||||
@@ -39,6 +40,7 @@ __all__ = [
|
||||
"ScoreStrategy",
|
||||
# Statistics
|
||||
"StatisticsService",
|
||||
"ClassStatisticsService",
|
||||
# Score Calculator
|
||||
"StudentScoreCalculator",
|
||||
"ProgressCalculator",
|
||||
|
||||
331
backend/domain/services/class_statistics_service.py
Normal file
331
backend/domain/services/class_statistics_service.py
Normal file
@@ -0,0 +1,331 @@
|
||||
"""
|
||||
Service de calcul des statistiques de classe.
|
||||
|
||||
Calcule les statistiques complètes pour le dashboard de classe:
|
||||
- Moyennes par élève
|
||||
- Scores par évaluation pour chaque élève
|
||||
- Statistiques par domaine et compétence
|
||||
"""
|
||||
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
from collections import defaultdict
|
||||
|
||||
from infrastructure.database.models import (
|
||||
Student,
|
||||
Assessment,
|
||||
Exercise,
|
||||
GradingElement,
|
||||
Grade,
|
||||
Domain,
|
||||
Competence,
|
||||
)
|
||||
from domain.services.grading_calculator import GradingCalculator
|
||||
from schemas.class_group import (
|
||||
StudentAverage,
|
||||
AssessmentScore,
|
||||
DomainStudentStats,
|
||||
CompetenceStudentStats,
|
||||
DomainStats,
|
||||
CompetenceStats,
|
||||
)
|
||||
|
||||
|
||||
class ClassStatisticsService:
|
||||
"""Service de calcul des statistiques de classe."""
|
||||
|
||||
def __init__(self):
|
||||
self.calculator = GradingCalculator()
|
||||
|
||||
async def calculate_student_statistics(
|
||||
self,
|
||||
students: List[Student],
|
||||
assessments: List[Assessment],
|
||||
grades_by_student_assessment: Dict[Tuple[int, int], List[Tuple[Grade, GradingElement]]],
|
||||
domains: List[Domain],
|
||||
competences: List[Competence],
|
||||
) -> List[StudentAverage]:
|
||||
"""
|
||||
Calcule les statistiques complètes pour chaque élève.
|
||||
|
||||
Args:
|
||||
students: Liste des élèves
|
||||
assessments: Liste des évaluations du trimestre
|
||||
grades_by_student_assessment: Dict[(student_id, assessment_id)] -> [(grade, element)]
|
||||
domains: Liste des domaines
|
||||
competences: Liste des compétences
|
||||
|
||||
Returns:
|
||||
Liste des StudentAverage avec toutes les statistiques
|
||||
"""
|
||||
student_averages = []
|
||||
|
||||
for student in students:
|
||||
# Initialiser les statistiques par domaine/compétence
|
||||
domain_stats: Dict[int, DomainStudentStats] = {
|
||||
domain.id: DomainStudentStats(
|
||||
domain_id=domain.id,
|
||||
evaluation_count=0,
|
||||
total_points_obtained=0.0,
|
||||
total_points_possible=0.0,
|
||||
)
|
||||
for domain in domains
|
||||
}
|
||||
|
||||
competence_stats: Dict[int, CompetenceStudentStats] = {}
|
||||
|
||||
# Calculer les scores par évaluation
|
||||
assessment_scores: Dict[int, AssessmentScore] = {}
|
||||
weighted_sum = 0.0
|
||||
total_coefficient = 0.0
|
||||
assessment_count = 0
|
||||
|
||||
for assessment in assessments:
|
||||
grades_data = grades_by_student_assessment.get((student.id, assessment.id), [])
|
||||
|
||||
if not grades_data:
|
||||
continue
|
||||
|
||||
# Calculer le score total pour cette évaluation
|
||||
total_score = 0.0
|
||||
total_max_points = 0.0
|
||||
|
||||
for grade, element in grades_data:
|
||||
if grade.value:
|
||||
score = self.calculator.calculate_score(
|
||||
grade.value, element.grading_type, element.max_points
|
||||
)
|
||||
|
||||
if score is not None and self.calculator.is_counted_in_total(grade.value):
|
||||
total_score += score
|
||||
total_max_points += element.max_points
|
||||
|
||||
# Statistiques par domaine
|
||||
if element.domain_id and element.domain_id in domain_stats:
|
||||
domain_stats[element.domain_id].evaluation_count += 1
|
||||
domain_stats[element.domain_id].total_points_obtained += score
|
||||
domain_stats[element.domain_id].total_points_possible += element.max_points
|
||||
|
||||
# Statistiques par compétence (skill)
|
||||
# Note: On utilise element.skill pour identifier la compétence
|
||||
if element.skill:
|
||||
# Trouver la compétence correspondante
|
||||
matching_competence = next(
|
||||
(c for c in competences if c.name == element.skill),
|
||||
None
|
||||
)
|
||||
if matching_competence:
|
||||
if matching_competence.id not in competence_stats:
|
||||
competence_stats[matching_competence.id] = CompetenceStudentStats(
|
||||
competence_id=matching_competence.id,
|
||||
evaluation_count=0,
|
||||
total_points_obtained=0.0,
|
||||
total_points_possible=0.0,
|
||||
)
|
||||
|
||||
competence_stats[matching_competence.id].evaluation_count += 1
|
||||
competence_stats[matching_competence.id].total_points_obtained += score
|
||||
competence_stats[matching_competence.id].total_points_possible += element.max_points
|
||||
|
||||
# Calculer le score sur 20
|
||||
score_on_20 = None
|
||||
if total_max_points > 0:
|
||||
score_on_20 = round(total_score / total_max_points * 20, 2)
|
||||
weighted_sum += score_on_20 * assessment.coefficient
|
||||
total_coefficient += assessment.coefficient
|
||||
assessment_count += 1
|
||||
|
||||
# Sauvegarder le score de cette évaluation
|
||||
assessment_scores[assessment.id] = AssessmentScore(
|
||||
assessment_id=assessment.id,
|
||||
assessment_title=assessment.title,
|
||||
score=round(total_score, 2) if total_score > 0 else None,
|
||||
max_points=round(total_max_points, 2),
|
||||
score_on_20=score_on_20,
|
||||
)
|
||||
|
||||
# Calculer la moyenne pondérée
|
||||
average = None
|
||||
if total_coefficient > 0:
|
||||
average = round(weighted_sum / total_coefficient, 2)
|
||||
|
||||
student_averages.append(StudentAverage(
|
||||
student_id=student.id,
|
||||
first_name=student.first_name,
|
||||
last_name=student.last_name,
|
||||
full_name=f"{student.first_name} {student.last_name}",
|
||||
average=average,
|
||||
assessment_count=assessment_count,
|
||||
assessment_scores=assessment_scores,
|
||||
domain_stats=domain_stats,
|
||||
competence_stats=competence_stats,
|
||||
))
|
||||
|
||||
return student_averages
|
||||
|
||||
def aggregate_domain_competence_stats(
|
||||
self,
|
||||
student_averages: List[StudentAverage],
|
||||
domains: List[Domain],
|
||||
competences: List[Competence],
|
||||
) -> Tuple[List[DomainStats], List[CompetenceStats]]:
|
||||
"""
|
||||
Agrège les statistiques par domaine et compétence pour tous les élèves.
|
||||
|
||||
Args:
|
||||
student_averages: Liste des statistiques par élève
|
||||
domains: Liste des domaines
|
||||
competences: Liste des compétences
|
||||
|
||||
Returns:
|
||||
Tuple (domains_stats, competences_stats)
|
||||
"""
|
||||
# Agréger par domaine
|
||||
domain_aggregates: Dict[int, Dict] = defaultdict(
|
||||
lambda: {
|
||||
"evaluation_count": 0,
|
||||
"total_points_obtained": 0.0,
|
||||
"total_points_possible": 0.0,
|
||||
}
|
||||
)
|
||||
|
||||
for student in student_averages:
|
||||
for domain_id, stats in student.domain_stats.items():
|
||||
domain_aggregates[domain_id]["evaluation_count"] += stats.evaluation_count
|
||||
domain_aggregates[domain_id]["total_points_obtained"] += stats.total_points_obtained
|
||||
domain_aggregates[domain_id]["total_points_possible"] += stats.total_points_possible
|
||||
|
||||
domains_stats = []
|
||||
for domain in domains:
|
||||
agg = domain_aggregates.get(domain.id, {
|
||||
"evaluation_count": 0,
|
||||
"total_points_obtained": 0.0,
|
||||
"total_points_possible": 0.0,
|
||||
})
|
||||
domains_stats.append(DomainStats(
|
||||
id=domain.id,
|
||||
name=domain.name,
|
||||
color=domain.color,
|
||||
evaluation_count=agg["evaluation_count"],
|
||||
total_points_obtained=round(agg["total_points_obtained"], 2),
|
||||
total_points_possible=round(agg["total_points_possible"], 2),
|
||||
))
|
||||
|
||||
# Agréger par compétence
|
||||
competence_aggregates: Dict[int, Dict] = defaultdict(
|
||||
lambda: {
|
||||
"evaluation_count": 0,
|
||||
"total_points_obtained": 0.0,
|
||||
"total_points_possible": 0.0,
|
||||
}
|
||||
)
|
||||
|
||||
for student in student_averages:
|
||||
for competence_id, stats in student.competence_stats.items():
|
||||
competence_aggregates[competence_id]["evaluation_count"] += stats.evaluation_count
|
||||
competence_aggregates[competence_id]["total_points_obtained"] += stats.total_points_obtained
|
||||
competence_aggregates[competence_id]["total_points_possible"] += stats.total_points_possible
|
||||
|
||||
competences_stats = []
|
||||
for competence in competences:
|
||||
agg = competence_aggregates.get(competence.id, {
|
||||
"evaluation_count": 0,
|
||||
"total_points_obtained": 0.0,
|
||||
"total_points_possible": 0.0,
|
||||
})
|
||||
competences_stats.append(CompetenceStats(
|
||||
id=competence.id,
|
||||
name=competence.name,
|
||||
color=competence.color,
|
||||
evaluation_count=agg["evaluation_count"],
|
||||
total_points_obtained=round(agg["total_points_obtained"], 2),
|
||||
total_points_possible=round(agg["total_points_possible"], 2),
|
||||
))
|
||||
|
||||
return domains_stats, competences_stats
|
||||
|
||||
def calculate_domain_competence_from_elements(
|
||||
self,
|
||||
assessments: List[Assessment],
|
||||
domains: List[Domain],
|
||||
competences: List[Competence],
|
||||
) -> Tuple[List[DomainStats], List[CompetenceStats]]:
|
||||
"""
|
||||
Calcule les statistiques domaines/compétences depuis les GradingElements.
|
||||
|
||||
Perspective enseignant : ce qui a été évalué, pas les résultats des élèves.
|
||||
|
||||
Args:
|
||||
assessments: Liste des évaluations (avec exercises et grading_elements chargés)
|
||||
domains: Liste des domaines
|
||||
competences: Liste des compétences
|
||||
|
||||
Returns:
|
||||
Tuple (domains_stats, competences_stats)
|
||||
"""
|
||||
# Compter les GradingElements par domaine
|
||||
domain_aggregates: Dict[int, Dict] = defaultdict(
|
||||
lambda: {
|
||||
"evaluation_count": 0,
|
||||
"total_points_possible": 0.0,
|
||||
}
|
||||
)
|
||||
|
||||
competence_aggregates: Dict[int, Dict] = defaultdict(
|
||||
lambda: {
|
||||
"evaluation_count": 0,
|
||||
"total_points_possible": 0.0,
|
||||
}
|
||||
)
|
||||
|
||||
# Parcourir tous les éléments de notation
|
||||
for assessment in assessments:
|
||||
for exercise in assessment.exercises:
|
||||
for element in exercise.grading_elements:
|
||||
# Compter par domaine
|
||||
if element.domain_id:
|
||||
domain_aggregates[element.domain_id]["evaluation_count"] += 1
|
||||
domain_aggregates[element.domain_id]["total_points_possible"] += element.max_points
|
||||
|
||||
# Compter par compétence (via skill)
|
||||
if element.skill:
|
||||
matching_competence = next(
|
||||
(c for c in competences if c.name == element.skill),
|
||||
None
|
||||
)
|
||||
if matching_competence:
|
||||
competence_aggregates[matching_competence.id]["evaluation_count"] += 1
|
||||
competence_aggregates[matching_competence.id]["total_points_possible"] += element.max_points
|
||||
|
||||
# Créer les stats par domaine
|
||||
domains_stats = []
|
||||
for domain in domains:
|
||||
agg = domain_aggregates.get(domain.id, {
|
||||
"evaluation_count": 0,
|
||||
"total_points_possible": 0.0,
|
||||
})
|
||||
domains_stats.append(DomainStats(
|
||||
id=domain.id,
|
||||
name=domain.name,
|
||||
color=domain.color,
|
||||
evaluation_count=agg["evaluation_count"],
|
||||
total_points_obtained=0.0, # Non utilisé dans cette perspective
|
||||
total_points_possible=round(agg["total_points_possible"], 2),
|
||||
))
|
||||
|
||||
# Créer les stats par compétence
|
||||
competences_stats = []
|
||||
for competence in competences:
|
||||
agg = competence_aggregates.get(competence.id, {
|
||||
"evaluation_count": 0,
|
||||
"total_points_possible": 0.0,
|
||||
})
|
||||
competences_stats.append(CompetenceStats(
|
||||
id=competence.id,
|
||||
name=competence.name,
|
||||
color=competence.color,
|
||||
evaluation_count=agg["evaluation_count"],
|
||||
total_points_obtained=0.0, # Non utilisé
|
||||
total_points_possible=round(agg["total_points_possible"], 2),
|
||||
))
|
||||
|
||||
return domains_stats, competences_stats
|
||||
@@ -3,7 +3,7 @@ Schemas Pydantic pour ClassGroup.
|
||||
"""
|
||||
|
||||
from datetime import date
|
||||
from typing import Optional, List
|
||||
from typing import Optional, List, Dict
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
@@ -117,6 +117,9 @@ class StudentAverage(BaseSchema):
|
||||
full_name: str
|
||||
average: Optional[float] = None
|
||||
assessment_count: int = 0
|
||||
assessment_scores: Dict[int, "AssessmentScore"] = {}
|
||||
domain_stats: Dict[int, "DomainStudentStats"] = {}
|
||||
competence_stats: Dict[int, "CompetenceStudentStats"] = {}
|
||||
|
||||
|
||||
class HistogramBin(BaseSchema):
|
||||
@@ -128,14 +131,43 @@ class HistogramBin(BaseSchema):
|
||||
count: int
|
||||
|
||||
|
||||
class AssessmentScore(BaseSchema):
|
||||
"""Score d'un élève pour une évaluation."""
|
||||
|
||||
assessment_id: int
|
||||
assessment_title: str
|
||||
score: Optional[float] = None
|
||||
max_points: float = 0.0
|
||||
score_on_20: Optional[float] = None
|
||||
|
||||
|
||||
class DomainStudentStats(BaseSchema):
|
||||
"""Statistiques d'un élève pour un domaine."""
|
||||
|
||||
domain_id: int
|
||||
evaluation_count: int = 0
|
||||
total_points_obtained: float = 0.0
|
||||
total_points_possible: float = 0.0
|
||||
|
||||
|
||||
class CompetenceStudentStats(BaseSchema):
|
||||
"""Statistiques d'un élève pour une compétence."""
|
||||
|
||||
competence_id: int
|
||||
evaluation_count: int = 0
|
||||
total_points_obtained: float = 0.0
|
||||
total_points_possible: float = 0.0
|
||||
|
||||
|
||||
class DomainStats(BaseSchema):
|
||||
"""Statistiques par domaine."""
|
||||
|
||||
id: int
|
||||
name: str
|
||||
color: str
|
||||
mean: Optional[float] = None
|
||||
elements_count: int = 0
|
||||
evaluation_count: int = 0
|
||||
total_points_obtained: float = 0.0
|
||||
total_points_possible: float = 0.0
|
||||
|
||||
|
||||
class CompetenceStats(BaseSchema):
|
||||
@@ -144,8 +176,9 @@ class CompetenceStats(BaseSchema):
|
||||
id: int
|
||||
name: str
|
||||
color: str
|
||||
mean: Optional[float] = None
|
||||
elements_count: int = 0
|
||||
evaluation_count: int = 0
|
||||
total_points_obtained: float = 0.0
|
||||
total_points_possible: float = 0.0
|
||||
|
||||
|
||||
class ClassDashboardStats(BaseSchema):
|
||||
|
||||
601
docs/CLASS_DASHBOARD_IMPROVEMENTS.md
Normal file
601
docs/CLASS_DASHBOARD_IMPROVEMENTS.md
Normal file
@@ -0,0 +1,601 @@
|
||||
# Améliorations du Dashboard de Classe
|
||||
|
||||
**Date**: 3 décembre 2025
|
||||
**Version**: 2.0
|
||||
**Fichiers modifiés**: 4 fichiers (3 backend, 1 frontend)
|
||||
|
||||
## 📋 Objectifs des Modifications
|
||||
|
||||
### Tableau des Élèves
|
||||
1. ✅ Permettre le tri sur toutes les colonnes
|
||||
2. ✅ Afficher toutes les notes (une colonne par évaluation)
|
||||
3. ✅ Supprimer les indicateurs de performance (badges Excellent/Bon/Moyen/Insuffisant)
|
||||
|
||||
### Tableau Domaines/Compétences
|
||||
1. ✅ Afficher le nombre de fois qu'ils ont été évalués
|
||||
2. ✅ Afficher le nombre de points attribués (total obtenu/total possible)
|
||||
3. ✅ Supprimer les moyennes
|
||||
|
||||
---
|
||||
|
||||
## 🔧 Modifications Backend
|
||||
|
||||
### 1. Nouveaux Schemas (`backend/schemas/class_group.py`)
|
||||
|
||||
#### Schemas ajoutés
|
||||
|
||||
**AssessmentScore**
|
||||
```python
|
||||
class AssessmentScore(BaseSchema):
|
||||
"""Score d'un élève pour une évaluation."""
|
||||
assessment_id: int
|
||||
assessment_title: str
|
||||
score: Optional[float] = None # Score brut (ex: 15.5)
|
||||
max_points: float = 0.0 # Points maximum (ex: 20)
|
||||
score_on_20: Optional[float] = None # Score ramené sur 20
|
||||
```
|
||||
|
||||
**DomainStudentStats**
|
||||
```python
|
||||
class DomainStudentStats(BaseSchema):
|
||||
"""Statistiques d'un élève pour un domaine."""
|
||||
domain_id: int
|
||||
evaluation_count: int = 0 # Nombre de fois évalué sur ce domaine
|
||||
total_points_obtained: float = 0.0 # Total des points obtenus
|
||||
total_points_possible: float = 0.0 # Total des points possibles
|
||||
```
|
||||
|
||||
**CompetenceStudentStats**
|
||||
```python
|
||||
class CompetenceStudentStats(BaseSchema):
|
||||
"""Statistiques d'un élève pour une compétence."""
|
||||
competence_id: int
|
||||
evaluation_count: int = 0
|
||||
total_points_obtained: float = 0.0
|
||||
total_points_possible: float = 0.0
|
||||
```
|
||||
|
||||
#### Schemas modifiés
|
||||
|
||||
**DomainStats** - Avant vs Après
|
||||
```python
|
||||
# AVANT
|
||||
class DomainStats(BaseSchema):
|
||||
id: int
|
||||
name: str
|
||||
color: str
|
||||
mean: Optional[float] = None # ❌ Supprimé
|
||||
elements_count: int = 0
|
||||
|
||||
# APRÈS
|
||||
class DomainStats(BaseSchema):
|
||||
id: int
|
||||
name: str
|
||||
color: str
|
||||
evaluation_count: int = 0 # ✅ Nombre d'évaluations
|
||||
total_points_obtained: float = 0.0 # ✅ Points obtenus
|
||||
total_points_possible: float = 0.0 # ✅ Points possibles
|
||||
```
|
||||
|
||||
**CompetenceStats** - Même structure que DomainStats
|
||||
|
||||
**StudentAverage** - Enrichi avec 3 nouveaux champs
|
||||
```python
|
||||
class StudentAverage(BaseSchema):
|
||||
student_id: int
|
||||
first_name: str
|
||||
last_name: str
|
||||
full_name: str
|
||||
average: Optional[float] = None
|
||||
assessment_count: int = 0
|
||||
|
||||
# ✅ NOUVEAUX CHAMPS
|
||||
assessment_scores: Dict[int, AssessmentScore] = {} # Scores par évaluation
|
||||
domain_stats: Dict[int, DomainStudentStats] = {} # Stats par domaine
|
||||
competence_stats: Dict[int, CompetenceStudentStats] = {} # Stats par compétence
|
||||
```
|
||||
|
||||
### 2. Nouveau Service (`backend/domain/services/class_statistics_service.py`)
|
||||
|
||||
**ClassStatisticsService** - Service de calcul des statistiques de classe
|
||||
|
||||
#### Méthode 1: `calculate_student_statistics()`
|
||||
|
||||
**Signature**:
|
||||
```python
|
||||
async def calculate_student_statistics(
|
||||
students: List[Student],
|
||||
assessments: List[Assessment],
|
||||
grades_by_student_assessment: Dict[Tuple[int, int], List[Tuple[Grade, GradingElement]]],
|
||||
domains: List[Domain],
|
||||
competences: List[Competence],
|
||||
) -> List[StudentAverage]
|
||||
```
|
||||
|
||||
**Rôle**: Calcule toutes les statistiques pour chaque élève
|
||||
- Score par évaluation (brut + sur 20)
|
||||
- Moyenne pondérée par coefficient
|
||||
- Statistiques par domaine (nombre d'évaluations + points)
|
||||
- Statistiques par compétence (via `element.skill`)
|
||||
|
||||
**Logique**:
|
||||
1. Pour chaque élève:
|
||||
- Initialiser les dictionnaires de stats par domaine/compétence
|
||||
- Pour chaque évaluation:
|
||||
- Calculer le score total et max_points
|
||||
- Ramener sur 20 pour la moyenne pondérée
|
||||
- Pour chaque note:
|
||||
- Mettre à jour les stats du domaine associé
|
||||
- Mettre à jour les stats de la compétence associée (via skill)
|
||||
|
||||
#### Méthode 2: `aggregate_domain_competence_stats()`
|
||||
|
||||
**Signature**:
|
||||
```python
|
||||
def aggregate_domain_competence_stats(
|
||||
student_averages: List[StudentAverage],
|
||||
domains: List[Domain],
|
||||
competences: List[Competence],
|
||||
) -> Tuple[List[DomainStats], List[CompetenceStats]]
|
||||
```
|
||||
|
||||
**Rôle**: Agrège les statistiques de tous les élèves par domaine/compétence
|
||||
|
||||
**Logique**:
|
||||
1. Pour chaque domaine:
|
||||
- Sommer evaluation_count de tous les élèves
|
||||
- Sommer total_points_obtained de tous les élèves
|
||||
- Sommer total_points_possible de tous les élèves
|
||||
2. Même chose pour les compétences
|
||||
|
||||
### 3. Endpoint Refactorisé (`backend/api/routes/classes.py`)
|
||||
|
||||
**GET `/classes/{class_id}/stats?trimester={1|2|3}`**
|
||||
|
||||
#### Modifications principales
|
||||
|
||||
**Avant** (ancien code):
|
||||
```python
|
||||
# Calculer les moyennes de chaque élève
|
||||
calculator = GradingCalculator()
|
||||
student_averages = []
|
||||
|
||||
for student in students:
|
||||
# ... calcul simple de la moyenne
|
||||
student_averages.append(StudentAverage(
|
||||
student_id=student.id,
|
||||
average=average,
|
||||
assessment_count=assessment_count
|
||||
))
|
||||
|
||||
# Statistiques domaines/compétences simplifiées (vides)
|
||||
domains_stats = []
|
||||
for domain in domains:
|
||||
domains_stats.append(DomainStats(
|
||||
id=domain.id,
|
||||
name=domain.name,
|
||||
color=domain.color,
|
||||
mean=None, # ❌ Pas calculé
|
||||
elements_count=0 # ❌ Pas calculé
|
||||
))
|
||||
```
|
||||
|
||||
**Après** (nouveau code):
|
||||
```python
|
||||
# Récupérer toutes les notes en une passe
|
||||
grades_by_student_assessment = {}
|
||||
for student in students:
|
||||
for assessment in assessments:
|
||||
grades_query = (...)
|
||||
grades_by_student_assessment[(student.id, assessment.id)] = grades_result.all()
|
||||
|
||||
# Utiliser le service pour calculer les statistiques
|
||||
stats_service = ClassStatisticsService()
|
||||
student_averages = await stats_service.calculate_student_statistics(
|
||||
students=students,
|
||||
assessments=assessments,
|
||||
grades_by_student_assessment=grades_by_student_assessment,
|
||||
domains=domains,
|
||||
competences=competences,
|
||||
)
|
||||
|
||||
# Agréger les statistiques domaines/compétences
|
||||
domains_stats, competences_stats = stats_service.aggregate_domain_competence_stats(
|
||||
student_averages=student_averages,
|
||||
domains=domains,
|
||||
competences=competences,
|
||||
)
|
||||
```
|
||||
|
||||
#### Avantages
|
||||
- ✅ Code modulaire et testable
|
||||
- ✅ Séparation des responsabilités (service vs controller)
|
||||
- ✅ Statistiques complètes calculées automatiquement
|
||||
- ✅ Données enrichies retournées au frontend
|
||||
|
||||
---
|
||||
|
||||
## 🎨 Modifications Frontend
|
||||
|
||||
### 1. Script Vue.js (`frontend/src/views/ClassDashboardView.vue`)
|
||||
|
||||
#### Variables ajoutées
|
||||
|
||||
```javascript
|
||||
const sortColumn = ref('name') // Colonne de tri active
|
||||
const sortDirection = ref('asc') // Direction du tri
|
||||
```
|
||||
|
||||
#### Computed ajoutés
|
||||
|
||||
**assessments** - Extraction des évaluations
|
||||
```javascript
|
||||
const assessments = computed(() => {
|
||||
if (!stats.value?.student_averages?.length) return []
|
||||
|
||||
const firstStudent = stats.value.student_averages[0]
|
||||
if (!firstStudent?.assessment_scores) return []
|
||||
|
||||
// Extraire et trier les évaluations par ID
|
||||
return Object.values(firstStudent.assessment_scores)
|
||||
.sort((a, b) => a.assessment_id - b.assessment_id)
|
||||
})
|
||||
```
|
||||
|
||||
**sortedStudents** - Tri dynamique des élèves
|
||||
```javascript
|
||||
const sortedStudents = computed(() => {
|
||||
if (!stats.value?.student_averages) return []
|
||||
|
||||
const students = [...stats.value.student_averages]
|
||||
|
||||
students.sort((a, b) => {
|
||||
let valA, valB
|
||||
|
||||
if (sortColumn.value === 'name') {
|
||||
valA = `${a.last_name} ${a.first_name}`.toLowerCase()
|
||||
valB = `${b.last_name} ${b.first_name}`.toLowerCase()
|
||||
} else if (sortColumn.value === 'average') {
|
||||
valA = a.average ?? -1
|
||||
valB = b.average ?? -1
|
||||
} else if (sortColumn.value.startsWith('assessment_')) {
|
||||
const assessmentId = parseInt(sortColumn.value.split('_')[1])
|
||||
valA = a.assessment_scores?.[assessmentId]?.score ?? -1
|
||||
valB = b.assessment_scores?.[assessmentId]?.score ?? -1
|
||||
}
|
||||
|
||||
const comparison = valA > valB ? 1 : valA < valB ? -1 : 0
|
||||
return sortDirection.value === 'asc' ? comparison : -comparison
|
||||
})
|
||||
|
||||
return students
|
||||
})
|
||||
```
|
||||
|
||||
#### Fonctions ajoutées
|
||||
|
||||
**sortBy(column)** - Gestion du tri
|
||||
```javascript
|
||||
function sortBy(column) {
|
||||
if (sortColumn.value === column) {
|
||||
// Inverser la direction si même colonne
|
||||
sortDirection.value = sortDirection.value === 'asc' ? 'desc' : 'asc'
|
||||
} else {
|
||||
// Nouvelle colonne : tri ascendant
|
||||
sortColumn.value = column
|
||||
sortDirection.value = 'asc'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**getAssessmentScore(student, assessmentId)** - Formatage des notes
|
||||
```javascript
|
||||
function getAssessmentScore(student, assessmentId) {
|
||||
const score = student.assessment_scores?.[assessmentId]
|
||||
if (!score || score.score === null) return '-'
|
||||
return `${score.score.toFixed(1)}/${score.max_points.toFixed(0)}`
|
||||
}
|
||||
```
|
||||
|
||||
**getSortIcon(column)** - Indicateur visuel
|
||||
```javascript
|
||||
function getSortIcon(column) {
|
||||
if (sortColumn.value !== column) return ''
|
||||
return sortDirection.value === 'asc' ? '▲' : '▼'
|
||||
}
|
||||
```
|
||||
|
||||
#### Fonctions supprimées
|
||||
|
||||
```javascript
|
||||
// ❌ Supprimé
|
||||
function getPerformanceClass(average) { ... }
|
||||
function getPerformanceLabel(average) { ... }
|
||||
```
|
||||
|
||||
### 2. Template - Tableau des Élèves
|
||||
|
||||
#### Avant
|
||||
```html
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Élève</th>
|
||||
<th>Moyenne</th>
|
||||
<th>Performance</th> <!-- ❌ Supprimé -->
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr v-for="student in stats.student_averages">
|
||||
<td>{{ student.last_name }} {{ student.first_name }}</td>
|
||||
<td>{{ student.average?.toFixed(2) || '-' }}</td>
|
||||
<td>
|
||||
<span :class="getPerformanceClass(student.average)">
|
||||
{{ getPerformanceLabel(student.average) }}
|
||||
</span>
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
```
|
||||
|
||||
#### Après
|
||||
```html
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<!-- Colonne Nom (triable) -->
|
||||
<th @click="sortBy('name')" class="cursor-pointer hover:bg-gray-100">
|
||||
Élève {{ getSortIcon('name') }}
|
||||
</th>
|
||||
|
||||
<!-- Colonne Moyenne (triable) -->
|
||||
<th @click="sortBy('average')" class="cursor-pointer hover:bg-gray-100">
|
||||
Moyenne {{ getSortIcon('average') }}
|
||||
</th>
|
||||
|
||||
<!-- Colonnes dynamiques pour chaque évaluation (triables) -->
|
||||
<th
|
||||
v-for="assessment in assessments"
|
||||
:key="assessment.assessment_id"
|
||||
@click="sortBy(`assessment_${assessment.assessment_id}`)"
|
||||
class="cursor-pointer hover:bg-gray-100"
|
||||
:title="assessment.assessment_title"
|
||||
>
|
||||
<div class="flex flex-col items-center">
|
||||
<span class="truncate max-w-[120px]">
|
||||
{{ assessment.assessment_title }}
|
||||
</span>
|
||||
<span class="text-[10px]">
|
||||
{{ getSortIcon(`assessment_${assessment.assessment_id}`) }}
|
||||
</span>
|
||||
</div>
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
|
||||
<tbody>
|
||||
<tr v-for="student in sortedStudents" :key="student.student_id">
|
||||
<td>{{ student.last_name }} {{ student.first_name }}</td>
|
||||
<td class="font-bold text-blue-600">
|
||||
{{ student.average?.toFixed(2) || '-' }}
|
||||
</td>
|
||||
|
||||
<!-- Notes pour chaque évaluation -->
|
||||
<td v-for="assessment in assessments" :key="assessment.assessment_id">
|
||||
{{ getAssessmentScore(student, assessment.assessment_id) }}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
```
|
||||
|
||||
**Changements**:
|
||||
- ✅ Colonnes dynamiques générées depuis `assessments`
|
||||
- ✅ Tri sur toutes les colonnes (clic sur en-tête)
|
||||
- ✅ Indicateur visuel de tri (▲/▼)
|
||||
- ✅ Hover sur en-têtes
|
||||
- ✅ Titre complet en tooltip (`:title`)
|
||||
- ❌ Suppression colonne Performance
|
||||
|
||||
### 3. Template - Domaines/Compétences
|
||||
|
||||
#### Avant
|
||||
```html
|
||||
<div v-for="domain in stats.domains_stats" :key="domain.id">
|
||||
<div class="flex justify-between">
|
||||
<span>{{ domain.name }}</span>
|
||||
<span>{{ domain.mean?.toFixed(1) || '-' }}/20</span> <!-- ❌ -->
|
||||
</div>
|
||||
<div class="progress-bar">
|
||||
<div :style="{ width: `${(domain.mean / 20) * 100}%` }"></div>
|
||||
</div>
|
||||
<p>{{ domain.elements_count }} éléments évalués</p> <!-- ❌ -->
|
||||
</div>
|
||||
```
|
||||
|
||||
#### Après
|
||||
```html
|
||||
<div v-for="domain in stats.domains_stats" :key="domain.id">
|
||||
<div class="flex justify-between">
|
||||
<span>{{ domain.name }}</span>
|
||||
<!-- ✅ Affichage points obtenus / points possibles -->
|
||||
<span>
|
||||
{{ domain.total_points_obtained?.toFixed(1) || '0' }}/{{ domain.total_points_possible?.toFixed(0) || '0' }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<!-- ✅ Barre calculée sur les points réels -->
|
||||
<div class="progress-bar">
|
||||
<div :style="{
|
||||
width: `${domain.total_points_possible > 0
|
||||
? (domain.total_points_obtained / domain.total_points_possible) * 100
|
||||
: 0}%`
|
||||
}"></div>
|
||||
</div>
|
||||
|
||||
<!-- ✅ Nombre d'évaluations -->
|
||||
<p>{{ domain.evaluation_count }} évaluations</p>
|
||||
</div>
|
||||
```
|
||||
|
||||
**Changements**:
|
||||
- ✅ Affichage `total_points_obtained / total_points_possible`
|
||||
- ✅ Texte "X évaluations" au lieu de "X éléments"
|
||||
- ✅ Barre calculée sur les points réels
|
||||
- ❌ Suppression de `mean`
|
||||
|
||||
---
|
||||
|
||||
## 📊 Flux de Données Complet
|
||||
|
||||
### 1. Chargement Initial
|
||||
|
||||
```
|
||||
Frontend (ClassDashboardView.vue)
|
||||
→ fetchData()
|
||||
→ classesStore.fetchClassStats(classId, trimester)
|
||||
→ GET /api/classes/{id}/stats?trimester={t}
|
||||
|
||||
Backend (classes.py)
|
||||
→ get_class_stats()
|
||||
→ Récupérer students, assessments, domains, competences
|
||||
→ Charger toutes les notes (grades_by_student_assessment)
|
||||
→ ClassStatisticsService.calculate_student_statistics()
|
||||
→ Pour chaque élève:
|
||||
→ Calculer scores par évaluation
|
||||
→ Calculer stats par domaine
|
||||
→ Calculer stats par compétence
|
||||
→ Retourner StudentAverage enrichi
|
||||
→ ClassStatisticsService.aggregate_domain_competence_stats()
|
||||
→ Agréger tous les élèves
|
||||
→ Retourner DomainStats et CompetenceStats
|
||||
→ Calculer statistiques globales (mean, median, std_dev)
|
||||
→ Retourner ClassDashboardStats complet
|
||||
|
||||
Frontend (ClassDashboardView.vue)
|
||||
→ stats.value = résultat API
|
||||
→ assessments computed → extrait évaluations
|
||||
→ sortedStudents computed → tri initial
|
||||
→ Affichage tableau
|
||||
```
|
||||
|
||||
### 2. Tri Utilisateur
|
||||
|
||||
```
|
||||
Frontend
|
||||
→ Utilisateur clique sur en-tête de colonne
|
||||
→ sortBy(column) appelée
|
||||
→ Met à jour sortColumn, sortDirection
|
||||
→ sortedStudents computed se recalcule automatiquement
|
||||
→ Vue.js re-rend le tableau
|
||||
```
|
||||
|
||||
### 3. Changement de Trimestre
|
||||
|
||||
```
|
||||
Frontend
|
||||
→ Utilisateur clique sur "Trimestre 2"
|
||||
→ selectTrimester(2) appelée
|
||||
→ Nouvelle requête API avec trimester=2
|
||||
→ stats.value mis à jour
|
||||
→ assessments computed se recalcule
|
||||
→ sortedStudents computed se recalcule
|
||||
→ Tableau re-rendu avec nouvelles données
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Résultat Final
|
||||
|
||||
### Tableau des Élèves - Fonctionnalités
|
||||
|
||||
| Fonctionnalité | Avant | Après |
|
||||
|----------------|-------|-------|
|
||||
| Tri sur colonnes | ❌ | ✅ Toutes colonnes |
|
||||
| Affichage notes évaluations | ❌ | ✅ Toutes visibles |
|
||||
| Indicateurs visuels | ✅ Badges | ❌ Supprimés |
|
||||
| UX | Statique | ✅ Interactive |
|
||||
|
||||
### Tableau Domaines/Compétences - Données
|
||||
|
||||
| Donnée | Avant | Après |
|
||||
|--------|-------|-------|
|
||||
| Moyenne | ✅ X/20 ou X/3 | ❌ Supprimée |
|
||||
| Points obtenus/possibles | ❌ | ✅ XX.X/YY |
|
||||
| Nombre d'évaluations | ❌ "X éléments" | ✅ "X évaluations" |
|
||||
| Barre de progression | Basée sur moyenne | ✅ Basée sur points |
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Pour Tester
|
||||
|
||||
### 1. Lancer le backend
|
||||
```bash
|
||||
cd backend
|
||||
uv run uvicorn api.main:app --reload
|
||||
```
|
||||
|
||||
### 2. Lancer le frontend
|
||||
```bash
|
||||
cd frontend
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### 3. Accéder à une classe
|
||||
```
|
||||
http://localhost:5173/classes/{id}
|
||||
```
|
||||
|
||||
### 4. Vérifier
|
||||
- ✅ Tableau élèves affiche toutes les colonnes d'évaluations
|
||||
- ✅ Clic sur en-tête trie la colonne (nom, moyenne, évaluations)
|
||||
- ✅ Indicateur ▲/▼ s'affiche
|
||||
- ✅ Domaines/Compétences affichent points et nombre d'évaluations
|
||||
- ✅ Pas de badges de performance
|
||||
|
||||
---
|
||||
|
||||
## 📁 Fichiers Modifiés
|
||||
|
||||
### Backend (3 fichiers)
|
||||
|
||||
1. **`backend/schemas/class_group.py`**
|
||||
- Ajout: AssessmentScore, DomainStudentStats, CompetenceStudentStats
|
||||
- Modification: DomainStats, CompetenceStats, StudentAverage
|
||||
|
||||
2. **`backend/domain/services/class_statistics_service.py`** (NOUVEAU)
|
||||
- ClassStatisticsService
|
||||
- calculate_student_statistics()
|
||||
- aggregate_domain_competence_stats()
|
||||
|
||||
3. **`backend/api/routes/classes.py`**
|
||||
- get_class_stats() refactorisé
|
||||
- Utilisation de ClassStatisticsService
|
||||
- Import des nouveaux schemas
|
||||
|
||||
### Frontend (1 fichier)
|
||||
|
||||
4. **`frontend/src/views/ClassDashboardView.vue`**
|
||||
- Script: ajout tri, computed, fonctions
|
||||
- Template: refonte tableau élèves + domaines/compétences
|
||||
- Suppression: fonctions de performance
|
||||
|
||||
---
|
||||
|
||||
## 🎓 Points Clés Techniques
|
||||
|
||||
### Architecture Backend
|
||||
- **Service Layer**: Logique métier isolée dans ClassStatisticsService
|
||||
- **Schema Evolution**: Schemas enrichis pour supporter données complexes
|
||||
- **Performance**: Une requête par élève/évaluation (optimisable avec joinedload)
|
||||
|
||||
### Architecture Frontend
|
||||
- **Reactive Computing**: Tri géré par computed (pas de setState manuel)
|
||||
- **Dynamic Columns**: Colonnes générées depuis les données backend
|
||||
- **UX**: Hover, cursors, indicateurs visuels pour meilleure expérience
|
||||
|
||||
### Coordination
|
||||
- **Contract-First**: Schemas Pydantic garantissent le contrat API
|
||||
- **Type Safety**: Dict[int, Schema] pour accès rapide côté frontend
|
||||
- **Consistency**: Même structure pour domaines et compétences
|
||||
386
docs/DOMAIN_COMPETENCE_PERSPECTIVE.md
Normal file
386
docs/DOMAIN_COMPETENCE_PERSPECTIVE.md
Normal file
@@ -0,0 +1,386 @@
|
||||
# Perspective Enseignant - Domaines et Compétences
|
||||
|
||||
**Date**: 3 décembre 2025
|
||||
**Type**: Changement de perspective - Ce qui a été évalué vs Résultats des élèves
|
||||
|
||||
## 🎯 Objectif du Changement
|
||||
|
||||
Passer d'une **perspective centrée sur les résultats des élèves** à une **perspective centrée sur ce qui a été évalué par l'enseignant**.
|
||||
|
||||
### Avant (Perspective Élèves)
|
||||
```
|
||||
Domaine Algèbre:
|
||||
- 75 évaluations (25 élèves × 3 questions)
|
||||
- 187.5 / 250 points
|
||||
```
|
||||
→ **Difficile à interpréter** : mélange le nombre d'élèves et les évaluations
|
||||
|
||||
### Après (Perspective Enseignant)
|
||||
```
|
||||
Domaine Algèbre:
|
||||
- 3 éléments de notation
|
||||
- 10 points maximum
|
||||
```
|
||||
→ **Clair et actionnable** : l'enseignant voit ce qu'il a créé
|
||||
|
||||
---
|
||||
|
||||
## 📊 Cas d'Usage Concrets
|
||||
|
||||
### Scénario 1: Vérifier l'équilibre d'un trimestre
|
||||
|
||||
**Données affichées**:
|
||||
```
|
||||
Domaines:
|
||||
- Algèbre : 8 éléments, 25 points
|
||||
- Géométrie : 3 éléments, 12 points ← Sous-représenté
|
||||
- Probabilités : 2 éléments, 8 points ← Sous-représenté
|
||||
- Calcul : 6 éléments, 18 points
|
||||
```
|
||||
|
||||
**Action enseignant**: Créer plus d'évaluations sur Géométrie et Probabilités pour le prochain contrôle.
|
||||
|
||||
### Scénario 2: Analyser une évaluation
|
||||
|
||||
**Données affichées**:
|
||||
```
|
||||
Compétences:
|
||||
- Calculer : 5 éléments, 15 points
|
||||
- Raisonner : 3 éléments, 12 points
|
||||
- Communiquer : 1 élément, 5 points ← Sous-représenté
|
||||
```
|
||||
|
||||
**Action enseignant**: Ajouter des questions de communication dans la prochaine évaluation.
|
||||
|
||||
### Scénario 3: Comparer les trimestres
|
||||
|
||||
| Domaine | T1 | T2 | T3 |
|
||||
|---------|----|----|-----|
|
||||
| Algèbre | 8 éléments, 25pts | 6 éléments, 18pts | 10 éléments, 30pts |
|
||||
| Géométrie | 3 éléments, 12pts | 7 éléments, 20pts | 5 éléments, 15pts |
|
||||
|
||||
**Insight**: L'enseignant voit qu'il a moins travaillé la géométrie au T3, peut ajuster pour l'année suivante.
|
||||
|
||||
---
|
||||
|
||||
## 🔧 Modifications Techniques
|
||||
|
||||
### Backend
|
||||
|
||||
#### 1. Nouveau Service (`backend/domain/services/class_statistics_service.py`)
|
||||
|
||||
**Méthode ajoutée**: `calculate_domain_competence_from_elements()`
|
||||
|
||||
**Logique**:
|
||||
```python
|
||||
for assessment in assessments:
|
||||
for exercise in assessment.exercises:
|
||||
for element in exercise.grading_elements:
|
||||
# Compter par domaine
|
||||
if element.domain_id:
|
||||
domain_count[element.domain_id] += 1
|
||||
domain_points[element.domain_id] += element.max_points
|
||||
|
||||
# Compter par compétence (via skill)
|
||||
if element.skill:
|
||||
competence = find_competence_by_name(element.skill)
|
||||
if competence:
|
||||
competence_count[competence.id] += 1
|
||||
competence_points[competence.id] += element.max_points
|
||||
```
|
||||
|
||||
**Retourne**:
|
||||
- `evaluation_count`: Nombre de GradingElements utilisant ce domaine/compétence
|
||||
- `total_points_possible`: Somme des max_points de ces éléments
|
||||
- `total_points_obtained`: 0 (non pertinent dans cette perspective)
|
||||
|
||||
#### 2. Endpoint modifié (`backend/api/routes/classes.py`)
|
||||
|
||||
**Chargement des relations**:
|
||||
```python
|
||||
assessments_query = (
|
||||
select(Assessment)
|
||||
.options(
|
||||
selectinload(Assessment.exercises).selectinload(Exercise.grading_elements)
|
||||
)
|
||||
.where(...)
|
||||
)
|
||||
```
|
||||
|
||||
**Utilisation du nouveau service**:
|
||||
```python
|
||||
# AVANT
|
||||
domains_stats, competences_stats = stats_service.aggregate_domain_competence_stats(
|
||||
student_averages=student_averages,
|
||||
domains=domains,
|
||||
competences=competences,
|
||||
)
|
||||
|
||||
# APRÈS
|
||||
domains_stats, competences_stats = stats_service.calculate_domain_competence_from_elements(
|
||||
assessments=assessments, # Avec exercises et grading_elements chargés
|
||||
domains=domains,
|
||||
competences=competences,
|
||||
)
|
||||
```
|
||||
|
||||
### Frontend
|
||||
|
||||
#### 1. Affichage modifié (`frontend/src/views/ClassDashboardView.vue`)
|
||||
|
||||
**Template**:
|
||||
```vue
|
||||
<!-- AVANT -->
|
||||
<div>
|
||||
<span>{{ domain.name }}</span>
|
||||
<span>{{ domain.total_points_obtained }}/{{ domain.total_points_possible }}</span>
|
||||
<p>{{ domain.evaluation_count }} évaluations</p>
|
||||
</div>
|
||||
|
||||
<!-- APRÈS -->
|
||||
<div>
|
||||
<span>{{ domain.name }}</span>
|
||||
<span>{{ domain.total_points_possible }} points</span>
|
||||
<p>{{ domain.evaluation_count }} élément(s) de notation</p>
|
||||
</div>
|
||||
```
|
||||
|
||||
**Titres modifiés**:
|
||||
- "Statistiques par domaine" → "Évaluations par domaine"
|
||||
- "Statistiques par compétence" → "Évaluations par compétence"
|
||||
|
||||
**Sous-titre ajouté**:
|
||||
- "Perspective enseignant : ce qui a été évalué"
|
||||
|
||||
#### 2. Barre de progression relative
|
||||
|
||||
**Nouvelle logique**:
|
||||
```javascript
|
||||
function getRelativeWidth(item, allItems) {
|
||||
const maxPoints = Math.max(...allItems.map(d => d.total_points_possible || 0))
|
||||
if (maxPoints === 0) return 0
|
||||
return ((item.total_points_possible || 0) / maxPoints) * 100
|
||||
}
|
||||
```
|
||||
|
||||
**Utilisation**:
|
||||
```vue
|
||||
<div class="progress-bar">
|
||||
<div :style="{ width: `${getRelativeWidth(domain, stats.domains_stats)}%` }"></div>
|
||||
</div>
|
||||
```
|
||||
|
||||
La barre montre **la proportion relative** par rapport au domaine le plus évalué, pas un pourcentage absolu.
|
||||
|
||||
---
|
||||
|
||||
## 📈 Comparaison Avant/Après
|
||||
|
||||
### Exemple avec une classe de 25 élèves, 2 contrôles
|
||||
|
||||
**Contrôle 1** (Trimestre 1):
|
||||
- Q1: Algèbre, 5 points
|
||||
- Q2: Algèbre, 3 points
|
||||
- Q3: Géométrie, 4 points
|
||||
|
||||
**Contrôle 2** (Trimestre 1):
|
||||
- Q1: Algèbre, 2 points
|
||||
- Q2: Probabilités, 5 points
|
||||
|
||||
### Avant (Perspective Élèves)
|
||||
|
||||
**Domaine Algèbre**:
|
||||
- `evaluation_count`: 75 (3 questions × 25 élèves)
|
||||
- `total_points_obtained`: 187.5 (moyenne hypothétique)
|
||||
- `total_points_possible`: 250 ((5+3+2) × 25)
|
||||
|
||||
**Affichage**: "75 évaluations - 187.5/250"
|
||||
→ **Illisible** pour l'enseignant
|
||||
|
||||
### Après (Perspective Enseignant)
|
||||
|
||||
**Domaine Algèbre**:
|
||||
- `evaluation_count`: 3 (nombre de questions posées)
|
||||
- `total_points_possible`: 10 (5+3+2)
|
||||
|
||||
**Affichage**: "3 éléments de notation - 10 points"
|
||||
→ **Très lisible** et actionnable
|
||||
|
||||
---
|
||||
|
||||
## 🎓 Avantages Pédagogiques
|
||||
|
||||
### 1. Clarté
|
||||
- **Indépendant du nombre d'élèves**: 3 éléments restent 3 éléments
|
||||
- **Unité cohérente**: Points = barème défini par l'enseignant
|
||||
|
||||
### 2. Planification
|
||||
- **Voir rapidement** quels domaines ont été peu évalués
|
||||
- **Équilibrer** les évaluations entre domaines
|
||||
- **Comparer** les trimestres facilement
|
||||
|
||||
### 3. Réflexivité
|
||||
- **Analyse de pratique**: "Ai-je trop évalué l'algèbre ?"
|
||||
- **Diversification**: "Dois-je ajouter plus de géométrie ?"
|
||||
- **Cohérence**: "Mon programme est-il équilibré ?"
|
||||
|
||||
### 4. Communication
|
||||
- **Conseil de classe**: "J'ai évalué 8 fois l'algèbre ce trimestre"
|
||||
- **Parents**: "Voici la répartition de mes évaluations"
|
||||
- **Équipe pédagogique**: "Comparons nos pratiques d'évaluation"
|
||||
|
||||
---
|
||||
|
||||
## 🔍 Détails d'Implémentation
|
||||
|
||||
### Gestion des Compétences
|
||||
|
||||
**Mapping via `element.skill`**:
|
||||
```python
|
||||
if element.skill:
|
||||
matching_competence = next(
|
||||
(c for c in competences if c.name == element.skill),
|
||||
None
|
||||
)
|
||||
```
|
||||
|
||||
**Logique**:
|
||||
- Les compétences sont stockées dans `GradingElement.skill` (champ texte libre)
|
||||
- Le matching se fait par nom de compétence
|
||||
- Si aucune correspondance: la compétence n'est pas comptée
|
||||
|
||||
**Amélioration future**: Ajouter `competence_id` dans `GradingElement` pour éviter le matching par nom.
|
||||
|
||||
### Performance
|
||||
|
||||
**Requête optimisée**:
|
||||
```python
|
||||
# Un seul trip à la base de données
|
||||
assessments = session.execute(
|
||||
select(Assessment)
|
||||
.options(selectinload(Assessment.exercises).selectinload(Exercise.grading_elements))
|
||||
.where(...)
|
||||
).scalars().all()
|
||||
```
|
||||
|
||||
**Complexité**: O(n) où n = nombre total de GradingElements du trimestre
|
||||
|
||||
**Charge typique**:
|
||||
- 5 évaluations × 15 éléments/évaluation = 75 éléments à parcourir
|
||||
- Temps: < 10ms
|
||||
|
||||
### Données Retournées
|
||||
|
||||
**Structure JSON**:
|
||||
```json
|
||||
{
|
||||
"domains_stats": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Algèbre",
|
||||
"color": "#3B82F6",
|
||||
"evaluation_count": 3,
|
||||
"total_points_obtained": 0.0,
|
||||
"total_points_possible": 10.0
|
||||
}
|
||||
],
|
||||
"competences_stats": [
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Calculer",
|
||||
"color": "#10B981",
|
||||
"evaluation_count": 5,
|
||||
"total_points_obtained": 0.0,
|
||||
"total_points_possible": 15.0
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
**Note**: `total_points_obtained` est à 0 car non pertinent dans cette perspective.
|
||||
|
||||
---
|
||||
|
||||
## 📁 Fichiers Modifiés
|
||||
|
||||
### Backend (2 fichiers)
|
||||
|
||||
1. **`backend/domain/services/class_statistics_service.py`**
|
||||
- Ajout: `calculate_domain_competence_from_elements()`
|
||||
- Logique: Parcours des GradingElements
|
||||
- Retour: DomainStats et CompetenceStats
|
||||
|
||||
2. **`backend/api/routes/classes.py`**
|
||||
- Modification: Chargement des assessments avec relations
|
||||
- Modification: Appel de la nouvelle méthode de calcul
|
||||
|
||||
### Frontend (1 fichier)
|
||||
|
||||
3. **`frontend/src/views/ClassDashboardView.vue`**
|
||||
- Modification: Titres ("Évaluations par..." au lieu de "Statistiques par...")
|
||||
- Modification: Affichage (points seuls au lieu de obtained/possible)
|
||||
- Modification: Texte ("élément(s) de notation" au lieu de "évaluations")
|
||||
- Ajout: Fonction `getRelativeWidth()` pour barre proportionnelle
|
||||
- Ajout: Sous-titre explicatif "Perspective enseignant : ce qui a été évalué"
|
||||
|
||||
---
|
||||
|
||||
## ✅ Tests Suggérés
|
||||
|
||||
### 1. Vérifier le comptage
|
||||
- Créer 3 évaluations avec 2, 3, 4 éléments respectivement
|
||||
- Vérifier: `evaluation_count = 9`
|
||||
|
||||
### 2. Vérifier les points
|
||||
- Élément 1: 5 points
|
||||
- Élément 2: 3 points
|
||||
- Élément 3: 2 points
|
||||
- Vérifier: `total_points_possible = 10`
|
||||
|
||||
### 3. Vérifier le filtrage par domaine
|
||||
- 3 éléments Algèbre, 2 éléments Géométrie
|
||||
- Vérifier: Algèbre montre 3, Géométrie montre 2
|
||||
|
||||
### 4. Vérifier les compétences
|
||||
- Créer des éléments avec `skill = "Calculer"`
|
||||
- Vérifier: Compétence "Calculer" affiche le bon nombre
|
||||
|
||||
### 5. Vérifier l'absence de données
|
||||
- Domaine sans éléments: `evaluation_count = 0, total_points_possible = 0`
|
||||
- Affichage: "0 élément(s) de notation - 0 points"
|
||||
|
||||
---
|
||||
|
||||
## 🚀 Utilisation
|
||||
|
||||
### Accéder au Dashboard
|
||||
```
|
||||
http://localhost:5173/classes/{id}
|
||||
```
|
||||
|
||||
### Vérifier l'affichage
|
||||
1. Sélectionner un trimestre
|
||||
2. Scroller jusqu'aux sections "Évaluations par domaine/compétence"
|
||||
3. Vérifier:
|
||||
- ✅ Titre: "Évaluations par domaine/compétence"
|
||||
- ✅ Sous-titre: "Perspective enseignant : ce qui a été évalué"
|
||||
- ✅ Affichage: "X points" (pas de fraction)
|
||||
- ✅ Texte: "X élément(s) de notation"
|
||||
- ✅ Barre proportionnelle au domaine le plus évalué
|
||||
|
||||
---
|
||||
|
||||
## 🎯 Conclusion
|
||||
|
||||
Cette modification transforme le dashboard d'une **vue résultats** (complexe, orientée élèves) en une **vue enseignant** (simple, orientée pratique pédagogique).
|
||||
|
||||
**Impact pédagogique**:
|
||||
- ✅ Meilleure visibilité sur la répartition des évaluations
|
||||
- ✅ Aide à l'équilibrage du programme
|
||||
- ✅ Facilite la réflexion sur les pratiques d'évaluation
|
||||
- ✅ Indépendant du nombre d'élèves (plus cohérent)
|
||||
|
||||
**Impact technique**:
|
||||
- ✅ Code plus simple (pas de double comptage élèves × éléments)
|
||||
- ✅ Requêtes optimisées avec selectinload
|
||||
- ✅ Données plus claires et exploitables
|
||||
@@ -79,59 +79,65 @@
|
||||
<div v-if="stats" class="grid grid-cols-1 lg:grid-cols-2 gap-6 mb-8">
|
||||
<!-- Domaines -->
|
||||
<div v-if="stats.domains_stats?.length" class="bg-white rounded-xl shadow-md p-6">
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Performance par domaine</h2>
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Évaluations par domaine</h2>
|
||||
<p class="text-xs text-gray-500 mb-4">Perspective enseignant : ce qui a été évalué</p>
|
||||
<div class="space-y-4">
|
||||
<div v-for="domain in stats.domains_stats" :key="domain.id" class="space-y-1">
|
||||
<div class="flex justify-between text-sm">
|
||||
<span class="font-medium text-gray-700 truncate" :title="domain.name">{{ domain.name }}</span>
|
||||
<span class="font-bold" :style="{ color: domain.color || '#6B7280' }">
|
||||
{{ domain.mean?.toFixed(1) || '-' }}/20
|
||||
{{ domain.total_points_possible?.toFixed(1) || '0' }} points
|
||||
</span>
|
||||
</div>
|
||||
<div class="w-full bg-gray-200 rounded-full h-2">
|
||||
<div
|
||||
class="h-2 rounded-full transition-all duration-1000 ease-out"
|
||||
:style="{
|
||||
width: `${domain.mean ? (domain.mean / 20) * 100 : 0}%`,
|
||||
width: `${getRelativeWidth(domain, stats.domains_stats)}%`,
|
||||
backgroundColor: domain.color || '#6B7280'
|
||||
}"
|
||||
></div>
|
||||
</div>
|
||||
<p class="text-xs text-gray-400">{{ domain.elements_count }} éléments évalués</p>
|
||||
<p class="text-xs text-gray-400">
|
||||
{{ domain.evaluation_count }} élément(s) de notation
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div v-else class="bg-white rounded-xl shadow-md p-6">
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Performance par domaine</h2>
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Évaluations par domaine</h2>
|
||||
<p class="text-sm text-gray-500 italic">Aucune donnée de domaine disponible</p>
|
||||
</div>
|
||||
|
||||
<!-- Compétences -->
|
||||
<div v-if="stats.competences_stats?.length" class="bg-white rounded-xl shadow-md p-6">
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Performance par compétence</h2>
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Évaluations par compétence</h2>
|
||||
<p class="text-xs text-gray-500 mb-4">Perspective enseignant : ce qui a été évalué</p>
|
||||
<div class="space-y-4">
|
||||
<div v-for="competence in stats.competences_stats" :key="competence.id" class="space-y-1">
|
||||
<div class="flex justify-between text-sm">
|
||||
<span class="font-medium text-gray-700 truncate" :title="competence.name">{{ competence.name }}</span>
|
||||
<span class="font-bold" :style="{ color: competence.color || '#6B7280' }">
|
||||
{{ competence.mean?.toFixed(1) || '-' }}/3
|
||||
{{ competence.total_points_possible?.toFixed(1) || '0' }} points
|
||||
</span>
|
||||
</div>
|
||||
<div class="w-full bg-gray-200 rounded-full h-2">
|
||||
<div
|
||||
class="h-2 rounded-full transition-all duration-1000 ease-out"
|
||||
:style="{
|
||||
width: `${competence.mean ? (competence.mean / 3) * 100 : 0}%`,
|
||||
width: `${getRelativeWidth(competence, stats.competences_stats)}%`,
|
||||
backgroundColor: competence.color || '#6B7280'
|
||||
}"
|
||||
></div>
|
||||
</div>
|
||||
<p class="text-xs text-gray-400">{{ competence.elements_count }} éléments évalués</p>
|
||||
<p class="text-xs text-gray-400">
|
||||
{{ competence.evaluation_count }} élément(s) de notation
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div v-else class="bg-white rounded-xl shadow-md p-6">
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Performance par compétence</h2>
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Évaluations par compétence</h2>
|
||||
<p class="text-sm text-gray-500 italic">Aucune donnée de compétence disponible</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -139,32 +145,53 @@
|
||||
<!-- Students averages -->
|
||||
<div v-if="stats?.student_averages?.length" class="bg-white rounded-xl shadow-md overflow-hidden">
|
||||
<div class="px-6 py-4 border-b border-gray-200">
|
||||
<h2 class="text-lg font-semibold text-gray-800">Moyennes par élève</h2>
|
||||
<h2 class="text-lg font-semibold text-gray-800">Notes par élève</h2>
|
||||
<p class="text-xs text-gray-500 mt-1">Cliquez sur les en-têtes pour trier</p>
|
||||
</div>
|
||||
<div class="overflow-x-auto">
|
||||
<table class="w-full">
|
||||
<thead class="bg-gray-50">
|
||||
<tr>
|
||||
<th class="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider">Élève</th>
|
||||
<th class="px-6 py-3 text-right text-xs font-medium text-gray-500 uppercase tracking-wider">Moyenne</th>
|
||||
<th class="px-6 py-3 text-center text-xs font-medium text-gray-500 uppercase tracking-wider">Performance</th>
|
||||
<th
|
||||
@click="sortBy('name')"
|
||||
class="px-6 py-3 text-left text-xs font-medium text-gray-500 uppercase tracking-wider cursor-pointer hover:bg-gray-100"
|
||||
>
|
||||
Élève {{ getSortIcon('name') }}
|
||||
</th>
|
||||
<th
|
||||
@click="sortBy('average')"
|
||||
class="px-6 py-3 text-right text-xs font-medium text-gray-500 uppercase tracking-wider cursor-pointer hover:bg-gray-100"
|
||||
>
|
||||
Moyenne {{ getSortIcon('average') }}
|
||||
</th>
|
||||
<th
|
||||
v-for="assessment in assessments"
|
||||
:key="assessment.assessment_id"
|
||||
@click="sortBy(`assessment_${assessment.assessment_id}`)"
|
||||
class="px-4 py-3 text-center text-xs font-medium text-gray-500 uppercase tracking-wider cursor-pointer hover:bg-gray-100"
|
||||
:title="assessment.assessment_title"
|
||||
>
|
||||
<div class="flex flex-col items-center">
|
||||
<span class="truncate max-w-[120px]">{{ assessment.assessment_title }}</span>
|
||||
<span class="text-[10px] font-normal">{{ getSortIcon(`assessment_${assessment.assessment_id}`) }}</span>
|
||||
</div>
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody class="divide-y divide-gray-200">
|
||||
<tr v-for="student in stats.student_averages" :key="student.student_id" class="hover:bg-gray-50">
|
||||
<tr v-for="student in sortedStudents" :key="student.student_id" class="hover:bg-gray-50">
|
||||
<td class="px-6 py-4 whitespace-nowrap text-sm font-medium text-gray-900">
|
||||
{{ student.last_name }} {{ student.first_name }}
|
||||
</td>
|
||||
<td class="px-6 py-4 whitespace-nowrap text-sm text-right font-bold">
|
||||
<td class="px-6 py-4 whitespace-nowrap text-sm text-right font-bold text-blue-600">
|
||||
{{ student.average?.toFixed(2) || '-' }}
|
||||
</td>
|
||||
<td class="px-6 py-4 whitespace-nowrap text-center">
|
||||
<span
|
||||
class="px-2 py-1 text-xs font-medium rounded-full"
|
||||
:class="getPerformanceClass(student.average)"
|
||||
>
|
||||
{{ getPerformanceLabel(student.average) }}
|
||||
</span>
|
||||
<td
|
||||
v-for="assessment in assessments"
|
||||
:key="assessment.assessment_id"
|
||||
class="px-4 py-4 whitespace-nowrap text-sm text-center text-gray-700"
|
||||
>
|
||||
{{ getAssessmentScore(student, assessment.assessment_id) }}
|
||||
</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
@@ -172,15 +199,15 @@
|
||||
</div>
|
||||
</div>
|
||||
<div v-else-if="stats" class="bg-white rounded-xl shadow-md p-6">
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Moyennes par élève</h2>
|
||||
<p class="text-sm text-gray-500 italic text-center py-4">Aucune moyenne disponible pour ce trimestre</p>
|
||||
<h2 class="text-lg font-semibold text-gray-800 mb-4">Notes par élève</h2>
|
||||
<p class="text-sm text-gray-500 italic text-center py-4">Aucune note disponible pour ce trimestre</p>
|
||||
</div>
|
||||
</template>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
import { ref, onMounted } from 'vue'
|
||||
import { ref, computed, onMounted } from 'vue'
|
||||
import { useRoute } from 'vue-router'
|
||||
import { useClassesStore } from '@/stores/classes'
|
||||
import LoadingSpinner from '@/components/common/LoadingSpinner.vue'
|
||||
@@ -192,6 +219,8 @@ const loading = ref(true)
|
||||
const classData = ref(null)
|
||||
const stats = ref(null)
|
||||
const trimester = ref(1)
|
||||
const sortColumn = ref('name')
|
||||
const sortDirection = ref('asc')
|
||||
|
||||
async function fetchData() {
|
||||
loading.value = true
|
||||
@@ -209,21 +238,71 @@ async function selectTrimester(t) {
|
||||
stats.value = await classesStore.fetchClassStats(route.params.id, t)
|
||||
}
|
||||
|
||||
// Fonctions pour les labels de performance
|
||||
function getPerformanceClass(average) {
|
||||
if (average === null || average === undefined) return 'bg-gray-100 text-gray-600'
|
||||
if (average >= 16) return 'bg-green-100 text-green-800'
|
||||
if (average >= 12) return 'bg-blue-100 text-blue-800'
|
||||
if (average >= 8) return 'bg-orange-100 text-orange-800'
|
||||
return 'bg-red-100 text-red-800'
|
||||
// Récupérer la liste des évaluations triée par date
|
||||
const assessments = computed(() => {
|
||||
if (!stats.value?.student_averages?.length) return []
|
||||
|
||||
// Extraire les évaluations depuis le premier élève
|
||||
const firstStudent = stats.value.student_averages[0]
|
||||
if (!firstStudent?.assessment_scores) return []
|
||||
|
||||
return Object.values(firstStudent.assessment_scores).sort((a, b) => a.assessment_id - b.assessment_id)
|
||||
})
|
||||
|
||||
// Fonction de tri des élèves
|
||||
const sortedStudents = computed(() => {
|
||||
if (!stats.value?.student_averages) return []
|
||||
|
||||
const students = [...stats.value.student_averages]
|
||||
|
||||
students.sort((a, b) => {
|
||||
let valA, valB
|
||||
|
||||
if (sortColumn.value === 'name') {
|
||||
valA = `${a.last_name} ${a.first_name}`.toLowerCase()
|
||||
valB = `${b.last_name} ${b.first_name}`.toLowerCase()
|
||||
} else if (sortColumn.value === 'average') {
|
||||
valA = a.average ?? -1
|
||||
valB = b.average ?? -1
|
||||
} else if (sortColumn.value.startsWith('assessment_')) {
|
||||
const assessmentId = parseInt(sortColumn.value.split('_')[1])
|
||||
valA = a.assessment_scores?.[assessmentId]?.score ?? -1
|
||||
valB = b.assessment_scores?.[assessmentId]?.score ?? -1
|
||||
} else {
|
||||
return 0
|
||||
}
|
||||
|
||||
const comparison = valA > valB ? 1 : valA < valB ? -1 : 0
|
||||
return sortDirection.value === 'asc' ? comparison : -comparison
|
||||
})
|
||||
|
||||
return students
|
||||
})
|
||||
|
||||
function sortBy(column) {
|
||||
if (sortColumn.value === column) {
|
||||
sortDirection.value = sortDirection.value === 'asc' ? 'desc' : 'asc'
|
||||
} else {
|
||||
sortColumn.value = column
|
||||
sortDirection.value = 'asc'
|
||||
}
|
||||
}
|
||||
|
||||
function getPerformanceLabel(average) {
|
||||
if (average === null || average === undefined) return '-'
|
||||
if (average >= 16) return 'Excellent'
|
||||
if (average >= 12) return 'Bon'
|
||||
if (average >= 8) return 'Moyen'
|
||||
return 'Insuffisant'
|
||||
function getAssessmentScore(student, assessmentId) {
|
||||
const score = student.assessment_scores?.[assessmentId]
|
||||
if (!score || score.score === null) return '-'
|
||||
return `${score.score.toFixed(1)}/${score.max_points.toFixed(0)}`
|
||||
}
|
||||
|
||||
function getSortIcon(column) {
|
||||
if (sortColumn.value !== column) return ''
|
||||
return sortDirection.value === 'asc' ? '▲' : '▼'
|
||||
}
|
||||
|
||||
function getRelativeWidth(item, allItems) {
|
||||
const maxPoints = Math.max(...allItems.map(d => d.total_points_possible || 0))
|
||||
if (maxPoints === 0) return 0
|
||||
return ((item.total_points_possible || 0) / maxPoints) * 100
|
||||
}
|
||||
|
||||
onMounted(fetchData)
|
||||
|
||||
Reference in New Issue
Block a user