Compare commits
57 Commits
Author | SHA1 | Date | |
---|---|---|---|
b8eef461f8 | |||
a953631d19 | |||
ccb59975f7 | |||
0ebc24ff29 | |||
07595f1fd8 | |||
f3302e2132 | |||
3b98a881e7 | |||
356db507eb | |||
066990d109 | |||
94c942d055 | |||
12b3220170 | |||
6eec1f83bb | |||
36e90a004e | |||
a95ce91b29 | |||
b8a769b96d | |||
6f5b479426 | |||
5cf062c7a0 | |||
c541d0063f | |||
c7eb8e44d2 | |||
febe686688 | |||
ccf1655cf4 | |||
4a16444835 | |||
9ec183c3a5 | |||
e5a50e0be8 | |||
fd567c292d | |||
4e13d0e32f | |||
de9a4bc4be | |||
2dc1cf6fb8 | |||
7d908775a9 | |||
dfd0bb2b81 | |||
f73ad3a34d | |||
fe92433311 | |||
6fbe238e59 | |||
566ba8e2f5 | |||
2444bf38a1 | |||
6f486a6f3c | |||
5735d344c5 | |||
b3bb1f0cc8 | |||
21821c275e | |||
8911f8ddeb | |||
4e8addd6cb | |||
ba750989d8 | |||
309e9627e7 | |||
b4df4d6c09 | |||
723092c38f | |||
77b26f1180 | |||
b50227a658 | |||
4f4fc53253 | |||
67b9a1e8a0 | |||
a3c44321bf | |||
e496d86828 | |||
c4fcb6a0ef | |||
a2a0269f39 | |||
64c28427c9 | |||
997e194b6d | |||
6a44ca033f | |||
a7aeb12844 |
5
.gitignore
vendored
5
.gitignore
vendored
@@ -123,6 +123,5 @@ dmypy.json
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# vim
|
||||
.vim
|
||||
|
||||
# temporary database
|
||||
sqlite.db
|
||||
|
19
.pre-commit-config.yaml
Normal file
19
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.0.1
|
||||
hooks:
|
||||
- id: check-toml
|
||||
- id: check-yaml
|
||||
- id: end-of-file-fixer
|
||||
- id: mixed-line-ending
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.3.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.10.1
|
||||
hooks:
|
||||
- id: isort
|
||||
args: ["--profile", "black"]
|
35
README.md
35
README.md
@@ -1,34 +1,19 @@
|
||||
# Encore une autre façon d'enregistrer et d'analyser mes notes
|
||||
# Recopytex
|
||||
|
||||
Cette fois ci, on utilise:
|
||||
One more rewrite of Opytex. This time, it more a learning project than a operational project.
|
||||
|
||||
- Des fichiers csv pour stocker les notes
|
||||
- Des fichiers yaml pour les infos sur les élèves
|
||||
- Des notebooks pour l'analyse
|
||||
- Papermill pour produire les notesbooks à partir de template
|
||||
I am following the book *Architecture Patterns with Python* by Harry J.W. Percival and Bob Gregory. Then the project will follow TTD method using DDD.
|
||||
|
||||
## Les fichiers CSV
|
||||
## Backend API
|
||||
|
||||
les paramètres sont décris dans ./recopytex/config.py
|
||||
It uses **fastapi**, **sqlalchemy**.
|
||||
|
||||
### Descriptions des questions
|
||||
### Installing
|
||||
|
||||
- Trimestre
|
||||
- Nom
|
||||
- Date
|
||||
- Exercice
|
||||
- Question
|
||||
- Competence
|
||||
- Domaine
|
||||
- Commentaire
|
||||
- Bareme
|
||||
- Est_nivele
|
||||
poetry install
|
||||
|
||||
### Fire up
|
||||
|
||||
### Valeurs pour notes les élèves
|
||||
|
||||
- Score: 0, 1, 2, 3
|
||||
- Pas de réponses: .
|
||||
- Absent: a
|
||||
- Dispensé: (vide)
|
||||
uvicorn backend.api.main:app --reload
|
||||
|
||||
## Frontend
|
||||
|
49
backend/adapters/orm.py
Normal file
49
backend/adapters/orm.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from sqlalchemy import Column, ForeignKey, MetaData, String, Table
|
||||
from sqlalchemy.orm import backref, registry, relationship
|
||||
|
||||
from backend.model.assessment import Assessment
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
|
||||
metadata = MetaData()
|
||||
mapper_registry = registry()
|
||||
|
||||
tribes_table = Table(
|
||||
"tribes",
|
||||
metadata,
|
||||
Column("name", String(255), primary_key=True),
|
||||
Column("level", String(255)),
|
||||
)
|
||||
|
||||
assessments_table = Table(
|
||||
"assessments",
|
||||
metadata,
|
||||
Column("id", String(255), primary_key=True),
|
||||
Column("name", String(255)),
|
||||
Column("tribe_name", String(255), ForeignKey("tribes.name")),
|
||||
)
|
||||
|
||||
students_table = Table(
|
||||
"students",
|
||||
metadata,
|
||||
Column("id", String(255), primary_key=True),
|
||||
Column("name", String(255)),
|
||||
Column("tribe_name", String(255), ForeignKey("tribes.name")),
|
||||
)
|
||||
|
||||
|
||||
def start_mappers():
|
||||
tribes_mapper = mapper_registry.map_imperatively(
|
||||
Tribe,
|
||||
tribes_table,
|
||||
properties={
|
||||
"students": relationship(
|
||||
Student, backref="tribes", order_by=students_table.c.id
|
||||
),
|
||||
"assessments": relationship(
|
||||
Assessment, backref="tribes", order_by=assessments_table.c.id
|
||||
),
|
||||
},
|
||||
)
|
||||
students_mapper = mapper_registry.map_imperatively(Student, students_table)
|
||||
assessments_mapper = mapper_registry.map_imperatively(Assessment, assessments_table)
|
33
backend/adapters/sqlite.py
Normal file
33
backend/adapters/sqlite.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import sqlite3
|
||||
|
||||
|
||||
def create_tribe_table(conn) -> None:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS tribes(
|
||||
name VARCHAR PRIMARY KEY UNIQUE,
|
||||
level VARCHAR
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def create_student_table(conn) -> None:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS students(
|
||||
id VARCHAR(500) PRIMARY KEY UNIQUE,
|
||||
name VARCHAR,
|
||||
tribe_name VARCHAR
|
||||
)
|
||||
"""
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
|
||||
def create_db(conn) -> None:
|
||||
create_tribe_table(conn)
|
||||
create_student_table(conn)
|
194
backend/api/main.py
Normal file
194
backend/api/main.py
Normal file
@@ -0,0 +1,194 @@
|
||||
import sqlite3
|
||||
|
||||
from fastapi import FastAPI, status
|
||||
from fastapi.responses import JSONResponse, RedirectResponse, Response
|
||||
|
||||
from backend.adapters.sqlite import create_db
|
||||
from backend.api.model import StudentModel, TribeModel
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.student_sqlite_repository import StudentSQLiteRepository
|
||||
from backend.repository.tribe_sqlite_repository import (
|
||||
TribeRepositoryError,
|
||||
TribeSQLiteRepository,
|
||||
)
|
||||
from backend.service import services
|
||||
from backend.service.services import StudentDoesExist, TribeDoesNotExist, TribeExists
|
||||
|
||||
# from sqlalchemy import create_engine
|
||||
# from sqlalchemy.orm import clear_mappers, sessionmaker
|
||||
# import backend.adapters.orm as orm
|
||||
# from backend.repository.tribe_sqlalchemy_repository import TribeSQLAlchemyRepository
|
||||
|
||||
# orm.start_mappers()
|
||||
# engine = create_engine("sqlite:///:memory:")
|
||||
# orm.metadata.create_all(engine)
|
||||
# session = sessionmaker(bind=engine)()
|
||||
# tribe_repo = TribeSQLAlchemyRepository(session)
|
||||
|
||||
conn = sqlite3.connect("sqlite.db")
|
||||
create_db(conn)
|
||||
|
||||
tribe_repo = TribeSQLiteRepository(conn)
|
||||
student_repo = StudentSQLiteRepository(conn)
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
@app.post("/tribes", response_class=RedirectResponse, status_code=status.HTTP_302_FOUND)
|
||||
async def post_tribe(item: TribeModel):
|
||||
try:
|
||||
tribe = services.add_tribe(
|
||||
name=item.name, level=item.level, tribe_repo=tribe_repo, conn=conn
|
||||
)
|
||||
except TribeExists:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"The tribe {item.name} already exists",
|
||||
)
|
||||
|
||||
return f"/tribes/{tribe.name}"
|
||||
|
||||
|
||||
@app.put(
|
||||
"/tribes/{name}", response_class=RedirectResponse, status_code=status.HTTP_302_FOUND
|
||||
)
|
||||
async def put_tribe(name: str, item: TribeModel):
|
||||
try:
|
||||
tribe = services.update_tribe(
|
||||
name=item.name, level=item.level, tribe_repo=tribe_repo, conn=conn
|
||||
)
|
||||
except TribeDoesNotExist:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"The tribe {name} does not exists",
|
||||
)
|
||||
|
||||
return f"/tribes/{tribe.name}"
|
||||
|
||||
|
||||
@app.delete("/tribes/{name}")
|
||||
async def delete_tribe(name: str):
|
||||
try:
|
||||
services.delete_tribe(name=name, tribe_repo=tribe_repo, conn=conn)
|
||||
except TribeDoesNotExist:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"The tribe {name} does not exists",
|
||||
)
|
||||
|
||||
return Response(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
)
|
||||
|
||||
|
||||
@app.get("/tribes", response_model=list[TribeModel])
|
||||
async def list_tribes():
|
||||
tribes = tribe_repo.list()
|
||||
|
||||
return [t.to_dict() for t in tribes]
|
||||
|
||||
|
||||
@app.get("/tribes/{name}", response_model=TribeModel)
|
||||
async def get_tribe(name: str):
|
||||
tribe = tribe_repo.get(name)
|
||||
|
||||
return tribe.to_dict()
|
||||
|
||||
|
||||
@app.post(
|
||||
"/students", response_class=RedirectResponse, status_code=status.HTTP_302_FOUND
|
||||
)
|
||||
async def post_student(item: StudentModel):
|
||||
if item.id is not None:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"You can't post a student with an id. It is already registrered. Use PUT to modify it.",
|
||||
)
|
||||
|
||||
try:
|
||||
student = services.add_student(
|
||||
name=item.name,
|
||||
tribe=item.tribe_name,
|
||||
tribe_repo=tribe_repo,
|
||||
student_repo=student_repo,
|
||||
conn=conn,
|
||||
)
|
||||
except TribeDoesNotExist:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"The tribe {item.tribe_name} does not exists. You can't add a student in it.",
|
||||
)
|
||||
|
||||
return f"/students/{student.id}"
|
||||
|
||||
|
||||
@app.get("/students/{id}", status_code=status.HTTP_200_OK, response_model=StudentModel)
|
||||
async def get_student(id: str):
|
||||
tribes = tribe_repo.list()
|
||||
student = student_repo.get(id, tribes)
|
||||
return student.to_dict()
|
||||
|
||||
|
||||
@app.get("/students", status_code=status.HTTP_200_OK, response_model=list[StudentModel])
|
||||
async def list_students():
|
||||
tribes = tribe_repo.list()
|
||||
students = student_repo.list(tribes)
|
||||
|
||||
return [t.to_dict() for t in students]
|
||||
|
||||
|
||||
@app.put(
|
||||
"/students/{student_id}",
|
||||
response_class=RedirectResponse,
|
||||
status_code=status.HTTP_302_FOUND,
|
||||
)
|
||||
async def put_student(student_id, item: StudentModel):
|
||||
if student_id != item.id:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content=f"Url and student id are the same",
|
||||
)
|
||||
|
||||
try:
|
||||
student = services.update_student(
|
||||
id=item.id,
|
||||
name=item.name,
|
||||
tribe=item.tribe_name,
|
||||
tribe_repo=tribe_repo,
|
||||
student_repo=student_repo,
|
||||
conn=conn,
|
||||
)
|
||||
except TribeDoesNotExist:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"The tribe {tribe_name} does not exists. You can't add a student in it.",
|
||||
)
|
||||
except StudentDoesExist:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"The student {item.name} ({item.id=}) does not exists. You can't modify it.",
|
||||
)
|
||||
|
||||
return f"/students/{student.id}"
|
||||
|
||||
|
||||
@app.delete(
|
||||
"/students/{student_id}",
|
||||
)
|
||||
async def delete_student(student_id):
|
||||
try:
|
||||
student = services.delete_student(
|
||||
id=student_id,
|
||||
student_repo=student_repo,
|
||||
conn=conn,
|
||||
)
|
||||
except StudentDoesExist:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
content=f"The student ({student_id=}) does not exists. You can't delete it.",
|
||||
)
|
||||
|
||||
return Response(
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
)
|
16
backend/api/model.py
Normal file
16
backend/api/model.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class StudentModel(BaseModel):
|
||||
name: str
|
||||
tribe_name: str
|
||||
id: Optional[str]
|
||||
|
||||
|
||||
class TribeModel(BaseModel):
|
||||
name: str
|
||||
level: str
|
||||
students: list[StudentModel] | None = []
|
||||
assessments: list | None = []
|
16
backend/config.py
Normal file
16
backend/config.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
from backend.adapters.sqlite import create_db
|
||||
|
||||
|
||||
def sqlite_conn(sqlite_file: str = ":memory"):
|
||||
conn = sqlite3.connect(sqlite_file)
|
||||
create_db(conn)
|
||||
return conn
|
||||
|
||||
|
||||
def get_api_url():
|
||||
host = os.environ.get("API_HOST", "localhost")
|
||||
port = 8000 if host == "localhost" else 80
|
||||
return f"http://{host}:{port}"
|
69
backend/model/assessment.py
Normal file
69
backend/model/assessment.py
Normal file
@@ -0,0 +1,69 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import date
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.model.tribe import Tribe
|
||||
|
||||
|
||||
@dataclass
|
||||
class Assessment:
|
||||
name: str
|
||||
tribe: Tribe
|
||||
term: int
|
||||
exercises: list[Exercise] = field(default_factory=list)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.tribe.register_assessment(self)
|
||||
|
||||
def register_exercise(self, exercise: Exercise):
|
||||
self.exercises.append(exercise)
|
||||
|
||||
@property
|
||||
def questions(self):
|
||||
return sum([exercise.questions for exercise in self.exercises])
|
||||
|
||||
|
||||
@dataclass
|
||||
class Exercise:
|
||||
name: str
|
||||
assessment: Assessment
|
||||
date: Optional[date]
|
||||
questions: list[Question] = field(default_factory=list)
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.assessment.register_exercise(self)
|
||||
|
||||
def register_question(self, question: Question):
|
||||
self.questions.append(question)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Domain:
|
||||
name: str
|
||||
description: str
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Skill:
|
||||
name: str
|
||||
description: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class Question:
|
||||
name: str
|
||||
exercise: Exercise
|
||||
description: str
|
||||
skill: Skill
|
||||
domain: Domain
|
||||
is_leveled: bool
|
||||
scale: int
|
||||
|
||||
def __post_init__(
|
||||
self,
|
||||
) -> None:
|
||||
|
||||
self.exercise.register_question(self)
|
33
backend/model/student.py
Normal file
33
backend/model/student.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.model.tribe import Tribe
|
||||
|
||||
|
||||
@dataclass
|
||||
class Student:
|
||||
name: str
|
||||
tribe: Tribe
|
||||
id: str = field(default_factory=lambda: str(uuid4()))
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
self.tribe.register_student(self)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, Student):
|
||||
return self.id == other.id
|
||||
return False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.id)
|
||||
|
||||
def to_tuple(self) -> tuple:
|
||||
return (self.id, self.name, self.tribe.name)
|
||||
|
||||
def to_dict(self, full_tribe=False) -> dict:
|
||||
return {"id": self.id, "name": self.name, "tribe_name": self.tribe.name}
|
55
backend/model/tribe.py
Normal file
55
backend/model/tribe.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from backend.model.assessment import Assessment
|
||||
from backend.model.student import Student
|
||||
|
||||
|
||||
class TribeError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class Tribe:
|
||||
name: str
|
||||
level: str
|
||||
students: list[Student] = field(default_factory=list)
|
||||
assessments: list[Assessment] = field(default_factory=list)
|
||||
|
||||
def register_assessment(self, assessment: Assessment) -> None:
|
||||
self.assessments.append(assessment)
|
||||
|
||||
@property
|
||||
def students_id(self) -> list[str]:
|
||||
return [s.id for s in self.students]
|
||||
|
||||
def register_student(self, student: Student) -> None:
|
||||
"""Register a student
|
||||
|
||||
If the student is already registered, it is modified.
|
||||
"""
|
||||
try:
|
||||
old_student = next(filter(lambda s: s.id == student.id, self.students))
|
||||
except StopIteration:
|
||||
pass
|
||||
else:
|
||||
self.students.remove(old_student)
|
||||
|
||||
self.students.append(student)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, Tribe):
|
||||
return self.name == other.name
|
||||
return False
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self.name)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {"name": self.name, "level": self.level}
|
||||
|
||||
def to_tuple(self) -> tuple:
|
||||
return (self.name, self.level)
|
23
backend/repository/abstract_repository.py
Normal file
23
backend/repository/abstract_repository.py
Normal file
@@ -0,0 +1,23 @@
|
||||
import abc
|
||||
|
||||
|
||||
class AbstractRepository(abc.ABC):
|
||||
@abc.abstractmethod
|
||||
def add(self, element):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def update(self, reference, element):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def list(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def get(self, reference):
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete(self, element):
|
||||
raise NotImplementedError
|
105
backend/repository/student_sqlite_repository.py
Normal file
105
backend/repository/student_sqlite_repository.py
Normal file
@@ -0,0 +1,105 @@
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.abstract_repository import AbstractRepository
|
||||
|
||||
|
||||
class StudentRepositoryError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class StudentSQLiteRepository(AbstractRepository):
|
||||
def __init__(self, conn) -> None:
|
||||
self.conn = conn
|
||||
|
||||
def add(self, student: Student) -> None:
|
||||
self.conn.execute(
|
||||
"""
|
||||
INSERT INTO students(id, name, tribe_name) VALUES (?, ?, ?)
|
||||
""",
|
||||
(
|
||||
student.id,
|
||||
student.name,
|
||||
student.tribe.name,
|
||||
),
|
||||
)
|
||||
|
||||
def update(self, student: Student) -> None:
|
||||
search_student = self.conn.execute(
|
||||
"""
|
||||
SELECT id FROM students WHERE id=:id
|
||||
""",
|
||||
{"id": student.id},
|
||||
).fetchone()
|
||||
|
||||
if search_student is None:
|
||||
raise StudentRepositoryError(f"The student ({student.id=}) does not exists")
|
||||
|
||||
self.conn.execute(
|
||||
"""
|
||||
UPDATE students SET name=:newname, tribe_name=:newtribe WHERE id=:id
|
||||
""",
|
||||
{
|
||||
"newname": student.name,
|
||||
"newtribe": student.tribe.name,
|
||||
"id": student.id,
|
||||
},
|
||||
)
|
||||
|
||||
def _rebuild_student(self, row: tuple, tribes: list[Tribe]) -> Student:
|
||||
print(row)
|
||||
print([t.name for t in tribes])
|
||||
tribe = next(filter(lambda t: t.name == row[2], tribes))
|
||||
return Student(id=row[0], name=row[1], tribe=tribe)
|
||||
|
||||
def get(self, id: str, tribes: list[Tribe]) -> Student:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT id, name, tribe_name FROM students WHERE id=?
|
||||
""",
|
||||
(id,),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
return self._rebuild_student(row, tribes)
|
||||
|
||||
raise ValueError(f"The student ({id=}) does not exists")
|
||||
|
||||
def list(self, tribes: list[Tribe]) -> list[Student]:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT * FROM students
|
||||
"""
|
||||
)
|
||||
|
||||
rows = cursor.fetchall()
|
||||
return [self._rebuild_student(r, tribes) for r in rows]
|
||||
|
||||
def list_id(self):
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT id FROM students
|
||||
"""
|
||||
)
|
||||
|
||||
rows = cursor.fetchall()
|
||||
return [r[0] for r in rows]
|
||||
|
||||
def delete(self, id: str) -> None:
|
||||
students_id = self.list_id()
|
||||
if id not in students_id:
|
||||
raise StudentRepositoryError(
|
||||
f"The student {id} doesn't exists. Can't delete it."
|
||||
)
|
||||
|
||||
self.conn.execute(
|
||||
"""
|
||||
DELETE FROM students WHERE id=:id
|
||||
""",
|
||||
{
|
||||
"id": id,
|
||||
},
|
||||
)
|
23
backend/repository/tribe_sqlalchemy_repository.py
Normal file
23
backend/repository/tribe_sqlalchemy_repository.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.abstract_repository import AbstractRepository
|
||||
|
||||
|
||||
class TribeSQLAlchemyRepository(AbstractRepository):
|
||||
def __init__(self, session) -> None:
|
||||
self.session = session
|
||||
|
||||
def add(self, tribe: Tribe) -> None:
|
||||
self.session.add(tribe)
|
||||
|
||||
def update(self, name: str, tribe: Tribe) -> None:
|
||||
self.session.query(Tribe).filter_by(name=name).update(tribe.to_dict())
|
||||
|
||||
def get(self, name: str) -> Tribe:
|
||||
return self.session.query(Tribe).filter_by(name=name).one()
|
||||
|
||||
def list(self) -> list[Tribe]:
|
||||
return self.session.query(Tribe).all()
|
||||
|
||||
def delete(self, tribe: Tribe) -> None:
|
||||
the_tribe = self.get(tribe.name)
|
||||
self.session.delete(the_tribe)
|
89
backend/repository/tribe_sqlite_repository.py
Normal file
89
backend/repository/tribe_sqlite_repository.py
Normal file
@@ -0,0 +1,89 @@
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.abstract_repository import AbstractRepository
|
||||
|
||||
|
||||
class TribeRepositoryError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TribeSQLiteRepository(AbstractRepository):
|
||||
def __init__(self, conn) -> None:
|
||||
self.conn = conn
|
||||
|
||||
def add(self, tribe: Tribe) -> None:
|
||||
tribes = self.list()
|
||||
if tribe.name in map(lambda x: x.name, tribes):
|
||||
raise TribeRepositoryError(
|
||||
f"The tribe {tribe.name} already exists. Can't add it"
|
||||
)
|
||||
|
||||
self.conn.execute(
|
||||
"""
|
||||
INSERT INTO tribes(name, level) VALUES (?, ?)
|
||||
""",
|
||||
(
|
||||
tribe.name,
|
||||
tribe.level,
|
||||
),
|
||||
)
|
||||
|
||||
def update(self, name: str, tribe: Tribe) -> None:
|
||||
tribes = self.list()
|
||||
|
||||
if name not in map(lambda x: x.name, tribes):
|
||||
raise TribeRepositoryError(
|
||||
f"The tribe {name} doesn't exists. Can't update it"
|
||||
)
|
||||
|
||||
self.conn.execute(
|
||||
"""
|
||||
UPDATE tribes SET name=:newname, level=:newlevel WHERE name=:name
|
||||
""",
|
||||
{
|
||||
"newname": tribe.name,
|
||||
"newlevel": tribe.level,
|
||||
"name": name,
|
||||
},
|
||||
)
|
||||
|
||||
def get(self, name: str) -> Tribe:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT * FROM tribes WHERE name=?
|
||||
""",
|
||||
(name,),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
return Tribe(*row)
|
||||
|
||||
raise TribeRepositoryError(f"The tribe {name} does not exists")
|
||||
|
||||
def list(self) -> list[Tribe]:
|
||||
cursor = self.conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT * FROM tribes
|
||||
"""
|
||||
)
|
||||
|
||||
rows = cursor.fetchall()
|
||||
return [Tribe(*r) for r in rows]
|
||||
|
||||
def delete(self, name: str) -> None:
|
||||
tribes = self.list()
|
||||
if name not in map(lambda x: x.name, tribes):
|
||||
raise TribeRepositoryError(
|
||||
f"The tribe {name} doesn't exists. Can't delete it."
|
||||
)
|
||||
|
||||
self.conn.execute(
|
||||
"""
|
||||
DELETE FROM tribes WHERE name=:name
|
||||
""",
|
||||
{
|
||||
"name": name,
|
||||
},
|
||||
)
|
116
backend/service/services.py
Normal file
116
backend/service/services.py
Normal file
@@ -0,0 +1,116 @@
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.abstract_repository import AbstractRepository
|
||||
from backend.repository.student_sqlite_repository import StudentRepositoryError
|
||||
from backend.repository.tribe_sqlite_repository import TribeRepositoryError
|
||||
|
||||
|
||||
class TribeExists(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class TribeDoesNotExist(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class StudentExists(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class StudentDoesExist(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def add_tribe(name: str, level: str, tribe_repo: AbstractRepository, conn) -> Tribe:
|
||||
tribe = Tribe(name=name, level=level)
|
||||
try:
|
||||
tribe_repo.add(tribe)
|
||||
except TribeRepositoryError:
|
||||
raise TribeExists(f"The tribe {tribe.name} already exists")
|
||||
conn.commit()
|
||||
return tribe
|
||||
|
||||
|
||||
def update_tribe(name: str, level: str, tribe_repo: AbstractRepository, conn) -> Tribe:
|
||||
tribe = Tribe(name=name, level=level)
|
||||
try:
|
||||
tribe_repo.update(name=name, tribe=tribe)
|
||||
except TribeRepositoryError:
|
||||
raise TribeDoesNotExist(f"The tribe {name} does not exists you can't update it")
|
||||
conn.commit()
|
||||
return tribe
|
||||
|
||||
|
||||
def delete_tribe(name: str, tribe_repo: AbstractRepository, conn) -> None:
|
||||
try:
|
||||
tribe_repo.delete(name=name)
|
||||
except TribeRepositoryError:
|
||||
raise TribeDoesNotExist(f"The tribe {name} does not exists you can't delete it")
|
||||
|
||||
conn.commit()
|
||||
|
||||
|
||||
def add_student(
|
||||
name: str,
|
||||
tribe: str,
|
||||
student_repo: AbstractRepository,
|
||||
tribe_repo: AbstractRepository,
|
||||
conn,
|
||||
) -> Student:
|
||||
|
||||
try:
|
||||
_tribe = tribe_repo.get(tribe)
|
||||
except TribeRepositoryError:
|
||||
raise TribeDoesNotExist(
|
||||
f"The tribe {tribe} does not exists. Can't add a student in it"
|
||||
)
|
||||
|
||||
student = Student(name=name, tribe=_tribe)
|
||||
|
||||
try:
|
||||
student_repo.add(student)
|
||||
except StudentRepositoryError:
|
||||
raise StudentExists(f"The student {student.name} already exists. Can't add it.")
|
||||
|
||||
conn.commit()
|
||||
return student
|
||||
|
||||
|
||||
def update_student(
|
||||
id: str,
|
||||
name: str,
|
||||
tribe: str,
|
||||
student_repo: AbstractRepository,
|
||||
tribe_repo: AbstractRepository,
|
||||
conn,
|
||||
) -> Student:
|
||||
try:
|
||||
_tribe = tribe_repo.get(tribe)
|
||||
except TribeRepositoryError:
|
||||
raise TribeDoesNotExist(
|
||||
f"The tribe {tribe} does not exists. Can't update a student with it"
|
||||
)
|
||||
|
||||
student = Student(id=id, name=name, tribe=_tribe)
|
||||
|
||||
try:
|
||||
student_repo.update(student)
|
||||
except StudentRepositoryError:
|
||||
raise StudentDoesExist(
|
||||
f"The student {student.name} ({student.id=}) does not exists. Can't update it."
|
||||
)
|
||||
|
||||
conn.commit()
|
||||
return student
|
||||
|
||||
|
||||
def delete_student(
|
||||
id: str,
|
||||
student_repo: AbstractRepository,
|
||||
conn,
|
||||
) -> Student:
|
||||
try:
|
||||
student_repo.delete(id=id)
|
||||
except StudentRepositoryError:
|
||||
raise StudentDoesExist("The student with id {id} does not exists")
|
||||
conn.commit()
|
@@ -1,32 +0,0 @@
|
||||
---
|
||||
source: ./
|
||||
output: ./
|
||||
templates: templates/
|
||||
|
||||
competences:
|
||||
Chercher:
|
||||
name: Chercher
|
||||
abrv: Cher
|
||||
Représenter:
|
||||
name: Représenter
|
||||
abrv: Rep
|
||||
Modéliser:
|
||||
name: Modéliser
|
||||
abrv: Mod
|
||||
Raisonner:
|
||||
name: Raisonner
|
||||
abrv: Rai
|
||||
Calculer:
|
||||
name: Calculer
|
||||
abrv: Cal
|
||||
Communiquer:
|
||||
name: Communiquer
|
||||
abrv: Com
|
||||
|
||||
|
||||
tribes:
|
||||
- name: Tribe1
|
||||
type: Type1
|
||||
students: tribe1.csv
|
||||
- name: Tribe2
|
||||
students: tribe2.csv
|
@@ -1,21 +0,0 @@
|
||||
Nom,email
|
||||
Star Tice,stice0@jalbum.net
|
||||
Umberto Dingate,udingate1@tumblr.com
|
||||
Starlin Crangle,scrangle2@wufoo.com
|
||||
Humbert Bourcq,hbourcq3@g.co
|
||||
Gabriella Handyside,ghandyside4@patch.com
|
||||
Stewart Eaves,seaves5@ycombinator.com
|
||||
Erick Going,egoing6@va.gov
|
||||
Ase Praton,apraton7@va.gov
|
||||
Rollins Planks,rplanks8@delicious.com
|
||||
Dunstan Sarjant,dsarjant9@naver.com
|
||||
Stacy Guiton,sguitona@themeforest.net
|
||||
Ange Stanes,astanesb@marriott.com
|
||||
Amabelle Elleton,aelletonc@squidoo.com
|
||||
Darn Broomhall,dbroomhalld@cisco.com
|
||||
Dyan Chatto,dchattoe@npr.org
|
||||
Keane Rennebach,krennebachf@dot.gov
|
||||
Nari Paulton,npaultong@gov.uk
|
||||
Brandy Wase,bwaseh@ftc.gov
|
||||
Jaclyn Firidolfi,jfiridolfii@reuters.com
|
||||
Violette Lockney,vlockneyj@chron.com
|
|
@@ -1,21 +0,0 @@
|
||||
Nom,email
|
||||
Elle McKintosh,emckintosh0@1und1.de
|
||||
Ty Megany,tmegany1@reuters.com
|
||||
Pippa Borrows,pborrows2@a8.net
|
||||
Sonny Eskrick,seskrick3@123-reg.co.uk
|
||||
Mollee Britch,mbritch4@usda.gov
|
||||
Ingram Plaistowe,iplaistowe5@purevolume.com
|
||||
Fay Vanyard,fvanyard6@sbwire.com
|
||||
Nancy Rase,nrase7@omniture.com
|
||||
Rachael Ruxton,rruxton8@bravesites.com
|
||||
Tallie Rushmer,trushmer9@home.pl
|
||||
Seward MacIlhagga,smacilhaggaa@hatena.ne.jp
|
||||
Lizette Searl,lsearlb@list-manage.com
|
||||
Talya Mannagh,tmannaghc@webnode.com
|
||||
Jordan Witherbed,jwitherbedd@unesco.org
|
||||
Reagan Botcherby,rbotcherbye@scientificamerican.com
|
||||
Libbie Shoulder,lshoulderf@desdev.cn
|
||||
Abner Khomich,akhomichg@youtube.com
|
||||
Zollie Kitman,zkitmanh@forbes.com
|
||||
Fiorenze Durden,fdurdeni@feedburner.com
|
||||
Kevyn Race,kracej@seattletimes.com
|
|
872
poetry.lock
generated
Normal file
872
poetry.lock
generated
Normal file
@@ -0,0 +1,872 @@
|
||||
# This file is automatically @generated by Poetry and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "anyio"
|
||||
version = "3.6.2"
|
||||
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.2"
|
||||
files = [
|
||||
{file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"},
|
||||
{file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
idna = ">=2.8"
|
||||
sniffio = ">=1.1"
|
||||
|
||||
[package.extras]
|
||||
doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
||||
test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"]
|
||||
trio = ["trio (>=0.16,<0.22)"]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "22.1.0"
|
||||
description = "Classes Without Boilerplate"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
|
||||
{file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
|
||||
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
|
||||
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
|
||||
|
||||
[[package]]
|
||||
name = "certifi"
|
||||
version = "2022.12.7"
|
||||
description = "Python package for providing Mozilla's CA Bundle."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
|
||||
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfgv"
|
||||
version = "3.3.1"
|
||||
description = "Validate configuration and produce human readable error messages."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
files = [
|
||||
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
|
||||
{file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "charset-normalizer"
|
||||
version = "2.1.1"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.0"
|
||||
files = [
|
||||
{file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
|
||||
{file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
unicode-backport = ["unicodedata2"]
|
||||
|
||||
[[package]]
|
||||
name = "click"
|
||||
version = "8.1.3"
|
||||
description = "Composable command line interface toolkit"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
|
||||
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
description = "Cross-platform colored terminal text."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
|
||||
files = [
|
||||
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.0.1"
|
||||
description = "Code coverage measurement for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "coverage-7.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3695c4f4750bca943b3e1f74ad4be8d29e4aeab927d50772c41359107bd5d5c"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa6a5a224b7f4cfb226f4fc55a57e8537fcc096f42219128c2c74c0e7d0953e1"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74f70cd92669394eaf8d7756d1b195c8032cf7bbbdfce3bc489d4e15b3b8cf73"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b66bb21a23680dee0be66557dc6b02a3152ddb55edf9f6723fa4a93368f7158d"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d87717959d4d0ee9db08a0f1d80d21eb585aafe30f9b0a54ecf779a69cb015f6"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:854f22fa361d1ff914c7efa347398374cc7d567bdafa48ac3aa22334650dfba2"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e414dc32ee5c3f36544ea466b6f52f28a7af788653744b8570d0bf12ff34bc0"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6c5ad996c6fa4d8ed669cfa1e8551348729d008a2caf81489ab9ea67cfbc7498"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-win32.whl", hash = "sha256:691571f31ace1837838b7e421d3a09a8c00b4aac32efacb4fc9bd0a5c647d25a"},
|
||||
{file = "coverage-7.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:89caf4425fe88889e2973a8e9a3f6f5f9bbe5dd411d7d521e86428c08a873a4a"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:63d56165a7c76265468d7e0c5548215a5ba515fc2cba5232d17df97bffa10f6c"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f943a3b2bc520102dd3e0bb465e1286e12c9a54f58accd71b9e65324d9c7c01"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:830525361249dc4cd013652b0efad645a385707a5ae49350c894b67d23fbb07c"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd1b9c5adc066db699ccf7fa839189a649afcdd9e02cb5dc9d24e67e7922737d"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00c14720b8b3b6c23b487e70bd406abafc976ddc50490f645166f111c419c39"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d55d840e1b8c0002fce66443e124e8581f30f9ead2e54fbf6709fb593181f2c"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66b18c3cf8bbab0cce0d7b9e4262dc830e93588986865a8c78ab2ae324b3ed56"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:12a5aa77783d49e05439fbe6e6b427484f8a0f9f456b46a51d8aac022cfd024d"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-win32.whl", hash = "sha256:b77015d1cb8fe941be1222a5a8b4e3fbca88180cfa7e2d4a4e58aeabadef0ab7"},
|
||||
{file = "coverage-7.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb992c47cb1e5bd6a01e97182400bcc2ba2077080a17fcd7be23aaa6e572e390"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e78e9dcbf4f3853d3ae18a8f9272111242531535ec9e1009fa8ec4a2b74557dc"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60bef2e2416f15fdc05772bf87db06c6a6f9870d1db08fdd019fbec98ae24a9"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9823e4789ab70f3ec88724bba1a203f2856331986cd893dedbe3e23a6cfc1e4e"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9158f8fb06747ac17bd237930c4372336edc85b6e13bdc778e60f9d685c3ca37"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:486ee81fa694b4b796fc5617e376326a088f7b9729c74d9defa211813f3861e4"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1285648428a6101b5f41a18991c84f1c3959cee359e51b8375c5882fc364a13f"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2c44fcfb3781b41409d0f060a4ed748537557de9362a8a9282182fafb7a76ab4"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-win32.whl", hash = "sha256:d6814854c02cbcd9c873c0f3286a02e3ac1250625cca822ca6bc1018c5b19f1c"},
|
||||
{file = "coverage-7.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66460f17c9319ea4f91c165d46840314f0a7c004720b20be58594d162a441d8"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b373c9345c584bb4b5f5b8840df7f4ab48c4cbb7934b58d52c57020d911b856"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d3022c3007d3267a880b5adcf18c2a9bf1fc64469b394a804886b401959b8742"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92651580bd46519067e36493acb394ea0607b55b45bd81dd4e26379ed1871f55"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cfc595d2af13856505631be072835c59f1acf30028d1c860b435c5fc9c15b69"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b4b3a4d9915b2be879aff6299c0a6129f3d08a775d5a061f503cf79571f73e4"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b6f22bb64cc39bcb883e5910f99a27b200fdc14cdd79df8696fa96b0005c9444"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72d1507f152abacea81f65fee38e4ef3ac3c02ff8bc16f21d935fd3a8a4ad910"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a79137fc99815fff6a852c233628e735ec15903cfd16da0f229d9c4d45926ab"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-win32.whl", hash = "sha256:b3763e7fcade2ff6c8e62340af9277f54336920489ceb6a8cd6cc96da52fcc62"},
|
||||
{file = "coverage-7.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:09f6b5a8415b6b3e136d5fec62b552972187265cb705097bf030eb9d4ffb9b60"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:978258fec36c154b5e250d356c59af7d4c3ba02bef4b99cda90b6029441d797d"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:19ec666533f0f70a0993f88b8273057b96c07b9d26457b41863ccd021a043b9a"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfded268092a84605f1cc19e5c737f9ce630a8900a3589e9289622db161967e9"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bcfb1d8ac94af886b54e18a88b393f6a73d5959bb31e46644a02453c36e475"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b4a923cc7566bbc7ae2dfd0ba5a039b61d19c740f1373791f2ebd11caea59"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aec2d1515d9d39ff270059fd3afbb3b44e6ec5758af73caf18991807138c7118"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c20cfebcc149a4c212f6491a5f9ff56f41829cd4f607b5be71bb2d530ef243b1"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fd556ff16a57a070ce4f31c635953cc44e25244f91a0378c6e9bdfd40fdb249f"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-win32.whl", hash = "sha256:b9ea158775c7c2d3e54530a92da79496fb3fb577c876eec761c23e028f1e216c"},
|
||||
{file = "coverage-7.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:d1991f1dd95eba69d2cd7708ff6c2bbd2426160ffc73c2b81f617a053ebcb1a8"},
|
||||
{file = "coverage-7.0.1-pp37.pp38.pp39-none-any.whl", hash = "sha256:3dd4ee135e08037f458425b8842d24a95a0961831a33f89685ff86b77d378f89"},
|
||||
{file = "coverage-7.0.1.tar.gz", hash = "sha256:a4a574a19eeb67575a5328a5760bbbb737faa685616586a9f9da4281f940109c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
|
||||
|
||||
[package.extras]
|
||||
toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "distlib"
|
||||
version = "0.3.6"
|
||||
description = "Distribution utilities"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
|
||||
{file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.0.4"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"},
|
||||
{file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["pytest (>=6)"]
|
||||
|
||||
[[package]]
|
||||
name = "faker"
|
||||
version = "15.3.4"
|
||||
description = "Faker is a Python package that generates fake data for you."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Faker-15.3.4-py3-none-any.whl", hash = "sha256:c2a2ff9dd8dfd991109b517ab98d5cb465e857acb45f6b643a0e284a9eb2cc76"},
|
||||
{file = "Faker-15.3.4.tar.gz", hash = "sha256:2d5443724f640ce07658ca8ca8bbd40d26b58914e63eec6549727869aa67e2cc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dateutil = ">=2.4"
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.88.0"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "fastapi-0.88.0-py3-none-any.whl", hash = "sha256:263b718bb384422fe3d042ffc9a0c8dece5e034ab6586ff034f6b4b1667c3eee"},
|
||||
{file = "fastapi-0.88.0.tar.gz", hash = "sha256:915bf304180a0e7c5605ec81097b7d4cd8826ff87a02bb198e336fb9f3b5ff02"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
|
||||
starlette = "0.22.0"
|
||||
|
||||
[package.extras]
|
||||
all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
|
||||
dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.138)", "uvicorn[standard] (>=0.12.0,<0.19.0)"]
|
||||
doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.7.0)"]
|
||||
test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.10.0)", "coverage[toml] (>=6.5.0,<7.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.138)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.8.2"
|
||||
description = "A platform independent file lock."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"},
|
||||
{file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
|
||||
testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "2.0.1"
|
||||
description = "Lightweight in-process concurrent programming"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
|
||||
files = [
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"},
|
||||
{file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"},
|
||||
{file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"},
|
||||
{file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"},
|
||||
{file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"},
|
||||
{file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
|
||||
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
|
||||
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
|
||||
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
|
||||
{file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["Sphinx", "docutils (<0.18)"]
|
||||
test = ["faulthandler", "objgraph", "psutil"]
|
||||
|
||||
[[package]]
|
||||
name = "h11"
|
||||
version = "0.14.0"
|
||||
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
|
||||
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.5.11"
|
||||
description = "File identification library for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "identify-2.5.11-py2.py3-none-any.whl", hash = "sha256:e7db36b772b188099616aaf2accbee122949d1c6a1bac4f38196720d6f9f06db"},
|
||||
{file = "identify-2.5.11.tar.gz", hash = "sha256:14b7076b29c99b1b0b8b08e96d448c7b877a9b07683cd8cfda2ea06af85ffa1c"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
license = ["ukkonen"]
|
||||
|
||||
[[package]]
|
||||
name = "idna"
|
||||
version = "3.4"
|
||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
files = [
|
||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "1.1.1"
|
||||
description = "iniconfig: brain-dead simple config-ini parsing"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
|
||||
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nodeenv"
|
||||
version = "1.7.0"
|
||||
description = "Node.js virtual environment builder"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
|
||||
files = [
|
||||
{file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
|
||||
{file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
setuptools = "*"
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "22.0"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"},
|
||||
{file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "2.6.0"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"},
|
||||
{file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"]
|
||||
test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.0.0"
|
||||
description = "plugin and hook calling mechanisms for python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
|
||||
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pre-commit", "tox"]
|
||||
testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "2.20.0"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
|
||||
{file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cfgv = ">=2.0.0"
|
||||
identify = ">=1.0.0"
|
||||
nodeenv = ">=0.11.1"
|
||||
pyyaml = ">=5.1"
|
||||
toml = "*"
|
||||
virtualenv = ">=20.0.8"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "1.10.2"
|
||||
description = "Data validation and settings management using python type hints"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"},
|
||||
{file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"},
|
||||
{file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"},
|
||||
{file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"},
|
||||
{file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"},
|
||||
{file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"},
|
||||
{file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"},
|
||||
{file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"},
|
||||
{file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"},
|
||||
{file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"},
|
||||
{file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"},
|
||||
{file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"},
|
||||
{file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"},
|
||||
{file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"},
|
||||
{file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"},
|
||||
{file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"},
|
||||
{file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"},
|
||||
{file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"},
|
||||
{file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"},
|
||||
{file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"},
|
||||
{file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"},
|
||||
{file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"},
|
||||
{file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"},
|
||||
{file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"},
|
||||
{file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"},
|
||||
{file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"},
|
||||
{file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"},
|
||||
{file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"},
|
||||
{file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"},
|
||||
{file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"},
|
||||
{file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"},
|
||||
{file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"},
|
||||
{file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"},
|
||||
{file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"},
|
||||
{file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"},
|
||||
{file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.1.0"
|
||||
|
||||
[package.extras]
|
||||
dotenv = ["python-dotenv (>=0.10.4)"]
|
||||
email = ["email-validator (>=1.0.3)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "7.2.0"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"},
|
||||
{file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
attrs = ">=19.2.0"
|
||||
colorama = {version = "*", markers = "sys_platform == \"win32\""}
|
||||
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
|
||||
iniconfig = "*"
|
||||
packaging = "*"
|
||||
pluggy = ">=0.12,<2.0"
|
||||
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-cov"
|
||||
version = "4.0.0"
|
||||
description = "Pytest plugin for measuring coverage."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"},
|
||||
{file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
coverage = {version = ">=5.2.1", extras = ["toml"]}
|
||||
pytest = ">=4.6"
|
||||
|
||||
[package.extras]
|
||||
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.8.2"
|
||||
description = "Extensions to the standard Python datetime module"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
files = [
|
||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.5"
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0"
|
||||
description = "YAML parser and emitter for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
|
||||
{file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
|
||||
{file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
|
||||
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
|
||||
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests"
|
||||
version = "2.28.1"
|
||||
description = "Python HTTP for Humans."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7, <4"
|
||||
files = [
|
||||
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
|
||||
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2017.4.17"
|
||||
charset-normalizer = ">=2,<3"
|
||||
idna = ">=2.5,<4"
|
||||
urllib3 = ">=1.21.1,<1.27"
|
||||
|
||||
[package.extras]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
|
||||
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "65.6.3"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"},
|
||||
{file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
|
||||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.16.0"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
files = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.0"
|
||||
description = "Sniff out which async library your code is running under"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
|
||||
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sqlalchemy"
|
||||
version = "1.4.45"
|
||||
description = "Database Abstraction Library"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
|
||||
files = [
|
||||
{file = "SQLAlchemy-1.4.45-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:f1d3fb02a4d0b07d1351a4a52f159e5e7b3045c903468b7e9349ebf0020ffdb9"},
|
||||
{file = "SQLAlchemy-1.4.45-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b7025d46aba946272f6b6b357a22f3787473ef27451f342df1a2a6de23743e3"},
|
||||
{file = "SQLAlchemy-1.4.45-cp27-cp27m-win32.whl", hash = "sha256:26b8424b32eeefa4faad21decd7bdd4aade58640b39407bf43e7d0a7c1bc0453"},
|
||||
{file = "SQLAlchemy-1.4.45-cp27-cp27m-win_amd64.whl", hash = "sha256:13578d1cda69bc5e76c59fec9180d6db7ceb71c1360a4d7861c37d87ea6ca0b1"},
|
||||
{file = "SQLAlchemy-1.4.45-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6cd53b4c756a6f9c6518a3dc9c05a38840f9ae442c91fe1abde50d73651b6922"},
|
||||
{file = "SQLAlchemy-1.4.45-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:ca152ffc7f0aa069c95fba46165030267ec5e4bb0107aba45e5e9e86fe4d9363"},
|
||||
{file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06055476d38ed7915eeed22b78580556d446d175c3574a01b9eb04d91f3a8b2e"},
|
||||
{file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:081e2a2d75466353c738ca2ee71c0cfb08229b4f9909b5fa085f75c48d021471"},
|
||||
{file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96821d806c0c90c68ce3f2ce6dd529c10e5d7587961f31dd5c30e3bfddc4545d"},
|
||||
{file = "SQLAlchemy-1.4.45-cp310-cp310-win32.whl", hash = "sha256:c8051bff4ce48cbc98f11e95ac46bfd1e36272401070c010248a3230d099663f"},
|
||||
{file = "SQLAlchemy-1.4.45-cp310-cp310-win_amd64.whl", hash = "sha256:16ad798fc121cad5ea019eb2297127b08c54e1aa95fe17b3fea9fdbc5c34fe62"},
|
||||
{file = "SQLAlchemy-1.4.45-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099efef0de9fbda4c2d7cb129e4e7f812007901942259d4e6c6e19bd69de1088"},
|
||||
{file = "SQLAlchemy-1.4.45-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a29d02c9e6f6b105580c5ed7afb722b97bc2e2fdb85e1d45d7ddd8440cfbca"},
|
||||
{file = "SQLAlchemy-1.4.45-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc10423b59d6d032d6dff0bb42aa06dc6a8824eb6029d70c7d1b6981a2e7f4d8"},
|
||||
{file = "SQLAlchemy-1.4.45-cp311-cp311-win32.whl", hash = "sha256:1a92685db3b0682776a5abcb5f9e9addb3d7d9a6d841a452a17ec2d8d457bea7"},
|
||||
{file = "SQLAlchemy-1.4.45-cp311-cp311-win_amd64.whl", hash = "sha256:db3ccbce4a861bf4338b254f95916fc68dd8b7aa50eea838ecdaf3a52810e9c0"},
|
||||
{file = "SQLAlchemy-1.4.45-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a62ae2ea3b940ce9c9cbd675489c2047921ce0a79f971d3082978be91bd58117"},
|
||||
{file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a87f8595390764db333a1705591d0934973d132af607f4fa8b792b366eacbb3c"},
|
||||
{file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a21c1fb71c69c8ec65430160cd3eee44bbcea15b5a4e556f29d03f246f425ec"},
|
||||
{file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7944b04e6fcf8d733964dd9ee36b6a587251a1a4049af3a9b846f6e64eb349a"},
|
||||
{file = "SQLAlchemy-1.4.45-cp36-cp36m-win32.whl", hash = "sha256:a3bcd5e2049ceb97e8c273e6a84ff4abcfa1dc47b6d8bbd36e07cce7176610d3"},
|
||||
{file = "SQLAlchemy-1.4.45-cp36-cp36m-win_amd64.whl", hash = "sha256:5953e225be47d80410ae519f865b5c341f541d8e383fb6d11f67fb71a45bf890"},
|
||||
{file = "SQLAlchemy-1.4.45-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:6a91b7883cb7855a27bc0637166eed622fdf1bb94a4d1630165e5dd88c7e64d3"},
|
||||
{file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d458fd0566bc9e10b8be857f089e96b5ca1b1ef033226f24512f9ffdf485a8c0"},
|
||||
{file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f4ad3b081c0dbb738886f8d425a5d983328670ee83b38192687d78fc82bd1e"},
|
||||
{file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd95a3e6ab46da2c5b0703e797a772f3fab44d085b3919a4f27339aa3b1f51d3"},
|
||||
{file = "SQLAlchemy-1.4.45-cp37-cp37m-win32.whl", hash = "sha256:715f5859daa3bee6ecbad64501637fa4640ca6734e8cda6135e3898d5f8ccadd"},
|
||||
{file = "SQLAlchemy-1.4.45-cp37-cp37m-win_amd64.whl", hash = "sha256:2d1539fbc82d2206380a86d6d7d0453764fdca5d042d78161bbfb8dd047c80ec"},
|
||||
{file = "SQLAlchemy-1.4.45-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:01aa76f324c9bbc0dcb2bc3d9e2a9d7ede4808afa1c38d40d5e2007e3163b206"},
|
||||
{file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:416fe7d228937bd37990b5a429fd00ad0e49eabcea3455af7beed7955f192edd"},
|
||||
{file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7e32ce2584564d9e068bb7e0ccd1810cbb0a824c0687f8016fe67e97c345a637"},
|
||||
{file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:561605cfc26273825ed2fb8484428faf36e853c13e4c90c61c58988aeccb34ed"},
|
||||
{file = "SQLAlchemy-1.4.45-cp38-cp38-win32.whl", hash = "sha256:55ddb5585129c5d964a537c9e32a8a68a8c6293b747f3fa164e1c034e1657a98"},
|
||||
{file = "SQLAlchemy-1.4.45-cp38-cp38-win_amd64.whl", hash = "sha256:445914dcadc0b623bd9851260ee54915ecf4e3041a62d57709b18a0eed19f33b"},
|
||||
{file = "SQLAlchemy-1.4.45-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:2db887dbf05bcc3151de1c4b506b14764c6240a42e844b4269132a7584de1e5f"},
|
||||
{file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52b90c9487e4449ad954624d01dea34c90cd8c104bce46b322c83654f37a23c5"},
|
||||
{file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f61e54b8c2b389de1a8ad52394729c478c67712dbdcdadb52c2575e41dae94a5"},
|
||||
{file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e91a5e45a2ea083fe344b3503405978dff14d60ef3aa836432c9ca8cd47806b6"},
|
||||
{file = "SQLAlchemy-1.4.45-cp39-cp39-win32.whl", hash = "sha256:0e068b8414d60dd35d43c693555fc3d2e1d822cef07960bb8ca3f1ee6c4ff762"},
|
||||
{file = "SQLAlchemy-1.4.45-cp39-cp39-win_amd64.whl", hash = "sha256:2d6f178ff2923730da271c8aa317f70cf0df11a4d1812f1d7a704b1cf29c5fe3"},
|
||||
{file = "SQLAlchemy-1.4.45.tar.gz", hash = "sha256:fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
|
||||
|
||||
[package.extras]
|
||||
aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
|
||||
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
|
||||
asyncio = ["greenlet (!=0.4.17)"]
|
||||
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
|
||||
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
|
||||
mssql = ["pyodbc"]
|
||||
mssql-pymssql = ["pymssql"]
|
||||
mssql-pyodbc = ["pyodbc"]
|
||||
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
|
||||
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
|
||||
mysql-connector = ["mysql-connector-python"]
|
||||
oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"]
|
||||
postgresql = ["psycopg2 (>=2.7)"]
|
||||
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
|
||||
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
|
||||
postgresql-psycopg2binary = ["psycopg2-binary"]
|
||||
postgresql-psycopg2cffi = ["psycopg2cffi"]
|
||||
pymysql = ["pymysql", "pymysql (<1)"]
|
||||
sqlcipher = ["sqlcipher3-binary"]
|
||||
|
||||
[[package]]
|
||||
name = "starlette"
|
||||
version = "0.22.0"
|
||||
description = "The little ASGI library that shines."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "starlette-0.22.0-py3-none-any.whl", hash = "sha256:b5eda991ad5f0ee5d8ce4c4540202a573bb6691ecd0c712262d0bc85cf8f2c50"},
|
||||
{file = "starlette-0.22.0.tar.gz", hash = "sha256:b092cbc365bea34dd6840b42861bdabb2f507f8671e642e8272d2442e08ea4ff"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
anyio = ">=3.4.0,<5"
|
||||
|
||||
[package.extras]
|
||||
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
|
||||
|
||||
[[package]]
|
||||
name = "toml"
|
||||
version = "0.10.2"
|
||||
description = "Python Library for Tom's Obvious, Minimal Language"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
files = [
|
||||
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
|
||||
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.1"
|
||||
description = "A lil' TOML parser"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
|
||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.4.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
|
||||
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "urllib3"
|
||||
version = "1.26.13"
|
||||
description = "HTTP library with thread-safe connection pooling, file post, and more."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
|
||||
files = [
|
||||
{file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"},
|
||||
{file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
|
||||
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
|
||||
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.20.0"
|
||||
description = "The lightning-fast ASGI server."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "uvicorn-0.20.0-py3-none-any.whl", hash = "sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd"},
|
||||
{file = "uvicorn-0.20.0.tar.gz", hash = "sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=7.0"
|
||||
h11 = ">=0.8"
|
||||
|
||||
[package.extras]
|
||||
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.17.1"
|
||||
description = "Virtual Python Environment builder"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"},
|
||||
{file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
distlib = ">=0.3.6,<1"
|
||||
filelock = ">=3.4.1,<4"
|
||||
platformdirs = ">=2.4,<3"
|
||||
|
||||
[package.extras]
|
||||
docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"]
|
||||
testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "dc9cca0226b975a91650ffdc42c0572f04707fc0de80fbcfee9809ebaf34a0bf"
|
24
pyproject.toml
Normal file
24
pyproject.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[tool.poetry]
|
||||
name = "backend"
|
||||
version = "0.1.0"
|
||||
description = ""
|
||||
authors = ["Bertrand Benjamin <benjamin.bertrand@opytex.org>"]
|
||||
readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
sqlalchemy = "^1.4.45"
|
||||
fastapi = "^0.88.0"
|
||||
uvicorn = "^0.20.0"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pre-commit = "^2.20.0"
|
||||
pytest = "^7.2.0"
|
||||
faker = "^15.3.4"
|
||||
requests = "^2.28.1"
|
||||
pytest-cov = "^4.0.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
@@ -1,4 +0,0 @@
|
||||
---
|
||||
source: sheets/
|
||||
output: reports/
|
||||
templates: templates/
|
@@ -1,5 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
from .csv_extraction import flat_df_students, flat_df_for
|
||||
from .df_marks_manip import pp_q_scores
|
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
NO_ST_COLUMNS = {
|
||||
"assessment": "Nom",
|
||||
"term": "Trimestre",
|
||||
"date": "Date",
|
||||
"exercise": "Exercice",
|
||||
"question": "Question",
|
||||
"competence": "Competence",
|
||||
"theme": "Domaine",
|
||||
"comment": "Commentaire",
|
||||
"is_leveled": "Est_nivele",
|
||||
"score_rate": "Bareme",
|
||||
}
|
||||
|
||||
COLUMNS = {
|
||||
**NO_ST_COLUMNS,
|
||||
"student": "Eleve",
|
||||
"score": "Score",
|
||||
"mark": "Note",
|
||||
"level": "Niveau",
|
||||
"normalized": "Normalise",
|
||||
}
|
||||
|
||||
VALIDSCORE = {
|
||||
"NOTFILLED": "", # The item is not scored yet
|
||||
"NOANSWER": ".", # Student gives no answer (this score will impact the fianl mark)
|
||||
"ABS": "a", # Student has absent (this score won't be impact the final mark)
|
||||
}
|
@@ -1,119 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
""" Extracting data from xlsx files """
|
||||
|
||||
import pandas as pd
|
||||
from .config import NO_ST_COLUMNS, COLUMNS, VALIDSCORE
|
||||
|
||||
pd.set_option("Precision", 2)
|
||||
|
||||
|
||||
def try_replace(x, old, new):
|
||||
try:
|
||||
return str(x).replace(old, new)
|
||||
except ValueError:
|
||||
return x
|
||||
|
||||
|
||||
def extract_students(df, no_student_columns=NO_ST_COLUMNS.values()):
|
||||
"""Extract the list of students from df
|
||||
|
||||
:param df: the dataframe
|
||||
:param no_student_columns: columns that are not students
|
||||
:return: list of students
|
||||
"""
|
||||
students = df.columns.difference(no_student_columns)
|
||||
return students
|
||||
|
||||
|
||||
def flat_df_students(
|
||||
df, no_student_columns=NO_ST_COLUMNS.values(), postprocessing=True
|
||||
):
|
||||
"""Flat the dataframe by returning a dataframe with on student on each line
|
||||
|
||||
:param df: the dataframe (one row per questions)
|
||||
:param no_student_columns: columns that are not students
|
||||
:return: dataframe with one row per questions and students
|
||||
|
||||
Columns of csv files:
|
||||
|
||||
- NO_ST_COLUMNS meta data on questions
|
||||
- one for each students
|
||||
|
||||
This function flat student's columns to "student" and "score"
|
||||
"""
|
||||
students = extract_students(df, no_student_columns)
|
||||
scores = []
|
||||
for st in students:
|
||||
scores.append(
|
||||
pd.melt(
|
||||
df,
|
||||
id_vars=no_student_columns,
|
||||
value_vars=st,
|
||||
var_name=COLUMNS["student"],
|
||||
value_name=COLUMNS["score"],
|
||||
).dropna(subset=[COLUMNS["score"]])
|
||||
)
|
||||
if postprocessing:
|
||||
return postprocess(pd.concat(scores))
|
||||
return pd.concat(scores)
|
||||
|
||||
|
||||
def flat_df_for(
|
||||
df, student, no_student_columns=NO_ST_COLUMNS.values(), postprocessing=True
|
||||
):
|
||||
"""Extract the data only for one student
|
||||
|
||||
:param df: the dataframe (one row per questions)
|
||||
:param no_student_columns: columns that are not students
|
||||
:return: dataframe with one row per questions and students
|
||||
|
||||
Columns of csv files:
|
||||
|
||||
- NO_ST_COLUMNS meta data on questions
|
||||
- one for each students
|
||||
|
||||
"""
|
||||
students = extract_students(df, no_student_columns)
|
||||
if student not in students:
|
||||
raise KeyError("This student is not in the table")
|
||||
st_df = df[list(no_student_columns) + [student]]
|
||||
st_df = st_df.rename(columns={student: COLUMNS["score"]}).dropna(
|
||||
subset=[COLUMNS["score"]]
|
||||
)
|
||||
if postprocessing:
|
||||
return postprocess(st_df)
|
||||
return st_df
|
||||
|
||||
|
||||
def postprocess(df):
|
||||
"""Postprocessing score dataframe
|
||||
|
||||
- Replace na with an empty string
|
||||
- Replace "NOANSWER" with -1
|
||||
- Turn commas number to dot numbers
|
||||
"""
|
||||
|
||||
df[COLUMNS["question"]].fillna("", inplace=True)
|
||||
df[COLUMNS["exercise"]].fillna("", inplace=True)
|
||||
df[COLUMNS["comment"]].fillna("", inplace=True)
|
||||
df[COLUMNS["competence"]].fillna("", inplace=True)
|
||||
|
||||
df[COLUMNS["score"]] = pd.to_numeric(
|
||||
df[COLUMNS["score"]]
|
||||
.replace(VALIDSCORE["NOANSWER"], -1)
|
||||
.apply(lambda x: try_replace(x, ",", "."))
|
||||
)
|
||||
df[COLUMNS["score_rate"]] = pd.to_numeric(
|
||||
df[COLUMNS["score_rate"]].apply(lambda x: try_replace(x, ",", ".")),
|
||||
errors="coerce",
|
||||
)
|
||||
|
||||
return df
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# Reglages pour 'vim'
|
||||
# vim:set autoindent expandtab tabstop=4 shiftwidth=4:
|
||||
# cursor: 16 del
|
@@ -1,5 +0,0 @@
|
||||
import dash
|
||||
|
||||
app = dash.Dash(__name__, suppress_callback_exceptions=True)
|
||||
# app = dash.Dash(__name__)
|
||||
server = app.server
|
@@ -1,66 +0,0 @@
|
||||
body {
|
||||
margin: 0px;
|
||||
font-family: 'Source Sans Pro','Roboto','Open Sans','Liberation Sans','DejaVu Sans','Verdana','Helvetica','Arial',sans-serif;
|
||||
}
|
||||
|
||||
header {
|
||||
margin: 0px 0px 20px 0px;
|
||||
background-color: #333333;
|
||||
color: #ffffff;
|
||||
padding: 20px;
|
||||
}
|
||||
|
||||
header > h1 {
|
||||
margin: 0px;
|
||||
}
|
||||
|
||||
main {
|
||||
width: 95vw;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
section {
|
||||
margin-top: 20px;
|
||||
margin-bottom: 20px;
|
||||
|
||||
}
|
||||
|
||||
/* Exam analysis */
|
||||
|
||||
#select {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
|
||||
#select > div {
|
||||
width: 40vw;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
#analysis {
|
||||
display: flex;
|
||||
flex-flow: row wrap;
|
||||
}
|
||||
|
||||
#analysis > * {
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
width: 45vw;
|
||||
margin: auto;
|
||||
}
|
||||
|
||||
/* Create new exam */
|
||||
|
||||
#new-exam {
|
||||
display: flex;
|
||||
flex-flow: row;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
#new-exam label {
|
||||
width: 20%;
|
||||
display: flex;
|
||||
flex-flow: column;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
|
@@ -1,355 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import dash
|
||||
import dash_html_components as html
|
||||
import dash_core_components as dcc
|
||||
import dash_table
|
||||
import plotly.graph_objects as go
|
||||
from datetime import date, datetime
|
||||
import uuid
|
||||
import pandas as pd
|
||||
import yaml
|
||||
|
||||
from ...scripts.getconfig import config
|
||||
from ...config import NO_ST_COLUMNS
|
||||
from ..app import app
|
||||
from ...scripts.exam import Exam
|
||||
|
||||
QUESTION_COLUMNS = [
|
||||
{"id": "id", "name": "Question"},
|
||||
{
|
||||
"id": "competence",
|
||||
"name": "Competence",
|
||||
"presentation": "dropdown",
|
||||
},
|
||||
{"id": "theme", "name": "Domaine"},
|
||||
{"id": "comment", "name": "Commentaire"},
|
||||
{"id": "score_rate", "name": "Bareme"},
|
||||
{"id": "is_leveled", "name": "Est_nivele"},
|
||||
]
|
||||
|
||||
|
||||
def get_current_year_limit():
|
||||
today = date.today()
|
||||
if today.month > 8:
|
||||
return {
|
||||
"min_date_allowed": date(today.year, 9, 1),
|
||||
"max_date_allowed": date(today.year + 1, 7, 15),
|
||||
"initial_visible_month": today,
|
||||
}
|
||||
|
||||
return {
|
||||
"min_date_allowed": date(today.year - 1, 9, 1),
|
||||
"max_date_allowed": date(today.year, 7, 15),
|
||||
"initial_visible_month": today,
|
||||
}
|
||||
|
||||
|
||||
layout = html.Div(
|
||||
[
|
||||
html.Header(
|
||||
children=[
|
||||
html.H1("Création d'une évaluation"),
|
||||
html.P("Pas encore de sauvegarde", id="is-saved"),
|
||||
html.Button("Enregistrer dans csv", id="save-csv"),
|
||||
],
|
||||
),
|
||||
html.Main(
|
||||
children=[
|
||||
html.Section(
|
||||
children=[
|
||||
html.Form(
|
||||
id="new-exam",
|
||||
children=[
|
||||
html.Label(
|
||||
children=[
|
||||
"Classe",
|
||||
dcc.Dropdown(
|
||||
id="tribe",
|
||||
options=[
|
||||
{"label": t["name"], "value": t["name"]}
|
||||
for t in config["tribes"]
|
||||
],
|
||||
value=config["tribes"][0]["name"],
|
||||
),
|
||||
]
|
||||
),
|
||||
html.Label(
|
||||
children=[
|
||||
"Nom de l'évaluation",
|
||||
dcc.Input(
|
||||
id="exam_name",
|
||||
type="text",
|
||||
placeholder="Nom de l'évaluation",
|
||||
),
|
||||
]
|
||||
),
|
||||
html.Label(
|
||||
children=[
|
||||
"Date",
|
||||
dcc.DatePickerSingle(
|
||||
id="date",
|
||||
date=date.today(),
|
||||
**get_current_year_limit(),
|
||||
),
|
||||
]
|
||||
),
|
||||
html.Label(
|
||||
children=[
|
||||
"Trimestre",
|
||||
dcc.Dropdown(
|
||||
id="term",
|
||||
options=[
|
||||
{"label": i + 1, "value": i + 1}
|
||||
for i in range(3)
|
||||
],
|
||||
value=1,
|
||||
),
|
||||
]
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
id="form",
|
||||
),
|
||||
html.Section(
|
||||
children=[
|
||||
html.Div(
|
||||
id="exercises",
|
||||
children=[],
|
||||
),
|
||||
html.Button(
|
||||
"Ajouter un exercice",
|
||||
id="add-exercise",
|
||||
className="add-exercise",
|
||||
),
|
||||
html.Div(
|
||||
id="summary",
|
||||
),
|
||||
],
|
||||
id="exercises",
|
||||
),
|
||||
html.Section(
|
||||
children=[
|
||||
html.Div(
|
||||
id="score_rate",
|
||||
),
|
||||
html.Div(
|
||||
id="exercises-viz",
|
||||
),
|
||||
html.Div(
|
||||
id="competences-viz",
|
||||
),
|
||||
html.Div(
|
||||
id="themes-viz",
|
||||
),
|
||||
],
|
||||
id="visualisation",
|
||||
),
|
||||
]
|
||||
),
|
||||
dcc.Store(id="exam_store"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@app.callback(
|
||||
dash.dependencies.Output("exercises", "children"),
|
||||
dash.dependencies.Input("add-exercise", "n_clicks"),
|
||||
dash.dependencies.State("exercises", "children"),
|
||||
)
|
||||
def add_exercise(n_clicks, children):
|
||||
if n_clicks is None:
|
||||
return children
|
||||
element_table = pd.DataFrame(columns=[c["id"] for c in QUESTION_COLUMNS])
|
||||
element_table = element_table.append(
|
||||
pd.Series(
|
||||
data={
|
||||
"id": 1,
|
||||
"competence": "Rechercher",
|
||||
"theme": "",
|
||||
"comment": "",
|
||||
"score_rate": 1,
|
||||
"is_leveled": 1,
|
||||
},
|
||||
name=0,
|
||||
)
|
||||
)
|
||||
new_exercise = html.Div(
|
||||
children=[
|
||||
html.Div(
|
||||
children=[
|
||||
dcc.Input(
|
||||
id={"type": "exercice", "index": str(n_clicks)},
|
||||
type="text",
|
||||
value=f"Exercice {len(children)+1}",
|
||||
placeholder="Nom de l'exercice",
|
||||
className="exercise-name",
|
||||
),
|
||||
html.Button(
|
||||
"X",
|
||||
id={"type": "rm_exercice", "index": str(n_clicks)},
|
||||
className="delete-exercise",
|
||||
),
|
||||
],
|
||||
className="exercise-head",
|
||||
),
|
||||
dash_table.DataTable(
|
||||
id={"type": "elements", "index": str(n_clicks)},
|
||||
columns=QUESTION_COLUMNS,
|
||||
data=element_table.to_dict("records"),
|
||||
editable=True,
|
||||
row_deletable=True,
|
||||
dropdown={
|
||||
"competence": {
|
||||
"options": [
|
||||
{"label": i, "value": i} for i in config["competences"]
|
||||
]
|
||||
},
|
||||
},
|
||||
style_cell={
|
||||
"whiteSpace": "normal",
|
||||
"height": "auto",
|
||||
},
|
||||
),
|
||||
html.Button(
|
||||
"Ajouter un élément de notation",
|
||||
id={"type": "add-element", "index": str(n_clicks)},
|
||||
className="add-element",
|
||||
),
|
||||
],
|
||||
className="exercise",
|
||||
id=f"exercise-{n_clicks}",
|
||||
)
|
||||
children.append(new_exercise)
|
||||
return children
|
||||
|
||||
|
||||
@app.callback(
|
||||
dash.dependencies.Output(
|
||||
{"type": "elements", "index": dash.dependencies.MATCH}, "data"
|
||||
),
|
||||
dash.dependencies.Input(
|
||||
{"type": "add-element", "index": dash.dependencies.MATCH}, "n_clicks"
|
||||
),
|
||||
[
|
||||
dash.dependencies.State(
|
||||
{"type": "elements", "index": dash.dependencies.MATCH}, "data"
|
||||
),
|
||||
],
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def add_element(n_clicks, elements):
|
||||
if n_clicks is None or n_clicks < len(elements):
|
||||
return elements
|
||||
|
||||
df = pd.DataFrame.from_records(elements)
|
||||
df = df.append(
|
||||
pd.Series(
|
||||
data={
|
||||
"id": len(df) + 1,
|
||||
"competence": "",
|
||||
"theme": "",
|
||||
"comment": "",
|
||||
"score_rate": 1,
|
||||
"is_leveled": 1,
|
||||
},
|
||||
name=n_clicks,
|
||||
)
|
||||
)
|
||||
return df.to_dict("records")
|
||||
|
||||
|
||||
def exam_generalities(tribe, exam_name, date, term, exercices=[], elements=[]):
|
||||
return [
|
||||
html.H1(f"{exam_name} pour les {tribe}"),
|
||||
html.P(f"Fait le {date} (Trimestre {term})"),
|
||||
]
|
||||
|
||||
|
||||
def exercise_summary(identifier, name, elements=[]):
|
||||
df = pd.DataFrame.from_records(elements)
|
||||
return html.Div(
|
||||
[
|
||||
html.H2(name),
|
||||
dash_table.DataTable(
|
||||
columns=[{"id": c, "name": c} for c in df], data=elements
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@app.callback(
|
||||
dash.dependencies.Output("exam_store", "data"),
|
||||
[
|
||||
dash.dependencies.Input("tribe", "value"),
|
||||
dash.dependencies.Input("exam_name", "value"),
|
||||
dash.dependencies.Input("date", "date"),
|
||||
dash.dependencies.Input("term", "value"),
|
||||
dash.dependencies.Input(
|
||||
{"type": "exercice", "index": dash.dependencies.ALL}, "value"
|
||||
),
|
||||
dash.dependencies.Input(
|
||||
{"type": "elements", "index": dash.dependencies.ALL}, "data"
|
||||
),
|
||||
],
|
||||
dash.dependencies.State({"type": "elements", "index": dash.dependencies.ALL}, "id"),
|
||||
)
|
||||
def store_exam(tribe, exam_name, date, term, exercices, elements, elements_id):
|
||||
exam = Exam(exam_name, tribe, date, term)
|
||||
for (i, name) in enumerate(exercices):
|
||||
ex_elements_id = [el for el in elements_id if el["index"] == str(i + 1)][0]
|
||||
index = elements_id.index(ex_elements_id)
|
||||
ex_elements = elements[index]
|
||||
exam.add_exercise(name, ex_elements)
|
||||
|
||||
return exam.to_dict()
|
||||
|
||||
|
||||
@app.callback(
|
||||
dash.dependencies.Output("score_rate", "children"),
|
||||
dash.dependencies.Input("exam_store", "data"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def score_rate(data):
|
||||
exam = Exam(**data)
|
||||
return [html.P(f"Barème /{exam.score_rate}")]
|
||||
|
||||
|
||||
@app.callback(
|
||||
dash.dependencies.Output("competences-viz", "figure"),
|
||||
dash.dependencies.Input("exam_store", "data"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def competences_viz(data):
|
||||
exam = Exam(**data)
|
||||
return [html.P(str(exam.competences_rate))]
|
||||
|
||||
|
||||
@app.callback(
|
||||
dash.dependencies.Output("themes-viz", "children"),
|
||||
dash.dependencies.Input("exam_store", "data"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def themes_viz(data):
|
||||
exam = Exam(**data)
|
||||
themes_rate = exam.themes_rate
|
||||
fig = go.Figure()
|
||||
if themes_rate:
|
||||
fig.add_trace(go.Pie(labels=list(themes_rate.keys()), values=list(themes_rate.values())))
|
||||
return [dcc.Graph(figure=fig)]
|
||||
return []
|
||||
|
||||
|
||||
@app.callback(
|
||||
dash.dependencies.Output("is-saved", "children"),
|
||||
dash.dependencies.Input("save-csv", "n_clicks"),
|
||||
dash.dependencies.State("exam_store", "data"),
|
||||
prevent_initial_call=True,
|
||||
)
|
||||
def save_to_csv(n_clicks, data):
|
||||
exam = Exam(**data)
|
||||
csv = exam.path(".csv")
|
||||
exam.write_csv()
|
||||
return [f"Dernière sauvegarde {datetime.today()} dans {csv}"]
|
@@ -1,406 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import dash
|
||||
import dash_html_components as html
|
||||
import dash_core_components as dcc
|
||||
import dash_table
|
||||
from dash.exceptions import PreventUpdate
|
||||
import plotly.graph_objects as go
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
|
||||
from ... import flat_df_students, pp_q_scores
|
||||
from ...config import NO_ST_COLUMNS
|
||||
from ...scripts.getconfig import config
|
||||
from ..app import app
|
||||
|
||||
COLORS = {
|
||||
".": "black",
|
||||
0: "#E7472B",
|
||||
1: "#FF712B",
|
||||
2: "#F2EC4C",
|
||||
3: "#68D42F",
|
||||
}
|
||||
|
||||
layout = html.Div(
|
||||
children=[
|
||||
html.Header(
|
||||
children=[
|
||||
html.H1("Analyse des notes"),
|
||||
html.P("Dernière sauvegarde", id="lastsave"),
|
||||
],
|
||||
),
|
||||
html.Main(
|
||||
[
|
||||
html.Section(
|
||||
[
|
||||
html.Div(
|
||||
[
|
||||
"Classe: ",
|
||||
dcc.Dropdown(
|
||||
id="tribe",
|
||||
options=[
|
||||
{"label": t["name"], "value": t["name"]}
|
||||
for t in config["tribes"]
|
||||
],
|
||||
value=config["tribes"][0]["name"],
|
||||
),
|
||||
],
|
||||
style={
|
||||
"display": "flex",
|
||||
"flex-flow": "column",
|
||||
},
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
"Evaluation: ",
|
||||
dcc.Dropdown(id="csv"),
|
||||
],
|
||||
style={
|
||||
"display": "flex",
|
||||
"flex-flow": "column",
|
||||
},
|
||||
),
|
||||
],
|
||||
id="select",
|
||||
style={
|
||||
"display": "flex",
|
||||
"flex-flow": "row wrap",
|
||||
},
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
html.Div(
|
||||
dash_table.DataTable(
|
||||
id="final_score_table",
|
||||
columns=[
|
||||
{"id": "Eleve", "name": "Élève"},
|
||||
{"id": "Note", "name": "Note"},
|
||||
{"id": "Bareme", "name": "Barème"},
|
||||
],
|
||||
data=[],
|
||||
style_data_conditional=[
|
||||
{
|
||||
"if": {"row_index": "odd"},
|
||||
"backgroundColor": "rgb(248, 248, 248)",
|
||||
}
|
||||
],
|
||||
style_data={
|
||||
"width": "100px",
|
||||
"maxWidth": "100px",
|
||||
"minWidth": "100px",
|
||||
},
|
||||
),
|
||||
id="final_score_table_container",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
dash_table.DataTable(
|
||||
id="final_score_describe",
|
||||
columns=[
|
||||
{"id": "count", "name": "count"},
|
||||
{"id": "mean", "name": "mean"},
|
||||
{"id": "std", "name": "std"},
|
||||
{"id": "min", "name": "min"},
|
||||
{"id": "25%", "name": "25%"},
|
||||
{"id": "50%", "name": "50%"},
|
||||
{"id": "75%", "name": "75%"},
|
||||
{"id": "max", "name": "max"},
|
||||
],
|
||||
),
|
||||
dcc.Graph(
|
||||
id="fig_assessment_hist",
|
||||
),
|
||||
dcc.Graph(id="fig_competences"),
|
||||
],
|
||||
id="desc_plots",
|
||||
),
|
||||
],
|
||||
id="analysis",
|
||||
),
|
||||
html.Div(
|
||||
[
|
||||
dash_table.DataTable(
|
||||
id="scores_table",
|
||||
columns=[
|
||||
{"id": "id", "name": "Question"},
|
||||
{
|
||||
"id": "competence",
|
||||
"name": "Competence",
|
||||
},
|
||||
{"id": "theme", "name": "Domaine"},
|
||||
{"id": "comment", "name": "Commentaire"},
|
||||
{"id": "score_rate", "name": "Bareme"},
|
||||
{"id": "is_leveled", "name": "Est_nivele"},
|
||||
],
|
||||
style_cell={
|
||||
"whiteSpace": "normal",
|
||||
"height": "auto",
|
||||
},
|
||||
fixed_columns={"headers": True, "data": 7},
|
||||
style_table={"minWidth": "100%"},
|
||||
style_data_conditional=[],
|
||||
editable=True,
|
||||
),
|
||||
html.Button("Ajouter un élément", id="btn_add_element"),
|
||||
],
|
||||
id="big_table",
|
||||
),
|
||||
dcc.Store(id="final_score"),
|
||||
],
|
||||
className="content",
|
||||
style={
|
||||
"width": "95vw",
|
||||
"margin": "auto",
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("csv", "options"),
|
||||
dash.dependencies.Output("csv", "value"),
|
||||
],
|
||||
[dash.dependencies.Input("tribe", "value")],
|
||||
)
|
||||
def update_csvs(value):
|
||||
if not value:
|
||||
raise PreventUpdate
|
||||
p = Path(value)
|
||||
csvs = list(p.glob("*.csv"))
|
||||
try:
|
||||
return [{"label": str(c), "value": str(c)} for c in csvs], str(csvs[0])
|
||||
except IndexError:
|
||||
return []
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("final_score", "data"),
|
||||
],
|
||||
[dash.dependencies.Input("scores_table", "data")],
|
||||
)
|
||||
def update_final_scores(data):
|
||||
if not data:
|
||||
raise PreventUpdate
|
||||
|
||||
scores = pd.DataFrame.from_records(data)
|
||||
try:
|
||||
if scores.iloc[0]["Commentaire"] == "commentaire" or scores.iloc[0].str.contains("PPRE").any():
|
||||
scores.drop([0], inplace=True)
|
||||
except KeyError:
|
||||
pass
|
||||
scores = flat_df_students(scores).dropna(subset=["Score"])
|
||||
if scores.empty:
|
||||
return [{}]
|
||||
|
||||
scores = pp_q_scores(scores)
|
||||
assessment_scores = scores.groupby(["Eleve"]).agg({"Note": "sum", "Bareme": "sum"})
|
||||
return [assessment_scores.reset_index().to_dict("records")]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("final_score_table", "data"),
|
||||
],
|
||||
[dash.dependencies.Input("final_score", "data")],
|
||||
)
|
||||
def update_final_scores_table(data):
|
||||
assessment_scores = pd.DataFrame.from_records(data)
|
||||
return [assessment_scores.to_dict("records")]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("final_score_describe", "data"),
|
||||
],
|
||||
[dash.dependencies.Input("final_score", "data")],
|
||||
)
|
||||
def update_final_scores_descr(data):
|
||||
scores = pd.DataFrame.from_records(data)
|
||||
if scores.empty:
|
||||
return [[{}]]
|
||||
desc = scores["Note"].describe().T.round(2)
|
||||
return [[desc.to_dict()]]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("fig_assessment_hist", "figure"),
|
||||
],
|
||||
[dash.dependencies.Input("final_score", "data")],
|
||||
)
|
||||
def update_final_scores_hist(data):
|
||||
assessment_scores = pd.DataFrame.from_records(data)
|
||||
|
||||
if assessment_scores.empty:
|
||||
return [go.Figure(data=[go.Scatter(x=[], y=[])])]
|
||||
|
||||
ranges = np.linspace(
|
||||
-0.5,
|
||||
assessment_scores.Bareme.max(),
|
||||
int(assessment_scores.Bareme.max() * 2 + 2),
|
||||
)
|
||||
bins = pd.cut(assessment_scores["Note"], ranges)
|
||||
assessment_scores["Bin"] = bins
|
||||
assessment_grouped = (
|
||||
assessment_scores.reset_index()
|
||||
.groupby("Bin")
|
||||
.agg({"Bareme": "count", "Eleve": lambda x: "\n".join(x)})
|
||||
)
|
||||
assessment_grouped.index = assessment_grouped.index.map(lambda i: i.right)
|
||||
fig = go.Figure()
|
||||
fig.add_bar(
|
||||
x=assessment_grouped.index,
|
||||
y=assessment_grouped.Bareme,
|
||||
text=assessment_grouped.Eleve,
|
||||
textposition="auto",
|
||||
hovertemplate="",
|
||||
marker_color="#4E89DE",
|
||||
)
|
||||
fig.update_layout(
|
||||
height=300,
|
||||
margin=dict(l=5, r=5, b=5, t=5),
|
||||
)
|
||||
return [fig]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("fig_competences", "figure"),
|
||||
],
|
||||
[dash.dependencies.Input("scores_table", "data")],
|
||||
)
|
||||
def update_competence_fig(data):
|
||||
scores = pd.DataFrame.from_records(data)
|
||||
try:
|
||||
if scores.iloc[0]["Commentaire"] == "commentaire" or scores.iloc[0].str.contains("PPRE").any():
|
||||
scores.drop([0], inplace=True)
|
||||
except KeyError:
|
||||
pass
|
||||
scores = flat_df_students(scores).dropna(subset=["Score"])
|
||||
|
||||
if scores.empty:
|
||||
return [go.Figure(data=[go.Scatter(x=[], y=[])])]
|
||||
|
||||
scores = pp_q_scores(scores)
|
||||
pt = pd.pivot_table(
|
||||
scores,
|
||||
index=["Exercice", "Question", "Commentaire"],
|
||||
columns="Score",
|
||||
aggfunc="size",
|
||||
fill_value=0,
|
||||
)
|
||||
for i in {i for i in pt.index.get_level_values(0)}:
|
||||
pt.loc[(str(i), "", ""), :] = ""
|
||||
pt.sort_index(inplace=True)
|
||||
index = (
|
||||
pt.index.get_level_values(0).map(str)
|
||||
+ ":"
|
||||
+ pt.index.get_level_values(1).map(str)
|
||||
+ " "
|
||||
+ pt.index.get_level_values(2).map(str)
|
||||
)
|
||||
|
||||
fig = go.Figure()
|
||||
bars = [
|
||||
{"score": -1, "name": "Pas de réponse", "color": COLORS["."]},
|
||||
{"score": 0, "name": "Faux", "color": COLORS[0]},
|
||||
{"score": 1, "name": "Peu juste", "color": COLORS[1]},
|
||||
{"score": 2, "name": "Presque juste", "color": COLORS[2]},
|
||||
{"score": 3, "name": "Juste", "color": COLORS[3]},
|
||||
]
|
||||
for b in bars:
|
||||
try:
|
||||
fig.add_bar(
|
||||
x=index, y=pt[b["score"]], name=b["name"], marker_color=b["color"]
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
fig.update_layout(barmode="relative")
|
||||
fig.update_layout(
|
||||
height=500,
|
||||
margin=dict(l=5, r=5, b=5, t=5),
|
||||
)
|
||||
return [fig]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("lastsave", "children"),
|
||||
],
|
||||
[
|
||||
dash.dependencies.Input("scores_table", "data"),
|
||||
dash.dependencies.State("csv", "value"),
|
||||
],
|
||||
)
|
||||
def save_scores(data, csv):
|
||||
try:
|
||||
scores = pd.DataFrame.from_records(data)
|
||||
scores = scores_table_column_order(scores)
|
||||
scores.to_csv(csv, index=False)
|
||||
except:
|
||||
return [f"Soucis pour sauvegarder à {datetime.today()} dans {csv}"]
|
||||
else:
|
||||
return [f"Dernière sauvegarde {datetime.today()} dans {csv}"]
|
||||
|
||||
|
||||
def highlight_value(df):
|
||||
""" Cells style """
|
||||
hight = []
|
||||
for v, color in COLORS.items():
|
||||
hight += [
|
||||
{
|
||||
"if": {"filter_query": "{{{}}} = {}".format(col, v), "column_id": col},
|
||||
"backgroundColor": color,
|
||||
"color": "white",
|
||||
}
|
||||
for col in df.columns
|
||||
if col not in NO_ST_COLUMNS.values()
|
||||
]
|
||||
return hight
|
||||
|
||||
def scores_table_column_order(df):
|
||||
df_student_columns = [c for c in df.columns if c not in NO_ST_COLUMNS.values()]
|
||||
order = list(NO_ST_COLUMNS.values())+df_student_columns
|
||||
return df.loc[:, order]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("scores_table", "columns"),
|
||||
dash.dependencies.Output("scores_table", "data"),
|
||||
dash.dependencies.Output("scores_table", "style_data_conditional"),
|
||||
],
|
||||
[
|
||||
dash.dependencies.Input("csv", "value"),
|
||||
dash.dependencies.Input("btn_add_element", "n_clicks"),
|
||||
dash.dependencies.State("scores_table", "data"),
|
||||
],
|
||||
)
|
||||
def update_scores_table(csv, add_element, data):
|
||||
ctx = dash.callback_context
|
||||
if ctx.triggered[0]["prop_id"] == "csv.value":
|
||||
stack = pd.read_csv(csv, encoding="UTF8")
|
||||
elif ctx.triggered[0]["prop_id"] == "btn_add_element.n_clicks":
|
||||
stack = pd.DataFrame.from_records(data)
|
||||
infos = pd.DataFrame.from_records(
|
||||
[{k: stack.iloc[-1][k] for k in NO_ST_COLUMNS.values()}]
|
||||
)
|
||||
stack = stack.append(infos)
|
||||
stack = scores_table_column_order(stack)
|
||||
return (
|
||||
[
|
||||
{"id": c, "name": c}
|
||||
for c in stack.columns
|
||||
if c not in ["Trimestre", "Nom", "Date"]
|
||||
],
|
||||
stack.to_dict("records"),
|
||||
highlight_value(stack),
|
||||
)
|
@@ -1,29 +0,0 @@
|
||||
import dash_core_components as dcc
|
||||
import dash_html_components as html
|
||||
from dash.dependencies import Input, Output
|
||||
|
||||
from .app import app
|
||||
from .exam_analysis import app as exam_analysis
|
||||
from .create_exam import app as create_exam
|
||||
from .student_analysis import app as student_analysis
|
||||
|
||||
|
||||
app.layout = html.Div(
|
||||
[dcc.Location(id="url", refresh=False), html.Div(id="page-content")]
|
||||
)
|
||||
|
||||
|
||||
@app.callback(Output("page-content", "children"), Input("url", "pathname"))
|
||||
def display_page(pathname):
|
||||
if pathname == "/":
|
||||
return exam_analysis.layout
|
||||
elif pathname == "/create-exam":
|
||||
return create_exam.layout
|
||||
elif pathname == "/students":
|
||||
return student_analysis.layout
|
||||
else:
|
||||
return "404"
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run_server(debug=True)
|
@@ -1,300 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import dash
|
||||
import dash_html_components as html
|
||||
import dash_core_components as dcc
|
||||
import dash_table
|
||||
import plotly.graph_objects as go
|
||||
from datetime import date, datetime
|
||||
import uuid
|
||||
import pandas as pd
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
|
||||
from ...scripts.getconfig import config
|
||||
from ... import flat_df_students, pp_q_scores
|
||||
from ...config import NO_ST_COLUMNS
|
||||
from ..app import app
|
||||
from ...scripts.exam import Exam
|
||||
|
||||
|
||||
def get_students(csv):
|
||||
return list(pd.read_csv(csv).T.to_dict().values())
|
||||
|
||||
|
||||
COLORS = {
|
||||
".": "black",
|
||||
0: "#E7472B",
|
||||
1: "#FF712B",
|
||||
2: "#F2EC4C",
|
||||
3: "#68D42F",
|
||||
}
|
||||
|
||||
QUESTION_COLUMNS = [
|
||||
{"id": "id", "name": "Question"},
|
||||
{
|
||||
"id": "competence",
|
||||
"name": "Competence",
|
||||
"presentation": "dropdown",
|
||||
},
|
||||
{"id": "theme", "name": "Domaine"},
|
||||
{"id": "comment", "name": "Commentaire"},
|
||||
{"id": "score_rate", "name": "Bareme"},
|
||||
{"id": "is_leveled", "name": "Est_nivele"},
|
||||
]
|
||||
|
||||
layout = html.Div(
|
||||
[
|
||||
html.Header(
|
||||
children=[
|
||||
html.H1("Bilan des élèves"),
|
||||
],
|
||||
),
|
||||
html.Main(
|
||||
children=[
|
||||
html.Section(
|
||||
children=[
|
||||
html.Form(
|
||||
id="select-student",
|
||||
children=[
|
||||
html.Label(
|
||||
children=[
|
||||
"Classe",
|
||||
dcc.Dropdown(
|
||||
id="tribe",
|
||||
options=[
|
||||
{"label": t["name"], "value": t["name"]}
|
||||
for t in config["tribes"]
|
||||
],
|
||||
value=config["tribes"][0]["name"],
|
||||
),
|
||||
]
|
||||
),
|
||||
html.Label(
|
||||
children=[
|
||||
"Élève",
|
||||
dcc.Dropdown(
|
||||
id="student",
|
||||
options=[
|
||||
{"label": t["Nom"], "value": t["Nom"]}
|
||||
for t in get_students(config["tribes"][0]["students"])
|
||||
],
|
||||
value=get_students(config["tribes"][0]["students"])[0]["Nom"],
|
||||
),
|
||||
]
|
||||
),
|
||||
html.Label(
|
||||
children=[
|
||||
"Trimestre",
|
||||
dcc.Dropdown(
|
||||
id="term",
|
||||
options=[
|
||||
{"label": i + 1, "value": i + 1}
|
||||
for i in range(3)
|
||||
],
|
||||
value=1,
|
||||
),
|
||||
]
|
||||
),
|
||||
],
|
||||
),
|
||||
],
|
||||
id="form",
|
||||
),
|
||||
html.Section(
|
||||
children=[
|
||||
html.H2("Évaluations"),
|
||||
html.Div(
|
||||
dash_table.DataTable(
|
||||
id="exam_scores",
|
||||
columns=[
|
||||
{"id": "Nom", "name": "Évaluations"},
|
||||
{"id": "Note", "name": "Note"},
|
||||
{"id": "Bareme", "name": "Barème"},
|
||||
],
|
||||
data=[],
|
||||
style_data_conditional=[
|
||||
{
|
||||
"if": {"row_index": "odd"},
|
||||
"backgroundColor": "rgb(248, 248, 248)",
|
||||
}
|
||||
],
|
||||
style_data={
|
||||
"width": "100px",
|
||||
"maxWidth": "100px",
|
||||
"minWidth": "100px",
|
||||
},
|
||||
),
|
||||
id="eval-table",
|
||||
),
|
||||
],
|
||||
id="Évaluations",
|
||||
),
|
||||
html.Section(
|
||||
children=[
|
||||
html.Div(
|
||||
id="competences-viz",
|
||||
),
|
||||
html.Div(
|
||||
id="themes-vizz",
|
||||
),
|
||||
],
|
||||
id="visualisation",
|
||||
),
|
||||
]
|
||||
),
|
||||
dcc.Store(id="student-scores"),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("student", "options"),
|
||||
dash.dependencies.Output("student", "value"),
|
||||
],
|
||||
[
|
||||
dash.dependencies.Input("tribe", "value")
|
||||
],)
|
||||
def update_students_list(tribe):
|
||||
tribe_config = [t for t in config["tribes"] if t["name"] == tribe][0]
|
||||
students = get_students(tribe_config["students"])
|
||||
options = [
|
||||
{"label": t["Nom"], "value": t["Nom"]}
|
||||
for t in students
|
||||
]
|
||||
value = students[0]["Nom"]
|
||||
return options, value
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("student-scores", "data"),
|
||||
],
|
||||
[
|
||||
dash.dependencies.Input("tribe", "value"),
|
||||
dash.dependencies.Input("student", "value"),
|
||||
dash.dependencies.Input("term", "value"),
|
||||
],
|
||||
)
|
||||
def update_student_scores(tribe, student, term):
|
||||
tribe_config = [t for t in config["tribes"] if t["name"] == tribe][0]
|
||||
|
||||
p = Path(tribe_config["name"])
|
||||
csvs = list(p.glob("*.csv"))
|
||||
|
||||
dfs = []
|
||||
for csv in csvs:
|
||||
try:
|
||||
scores = pd.read_csv(csv)
|
||||
except pd.errors.ParserError:
|
||||
pass
|
||||
else:
|
||||
if scores.iloc[0]["Commentaire"] == "commentaire" or scores.iloc[0].str.contains("PPRE").any():
|
||||
scores.drop([0], inplace=True)
|
||||
scores = flat_df_students(scores).dropna(subset=["Score"])
|
||||
scores = scores[scores["Eleve"] == student]
|
||||
scores = scores[scores["Trimestre"] == term]
|
||||
dfs.append(scores)
|
||||
|
||||
df = pd.concat(dfs)
|
||||
|
||||
return [df.to_dict("records")]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("exam_scores", "data"),
|
||||
],
|
||||
[
|
||||
dash.dependencies.Input("student-scores", "data"),
|
||||
],
|
||||
)
|
||||
def update_exam_scores(data):
|
||||
scores = pd.DataFrame.from_records(data)
|
||||
scores = pp_q_scores(scores)
|
||||
assessment_scores = scores.groupby(["Nom"]).agg({"Note": "sum", "Bareme": "sum"})
|
||||
return [assessment_scores.reset_index().to_dict("records")]
|
||||
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("competences-viz", "children"),
|
||||
],
|
||||
[
|
||||
dash.dependencies.Input("student-scores", "data"),
|
||||
],
|
||||
)
|
||||
def update_competences_viz(data):
|
||||
scores = pd.DataFrame.from_records(data)
|
||||
scores = pp_q_scores(scores)
|
||||
pt = pd.pivot_table(
|
||||
scores,
|
||||
index=["Competence"],
|
||||
columns="Score",
|
||||
aggfunc="size",
|
||||
fill_value=0,
|
||||
)
|
||||
fig = go.Figure()
|
||||
bars = [
|
||||
{"score": -1, "name": "Pas de réponse", "color": COLORS["."]},
|
||||
{"score": 0, "name": "Faux", "color": COLORS[0]},
|
||||
{"score": 1, "name": "Peu juste", "color": COLORS[1]},
|
||||
{"score": 2, "name": "Presque juste", "color": COLORS[2]},
|
||||
{"score": 3, "name": "Juste", "color": COLORS[3]},
|
||||
]
|
||||
for b in bars:
|
||||
try:
|
||||
fig.add_bar(
|
||||
x=list(config["competences"].keys()), y=pt[b["score"]], name=b["name"], marker_color=b["color"]
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
fig.update_layout(barmode="relative")
|
||||
fig.update_layout(
|
||||
height=500,
|
||||
margin=dict(l=5, r=5, b=5, t=5),
|
||||
)
|
||||
return [dcc.Graph(figure=fig)]
|
||||
|
||||
@app.callback(
|
||||
[
|
||||
dash.dependencies.Output("themes-vizz", "children"),
|
||||
],
|
||||
[
|
||||
dash.dependencies.Input("student-scores", "data"),
|
||||
],
|
||||
)
|
||||
def update_themes_viz(data):
|
||||
scores = pd.DataFrame.from_records(data)
|
||||
scores = pp_q_scores(scores)
|
||||
pt = pd.pivot_table(
|
||||
scores,
|
||||
index=["Domaine"],
|
||||
columns="Score",
|
||||
aggfunc="size",
|
||||
fill_value=0,
|
||||
)
|
||||
fig = go.Figure()
|
||||
bars = [
|
||||
{"score": -1, "name": "Pas de réponse", "color": COLORS["."]},
|
||||
{"score": 0, "name": "Faux", "color": COLORS[0]},
|
||||
{"score": 1, "name": "Peu juste", "color": COLORS[1]},
|
||||
{"score": 2, "name": "Presque juste", "color": COLORS[2]},
|
||||
{"score": 3, "name": "Juste", "color": COLORS[3]},
|
||||
]
|
||||
for b in bars:
|
||||
try:
|
||||
fig.add_bar(
|
||||
x=list(pt.index), y=pt[b["score"]], name=b["name"], marker_color=b["color"]
|
||||
)
|
||||
except KeyError:
|
||||
pass
|
||||
fig.update_layout(barmode="relative")
|
||||
fig.update_layout(
|
||||
height=500,
|
||||
margin=dict(l=5, r=5, b=5, t=5),
|
||||
)
|
||||
return [dcc.Graph(figure=fig)]
|
||||
|
@@ -1,219 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from math import ceil, floor
|
||||
from .config import COLUMNS
|
||||
|
||||
"""
|
||||
Functions for manipulate score dataframes
|
||||
"""
|
||||
|
||||
|
||||
def round_half_point(val):
|
||||
try:
|
||||
return 0.5 * ceil(2.0 * val)
|
||||
except ValueError:
|
||||
return val
|
||||
except TypeError:
|
||||
return val
|
||||
|
||||
|
||||
def score_to_mark(x):
|
||||
"""Compute the mark
|
||||
|
||||
if the item is leveled then the score is multiply by the score_rate
|
||||
otherwise it copies the score
|
||||
|
||||
:param x: dictionnary with COLUMNS["is_leveled"], COLUMNS["score"] and COLUMNS["score_rate"] keys
|
||||
:return: the mark
|
||||
|
||||
>>> d = {"Eleve":["E1"]*6 + ["E2"]*6,
|
||||
... COLUMNS["score_rate"]:[1]*2+[2]*2+[2]*2 + [1]*2+[2]*2+[2]*2,
|
||||
... COLUMNS["is_leveled"]:[0]*4+[1]*2 + [0]*4+[1]*2,
|
||||
... COLUMNS["score"]:[1, 0.33, 2, 1.5, 1, 3, 0.666, 1, 1.5, 1, 2, 3],
|
||||
... }
|
||||
>>> df = pd.DataFrame(d)
|
||||
>>> score_to_mark(df.loc[0])
|
||||
1.0
|
||||
>>> score_to_mark(df.loc[10])
|
||||
1.3333333333333333
|
||||
"""
|
||||
# -1 is no answer
|
||||
if x[COLUMNS["score"]] == -1:
|
||||
return 0
|
||||
|
||||
if x[COLUMNS["is_leveled"]]:
|
||||
if x[COLUMNS["score"]] not in [0, 1, 2, 3]:
|
||||
raise ValueError(
|
||||
f"The evaluation is out of range: {x[COLUMNS['score']]} at {x}"
|
||||
)
|
||||
return round(x[COLUMNS["score"]] * x[COLUMNS["score_rate"]] / 3, 2)
|
||||
|
||||
if x[COLUMNS["score"]] > x[COLUMNS["score_rate"]]:
|
||||
raise ValueError(
|
||||
f"The score ({x['score']}) is greated than the rating scale ({x[COLUMNS['score_rate']]}) at {x}"
|
||||
)
|
||||
return x[COLUMNS["score"]]
|
||||
|
||||
|
||||
def score_to_level(x):
|
||||
"""Compute the level (".",0,1,2,3).
|
||||
|
||||
:param x: dictionnary with COLUMNS["is_leveled"], COLUMNS["score"] and COLUMNS["score_rate"] keys
|
||||
:return: the level
|
||||
|
||||
>>> d = {"Eleve":["E1"]*6 + ["E2"]*6,
|
||||
... COLUMNS["score_rate"]:[1]*2+[2]*2+[2]*2 + [1]*2+[2]*2+[2]*2,
|
||||
... COLUMNS["is_leveled"]:[0]*4+[1]*2 + [0]*4+[1]*2,
|
||||
... COLUMNS["score"]:[1, 0.33, np.nan, 1.5, 1, 3, 0.666, 1, 1.5, 1, 2, 3],
|
||||
... }
|
||||
>>> df = pd.DataFrame(d)
|
||||
>>> score_to_level(df.loc[0])
|
||||
3
|
||||
>>> score_to_level(df.loc[1])
|
||||
1
|
||||
>>> score_to_level(df.loc[2])
|
||||
'na'
|
||||
>>> score_to_level(df.loc[3])
|
||||
3
|
||||
>>> score_to_level(df.loc[5])
|
||||
3
|
||||
>>> score_to_level(df.loc[10])
|
||||
2
|
||||
"""
|
||||
# negatives are no answer or negatives points
|
||||
if x[COLUMNS["score"]] <= -1:
|
||||
return np.nan
|
||||
|
||||
if x[COLUMNS["is_leveled"]]:
|
||||
return int(x[COLUMNS["score"]])
|
||||
|
||||
return int(ceil(x[COLUMNS["score"]] / x[COLUMNS["score_rate"]] * 3))
|
||||
|
||||
|
||||
# DataFrame columns manipulations
|
||||
|
||||
|
||||
def compute_mark(df):
|
||||
"""Compute the mark for the dataframe
|
||||
|
||||
apply score_to_mark to each row
|
||||
|
||||
:param df: DataFrame with COLUMNS["score"], COLUMNS["is_leveled"] and COLUMNS["score_rate"] columns.
|
||||
|
||||
>>> d = {"Eleve":["E1"]*6 + ["E2"]*6,
|
||||
... COLUMNS["score_rate"]:[1]*2+[2]*2+[2]*2 + [1]*2+[2]*2+[2]*2,
|
||||
... COLUMNS["is_leveled"]:[0]*4+[1]*2 + [0]*4+[1]*2,
|
||||
... COLUMNS["score"]:[1, 0.33, 2, 1.5, 1, 3, 0.666, 1, 1.5, 1, 2, 3],
|
||||
... }
|
||||
>>> df = pd.DataFrame(d)
|
||||
>>> compute_mark(df)
|
||||
0 1.00
|
||||
1 0.33
|
||||
2 2.00
|
||||
3 1.50
|
||||
4 0.67
|
||||
5 2.00
|
||||
6 0.67
|
||||
7 1.00
|
||||
8 1.50
|
||||
9 1.00
|
||||
10 1.33
|
||||
11 2.00
|
||||
dtype: float64
|
||||
"""
|
||||
return df[[COLUMNS["score"], COLUMNS["is_leveled"], COLUMNS["score_rate"]]].apply(
|
||||
score_to_mark, axis=1
|
||||
)
|
||||
|
||||
|
||||
def compute_level(df):
|
||||
"""Compute level for the dataframe
|
||||
|
||||
Applies score_to_level to each row
|
||||
|
||||
:param df: DataFrame with COLUMNS["score"], COLUMNS["is_leveled"] and COLUMNS["score_rate"] columns.
|
||||
:return: Columns with level
|
||||
|
||||
>>> d = {"Eleve":["E1"]*6 + ["E2"]*6,
|
||||
... COLUMNS["score_rate"]:[1]*2+[2]*2+[2]*2 + [1]*2+[2]*2+[2]*2,
|
||||
... COLUMNS["is_leveled"]:[0]*4+[1]*2 + [0]*4+[1]*2,
|
||||
... COLUMNS["score"]:[np.nan, 0.33, 2, 1.5, 1, 3, 0.666, 1, 1.5, 1, 2, 3],
|
||||
... }
|
||||
>>> df = pd.DataFrame(d)
|
||||
>>> compute_level(df)
|
||||
0 na
|
||||
1 1
|
||||
2 3
|
||||
3 3
|
||||
4 1
|
||||
5 3
|
||||
6 2
|
||||
7 3
|
||||
8 3
|
||||
9 2
|
||||
10 2
|
||||
11 3
|
||||
dtype: object
|
||||
"""
|
||||
return df[[COLUMNS["score"], COLUMNS["is_leveled"], COLUMNS["score_rate"]]].apply(
|
||||
score_to_level, axis=1
|
||||
)
|
||||
|
||||
|
||||
def compute_normalized(df):
|
||||
"""Compute the normalized mark (Mark / score_rate)
|
||||
|
||||
:param df: DataFrame with "Mark" and COLUMNS["score_rate"] columns
|
||||
:return: column with normalized mark
|
||||
|
||||
>>> d = {"Eleve":["E1"]*6 + ["E2"]*6,
|
||||
... COLUMNS["score_rate"]:[1]*2+[2]*2+[2]*2 + [1]*2+[2]*2+[2]*2,
|
||||
... COLUMNS["is_leveled"]:[0]*4+[1]*2 + [0]*4+[1]*2,
|
||||
... COLUMNS["score"]:[1, 0.33, 2, 1.5, 1, 3, 0.666, 1, 1.5, 1, 2, 3],
|
||||
... }
|
||||
>>> df = pd.DataFrame(d)
|
||||
>>> df["Mark"] = compute_marks(df)
|
||||
>>> compute_normalized(df)
|
||||
0 1.00
|
||||
1 0.33
|
||||
2 1.00
|
||||
3 0.75
|
||||
4 0.33
|
||||
5 1.00
|
||||
6 0.67
|
||||
7 1.00
|
||||
8 0.75
|
||||
9 0.50
|
||||
10 0.67
|
||||
11 1.00
|
||||
dtype: float64
|
||||
"""
|
||||
return df[COLUMNS["mark"]] / df[COLUMNS["score_rate"]]
|
||||
|
||||
|
||||
# Postprocessing question scores
|
||||
|
||||
|
||||
def pp_q_scores(df):
|
||||
"""Postprocessing questions scores dataframe
|
||||
|
||||
Add 3 columns: mark, level and normalized
|
||||
|
||||
:param df: questions-scores dataframe
|
||||
:return: same data frame with mark, level and normalize columns
|
||||
"""
|
||||
assign = {
|
||||
COLUMNS["mark"]: compute_mark,
|
||||
COLUMNS["level"]: compute_level,
|
||||
COLUMNS["normalized"]: compute_normalized,
|
||||
}
|
||||
return df.assign(**assign)
|
||||
|
||||
|
||||
# -----------------------------
|
||||
# Reglages pour 'vim'
|
||||
# vim:set autoindent expandtab tabstop=4 shiftwidth=4:
|
||||
# cursor: 16 del
|
@@ -1,10 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import yaml
|
||||
|
||||
CONFIGPATH = "recoconfig.yml"
|
||||
|
||||
with open(CONFIGPATH, "r") as configfile:
|
||||
config = yaml.load(configfile, Loader=yaml.FullLoader)
|
||||
|
@@ -1,211 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
# from prompt_toolkit import HTML
|
||||
from ..config import NO_ST_COLUMNS
|
||||
import pandas as pd
|
||||
import yaml
|
||||
from .getconfig import config
|
||||
|
||||
|
||||
def try_parsing_date(text, formats=["%Y-%m-%d", "%Y.%m.%d", "%Y/%m/%d"]):
|
||||
for fmt in formats:
|
||||
try:
|
||||
return datetime.strptime(text[:10], fmt)
|
||||
except ValueError:
|
||||
pass
|
||||
raise ValueError("no valid date format found")
|
||||
|
||||
|
||||
def format_question(question):
|
||||
question["score_rate"] = float(question["score_rate"])
|
||||
return question
|
||||
|
||||
|
||||
class Exam:
|
||||
def __init__(self, name, tribename, date, term, **kwrds):
|
||||
self._name = name
|
||||
self._tribename = tribename
|
||||
|
||||
self._date = try_parsing_date(date)
|
||||
|
||||
self._term = term
|
||||
|
||||
try:
|
||||
kwrds["exercices"]
|
||||
except KeyError:
|
||||
self._exercises = {}
|
||||
else:
|
||||
self._exercises = kwrds["exercices"]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def tribename(self):
|
||||
return self._tribename
|
||||
|
||||
@property
|
||||
def date(self):
|
||||
return self._date
|
||||
|
||||
@property
|
||||
def term(self):
|
||||
return self._term
|
||||
|
||||
def add_exercise(self, name, questions):
|
||||
"""Add key with questions in ._exercises"""
|
||||
try:
|
||||
self._exercises[name]
|
||||
except KeyError:
|
||||
self._exercises[name] = [
|
||||
format_question(question) for question in questions
|
||||
]
|
||||
else:
|
||||
raise KeyError("The exercise already exsists. Use modify_exercise")
|
||||
|
||||
def modify_exercise(self, name, questions, append=False):
|
||||
"""Modify questions of an exercise
|
||||
|
||||
If append==True, add questions to the exercise questions
|
||||
|
||||
"""
|
||||
try:
|
||||
self._exercises[name]
|
||||
except KeyError:
|
||||
raise KeyError("The exercise already exsists. Use modify_exercise")
|
||||
else:
|
||||
if append:
|
||||
self._exercises[name] += format_question(questions)
|
||||
else:
|
||||
self._exercises[name] = format_question(questions)
|
||||
|
||||
@property
|
||||
def exercices(self):
|
||||
return self._exercises
|
||||
|
||||
@property
|
||||
def tribe_path(self):
|
||||
return Path(config["source"]) / self.tribename
|
||||
|
||||
@property
|
||||
def tribe_student_path(self):
|
||||
return (
|
||||
Path(config["source"])
|
||||
/ [t["students"] for t in config["tribes"] if t["name"] == self.tribename][
|
||||
0
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def long_name(self):
|
||||
"""Get exam name with date inside"""
|
||||
return f"{self.date.strftime('%y%m%d')}_{self.name}"
|
||||
|
||||
def path(self, extention=""):
|
||||
return self.tribe_path / (self.long_name + extention)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"name": self.name,
|
||||
"tribename": self.tribename,
|
||||
"date": self.date,
|
||||
"term": self.term,
|
||||
"exercices": self.exercices,
|
||||
}
|
||||
|
||||
def to_row(self):
|
||||
rows = []
|
||||
for ex, questions in self.exercices.items():
|
||||
for q in questions:
|
||||
rows.append(
|
||||
{
|
||||
"term": self.term,
|
||||
"assessment": self.name,
|
||||
"date": self.date.strftime("%d/%m/%Y"),
|
||||
"exercise": ex,
|
||||
"question": q["id"],
|
||||
**q,
|
||||
}
|
||||
)
|
||||
return rows
|
||||
|
||||
@property
|
||||
def themes(self):
|
||||
themes = set()
|
||||
for questions in self._exercises.values():
|
||||
themes.update([q["theme"] for q in questions])
|
||||
return themes
|
||||
|
||||
def display_exercise(self, name):
|
||||
pass
|
||||
|
||||
def display(self, name):
|
||||
pass
|
||||
|
||||
def write_yaml(self):
|
||||
print(f"Sauvegarde temporaire dans {self.path('.yml')}")
|
||||
self.tribe_path.mkdir(exist_ok=True)
|
||||
with open(self.path(".yml"), "w") as f:
|
||||
f.write(yaml.dump(self.to_dict()))
|
||||
|
||||
def write_csv(self):
|
||||
rows = self.to_row()
|
||||
|
||||
print(rows)
|
||||
base_df = pd.DataFrame.from_dict(rows)[NO_ST_COLUMNS.keys()]
|
||||
base_df.rename(columns=NO_ST_COLUMNS, inplace=True)
|
||||
|
||||
students = pd.read_csv(self.tribe_student_path)["Nom"]
|
||||
for student in students:
|
||||
base_df[student] = ""
|
||||
|
||||
self.tribe_path.mkdir(exist_ok=True)
|
||||
base_df.to_csv(self.path(".csv"), index=False)
|
||||
|
||||
@property
|
||||
def score_rate(self):
|
||||
total = 0
|
||||
for ex, questions in self._exercises.items():
|
||||
total += sum([q["score_rate"] for q in questions])
|
||||
|
||||
return total
|
||||
|
||||
@property
|
||||
def competences_rate(self):
|
||||
"""Dictionnary with competences as key and total rate as value"""
|
||||
rates = {}
|
||||
for ex, questions in self._exercises.items():
|
||||
for q in questions:
|
||||
try:
|
||||
q["competence"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
try:
|
||||
rates[q["competence"]] += q["score_rate"]
|
||||
except KeyError:
|
||||
rates[q["competence"]] = q["score_rate"]
|
||||
return rates
|
||||
|
||||
@property
|
||||
def themes_rate(self):
|
||||
"""Dictionnary with themes as key and total rate as value"""
|
||||
rates = {}
|
||||
for ex, questions in self._exercises.items():
|
||||
for q in questions:
|
||||
try:
|
||||
q["theme"]
|
||||
except KeyError:
|
||||
pass
|
||||
else:
|
||||
if q["theme"]:
|
||||
try:
|
||||
rates[q["theme"]] += q["score_rate"]
|
||||
except KeyError:
|
||||
rates[q["theme"]] = q["score_rate"]
|
||||
return rates
|
@@ -1,9 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
import yaml
|
||||
|
||||
CONFIGPATH = "recoconfig.yml"
|
||||
|
||||
with open(CONFIGPATH, "r") as config:
|
||||
config = yaml.load(config, Loader=yaml.FullLoader)
|
||||
|
@@ -1,160 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import click
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from PyInquirer import prompt, print_json
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
|
||||
from .config import config
|
||||
from ..config import NO_ST_COLUMNS
|
||||
|
||||
|
||||
class PromptAbortException(EOFError):
|
||||
def __init__(self, message, errors=None):
|
||||
|
||||
# Call the base class constructor with the parameters it needs
|
||||
super(PromptAbortException, self).__init__("Abort questionnary", errors)
|
||||
|
||||
|
||||
def get_tribes(answers):
|
||||
""" List tribes based on subdirectory of config["source"] which have an "eleves.csv" file inside """
|
||||
return [
|
||||
p.name for p in Path(config["source"]).iterdir() if (p / "eleves.csv").exists()
|
||||
]
|
||||
|
||||
|
||||
def prepare_csv():
|
||||
items = new_eval()
|
||||
|
||||
item = items[0]
|
||||
# item = {"tribe": "308", "date": datetime.today(), "assessment": "plop"}
|
||||
csv_output = (
|
||||
Path(config["source"])
|
||||
/ item["tribe"]
|
||||
/ f"{item['date']:%y%m%d}_{item['assessment']}.csv"
|
||||
)
|
||||
|
||||
students = pd.read_csv(Path(config["source"]) / item["tribe"] / "eleves.csv")["Nom"]
|
||||
|
||||
columns = list(NO_ST_COLUMNS.keys())
|
||||
items = [[it[c] for c in columns] for it in items]
|
||||
columns = list(NO_ST_COLUMNS.values())
|
||||
items_df = pd.DataFrame.from_records(items, columns=columns)
|
||||
for s in students:
|
||||
items_df[s] = np.nan
|
||||
|
||||
items_df.to_csv(csv_output, index=False, date_format="%d/%m/%Y")
|
||||
click.echo(f"Saving csv file to {csv_output}")
|
||||
|
||||
|
||||
def new_eval(answers={}):
|
||||
click.echo(f"Préparation d'un nouveau devoir")
|
||||
|
||||
eval_questions = [
|
||||
{"type": "input", "name": "assessment", "message": "Nom de l'évaluation",},
|
||||
{
|
||||
"type": "list",
|
||||
"name": "tribe",
|
||||
"message": "Classe concernée",
|
||||
"choices": get_tribes,
|
||||
},
|
||||
{
|
||||
"type": "input",
|
||||
"name": "date",
|
||||
"message": "Date du devoir (%y%m%d)",
|
||||
"default": datetime.today().strftime("%y%m%d"),
|
||||
"filter": lambda val: datetime.strptime(val, "%y%m%d"),
|
||||
},
|
||||
{
|
||||
"type": "list",
|
||||
"name": "term",
|
||||
"message": "Trimestre",
|
||||
"choices": ["1", "2", "3"],
|
||||
},
|
||||
]
|
||||
|
||||
eval_ans = prompt(eval_questions)
|
||||
|
||||
items = []
|
||||
add_exo = True
|
||||
while add_exo:
|
||||
ex_items = new_exercice(eval_ans)
|
||||
items += ex_items
|
||||
add_exo = prompt(
|
||||
[
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "add_exo",
|
||||
"message": "Ajouter un autre exercice",
|
||||
"default": True,
|
||||
}
|
||||
]
|
||||
)["add_exo"]
|
||||
return items
|
||||
|
||||
|
||||
def new_exercice(answers={}):
|
||||
exercise_questions = [
|
||||
{"type": "input", "name": "exercise", "message": "Nom de l'exercice"},
|
||||
]
|
||||
|
||||
click.echo(f"Nouvel exercice")
|
||||
exercise_ans = prompt(exercise_questions, answers=answers)
|
||||
|
||||
items = []
|
||||
|
||||
add_item = True
|
||||
while add_item:
|
||||
try:
|
||||
item_ans = new_item(exercise_ans)
|
||||
except PromptAbortException:
|
||||
click.echo("Création de l'item annulée")
|
||||
else:
|
||||
items.append(item_ans)
|
||||
add_item = prompt(
|
||||
[
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "add_item",
|
||||
"message": f"Ajouter un autre item pour l'exercice {exercise_ans['exercise']}",
|
||||
"default": True,
|
||||
}
|
||||
]
|
||||
)["add_item"]
|
||||
|
||||
return items
|
||||
|
||||
|
||||
def new_item(answers={}):
|
||||
item_questions = [
|
||||
{"type": "input", "name": "question", "message": "Nom de l'item",},
|
||||
{"type": "input", "name": "comment", "message": "Commentaire",},
|
||||
{
|
||||
"type": "list",
|
||||
"name": "competence",
|
||||
"message": "Competence",
|
||||
"choices": ["Cher", "Rep", "Mod", "Rai", "Cal", "Com"],
|
||||
},
|
||||
{"type": "input", "name": "theme", "message": "Domaine",},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "is_leveled",
|
||||
"message": "Évaluation par niveau",
|
||||
"default": True,
|
||||
},
|
||||
{"type": "input", "name": "score_rate", "message": "Bareme"},
|
||||
{
|
||||
"type": "confirm",
|
||||
"name": "correct",
|
||||
"message": "Tout est correct?",
|
||||
"default": True,
|
||||
},
|
||||
]
|
||||
click.echo(f"Nouvelle question pour l'exercice {answers['exercise']}")
|
||||
item_ans = prompt(item_questions, answers=answers)
|
||||
if item_ans["correct"]:
|
||||
return item_ans
|
||||
raise PromptAbortException("Abort item creation")
|
@@ -1,233 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
|
||||
from prompt_toolkit import prompt, HTML, ANSI
|
||||
from prompt_toolkit import print_formatted_text as print
|
||||
from prompt_toolkit.styles import Style
|
||||
from prompt_toolkit.validation import Validator
|
||||
from prompt_toolkit.completion import WordCompleter
|
||||
from unidecode import unidecode
|
||||
from datetime import datetime
|
||||
from functools import wraps
|
||||
import sys
|
||||
|
||||
from .getconfig import config
|
||||
|
||||
|
||||
VALIDATE = [
|
||||
"o",
|
||||
"ok",
|
||||
"OK",
|
||||
"oui",
|
||||
"OUI",
|
||||
"yes",
|
||||
"YES",
|
||||
]
|
||||
REFUSE = ["n", "non", "NON", "no", "NO"]
|
||||
CANCEL = ["a", "annuler"]
|
||||
|
||||
STYLE = Style.from_dict(
|
||||
{
|
||||
"": "#93A1A1",
|
||||
"validation": "#884444",
|
||||
"appending": "#448844",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CancelError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def prompt_validate(question, cancelable=False, empty_means=1, style="validation"):
|
||||
"""Prompt for validation
|
||||
|
||||
:param question: Text to print to ask the question.
|
||||
:param cancelable: enable cancel answer
|
||||
:param empty_means: result for no answer
|
||||
:return:
|
||||
0 -> Refuse
|
||||
1 -> Validate
|
||||
-1 -> cancel
|
||||
"""
|
||||
question_ = question
|
||||
choices = VALIDATE + REFUSE
|
||||
|
||||
if cancelable:
|
||||
question_ += "(a ou annuler pour sortir)"
|
||||
choices += CANCEL
|
||||
|
||||
ans = prompt(
|
||||
[
|
||||
(f"class:{style}", question_),
|
||||
],
|
||||
completer=WordCompleter(choices),
|
||||
style=STYLE,
|
||||
).lower()
|
||||
|
||||
if ans == "":
|
||||
return empty_means
|
||||
if ans in VALIDATE:
|
||||
return 1
|
||||
if cancelable and ans in CANCEL:
|
||||
return -1
|
||||
return 0
|
||||
|
||||
|
||||
def prompt_until_validate(question="C'est ok? ", cancelable=False):
|
||||
def decorator(func):
|
||||
@wraps(func)
|
||||
def wrapper(*args, **kwrd):
|
||||
ans = func(*args, **kwrd)
|
||||
|
||||
confirm = prompt_validate(question, cancelable)
|
||||
|
||||
if confirm == -1:
|
||||
raise CancelError
|
||||
|
||||
while not confirm:
|
||||
sys.stdout.flush()
|
||||
ans = func(*args, **ans, **kwrd)
|
||||
confirm = prompt_validate(question, cancelable)
|
||||
if confirm == -1:
|
||||
raise CancelError
|
||||
return ans
|
||||
|
||||
return wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
@prompt_until_validate()
|
||||
def prompt_exam(**kwrd):
|
||||
""" Prompt questions to edit an exam """
|
||||
print(HTML("<b>Nouvelle évaluation</b>"))
|
||||
exam = {}
|
||||
exam["name"] = prompt("Nom de l'évaluation: ", default=kwrd.get("name", "DS"))
|
||||
|
||||
tribes_name = [t["name"] for t in config["tribes"]]
|
||||
|
||||
exam["tribename"] = prompt(
|
||||
"Nom de la classe: ",
|
||||
default=kwrd.get("tribename", ""),
|
||||
completer=WordCompleter(tribes_name),
|
||||
validator=Validator.from_callable(lambda x: x in tribes_name),
|
||||
)
|
||||
exam["tribe"] = [t for t in config["tribes"] if t["name"] == exam["tribename"]][0]
|
||||
|
||||
exam["date"] = prompt(
|
||||
"Date de l'évaluation (%y%m%d): ",
|
||||
default=kwrd.get("date", datetime.today()).strftime("%y%m%d"),
|
||||
validator=Validator.from_callable(lambda x: (len(x) == 6) and x.isdigit()),
|
||||
)
|
||||
exam["date"] = datetime.strptime(exam["date"], "%y%m%d")
|
||||
|
||||
exam["term"] = prompt(
|
||||
"Trimestre: ",
|
||||
validator=Validator.from_callable(lambda x: x.isdigit()),
|
||||
default=kwrd.get("term", "1"),
|
||||
)
|
||||
|
||||
return exam
|
||||
|
||||
|
||||
@prompt_until_validate()
|
||||
def prompt_exercise(number=1, completer={}, **kwrd):
|
||||
exercise = {}
|
||||
try:
|
||||
kwrd["name"]
|
||||
except KeyError:
|
||||
print(HTML("<b>Nouvel exercice</b>"))
|
||||
exercise["name"] = prompt(
|
||||
"Nom de l'exercice: ", default=kwrd.get("name", f"Exercice {number}")
|
||||
)
|
||||
else:
|
||||
print(HTML(f"<b>Modification de l'exercice: {kwrd['name']}</b>"))
|
||||
exercise["name"] = kwrd["name"]
|
||||
|
||||
exercise["questions"] = []
|
||||
|
||||
try:
|
||||
kwrd["questions"][0]
|
||||
except KeyError:
|
||||
last_question_id = "1a"
|
||||
except IndexError:
|
||||
last_question_id = "1a"
|
||||
else:
|
||||
for ques in kwrd["questions"]:
|
||||
try:
|
||||
exercise["questions"].append(
|
||||
prompt_question(completer=completer, **ques)
|
||||
)
|
||||
except CancelError:
|
||||
print("Cette question a été supprimée")
|
||||
last_question_id = exercise["questions"][-1]["id"]
|
||||
|
||||
appending = prompt_validate(
|
||||
question="Ajouter un élément de notation? ", style="appending"
|
||||
)
|
||||
while appending:
|
||||
try:
|
||||
exercise["questions"].append(
|
||||
prompt_question(last_question_id, completer=completer)
|
||||
)
|
||||
except CancelError:
|
||||
print("Cette question a été supprimée")
|
||||
else:
|
||||
last_question_id = exercise["questions"][-1]["id"]
|
||||
appending = prompt_validate(
|
||||
question="Ajouter un élément de notation? ", style="appending"
|
||||
)
|
||||
|
||||
return exercise
|
||||
|
||||
|
||||
@prompt_until_validate(cancelable=True)
|
||||
def prompt_question(last_question_id="1a", completer={}, **kwrd):
|
||||
try:
|
||||
kwrd["id"]
|
||||
except KeyError:
|
||||
print(HTML("<b>Nouvel élément de notation</b>"))
|
||||
else:
|
||||
print(
|
||||
HTML(f"<b>Modification de l'élément {kwrd['id']} ({kwrd['comment']})</b>")
|
||||
)
|
||||
|
||||
question = {}
|
||||
question["id"] = prompt(
|
||||
"Identifiant de la question: ",
|
||||
default=kwrd.get("id", "1a"),
|
||||
)
|
||||
|
||||
question["competence"] = prompt(
|
||||
"Competence: ",
|
||||
default=kwrd.get("competence", list(config["competences"].keys())[0]),
|
||||
completer=WordCompleter(config["competences"].keys()),
|
||||
validator=Validator.from_callable(lambda x: x in config["competences"].keys()),
|
||||
)
|
||||
|
||||
question["theme"] = prompt(
|
||||
"Domaine: ",
|
||||
default=kwrd.get("theme", ""),
|
||||
completer=WordCompleter(completer.get("theme", [])),
|
||||
)
|
||||
|
||||
question["comment"] = prompt(
|
||||
"Commentaire: ",
|
||||
default=kwrd.get("comment", ""),
|
||||
)
|
||||
|
||||
question["is_leveled"] = prompt(
|
||||
"Évaluation par niveau: ",
|
||||
default=kwrd.get("is_leveled", "1"),
|
||||
# validate
|
||||
)
|
||||
|
||||
question["score_rate"] = prompt(
|
||||
"Barème: ",
|
||||
default=kwrd.get("score_rate", "1"),
|
||||
# validate
|
||||
)
|
||||
|
||||
return question
|
@@ -1,84 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import click
|
||||
from pathlib import Path
|
||||
import sys
|
||||
import papermill as pm
|
||||
import pandas as pd
|
||||
from datetime import datetime
|
||||
import yaml
|
||||
|
||||
from .getconfig import config, CONFIGPATH
|
||||
from ..config import NO_ST_COLUMNS
|
||||
from .exam import Exam
|
||||
from ..dashboard.index import app as dash
|
||||
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
pass
|
||||
|
||||
|
||||
@cli.command()
|
||||
def print_config():
|
||||
click.echo(f"Config file is {CONFIGPATH}")
|
||||
click.echo("It contains")
|
||||
click.echo(config)
|
||||
|
||||
|
||||
@cli.command()
|
||||
def setup():
|
||||
"""Setup the environnement using recoconfig.yml"""
|
||||
for tribe in config["tribes"]:
|
||||
Path(tribe["name"]).mkdir(exist_ok=True)
|
||||
if not Path(tribe["students"]).exists():
|
||||
print(f"The file {tribe['students']} does not exists")
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option("--debug", default=0, help="Debug mode for dash")
|
||||
def dashboard(debug):
|
||||
dash.run_server(debug=bool(debug))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("csv_file")
|
||||
def report(csv_file):
|
||||
csv = Path(csv_file)
|
||||
if not csv.exists():
|
||||
click.echo(f"{csv_file} does not exists")
|
||||
sys.exit(1)
|
||||
if csv.suffix != ".csv":
|
||||
click.echo(f"{csv_file} has to be a csv file")
|
||||
sys.exit(1)
|
||||
|
||||
csv_file = Path(csv_file)
|
||||
tribe_dir = csv_file.parent
|
||||
csv_filename = csv_file.name.split(".")[0]
|
||||
|
||||
assessment = str(csv_filename).split("_")[-1].capitalize()
|
||||
date = str(csv_filename).split("_")[0]
|
||||
try:
|
||||
date = datetime.strptime(date, "%y%m%d")
|
||||
except ValueError:
|
||||
date = datetime.today().strptime(date, "%y%m%d")
|
||||
|
||||
tribe = str(tribe_dir).split("/")[-1]
|
||||
|
||||
template = Path(config["templates"]) / "tpl_evaluation.ipynb"
|
||||
|
||||
dest = Path(config["output"]) / tribe / csv_filename
|
||||
dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
click.echo(f"Building {assessment} ({date:%d/%m/%y}) report")
|
||||
pm.execute_notebook(
|
||||
str(template),
|
||||
str(dest / f"{assessment}.ipynb"),
|
||||
parameters=dict(
|
||||
tribe=tribe,
|
||||
assessment=assessment,
|
||||
date=f"{date:%d/%m/%y}",
|
||||
csv_file=str(csv_file.absolute()),
|
||||
),
|
||||
)
|
@@ -1,77 +0,0 @@
|
||||
prompt_toolkit
|
||||
ansiwrap==0.8.4
|
||||
appdirs==1.4.3
|
||||
attrs==19.1.0
|
||||
backcall==0.1.0
|
||||
black==19.10b0
|
||||
bleach==3.1.0
|
||||
certifi==2019.6.16
|
||||
chardet==3.0.4
|
||||
Click==7.0
|
||||
colorama==0.4.1
|
||||
cycler==0.10.0
|
||||
decorator==4.4.0
|
||||
defusedxml==0.6.0
|
||||
entrypoints==0.3
|
||||
future==0.17.1
|
||||
idna==2.8
|
||||
importlib-resources==1.0.2
|
||||
ipykernel==5.1.3
|
||||
ipython==7.11.1
|
||||
ipython-genutils==0.2.0
|
||||
ipywidgets==7.5.1
|
||||
jedi==0.15.2
|
||||
Jinja2==2.10.3
|
||||
jsonschema==3.2.0
|
||||
jupyter==1.0.0
|
||||
jupyter-client==5.3.4
|
||||
jupyter-console==6.1.0
|
||||
jupyter-core==4.6.1
|
||||
jupytex==0.0.3
|
||||
kiwisolver==1.1.0
|
||||
Markdown==3.1.1
|
||||
MarkupSafe==1.1.1
|
||||
matplotlib==3.1.2
|
||||
mistune==0.8.4
|
||||
nbconvert==5.6.1
|
||||
nbformat==5.0.3
|
||||
notebook==6.0.3
|
||||
numpy==1.18.1
|
||||
pandas==0.25.3
|
||||
pandocfilters==1.4.2
|
||||
papermill==1.2.1
|
||||
parso==0.5.2
|
||||
pathspec==0.7.0
|
||||
pexpect==4.8.0
|
||||
pickleshare==0.7.5
|
||||
prometheus-client==0.7.1
|
||||
prompt-toolkit==1.0.14
|
||||
ptyprocess==0.6.0
|
||||
Pygments==2.5.2
|
||||
PyInquirer==1.0.3
|
||||
pyparsing==2.4.6
|
||||
pyrsistent==0.15.7
|
||||
python-dateutil==2.8.0
|
||||
pytz==2019.3
|
||||
PyYAML==5.3
|
||||
pyzmq==18.1.1
|
||||
qtconsole==4.6.0
|
||||
-e git+git_opytex:/lafrite/recopytex.git@7e026bedb24c1ca8bef3b71b3d63f8b0d6916e81#egg=Recopytex
|
||||
regex==2020.1.8
|
||||
requests==2.22.0
|
||||
scipy==1.4.1
|
||||
Send2Trash==1.5.0
|
||||
six==1.12.0
|
||||
tenacity==6.0.0
|
||||
terminado==0.8.3
|
||||
testpath==0.4.4
|
||||
textwrap3==0.9.2
|
||||
toml==0.10.0
|
||||
tornado==6.0.3
|
||||
tqdm==4.41.1
|
||||
traitlets==4.3.2
|
||||
typed-ast==1.4.1
|
||||
urllib3==1.25.8
|
||||
wcwidth==0.1.8
|
||||
webencodings==0.5.1
|
||||
widgetsnbextension==3.5.1
|
@@ -1,69 +0,0 @@
|
||||
ansiwrap
|
||||
attrs
|
||||
backcall
|
||||
bleach
|
||||
certifi
|
||||
chardet
|
||||
Click
|
||||
colorama
|
||||
cycler
|
||||
decorator
|
||||
defusedxml
|
||||
entrypoints
|
||||
future
|
||||
idna
|
||||
importlib-resources
|
||||
ipykernel
|
||||
ipython
|
||||
ipython-genutils
|
||||
ipywidgets
|
||||
jedi
|
||||
Jinja2
|
||||
jsonschema
|
||||
jupyter
|
||||
jupyter-client
|
||||
jupyter-console
|
||||
jupyter-core
|
||||
jupytex
|
||||
kiwisolver
|
||||
MarkupSafe
|
||||
matplotlib
|
||||
mistune
|
||||
nbconvert
|
||||
nbformat
|
||||
notebook
|
||||
numpy
|
||||
pandas
|
||||
pandocfilters
|
||||
papermill
|
||||
parso
|
||||
pexpect
|
||||
pickleshare
|
||||
prometheus-client
|
||||
prompt-toolkit
|
||||
ptyprocess
|
||||
Pygments
|
||||
pyparsing
|
||||
pyrsistent
|
||||
python-dateutil
|
||||
pytz
|
||||
PyYAML
|
||||
pyzmq
|
||||
qtconsole
|
||||
-e git+git_opytex:/lafrite/recopytex.git@e9a8310f151ead60434ae944d726a2fd22b23d06#egg=Recopytex
|
||||
requests
|
||||
scipy
|
||||
seaborn
|
||||
Send2Trash
|
||||
six
|
||||
tenacity
|
||||
terminado
|
||||
testpath
|
||||
textwrap3
|
||||
tornado
|
||||
tqdm
|
||||
traitlets
|
||||
urllib3
|
||||
wcwidth
|
||||
webencodings
|
||||
widgetsnbextension
|
30
setup.py
30
setup.py
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name='Recopytex',
|
||||
version='1.1.1',
|
||||
description='Assessment analysis',
|
||||
author='Benjamin Bertrand',
|
||||
author_email='',
|
||||
packages=find_packages(),
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'Click',
|
||||
'pandas',
|
||||
'numpy',
|
||||
'papermill',
|
||||
'pyyaml',
|
||||
],
|
||||
entry_points='''
|
||||
[console_scripts]
|
||||
recopytex=recopytex.scripts.recopytex:cli
|
||||
''',
|
||||
)
|
||||
|
||||
# -----------------------------
|
||||
# Reglages pour 'vim'
|
||||
# vim:set autoindent expandtab tabstop=4 shiftwidth=4:
|
||||
# cursor: 16 del
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
134
tests/conftest.py
Normal file
134
tests/conftest.py
Normal file
@@ -0,0 +1,134 @@
|
||||
import sqlite3
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from sqlalchemy import create_engine
|
||||
from sqlalchemy.orm import clear_mappers, sessionmaker
|
||||
|
||||
from backend import config
|
||||
from backend.adapters.orm import metadata, start_mappers
|
||||
from backend.adapters.sqlite import create_db
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
from tests.model.fakes import build_student, build_tribes
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def in_memory_db():
|
||||
engine = create_engine("sqlite:///:memory:")
|
||||
metadata.create_all(engine)
|
||||
return engine
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def session(in_memory_db):
|
||||
start_mappers()
|
||||
yield sessionmaker(bind=in_memory_db)()
|
||||
clear_mappers()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def memory_sqlite_conn():
|
||||
sqlite_db = ":memory:"
|
||||
conn = sqlite3.connect(sqlite_db)
|
||||
create_db(conn)
|
||||
yield conn
|
||||
conn.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clean_db():
|
||||
sqlite_db = "sqlite.db"
|
||||
conn = sqlite3.connect(sqlite_db)
|
||||
create_db(conn)
|
||||
yield
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""DROP TABLE tribes""")
|
||||
cursor.execute("""DROP TABLE students""")
|
||||
conn.commit()
|
||||
|
||||
|
||||
def populate_tribes(conn) -> list[Tribe]:
|
||||
cursor = conn.cursor()
|
||||
tribes = build_tribes(3)
|
||||
cursor.executemany(
|
||||
"""
|
||||
INSERT INTO tribes(name, level) VALUES (?, ?)
|
||||
""",
|
||||
[t.to_tuple() for t in tribes],
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
return tribes
|
||||
|
||||
|
||||
def populate_students(conn, tribes: list[Tribe]) -> list[Student]:
|
||||
cursor = conn.cursor()
|
||||
prebuild_students = build_student(tribes, 2)
|
||||
cursor.executemany(
|
||||
"""
|
||||
INSERT INTO students(id, name, tribe_name) VALUES (:id, :name, :tribe_name)
|
||||
""",
|
||||
[s.to_dict() for s in prebuild_students],
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
return prebuild_students
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def populate_db():
|
||||
class Student_tribe_context:
|
||||
_tribes = []
|
||||
_students = []
|
||||
|
||||
def __init__(self, conn):
|
||||
self.conn = conn
|
||||
|
||||
def __enter__(self):
|
||||
self._tribes += populate_tribes(self.conn)
|
||||
self._students += populate_students(self.conn, self._tribes)
|
||||
return self._tribes, self._students
|
||||
|
||||
def __exit__(self, *args):
|
||||
|
||||
for student in self._students:
|
||||
self.conn.execute(
|
||||
"""
|
||||
DELETE FROM students WHERE id=:id
|
||||
""",
|
||||
{"id": student.id},
|
||||
)
|
||||
for tribe in self._tribes:
|
||||
self.conn.execute(
|
||||
"""
|
||||
DELETE FROM tribes WHERE name=:name
|
||||
""",
|
||||
{"name": tribe.name},
|
||||
)
|
||||
self.conn.commit()
|
||||
|
||||
def fixture(conn):
|
||||
return Student_tribe_context(conn)
|
||||
|
||||
yield fixture
|
||||
|
||||
|
||||
def wait_for_webapp_to_come_up():
|
||||
deadline = time.time() + 10
|
||||
url = config.get_api_url()
|
||||
while time.time() < deadline:
|
||||
try:
|
||||
return requests.get(url)
|
||||
except ConnectionError:
|
||||
time.sleep(0.5)
|
||||
pytest.fail("API never came up")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def restart_api():
|
||||
(Path(__file__).parent.parent / "backend" / "api" / "main.py").touch()
|
||||
time.sleep(0.5)
|
||||
wait_for_webapp_to_come_up()
|
104
tests/e2e/test_api_students.py
Normal file
104
tests/e2e/test_api_students.py
Normal file
@@ -0,0 +1,104 @@
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from backend import config
|
||||
from tests.model.fakes import build_student, build_tribes
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_post_student():
|
||||
url = config.get_api_url()
|
||||
tribe = build_tribes(1)[0]
|
||||
requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
data = {"name": "zart", "tribe_name": tribe.name}
|
||||
r = requests.post(f"{url}/students", json=data)
|
||||
|
||||
post_request = r.history[0]
|
||||
assert post_request.status_code == 302
|
||||
|
||||
assert r.status_code == 200
|
||||
assert r.json()["name"] == "zart"
|
||||
assert r.json()["tribe_name"] == tribe.name
|
||||
assert r.json()["id"]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_post_student_with_id():
|
||||
url = config.get_api_url()
|
||||
tribe = build_tribes(1)[0]
|
||||
requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
data = {"id": "1234", "name": "zart", "tribe_name": tribe.name}
|
||||
requests.post(f"{url}/students", json=data)
|
||||
r = requests.post(f"{url}/students", json=data)
|
||||
|
||||
assert r.status_code == 409
|
||||
assert (
|
||||
r.json()
|
||||
== f"You can't post a student with an id. It is already registrered. Use PUT to modify it."
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_post_student_in_non_existant_tribe():
|
||||
url = config.get_api_url()
|
||||
tribe = build_tribes(1)[0]
|
||||
requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
data = {"name": "zart", "tribe_name": tribe.name + "_"}
|
||||
requests.post(f"{url}/students", json=data)
|
||||
r = requests.post(f"{url}/students", json=data)
|
||||
|
||||
assert r.status_code == 409
|
||||
assert (
|
||||
r.json()
|
||||
== f"The tribe {tribe.name+'_'} does not exists. You can't add a student in it."
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_put_student():
|
||||
url = config.get_api_url()
|
||||
tribe = build_tribes(1)[0]
|
||||
requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
data = {"name": "zart", "tribe_name": tribe.name}
|
||||
r = requests.post(f"{url}/students", json=data)
|
||||
|
||||
student = r.json()
|
||||
student["name"] = "Choupinou"
|
||||
|
||||
r2 = requests.put(f"{url}/students/{student['id']}", json=student)
|
||||
|
||||
post_request = r2.history[0]
|
||||
assert post_request.status_code == 302
|
||||
|
||||
assert r2.status_code == 200
|
||||
assert r2.json()["name"] == "Choupinou"
|
||||
assert r2.json()["tribe_name"] == tribe.name
|
||||
assert r2.json()["id"] == r.json()["id"]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_delete_student():
|
||||
url = config.get_api_url()
|
||||
tribe = build_tribes(1)[0]
|
||||
requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
student = build_student([tribe], 1)[0]
|
||||
r = requests.post(
|
||||
f"{url}/students", json={"name": student.name, "tribe_name": student.tribe.name}
|
||||
)
|
||||
student_id = r.json()["id"]
|
||||
|
||||
r = requests.delete(f"{url}/students/{student_id}")
|
||||
assert r.status_code == 204
|
||||
|
||||
r = requests.get(f"{url}/students/")
|
||||
assert r.json() == []
|
119
tests/e2e/test_api_tribes.py
Normal file
119
tests/e2e/test_api_tribes.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from backend import config
|
||||
from tests.model.fakes import build_tribes
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_post_tribe():
|
||||
data = {"name": "tribe", "level": "2nd"}
|
||||
|
||||
url = config.get_api_url()
|
||||
r = requests.post(f"{url}/tribes", json=data)
|
||||
|
||||
post_request = r.history[0]
|
||||
assert post_request.status_code == 302
|
||||
|
||||
assert r.status_code == 200
|
||||
assert r.json() == {
|
||||
"assessments": [],
|
||||
"level": "2nd",
|
||||
"name": "tribe",
|
||||
"students": [],
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_post_tribe_already_exists():
|
||||
data = {"name": "Pioupiou", "level": "2nd"}
|
||||
|
||||
url = config.get_api_url()
|
||||
requests.post(f"{url}/tribes", json=data)
|
||||
r = requests.post(f"{url}/tribes", json=data)
|
||||
|
||||
assert r.status_code == 409
|
||||
assert r.json() == f"The tribe {data['name']} already exists"
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_put_tribe():
|
||||
tribe = build_tribes(1)[0]
|
||||
|
||||
url = config.get_api_url()
|
||||
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
mod_tribe = tribe
|
||||
mod_tribe.level = "other level"
|
||||
r = requests.put(f"{url}/tribes/{tribe.name}", json=mod_tribe.to_dict())
|
||||
post_request = r.history[0]
|
||||
assert post_request.status_code == 302
|
||||
|
||||
assert r.status_code == 200
|
||||
|
||||
r = requests.get(f"{url}/tribes")
|
||||
assert [t["name"] for t in r.json()] == [mod_tribe.name]
|
||||
assert [t["level"] for t in r.json()] == [mod_tribe.level]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_put_tribe_doesnt_exists():
|
||||
tribe = build_tribes(1)[0]
|
||||
|
||||
url = config.get_api_url()
|
||||
r = requests.put(f"{url}/tribes/{tribe.name}", json=tribe.to_dict())
|
||||
assert r.status_code == 409
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_delete_tribe():
|
||||
tribe = build_tribes(1)[0]
|
||||
|
||||
url = config.get_api_url()
|
||||
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
r = requests.delete(f"{url}/tribes/{tribe.name}")
|
||||
assert r.status_code == 204
|
||||
|
||||
r = requests.get(f"{url}/tribes")
|
||||
assert r.json() == []
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_delete_tribe_doesnt_exists():
|
||||
tribe = build_tribes(1)[0]
|
||||
|
||||
url = config.get_api_url()
|
||||
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
r = requests.delete(f"{url}/tribes/notexisting")
|
||||
assert r.status_code == 409
|
||||
|
||||
r = requests.get(f"{url}/tribes")
|
||||
assert [t["name"] for t in r.json()] == [tribe.name]
|
||||
assert [t["level"] for t in r.json()] == [tribe.level]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("restart_api")
|
||||
@pytest.mark.usefixtures("clean_db")
|
||||
def test_api_post_list_tribe():
|
||||
tribe = build_tribes(1)[0]
|
||||
|
||||
url = config.get_api_url()
|
||||
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
|
||||
|
||||
r = requests.get(f"{url}/tribes")
|
||||
assert r.json() == [
|
||||
{
|
||||
"assessments": [],
|
||||
"level": tribe.level,
|
||||
"name": tribe.name,
|
||||
"students": [],
|
||||
}
|
||||
]
|
43
tests/integration/test_orm.py
Normal file
43
tests/integration/test_orm.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from backend.adapters.orm import metadata, start_mappers
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
|
||||
|
||||
def test_tribes_mapper_can_load_tribe(session):
|
||||
session.execute(
|
||||
"INSERT INTO tribes (name, level) VALUES "
|
||||
"('tribe1', '2nd'),"
|
||||
"('tribe2', '1ST')"
|
||||
)
|
||||
expected = [
|
||||
(Tribe("tribe1", "2nd")),
|
||||
(Tribe("tribe2", "1ST")),
|
||||
]
|
||||
assert session.query(Tribe).all() == expected
|
||||
|
||||
|
||||
def test_tribe_mapper_can_save_tribe(session):
|
||||
tribe_infos = ("tribe1", "2nd")
|
||||
tribe = Tribe(*tribe_infos)
|
||||
session.add(tribe)
|
||||
|
||||
rows = list(session.execute("SELECT name, level FROM 'tribes'"))
|
||||
assert rows == []
|
||||
|
||||
session.commit()
|
||||
|
||||
rows = list(session.execute("SELECT name, level FROM 'tribes'"))
|
||||
assert rows == [tribe_infos]
|
||||
|
||||
|
||||
def test_tribe_mapper_can_save_and_load_tribe(session):
|
||||
tribe_infos = ("tribe1", "2nd")
|
||||
tribe = Tribe(*tribe_infos)
|
||||
|
||||
assert session.query(Tribe).all() == []
|
||||
session.add(tribe)
|
||||
|
||||
assert session.query(Tribe).all() == [tribe]
|
||||
session.commit()
|
||||
|
||||
assert session.query(Tribe).all() == [tribe]
|
107
tests/integration/test_repository_student_sqlite.py
Normal file
107
tests/integration/test_repository_student_sqlite.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import sqlite3
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.model.student import Student
|
||||
from backend.repository.student_sqlite_repository import (
|
||||
StudentRepositoryError,
|
||||
StudentSQLiteRepository,
|
||||
)
|
||||
|
||||
|
||||
def test_get_student(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
student_id = prebuild_students[0].id
|
||||
student = student_repo.get(student_id, prebuild_tribes)
|
||||
|
||||
assert prebuild_students[0] == student
|
||||
|
||||
|
||||
def test_get_student_not_exists(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
with pytest.raises(ValueError):
|
||||
student_repo.get("student0", prebuild_tribes)
|
||||
|
||||
|
||||
def test_list_students(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
students = student_repo.list(prebuild_tribes)
|
||||
|
||||
assert prebuild_students == students
|
||||
|
||||
|
||||
def test_add_student(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
student_infos = {"name": "student1", "tribe": prebuild_tribes[0]}
|
||||
student = Student(**student_infos)
|
||||
student_repo.add(student)
|
||||
memory_sqlite_conn.commit()
|
||||
|
||||
cursor = memory_sqlite_conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT id, name, tribe_name FROM students WHERE id=?
|
||||
""",
|
||||
(student.id,),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
assert row == student.to_tuple()
|
||||
|
||||
|
||||
def test_add_student_fail_exists(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
student_infos = {"name": "student1", "tribe": prebuild_tribes[0]}
|
||||
student = Student(**student_infos)
|
||||
student_repo.add(student)
|
||||
memory_sqlite_conn.commit()
|
||||
|
||||
with pytest.raises(sqlite3.IntegrityError):
|
||||
student_repo.add(student)
|
||||
|
||||
|
||||
def test_update_student(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
student = prebuild_students[0]
|
||||
student.name = "Boby"
|
||||
student.tribe = prebuild_tribes[-1]
|
||||
|
||||
student_repo.update(student)
|
||||
memory_sqlite_conn.commit()
|
||||
|
||||
student_list = student_repo.list(prebuild_tribes)
|
||||
assert set(student_list) == set(prebuild_students)
|
||||
|
||||
moded_student = next(filter(lambda s: s.id == student.id, student_list))
|
||||
assert moded_student == student
|
||||
|
||||
|
||||
def test_update_student_does_not_exists(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
student = Student(name="jkl", tribe=prebuild_tribes[0])
|
||||
|
||||
with pytest.raises(StudentRepositoryError):
|
||||
student_repo.update(student)
|
||||
|
||||
|
||||
def test_delete_student(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
|
||||
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
deleted_student = prebuild_students.pop()
|
||||
student_repo.delete(deleted_student.id)
|
||||
memory_sqlite_conn.commit()
|
||||
|
||||
assert student_repo.list(prebuild_tribes) == prebuild_students
|
93
tests/integration/test_repository_tribe_sqlite.py
Normal file
93
tests/integration/test_repository_tribe_sqlite.py
Normal file
@@ -0,0 +1,93 @@
|
||||
import pytest
|
||||
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.tribe_sqlite_repository import (
|
||||
TribeRepositoryError,
|
||||
TribeSQLiteRepository,
|
||||
)
|
||||
|
||||
|
||||
def test_get_tribe(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
name = prebuild_tribes[0].name
|
||||
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
tribes = tribe_repo.get(name)
|
||||
|
||||
assert prebuild_tribes[0] == tribes
|
||||
|
||||
|
||||
def test_get_tribe_not_exists(memory_sqlite_conn):
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
with pytest.raises(TribeRepositoryError):
|
||||
tribe_repo.get("Tribe0")
|
||||
|
||||
|
||||
def test_list_tribes(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
listed_tribes = tribe_repo.list()
|
||||
|
||||
assert prebuild_tribes == listed_tribes
|
||||
|
||||
|
||||
def test_add_tribe(memory_sqlite_conn):
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
tribe_infos = ("tribe1", "2nd")
|
||||
tribe = Tribe(*tribe_infos)
|
||||
tribe_repo.add(tribe)
|
||||
memory_sqlite_conn.commit()
|
||||
|
||||
cursor = memory_sqlite_conn.cursor()
|
||||
cursor.execute(
|
||||
"""
|
||||
SELECT * FROM tribes WHERE name=?
|
||||
""",
|
||||
("tribe1",),
|
||||
)
|
||||
|
||||
row = cursor.fetchone()
|
||||
assert row == tribe_infos
|
||||
|
||||
|
||||
def test_add_tribe_fail_exists(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
existing_tribe = prebuild_tribes[0]
|
||||
with pytest.raises(TribeRepositoryError):
|
||||
tribe_repo.add(existing_tribe)
|
||||
|
||||
|
||||
def test_update_tribe(memory_sqlite_conn, populate_db):
|
||||
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
name = prebuild_tribes[0].name
|
||||
new_tribe = Tribe("Tribe0", "Term")
|
||||
tribe_repo.update(name, new_tribe)
|
||||
memory_sqlite_conn.commit()
|
||||
|
||||
prebuild_tribes[0] = new_tribe
|
||||
assert tribe_repo.list() == prebuild_tribes
|
||||
|
||||
|
||||
def test_update_tribe_not_exists(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
|
||||
name = prebuild_tribes[0].name
|
||||
new_tribe = Tribe("Tribe0", "Term")
|
||||
with pytest.raises(TribeRepositoryError):
|
||||
tribe_repo.update("iouiou", new_tribe)
|
||||
|
||||
|
||||
def test_delete_tribe(memory_sqlite_conn, populate_db):
|
||||
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
|
||||
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
|
||||
deleted_tribe = prebuild_tribes.pop()
|
||||
deleted_tribe.name = "iouiou"
|
||||
with pytest.raises(TribeRepositoryError):
|
||||
tribe_repo.delete(deleted_tribe)
|
93
tests/model/fakes.py
Normal file
93
tests/model/fakes.py
Normal file
@@ -0,0 +1,93 @@
|
||||
from random import choice, randint
|
||||
|
||||
from faker import Faker
|
||||
from faker.providers import DynamicProvider
|
||||
|
||||
from backend.model.assessment import Assessment, Domain, Exercise, Question, Skill
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.abstract_repository import AbstractRepository
|
||||
|
||||
level_provider = DynamicProvider(
|
||||
provider_name="level",
|
||||
elements=["2nd", "1ST", "SNT", "1G", "TG", "EnsSci"],
|
||||
)
|
||||
|
||||
faker = Faker("fr_FR")
|
||||
faker.add_provider(level_provider)
|
||||
|
||||
|
||||
def build_tribes(quantity: int = 1) -> list[Tribe]:
|
||||
return [Tribe(name=faker.word(), level=faker.level()) for _ in range(quantity)]
|
||||
|
||||
|
||||
def build_assessments(
|
||||
tribes: list[Tribe], assessment_per_tribe: int = 1
|
||||
) -> list[Assessment]:
|
||||
|
||||
assessments = []
|
||||
for t in tribes:
|
||||
assessments += [
|
||||
Assessment("faker.word()", t, randint(1, 3))
|
||||
for _ in range(assessment_per_tribe)
|
||||
]
|
||||
|
||||
return assessments
|
||||
|
||||
|
||||
def build_exercises(
|
||||
assessments: list[Assessment], exercise_per_assessment=1
|
||||
) -> list[Exercise]:
|
||||
exercises = []
|
||||
for assessment in assessments:
|
||||
exercises += [
|
||||
Exercise("faker.word()", assessment, "today")
|
||||
for _ in range(exercise_per_assessment)
|
||||
]
|
||||
|
||||
return exercises
|
||||
|
||||
|
||||
def build_skills(quantity=1) -> list[Skill]:
|
||||
return [Skill(faker.word(), faker.text()) for _ in range(quantity)]
|
||||
|
||||
|
||||
def build_domains(quantity=1) -> list[Domain]:
|
||||
return [Domain(faker.word(), faker.text()) for _ in range(quantity)]
|
||||
|
||||
|
||||
def build_questions(
|
||||
exercises: list[Exercise],
|
||||
question_per_exercise=1,
|
||||
) -> list[Question]:
|
||||
skills = build_skills()
|
||||
domains = build_domains()
|
||||
|
||||
questions = []
|
||||
for exercise in exercises:
|
||||
questions += [
|
||||
Question(
|
||||
faker.word(),
|
||||
exercise,
|
||||
description="desc",
|
||||
skill=choice(skills),
|
||||
domain=choice(domains),
|
||||
is_leveled=choice([True, False]),
|
||||
scale=randint(1, 20),
|
||||
)
|
||||
for _ in range(question_per_exercise)
|
||||
]
|
||||
|
||||
return questions
|
||||
|
||||
|
||||
def build_student(
|
||||
tribes: list[Tribe],
|
||||
students_per_tribe=1,
|
||||
) -> list[Student]:
|
||||
students = []
|
||||
for tribe in tribes:
|
||||
students += [
|
||||
Student(name=faker.name(), tribe=tribe) for _ in range(students_per_tribe)
|
||||
]
|
||||
return students
|
26
tests/model/test_assessment.py
Normal file
26
tests/model/test_assessment.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from random import choice, randint
|
||||
|
||||
from tests.model.fakes import build_questions
|
||||
|
||||
from .fakes import build_assessments, build_exercises, build_tribes
|
||||
|
||||
|
||||
def test_assessement_register_exercise():
|
||||
exercise_qty = randint(1, 10)
|
||||
tribes = build_tribes(1)
|
||||
assessments = build_assessments(tribes, 1)
|
||||
exercises = build_exercises(assessments, exercise_qty)
|
||||
|
||||
assert len(assessments[0].exercises) == exercise_qty
|
||||
assert assessments[0].exercises == exercises
|
||||
|
||||
|
||||
def test_exercise_register_question():
|
||||
question_qty = randint(1, 10)
|
||||
tribes = build_tribes(1)
|
||||
assessments = build_assessments(tribes, 1)
|
||||
exercises = build_exercises(assessments, 1)
|
||||
questions = build_questions(exercises, question_qty)
|
||||
|
||||
assert len(exercises[0].questions) == question_qty
|
||||
assert exercises[0].questions == questions
|
16
tests/model/test_student.py
Normal file
16
tests/model/test_student.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
from tests.model.fakes import build_tribes
|
||||
|
||||
|
||||
def test_init_student():
|
||||
tribe = build_tribes(1)[0]
|
||||
student = Student(name="Bob", tribe=tribe)
|
||||
print(student)
|
||||
|
||||
assert type(student.name) == str
|
||||
assert type(student.tribe) == Tribe
|
||||
assert type(student.id) == str
|
||||
|
||||
student2 = Student(name="Hop", tribe=tribe)
|
||||
assert student.id != student2.id
|
33
tests/model/test_tribe.py
Normal file
33
tests/model/test_tribe.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from random import randint
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe, TribeError
|
||||
from tests.model.fakes import build_assessments, build_tribes
|
||||
|
||||
|
||||
def test_tribe_register_assessment():
|
||||
assessments_qty = randint(1, 10)
|
||||
tribes = build_tribes(1)
|
||||
assessments = build_assessments(tribes, assessments_qty)
|
||||
|
||||
assert len(tribes[0].assessments) == assessments_qty
|
||||
assert tribes[0].assessments == assessments
|
||||
|
||||
|
||||
def test_tribe_register_student():
|
||||
tribe = Tribe("foo", "2nd")
|
||||
student = Student(id="1", name="Bob", tribe=tribe)
|
||||
|
||||
assert len(tribe.students) == 1
|
||||
assert tribe.students[0] == student
|
||||
|
||||
|
||||
def test_tribe_register_student_already_exists_overwrite():
|
||||
tribe = Tribe("foo", "2nd")
|
||||
student = Student(id="1", name="Bob", tribe=tribe)
|
||||
changed_student = Student(id="1", name="Choupy", tribe=tribe)
|
||||
|
||||
assert tribe.students[0] == changed_student
|
||||
assert student.name not in [s.name for s in tribe.students]
|
350
tests/unit/test_service.py
Normal file
350
tests/unit/test_service.py
Normal file
@@ -0,0 +1,350 @@
|
||||
import pytest
|
||||
|
||||
from backend.model.student import Student
|
||||
from backend.model.tribe import Tribe
|
||||
from backend.repository.abstract_repository import AbstractRepository
|
||||
from backend.repository.student_sqlite_repository import StudentRepositoryError
|
||||
from backend.repository.tribe_sqlite_repository import TribeRepositoryError
|
||||
from backend.service import services
|
||||
from backend.service.services import (
|
||||
StudentDoesExist,
|
||||
StudentExists,
|
||||
TribeDoesNotExist,
|
||||
TribeExists,
|
||||
)
|
||||
from tests.model.fakes import build_student, build_tribes
|
||||
|
||||
|
||||
class FakeTribeRepository(AbstractRepository):
|
||||
def __init__(self, tribes: list[Tribe] = []) -> None:
|
||||
self._tribes = {t.name: t for t in tribes}
|
||||
|
||||
def add(self, tribe: Tribe) -> None:
|
||||
if tribe.name not in self._tribes.keys():
|
||||
self._tribes[tribe.name] = tribe
|
||||
else:
|
||||
raise TribeRepositoryError(f"{tribe} already exists")
|
||||
|
||||
def update(self, name: str, tribe: Tribe) -> None:
|
||||
try:
|
||||
self._tribes.pop(name)
|
||||
self._tribes[tribe.name] = tribe
|
||||
except KeyError:
|
||||
raise TribeRepositoryError(f"The tribe {tribe} does not exists")
|
||||
|
||||
def list(self) -> list[Tribe]:
|
||||
return list(self._tribes.values())
|
||||
|
||||
def get(self, name: str) -> Tribe:
|
||||
try:
|
||||
return self._tribes[name]
|
||||
except KeyError:
|
||||
raise TribeRepositoryError(f"The tribe {name} does not exists")
|
||||
|
||||
def delete(self, name: str) -> None:
|
||||
try:
|
||||
self._tribes.pop(name)
|
||||
except KeyError:
|
||||
raise TribeRepositoryError(f"The tribe {name} does not exists")
|
||||
|
||||
|
||||
class FakeStudentRepository(AbstractRepository):
|
||||
def __init__(self, students: list[Student] = []) -> None:
|
||||
self._students = {s.id: s for s in students}
|
||||
|
||||
def add(self, student: Student) -> None:
|
||||
if student.id not in self._students.keys():
|
||||
self._students[student.id] = student
|
||||
else:
|
||||
raise StudentRepositoryError(f"{student} already exists")
|
||||
|
||||
def update(self, student: Student) -> None:
|
||||
if student.id not in self._students.keys():
|
||||
raise StudentRepositoryError(f"The student {student} does not exists")
|
||||
|
||||
self._students[student.id] = student
|
||||
|
||||
def list(self) -> list[Student]:
|
||||
return list(self._students.values())
|
||||
|
||||
def get(self, id: str) -> Student:
|
||||
try:
|
||||
return self._students[id]
|
||||
except KeyError:
|
||||
raise KeyError(f"The student ({id=}) does not exists")
|
||||
|
||||
def delete(self, id: str) -> None:
|
||||
try:
|
||||
self._students.pop(id)
|
||||
except KeyError:
|
||||
raise StudentRepositoryError(f"The student with id {id} does not exists")
|
||||
|
||||
|
||||
class FakeConn:
|
||||
committed = False
|
||||
|
||||
def commit(self):
|
||||
self.committed = True
|
||||
|
||||
def reset_commit(self):
|
||||
self.committed = False
|
||||
|
||||
|
||||
def test_add_tribe():
|
||||
tribe_repo = FakeTribeRepository()
|
||||
tribe = build_tribes(1)[0]
|
||||
conn = FakeConn()
|
||||
services.add_tribe(
|
||||
name=tribe.name, level=tribe.level, tribe_repo=tribe_repo, conn=conn
|
||||
)
|
||||
|
||||
assert conn.committed is True
|
||||
assert tribe_repo.list() == [tribe]
|
||||
|
||||
|
||||
def test_add_tribe_fail_exists():
|
||||
tribe_repo = FakeTribeRepository()
|
||||
tribe = build_tribes(1)[0]
|
||||
conn = FakeConn()
|
||||
services.add_tribe(
|
||||
name=tribe.name, level=tribe.level, tribe_repo=tribe_repo, conn=conn
|
||||
)
|
||||
conn.reset_commit()
|
||||
|
||||
with pytest.raises(TribeExists):
|
||||
services.add_tribe(
|
||||
name=tribe.name, level=tribe.level, tribe_repo=tribe_repo, conn=conn
|
||||
)
|
||||
|
||||
assert conn.committed == False
|
||||
|
||||
|
||||
def test_update_tribe():
|
||||
tribes = build_tribes(3)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
conn = FakeConn()
|
||||
|
||||
other_level = "iouiouiou"
|
||||
tribes[0].level = other_level
|
||||
services.update_tribe(
|
||||
name=tribes[0].name, level=other_level, tribe_repo=tribe_repo, conn=conn
|
||||
)
|
||||
|
||||
assert conn.committed is True
|
||||
assert set(tribe_repo.list()) == set(tribes)
|
||||
|
||||
|
||||
def test_update_tribe_fail_not_exists():
|
||||
tribes = build_tribes(3)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
conn = FakeConn()
|
||||
|
||||
with pytest.raises(TribeDoesNotExist):
|
||||
services.update_tribe(
|
||||
name="azerty", level="jkl", tribe_repo=tribe_repo, conn=conn
|
||||
)
|
||||
|
||||
assert conn.committed == False
|
||||
|
||||
|
||||
def test_delete_tribe():
|
||||
tribes = build_tribes(3)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
conn = FakeConn()
|
||||
|
||||
tribe = tribes.pop()
|
||||
services.delete_tribe(name=tribe.name, tribe_repo=tribe_repo, conn=conn)
|
||||
|
||||
assert conn.committed is True
|
||||
assert set(tribe_repo.list()) == set(tribes)
|
||||
|
||||
|
||||
def test_delete_tribe_fail_not_exists():
|
||||
tribes = build_tribes(3)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
conn = FakeConn()
|
||||
|
||||
with pytest.raises(TribeDoesNotExist):
|
||||
services.delete_tribe(name="azerty", tribe_repo=tribe_repo, conn=conn)
|
||||
|
||||
assert conn.committed == False
|
||||
|
||||
|
||||
def test_add_student():
|
||||
tribes = build_tribes(1)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
student = build_student(tribes, 1)[0]
|
||||
student_repo = FakeStudentRepository()
|
||||
conn = FakeConn()
|
||||
|
||||
saved_student = services.add_student(
|
||||
name=student.name,
|
||||
tribe=student.tribe.name,
|
||||
student_repo=student_repo,
|
||||
tribe_repo=tribe_repo,
|
||||
conn=conn,
|
||||
)
|
||||
|
||||
assert conn.committed is True
|
||||
|
||||
listed_student = student_repo.list()[0]
|
||||
|
||||
assert student.name == listed_student.name
|
||||
assert student.tribe.name == listed_student.tribe.name
|
||||
# The id is not passed to the service, they can't have the same.
|
||||
assert student.id != listed_student.id
|
||||
|
||||
assert saved_student == listed_student
|
||||
|
||||
|
||||
def test_add_student_tribe_doesnt_exist():
|
||||
tribes = build_tribes(1)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
students = build_student(tribes, 1)
|
||||
student_repo = FakeStudentRepository()
|
||||
conn = FakeConn()
|
||||
|
||||
student = students[0]
|
||||
with pytest.raises(TribeDoesNotExist):
|
||||
services.add_student(
|
||||
name=student.name,
|
||||
tribe="iuouiouiouio",
|
||||
student_repo=student_repo,
|
||||
tribe_repo=tribe_repo,
|
||||
conn=conn,
|
||||
)
|
||||
|
||||
assert conn.committed is False
|
||||
|
||||
|
||||
def test_update_student():
|
||||
tribes = build_tribes(2)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
students = build_student(tribes, 1)
|
||||
student_repo = FakeStudentRepository(students)
|
||||
conn = FakeConn()
|
||||
|
||||
id = students[0].id
|
||||
new_name = "new name"
|
||||
new_tribe_name = tribes[1].name
|
||||
|
||||
saved_student = services.update_student(
|
||||
id=id,
|
||||
name=new_name,
|
||||
tribe=new_tribe_name,
|
||||
student_repo=student_repo,
|
||||
tribe_repo=tribe_repo,
|
||||
conn=conn,
|
||||
)
|
||||
|
||||
assert conn.committed is True
|
||||
|
||||
mod_student = student_repo.get(id)
|
||||
assert mod_student.name == new_name
|
||||
assert mod_student.tribe.name == new_tribe_name
|
||||
|
||||
listed_student = student_repo.list()
|
||||
assert len(listed_student) == 2
|
||||
|
||||
|
||||
def test_update_student_tribe_doesnt_exist():
|
||||
tribes = build_tribes(2)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
students = build_student(tribes, 1)
|
||||
student_repo = FakeStudentRepository(students)
|
||||
conn = FakeConn()
|
||||
|
||||
id = students[0].id
|
||||
new_name = "new name"
|
||||
new_tribe_name = "not existing tribe"
|
||||
|
||||
with pytest.raises(TribeDoesNotExist):
|
||||
services.update_student(
|
||||
id=id,
|
||||
name=new_name,
|
||||
tribe=new_tribe_name,
|
||||
student_repo=student_repo,
|
||||
tribe_repo=tribe_repo,
|
||||
conn=conn,
|
||||
)
|
||||
|
||||
assert conn.committed is False
|
||||
|
||||
mod_student = student_repo.get(id)
|
||||
assert mod_student.name == students[0].name
|
||||
assert mod_student.tribe.name == students[0].tribe.name
|
||||
|
||||
listed_student = student_repo.list()
|
||||
assert len(listed_student) == 2
|
||||
|
||||
|
||||
def test_update_student_doesnt_exist():
|
||||
tribes = build_tribes(2)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
students = build_student(tribes, 1)
|
||||
student_repo = FakeStudentRepository(students)
|
||||
conn = FakeConn()
|
||||
|
||||
id = "not existing id"
|
||||
new_name = students[0].name
|
||||
new_tribe_name = students[0].tribe.name
|
||||
|
||||
with pytest.raises(StudentDoesExist):
|
||||
services.update_student(
|
||||
id=id,
|
||||
name=new_name,
|
||||
tribe=new_tribe_name,
|
||||
student_repo=student_repo,
|
||||
tribe_repo=tribe_repo,
|
||||
conn=conn,
|
||||
)
|
||||
|
||||
assert conn.committed is False
|
||||
|
||||
original_student = student_repo.get(students[0].id)
|
||||
assert original_student.name == students[0].name
|
||||
assert original_student.tribe.name == students[0].tribe.name
|
||||
|
||||
listed_student = student_repo.list()
|
||||
assert len(listed_student) == 2
|
||||
|
||||
|
||||
def test_delete_student():
|
||||
tribes = build_tribes(2)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
students = build_student(tribes, 1)
|
||||
student_repo = FakeStudentRepository(students)
|
||||
conn = FakeConn()
|
||||
|
||||
student = students.pop()
|
||||
|
||||
services.delete_student(
|
||||
id=student.id,
|
||||
student_repo=student_repo,
|
||||
conn=conn,
|
||||
)
|
||||
|
||||
assert conn.committed is True
|
||||
|
||||
listed_student = student_repo.list()
|
||||
assert listed_student == students
|
||||
|
||||
|
||||
def test_delete_student_doesnt_exist():
|
||||
tribes = build_tribes(2)
|
||||
tribe_repo = FakeTribeRepository(tribes)
|
||||
students = build_student(tribes, 1)
|
||||
student_repo = FakeStudentRepository(students)
|
||||
conn = FakeConn()
|
||||
|
||||
with pytest.raises(StudentDoesExist):
|
||||
services.delete_student(
|
||||
id="not existing id",
|
||||
student_repo=student_repo,
|
||||
conn=conn,
|
||||
)
|
||||
|
||||
assert conn.committed is False
|
||||
|
||||
listed_student = student_repo.list()
|
||||
assert set(listed_student) == set(students)
|
Reference in New Issue
Block a user