109 Commits
main ... refact

Author SHA1 Message Date
83eb9c327b Feat: import html for 404 layout 2021-11-24 06:04:02 +01:00
ff1ecfef25 Feat: import app from good place (index instead of app)
see https://dash.plotly.com/urls
2021-11-24 06:03:16 +01:00
921292a0d2 Feat: activate cli 2021-11-24 05:48:22 +01:00
2d08671247 Feat: add "." in csv example and fix issues 2021-04-22 08:00:25 +02:00
a16211cbd4 Feat: legend formating and remove toolbox 2021-04-22 08:00:02 +02:00
876f583d51 Feat: format_score 2021-04-22 07:49:51 +02:00
97b97af2de Fix: replace np.isnan with pd.isnull 2021-04-21 07:09:05 +02:00
d8d84690c6 Feat: remove a print 2021-04-21 07:08:58 +02:00
18f855ab83 Feat: question levels bar figure 2021-04-21 07:03:45 +02:00
36425e587e Feat: add comment to score_config 2021-04-21 07:03:31 +02:00
8cdeecfc53 Feat: Score histogram 2021-04-20 19:13:14 +02:00
1a7c97d869 Feat: statistics table 2021-04-20 19:04:06 +02:00
ab5de2711e Feat: score_table style 2021-04-20 18:48:52 +02:00
235019102b Feat: Final mark for students 2021-04-20 18:30:12 +02:00
8ec24a24b3 Feat: delete functions on dataframe and move it to functions on rows 2021-04-19 21:54:44 +02:00
2e86b3a0a2 Feat: filter none numeric scores 2021-04-19 14:30:37 +02:00
7e6b24aaea Feat: rename valid_scores to scores in config 2021-04-19 14:29:57 +02:00
6889ddd97c Feat: relative import to absolute 2021-04-18 22:44:41 +02:00
10b9954c05 Feat: import score dataframe functions 2021-04-18 22:43:46 +02:00
7553628306 Feat: score table color formating based on score 2021-04-18 20:11:54 +02:00
effc049578 Fix: default_config color to string 2021-04-18 20:11:35 +02:00
411f910ce6 Feat: add colors to default_config 2021-04-18 18:00:17 +02:00
00d81d694a Feat: write column_values_to_column 2021-04-18 17:59:46 +02:00
a8b2ac455d Fix: column check in get_exam_questions 2021-04-18 17:23:37 +02:00
9e0ea14d05 Feat: remove relative imports and simplify get_unstack_scores 2021-04-18 17:20:54 +02:00
2031ade1ab Feat: basic score_table 2021-04-18 16:44:40 +02:00
6ed55c07d4 Feat: formating 2021-04-18 16:44:23 +02:00
1d234ea5fc Feat: add column description 2021-04-18 11:42:13 +02:00
646314ad88 Feat: store exams_scores in store 2021-04-18 10:50:26 +02:00
0739cfdae7 Feat: add get_exam_scores to loader 2021-04-18 10:48:12 +02:00
a50901556e Feat: start exams scores pages 2021-04-18 10:02:10 +02:00
abc5513268 Feat: origin store as string 2021-04-18 10:01:26 +02:00
598086ddb0 Feat: dash setup and start home page 2021-04-13 06:26:45 +02:00
94f8080acd Feat: empty dataframe in get_exams 2021-04-13 06:26:21 +02:00
6331573940 Feat: rename store to database and fix exam and question ids 2021-04-12 11:42:24 +02:00
a292fe23e0 Feat: remove get_student_scores 2021-04-07 20:50:55 +02:00
3005d49a1d Feat: wrote get_students 2021-04-06 19:51:15 +02:00
9fce390304 Feat: remove hard coded field in get_questions_scores 2021-04-06 19:41:56 +02:00
94f94dae84 Feat: get_questions_scores wrote 2021-04-06 19:37:12 +02:00
596a52b1d0 Feat: add id to exam and questions 2021-04-06 18:57:41 +02:00
37c95d75de Feat: write get_exam_questions 2021-04-06 18:18:25 +02:00
bd91bf51d6 Feat: get_exams for CSVLoader 2021-04-05 21:21:45 +02:00
c1fd060707 Feat: Start filesystem loader 2021-04-05 19:02:10 +02:00
a0e94f52b1 Feat: formating questions 2021-04-05 08:31:05 +02:00
c84f9845b2 Feat: visualisation des Competences et des themes dans students 2021-02-27 10:31:52 +01:00
d9e95f2186 Feat: return empty fig 2021-02-27 10:03:24 +01:00
581b0f4f2f Feat: table des évaluations 2021-02-23 17:55:43 +01:00
3dbfc85447 Feat: filter dans store scores 2021-02-23 17:40:18 +01:00
b5bf1ac137 Feat: add students to paths 2021-02-23 17:07:05 +01:00
74d751a586 Feat: update student list 2021-02-23 17:06:55 +01:00
1855d4016d Feat: start student_analysis 2021-02-23 16:53:59 +01:00
ff94470fb4 Feat: Start feedback on eval 2021-02-23 16:14:05 +01:00
d322452a6e Feat: rename exam-analysis to dashboard 2021-02-23 16:10:16 +01:00
e1d3940e9d Feat: add total score_rate 2021-02-08 15:45:50 +01:00
7dba11996a Feat: formating and split in sections 2021-02-08 15:19:09 +01:00
3250a600c9 Feat: start the layout for create_exam 2021-01-27 16:17:44 +01:00
589d63ff29 Feat: not showing all columns in bigtable and fixe first columns 2021-01-27 16:16:54 +01:00
429fed6a1e Feat: default values for elements 2021-01-24 06:53:06 +01:00
1255bf4b9e Fix: remove useless print 2021-01-23 06:54:19 +01:00
1fe7665753 Merge branch 'dev' of git_opytex:/lafrite/recopytex into dev 2021-01-22 11:14:34 +01:00
e08e4a32a8 Feat: exam creation page 2021-01-22 11:13:35 +01:00
b737612adb Feat: Start display summary 2021-01-22 05:39:14 +01:00
9c19e2ac56 Feat: New page with input fields 2021-01-21 22:17:49 +01:00
eb60734c26 Fix: remove useless import 2021-01-21 22:17:33 +01:00
329bcc460c Fix: calculer -> chercher 2021-01-21 22:17:02 +01:00
95fc842c1d Feat: 2nd page to create exam 2021-01-21 15:12:24 +01:00
e0ca1a458b Fix: column id to see student and score_rate 2021-01-21 14:11:39 +01:00
eb1abbe868 Fix: get back exam graphs 2021-01-21 14:01:57 +01:00
412e624791 Merge remote-tracking branch 'origin/dev' into dev 2021-01-21 09:57:33 +01:00
e8bf0b3f0a Fix: name and bareme in final_score_table and describe rounding 2021-01-21 09:52:49 +01:00
c057fa11e7 Feat: stop rounding score at 0.5 2021-01-21 09:52:49 +01:00
e15119605f Merge branch 'dev' of git_opytex:/lafrite/recopytex into dev 2021-01-21 09:38:58 +01:00
494567cdb5 Merge branch 'dev' of git_opytex:/lafrite/recopytex into dev 2021-01-21 09:25:58 +01:00
84fcee625d Feat: split dashboard 2021-01-20 20:54:59 +01:00
f62c898162 Fix: remove unecessary import 2021-01-20 20:51:22 +01:00
7955b989b4 Fix: missing category (0) in final_score plot 2021-01-17 22:26:16 +01:00
4f14e3518c Fix: concatenate index for competence plot 2021-01-17 22:21:58 +01:00
4bf8f4003e Feat: remove bootstrap and replace it with css 2021-01-17 22:04:52 +01:00
a14d47b15c Feat: Clean empty fig 2021-01-15 17:49:30 +01:00
09ac9f01f8 Feat: add competence fig and better error management 2021-01-15 13:48:57 +01:00
0a5a931d01 Feat: add row to scores_table!! 2021-01-14 21:53:38 +01:00
21397272c9 Feat: move dashboard to its own directory 2021-01-14 20:09:25 +01:00
894ebc4ec8 Feat: add competence bar plot 2021-01-13 08:28:54 +01:00
f6bfac4144 Feat: Hist graph and describe 2021-01-12 22:32:26 +01:00
cfd5928853 Feat: autosave while editing scores 2021-01-12 17:25:58 +01:00
8fcad94df4 Feat: start analysis dash board 2021-01-10 20:46:14 +01:00
27d7c45980 Feat: add temporary save 2021-01-10 07:21:28 +01:00
159e7a9f2e Feat: move exam to Exam class 2021-01-10 06:53:16 +01:00
72afb26e2a Fix: indentation 2021-01-10 06:52:56 +01:00
6eb918e0f5 Feat: can read exam config from yaml 2021-01-06 09:09:35 +01:00
56a669b2be Feat: remove exQty in prompt 2021-01-06 08:53:06 +01:00
a5f22fc8cd Fix: commentaire -> comment 2021-01-06 07:59:42 +01:00
5177df06d7 Fix: element -> row 2021-01-05 09:15:41 +01:00
d78fcbc281 Feat: add competences 2021-01-05 09:15:24 +01:00
98fa768541 format: black formating 2021-01-05 09:14:52 +01:00
00c2681823 Fix: element -> row 2021-01-05 09:14:37 +01:00
52f2f3f4cf Feat: incoporate cometences config 2021-01-01 18:04:28 +01:00
4ea7f8db14 Feat: replace references to PyInquier with prompt_toolkit 2021-01-01 17:47:13 +01:00
04a2506d86 Feat: rewrite new_exam prompt without Pyinquier 2020-12-31 18:00:42 +01:00
77c358b0c1 Feat: écriture du fichier csv 2020-10-04 18:49:44 +02:00
1886deb430 Feat: question prompts 2020-10-04 18:10:43 +02:00
5e0f2d92ef Feat: prompt for exercises 2020-10-04 16:38:36 +02:00
49cc52f7d1 Feat: prompts and write prompt_exam 2020-10-04 16:11:55 +02:00
6d93ef62d7 Feat: split requirements 2020-10-04 16:11:41 +02:00
488df4cb0c Feat: start example folder 2020-10-04 15:07:11 +02:00
9136f359e0 Feat: add .vim in gitignore 2020-10-04 07:30:21 +02:00
1dfee17990 Doc: des explications 2020-10-04 07:29:37 +02:00
400fb0a690 FEat: add comments 2020-10-04 07:20:08 +02:00
04a1ed9378 Feat: remove versions in requirements 2020-10-04 07:09:18 +02:00
70 changed files with 3729 additions and 2866 deletions

5
.gitignore vendored
View File

@@ -123,5 +123,6 @@ dmypy.json
# Pyre type checker
.pyre/
# temporary database
sqlite.db
# vim
.vim

View File

@@ -1,19 +0,0 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.0.1
hooks:
- id: check-toml
- id: check-yaml
- id: end-of-file-fixer
- id: mixed-line-ending
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/PyCQA/isort
rev: 5.10.1
hooks:
- id: isort
args: ["--profile", "black"]

View File

@@ -1,19 +1,34 @@
# Recopytex
# Encore une autre façon d'enregistrer et d'analyser mes notes
One more rewrite of Opytex. This time, it more a learning project than a operational project.
Cette fois ci, on utilise:
I am following the book *Architecture Patterns with Python* by Harry J.W. Percival and Bob Gregory. Then the project will follow TTD method using DDD.
- Des fichiers csv pour stocker les notes
- Des fichiers yaml pour les infos sur les élèves
- Des notebooks pour l'analyse
- Papermill pour produire les notesbooks à partir de template
## Backend API
## Les fichiers CSV
It uses **fastapi**, **sqlalchemy**.
les paramètres sont décris dans ./recopytex/config.py
### Installing
### Descriptions des questions
poetry install
- Trimestre
- Nom
- Date
- Exercice
- Question
- Competence
- Domaine
- Commentaire
- Bareme
- Est_nivele
### Fire up
uvicorn backend.api.main:app --reload
### Valeurs pour notes les élèves
- Score: 0, 1, 2, 3
- Pas de réponses: .
- Absent: a
- Dispensé: (vide)
## Frontend

View File

@@ -1,49 +0,0 @@
from sqlalchemy import Column, ForeignKey, MetaData, String, Table
from sqlalchemy.orm import backref, registry, relationship
from backend.model.assessment import Assessment
from backend.model.student import Student
from backend.model.tribe import Tribe
metadata = MetaData()
mapper_registry = registry()
tribes_table = Table(
"tribes",
metadata,
Column("name", String(255), primary_key=True),
Column("level", String(255)),
)
assessments_table = Table(
"assessments",
metadata,
Column("id", String(255), primary_key=True),
Column("name", String(255)),
Column("tribe_name", String(255), ForeignKey("tribes.name")),
)
students_table = Table(
"students",
metadata,
Column("id", String(255), primary_key=True),
Column("name", String(255)),
Column("tribe_name", String(255), ForeignKey("tribes.name")),
)
def start_mappers():
tribes_mapper = mapper_registry.map_imperatively(
Tribe,
tribes_table,
properties={
"students": relationship(
Student, backref="tribes", order_by=students_table.c.id
),
"assessments": relationship(
Assessment, backref="tribes", order_by=assessments_table.c.id
),
},
)
students_mapper = mapper_registry.map_imperatively(Student, students_table)
assessments_mapper = mapper_registry.map_imperatively(Assessment, assessments_table)

View File

@@ -1,33 +0,0 @@
import sqlite3
def create_tribe_table(conn) -> None:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS tribes(
name VARCHAR PRIMARY KEY UNIQUE,
level VARCHAR
)
"""
)
conn.commit()
def create_student_table(conn) -> None:
cursor = conn.cursor()
cursor.execute(
"""
CREATE TABLE IF NOT EXISTS students(
id VARCHAR(500) PRIMARY KEY UNIQUE,
name VARCHAR,
tribe_name VARCHAR
)
"""
)
conn.commit()
def create_db(conn) -> None:
create_tribe_table(conn)
create_student_table(conn)

View File

@@ -1,194 +0,0 @@
import sqlite3
from fastapi import FastAPI, status
from fastapi.responses import JSONResponse, RedirectResponse, Response
from backend.adapters.sqlite import create_db
from backend.api.model import StudentModel, TribeModel
from backend.model.student import Student
from backend.model.tribe import Tribe
from backend.repository.student_sqlite_repository import StudentSQLiteRepository
from backend.repository.tribe_sqlite_repository import (
TribeRepositoryError,
TribeSQLiteRepository,
)
from backend.service import services
from backend.service.services import StudentDoesExist, TribeDoesNotExist, TribeExists
# from sqlalchemy import create_engine
# from sqlalchemy.orm import clear_mappers, sessionmaker
# import backend.adapters.orm as orm
# from backend.repository.tribe_sqlalchemy_repository import TribeSQLAlchemyRepository
# orm.start_mappers()
# engine = create_engine("sqlite:///:memory:")
# orm.metadata.create_all(engine)
# session = sessionmaker(bind=engine)()
# tribe_repo = TribeSQLAlchemyRepository(session)
conn = sqlite3.connect("sqlite.db")
create_db(conn)
tribe_repo = TribeSQLiteRepository(conn)
student_repo = StudentSQLiteRepository(conn)
app = FastAPI()
@app.post("/tribes", response_class=RedirectResponse, status_code=status.HTTP_302_FOUND)
async def post_tribe(item: TribeModel):
try:
tribe = services.add_tribe(
name=item.name, level=item.level, tribe_repo=tribe_repo, conn=conn
)
except TribeExists:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"The tribe {item.name} already exists",
)
return f"/tribes/{tribe.name}"
@app.put(
"/tribes/{name}", response_class=RedirectResponse, status_code=status.HTTP_302_FOUND
)
async def put_tribe(name: str, item: TribeModel):
try:
tribe = services.update_tribe(
name=item.name, level=item.level, tribe_repo=tribe_repo, conn=conn
)
except TribeDoesNotExist:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"The tribe {name} does not exists",
)
return f"/tribes/{tribe.name}"
@app.delete("/tribes/{name}")
async def delete_tribe(name: str):
try:
services.delete_tribe(name=name, tribe_repo=tribe_repo, conn=conn)
except TribeDoesNotExist:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"The tribe {name} does not exists",
)
return Response(
status_code=status.HTTP_204_NO_CONTENT,
)
@app.get("/tribes", response_model=list[TribeModel])
async def list_tribes():
tribes = tribe_repo.list()
return [t.to_dict() for t in tribes]
@app.get("/tribes/{name}", response_model=TribeModel)
async def get_tribe(name: str):
tribe = tribe_repo.get(name)
return tribe.to_dict()
@app.post(
"/students", response_class=RedirectResponse, status_code=status.HTTP_302_FOUND
)
async def post_student(item: StudentModel):
if item.id is not None:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"You can't post a student with an id. It is already registrered. Use PUT to modify it.",
)
try:
student = services.add_student(
name=item.name,
tribe=item.tribe_name,
tribe_repo=tribe_repo,
student_repo=student_repo,
conn=conn,
)
except TribeDoesNotExist:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"The tribe {item.tribe_name} does not exists. You can't add a student in it.",
)
return f"/students/{student.id}"
@app.get("/students/{id}", status_code=status.HTTP_200_OK, response_model=StudentModel)
async def get_student(id: str):
tribes = tribe_repo.list()
student = student_repo.get(id, tribes)
return student.to_dict()
@app.get("/students", status_code=status.HTTP_200_OK, response_model=list[StudentModel])
async def list_students():
tribes = tribe_repo.list()
students = student_repo.list(tribes)
return [t.to_dict() for t in students]
@app.put(
"/students/{student_id}",
response_class=RedirectResponse,
status_code=status.HTTP_302_FOUND,
)
async def put_student(student_id, item: StudentModel):
if student_id != item.id:
return JSONResponse(
status_code=status.HTTP_400_BAD_REQUEST,
content=f"Url and student id are the same",
)
try:
student = services.update_student(
id=item.id,
name=item.name,
tribe=item.tribe_name,
tribe_repo=tribe_repo,
student_repo=student_repo,
conn=conn,
)
except TribeDoesNotExist:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"The tribe {tribe_name} does not exists. You can't add a student in it.",
)
except StudentDoesExist:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"The student {item.name} ({item.id=}) does not exists. You can't modify it.",
)
return f"/students/{student.id}"
@app.delete(
"/students/{student_id}",
)
async def delete_student(student_id):
try:
student = services.delete_student(
id=student_id,
student_repo=student_repo,
conn=conn,
)
except StudentDoesExist:
return JSONResponse(
status_code=status.HTTP_409_CONFLICT,
content=f"The student ({student_id=}) does not exists. You can't delete it.",
)
return Response(
status_code=status.HTTP_204_NO_CONTENT,
)

View File

@@ -1,16 +0,0 @@
from typing import Optional
from pydantic import BaseModel
class StudentModel(BaseModel):
name: str
tribe_name: str
id: Optional[str]
class TribeModel(BaseModel):
name: str
level: str
students: list[StudentModel] | None = []
assessments: list | None = []

View File

@@ -1,16 +0,0 @@
import os
import sqlite3
from backend.adapters.sqlite import create_db
def sqlite_conn(sqlite_file: str = ":memory"):
conn = sqlite3.connect(sqlite_file)
create_db(conn)
return conn
def get_api_url():
host = os.environ.get("API_HOST", "localhost")
port = 8000 if host == "localhost" else 80
return f"http://{host}:{port}"

View File

@@ -1,69 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass, field
from datetime import date
from typing import TYPE_CHECKING, Optional
if TYPE_CHECKING:
from backend.model.tribe import Tribe
@dataclass
class Assessment:
name: str
tribe: Tribe
term: int
exercises: list[Exercise] = field(default_factory=list)
def __post_init__(self) -> None:
self.tribe.register_assessment(self)
def register_exercise(self, exercise: Exercise):
self.exercises.append(exercise)
@property
def questions(self):
return sum([exercise.questions for exercise in self.exercises])
@dataclass
class Exercise:
name: str
assessment: Assessment
date: Optional[date]
questions: list[Question] = field(default_factory=list)
def __post_init__(self) -> None:
self.assessment.register_exercise(self)
def register_question(self, question: Question):
self.questions.append(question)
@dataclass(frozen=True)
class Domain:
name: str
description: str
@dataclass(frozen=True)
class Skill:
name: str
description: str
@dataclass
class Question:
name: str
exercise: Exercise
description: str
skill: Skill
domain: Domain
is_leveled: bool
scale: int
def __post_init__(
self,
) -> None:
self.exercise.register_question(self)

View File

@@ -1,33 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass, field
from datetime import datetime
from typing import TYPE_CHECKING, Optional
from uuid import UUID, uuid4
if TYPE_CHECKING:
from backend.model.tribe import Tribe
@dataclass
class Student:
name: str
tribe: Tribe
id: str = field(default_factory=lambda: str(uuid4()))
def __post_init__(self) -> None:
self.tribe.register_student(self)
def __eq__(self, other: object) -> bool:
if isinstance(other, Student):
return self.id == other.id
return False
def __hash__(self) -> int:
return hash(self.id)
def to_tuple(self) -> tuple:
return (self.id, self.name, self.tribe.name)
def to_dict(self, full_tribe=False) -> dict:
return {"id": self.id, "name": self.name, "tribe_name": self.tribe.name}

View File

@@ -1,55 +0,0 @@
from __future__ import annotations
from dataclasses import dataclass, field
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from backend.model.assessment import Assessment
from backend.model.student import Student
class TribeError(Exception):
pass
@dataclass
class Tribe:
name: str
level: str
students: list[Student] = field(default_factory=list)
assessments: list[Assessment] = field(default_factory=list)
def register_assessment(self, assessment: Assessment) -> None:
self.assessments.append(assessment)
@property
def students_id(self) -> list[str]:
return [s.id for s in self.students]
def register_student(self, student: Student) -> None:
"""Register a student
If the student is already registered, it is modified.
"""
try:
old_student = next(filter(lambda s: s.id == student.id, self.students))
except StopIteration:
pass
else:
self.students.remove(old_student)
self.students.append(student)
def __eq__(self, other: object) -> bool:
if isinstance(other, Tribe):
return self.name == other.name
return False
def __hash__(self) -> int:
return hash(self.name)
def to_dict(self) -> dict:
return {"name": self.name, "level": self.level}
def to_tuple(self) -> tuple:
return (self.name, self.level)

View File

@@ -1,23 +0,0 @@
import abc
class AbstractRepository(abc.ABC):
@abc.abstractmethod
def add(self, element):
raise NotImplementedError
@abc.abstractmethod
def update(self, reference, element):
raise NotImplementedError
@abc.abstractmethod
def list(self):
raise NotImplementedError
@abc.abstractmethod
def get(self, reference):
raise NotImplementedError
@abc.abstractmethod
def delete(self, element):
raise NotImplementedError

View File

@@ -1,105 +0,0 @@
from backend.model.student import Student
from backend.model.tribe import Tribe
from backend.repository.abstract_repository import AbstractRepository
class StudentRepositoryError(Exception):
pass
class StudentSQLiteRepository(AbstractRepository):
def __init__(self, conn) -> None:
self.conn = conn
def add(self, student: Student) -> None:
self.conn.execute(
"""
INSERT INTO students(id, name, tribe_name) VALUES (?, ?, ?)
""",
(
student.id,
student.name,
student.tribe.name,
),
)
def update(self, student: Student) -> None:
search_student = self.conn.execute(
"""
SELECT id FROM students WHERE id=:id
""",
{"id": student.id},
).fetchone()
if search_student is None:
raise StudentRepositoryError(f"The student ({student.id=}) does not exists")
self.conn.execute(
"""
UPDATE students SET name=:newname, tribe_name=:newtribe WHERE id=:id
""",
{
"newname": student.name,
"newtribe": student.tribe.name,
"id": student.id,
},
)
def _rebuild_student(self, row: tuple, tribes: list[Tribe]) -> Student:
print(row)
print([t.name for t in tribes])
tribe = next(filter(lambda t: t.name == row[2], tribes))
return Student(id=row[0], name=row[1], tribe=tribe)
def get(self, id: str, tribes: list[Tribe]) -> Student:
cursor = self.conn.cursor()
cursor.execute(
"""
SELECT id, name, tribe_name FROM students WHERE id=?
""",
(id,),
)
row = cursor.fetchone()
if row:
return self._rebuild_student(row, tribes)
raise ValueError(f"The student ({id=}) does not exists")
def list(self, tribes: list[Tribe]) -> list[Student]:
cursor = self.conn.cursor()
cursor.execute(
"""
SELECT * FROM students
"""
)
rows = cursor.fetchall()
return [self._rebuild_student(r, tribes) for r in rows]
def list_id(self):
cursor = self.conn.cursor()
cursor.execute(
"""
SELECT id FROM students
"""
)
rows = cursor.fetchall()
return [r[0] for r in rows]
def delete(self, id: str) -> None:
students_id = self.list_id()
if id not in students_id:
raise StudentRepositoryError(
f"The student {id} doesn't exists. Can't delete it."
)
self.conn.execute(
"""
DELETE FROM students WHERE id=:id
""",
{
"id": id,
},
)

View File

@@ -1,23 +0,0 @@
from backend.model.tribe import Tribe
from backend.repository.abstract_repository import AbstractRepository
class TribeSQLAlchemyRepository(AbstractRepository):
def __init__(self, session) -> None:
self.session = session
def add(self, tribe: Tribe) -> None:
self.session.add(tribe)
def update(self, name: str, tribe: Tribe) -> None:
self.session.query(Tribe).filter_by(name=name).update(tribe.to_dict())
def get(self, name: str) -> Tribe:
return self.session.query(Tribe).filter_by(name=name).one()
def list(self) -> list[Tribe]:
return self.session.query(Tribe).all()
def delete(self, tribe: Tribe) -> None:
the_tribe = self.get(tribe.name)
self.session.delete(the_tribe)

View File

@@ -1,89 +0,0 @@
from backend.model.tribe import Tribe
from backend.repository.abstract_repository import AbstractRepository
class TribeRepositoryError(Exception):
pass
class TribeSQLiteRepository(AbstractRepository):
def __init__(self, conn) -> None:
self.conn = conn
def add(self, tribe: Tribe) -> None:
tribes = self.list()
if tribe.name in map(lambda x: x.name, tribes):
raise TribeRepositoryError(
f"The tribe {tribe.name} already exists. Can't add it"
)
self.conn.execute(
"""
INSERT INTO tribes(name, level) VALUES (?, ?)
""",
(
tribe.name,
tribe.level,
),
)
def update(self, name: str, tribe: Tribe) -> None:
tribes = self.list()
if name not in map(lambda x: x.name, tribes):
raise TribeRepositoryError(
f"The tribe {name} doesn't exists. Can't update it"
)
self.conn.execute(
"""
UPDATE tribes SET name=:newname, level=:newlevel WHERE name=:name
""",
{
"newname": tribe.name,
"newlevel": tribe.level,
"name": name,
},
)
def get(self, name: str) -> Tribe:
cursor = self.conn.cursor()
cursor.execute(
"""
SELECT * FROM tribes WHERE name=?
""",
(name,),
)
row = cursor.fetchone()
if row:
return Tribe(*row)
raise TribeRepositoryError(f"The tribe {name} does not exists")
def list(self) -> list[Tribe]:
cursor = self.conn.cursor()
cursor.execute(
"""
SELECT * FROM tribes
"""
)
rows = cursor.fetchall()
return [Tribe(*r) for r in rows]
def delete(self, name: str) -> None:
tribes = self.list()
if name not in map(lambda x: x.name, tribes):
raise TribeRepositoryError(
f"The tribe {name} doesn't exists. Can't delete it."
)
self.conn.execute(
"""
DELETE FROM tribes WHERE name=:name
""",
{
"name": name,
},
)

View File

@@ -1,116 +0,0 @@
from backend.model.student import Student
from backend.model.tribe import Tribe
from backend.repository.abstract_repository import AbstractRepository
from backend.repository.student_sqlite_repository import StudentRepositoryError
from backend.repository.tribe_sqlite_repository import TribeRepositoryError
class TribeExists(Exception):
pass
class TribeDoesNotExist(Exception):
pass
class StudentExists(Exception):
pass
class StudentDoesExist(Exception):
pass
def add_tribe(name: str, level: str, tribe_repo: AbstractRepository, conn) -> Tribe:
tribe = Tribe(name=name, level=level)
try:
tribe_repo.add(tribe)
except TribeRepositoryError:
raise TribeExists(f"The tribe {tribe.name} already exists")
conn.commit()
return tribe
def update_tribe(name: str, level: str, tribe_repo: AbstractRepository, conn) -> Tribe:
tribe = Tribe(name=name, level=level)
try:
tribe_repo.update(name=name, tribe=tribe)
except TribeRepositoryError:
raise TribeDoesNotExist(f"The tribe {name} does not exists you can't update it")
conn.commit()
return tribe
def delete_tribe(name: str, tribe_repo: AbstractRepository, conn) -> None:
try:
tribe_repo.delete(name=name)
except TribeRepositoryError:
raise TribeDoesNotExist(f"The tribe {name} does not exists you can't delete it")
conn.commit()
def add_student(
name: str,
tribe: str,
student_repo: AbstractRepository,
tribe_repo: AbstractRepository,
conn,
) -> Student:
try:
_tribe = tribe_repo.get(tribe)
except TribeRepositoryError:
raise TribeDoesNotExist(
f"The tribe {tribe} does not exists. Can't add a student in it"
)
student = Student(name=name, tribe=_tribe)
try:
student_repo.add(student)
except StudentRepositoryError:
raise StudentExists(f"The student {student.name} already exists. Can't add it.")
conn.commit()
return student
def update_student(
id: str,
name: str,
tribe: str,
student_repo: AbstractRepository,
tribe_repo: AbstractRepository,
conn,
) -> Student:
try:
_tribe = tribe_repo.get(tribe)
except TribeRepositoryError:
raise TribeDoesNotExist(
f"The tribe {tribe} does not exists. Can't update a student with it"
)
student = Student(id=id, name=name, tribe=_tribe)
try:
student_repo.update(student)
except StudentRepositoryError:
raise StudentDoesExist(
f"The student {student.name} ({student.id=}) does not exists. Can't update it."
)
conn.commit()
return student
def delete_student(
id: str,
student_repo: AbstractRepository,
conn,
) -> Student:
try:
student_repo.delete(id=id)
except StudentRepositoryError:
raise StudentDoesExist("The student with id {id} does not exists")
conn.commit()

View File

@@ -0,0 +1,5 @@
Trimestre,Nom,Date,Exercice,Question,Competence,Domaine,Commentaire,Bareme,Est_nivele,Star Tice,Umberto Dingate,Starlin Crangle,Humbert Bourcq,Gabriella Handyside,Stewart Eaves,Erick Going,Ase Praton,Rollins Planks,Dunstan Sarjant,Stacy Guiton,Ange Stanes,Amabelle Elleton,Darn Broomhall,Dyan Chatto,Keane Rennebach,Nari Paulton,Brandy Wase,Jaclyn Firidolfi,Violette Lockney
1,DS,12/01/2021,Exercice 1,1,Calculer,Plop,Coucou,1,1,,,1,0,1,2,3,0,3,3,2,,1,,,,,,,
1,DS,12/01/2021,Exercice 1,2,Calculer,C'est trop chouette!,Coucou,1,1,,,1,2,,,3,3,,,,,2,,,,,,,
1,DS,12/01/2021,Exercice 1,3,Calculer,Null,Coucou,1,1,,,,3,2,,,,,,,,3,,,,,,,
1,DS,12/01/2021,Exercice 1,3,Calculer,Nié,DChic,1,1,,,,2,.,,,,,,,,,,,,,,,
1 Trimestre Nom Date Exercice Question Competence Domaine Commentaire Bareme Est_nivele Star Tice Umberto Dingate Starlin Crangle Humbert Bourcq Gabriella Handyside Stewart Eaves Erick Going Ase Praton Rollins Planks Dunstan Sarjant Stacy Guiton Ange Stanes Amabelle Elleton Darn Broomhall Dyan Chatto Keane Rennebach Nari Paulton Brandy Wase Jaclyn Firidolfi Violette Lockney
2 1 DS 12/01/2021 Exercice 1 1 Calculer Plop Coucou 1 1 1 0 1 2 3 0 3 3 2 1
3 1 DS 12/01/2021 Exercice 1 2 Calculer C'est trop chouette! Coucou 1 1 1 2 3 3 2
4 1 DS 12/01/2021 Exercice 1 3 Calculer Null Coucou 1 1 3 2 3
5 1 DS 12/01/2021 Exercice 1 3 Calculer Nié DChic 1 1 2 .

View File

@@ -0,0 +1,5 @@
Trimestre,Nom,Date,Exercice,Question,Competence,Domaine,Commentaire,Bareme,Est_nivele,Star Tice,Umberto Dingate,Starlin Crangle,Humbert Bourcq,Gabriella Handyside,Stewart Eaves,Erick Going,Ase Praton,Rollins Planks,Dunstan Sarjant,Stacy Guiton,Ange Stanes,Amabelle Elleton,Darn Broomhall,Dyan Chatto,Keane Rennebach,Nari Paulton,Brandy Wase,Jaclyn Firidolfi,Violette Lockney
1,DS6,22/01/2021,Exercice 1,Sait pas,,,,,,,,,,,,,,,,,,,,,,,,,
1,DS6,22/01/2021,Exercice 1,Ha,,,,,,,,,,,,,,,,,,,,,,,,,
1,DS6,22/01/2021,Exercice 1,,,,,,,,,,,,,,,,,,,,,,,,,,
1,DS6,22/01/2021,Exercice 2,grr,,,,,,,,,,,,,,,,,,,,,,,,,
1 Trimestre Nom Date Exercice Question Competence Domaine Commentaire Bareme Est_nivele Star Tice Umberto Dingate Starlin Crangle Humbert Bourcq Gabriella Handyside Stewart Eaves Erick Going Ase Praton Rollins Planks Dunstan Sarjant Stacy Guiton Ange Stanes Amabelle Elleton Darn Broomhall Dyan Chatto Keane Rennebach Nari Paulton Brandy Wase Jaclyn Firidolfi Violette Lockney
2 1 DS6 22/01/2021 Exercice 1 Sait pas
3 1 DS6 22/01/2021 Exercice 1 Ha
4 1 DS6 22/01/2021 Exercice 1
5 1 DS6 22/01/2021 Exercice 2 grr

13
example/recoconfig.yml Normal file
View File

@@ -0,0 +1,13 @@
---
source: ./
output: ./
templates: templates/
tribes:
Tribe1:
name: Tribe1
type: Type1
students: tribe1.csv
Tribe2:
name: Tribe2
students: tribe2.csv

21
example/tribe1.csv Normal file
View File

@@ -0,0 +1,21 @@
Nom,email
Star Tice,stice0@jalbum.net
Umberto Dingate,udingate1@tumblr.com
Starlin Crangle,scrangle2@wufoo.com
Humbert Bourcq,hbourcq3@g.co
Gabriella Handyside,ghandyside4@patch.com
Stewart Eaves,seaves5@ycombinator.com
Erick Going,egoing6@va.gov
Ase Praton,apraton7@va.gov
Rollins Planks,rplanks8@delicious.com
Dunstan Sarjant,dsarjant9@naver.com
Stacy Guiton,sguitona@themeforest.net
Ange Stanes,astanesb@marriott.com
Amabelle Elleton,aelletonc@squidoo.com
Darn Broomhall,dbroomhalld@cisco.com
Dyan Chatto,dchattoe@npr.org
Keane Rennebach,krennebachf@dot.gov
Nari Paulton,npaultong@gov.uk
Brandy Wase,bwaseh@ftc.gov
Jaclyn Firidolfi,jfiridolfii@reuters.com
Violette Lockney,vlockneyj@chron.com
1 Nom email
2 Star Tice stice0@jalbum.net
3 Umberto Dingate udingate1@tumblr.com
4 Starlin Crangle scrangle2@wufoo.com
5 Humbert Bourcq hbourcq3@g.co
6 Gabriella Handyside ghandyside4@patch.com
7 Stewart Eaves seaves5@ycombinator.com
8 Erick Going egoing6@va.gov
9 Ase Praton apraton7@va.gov
10 Rollins Planks rplanks8@delicious.com
11 Dunstan Sarjant dsarjant9@naver.com
12 Stacy Guiton sguitona@themeforest.net
13 Ange Stanes astanesb@marriott.com
14 Amabelle Elleton aelletonc@squidoo.com
15 Darn Broomhall dbroomhalld@cisco.com
16 Dyan Chatto dchattoe@npr.org
17 Keane Rennebach krennebachf@dot.gov
18 Nari Paulton npaultong@gov.uk
19 Brandy Wase bwaseh@ftc.gov
20 Jaclyn Firidolfi jfiridolfii@reuters.com
21 Violette Lockney vlockneyj@chron.com

21
example/tribe2.csv Normal file
View File

@@ -0,0 +1,21 @@
Nom,email
Elle McKintosh,emckintosh0@1und1.de
Ty Megany,tmegany1@reuters.com
Pippa Borrows,pborrows2@a8.net
Sonny Eskrick,seskrick3@123-reg.co.uk
Mollee Britch,mbritch4@usda.gov
Ingram Plaistowe,iplaistowe5@purevolume.com
Fay Vanyard,fvanyard6@sbwire.com
Nancy Rase,nrase7@omniture.com
Rachael Ruxton,rruxton8@bravesites.com
Tallie Rushmer,trushmer9@home.pl
Seward MacIlhagga,smacilhaggaa@hatena.ne.jp
Lizette Searl,lsearlb@list-manage.com
Talya Mannagh,tmannaghc@webnode.com
Jordan Witherbed,jwitherbedd@unesco.org
Reagan Botcherby,rbotcherbye@scientificamerican.com
Libbie Shoulder,lshoulderf@desdev.cn
Abner Khomich,akhomichg@youtube.com
Zollie Kitman,zkitmanh@forbes.com
Fiorenze Durden,fdurdeni@feedburner.com
Kevyn Race,kracej@seattletimes.com
1 Nom email
2 Elle McKintosh emckintosh0@1und1.de
3 Ty Megany tmegany1@reuters.com
4 Pippa Borrows pborrows2@a8.net
5 Sonny Eskrick seskrick3@123-reg.co.uk
6 Mollee Britch mbritch4@usda.gov
7 Ingram Plaistowe iplaistowe5@purevolume.com
8 Fay Vanyard fvanyard6@sbwire.com
9 Nancy Rase nrase7@omniture.com
10 Rachael Ruxton rruxton8@bravesites.com
11 Tallie Rushmer trushmer9@home.pl
12 Seward MacIlhagga smacilhaggaa@hatena.ne.jp
13 Lizette Searl lsearlb@list-manage.com
14 Talya Mannagh tmannaghc@webnode.com
15 Jordan Witherbed jwitherbedd@unesco.org
16 Reagan Botcherby rbotcherbye@scientificamerican.com
17 Libbie Shoulder lshoulderf@desdev.cn
18 Abner Khomich akhomichg@youtube.com
19 Zollie Kitman zkitmanh@forbes.com
20 Fiorenze Durden fdurdeni@feedburner.com
21 Kevyn Race kracej@seattletimes.com

872
poetry.lock generated
View File

@@ -1,872 +0,0 @@
# This file is automatically @generated by Poetry and should not be changed by hand.
[[package]]
name = "anyio"
version = "3.6.2"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
category = "main"
optional = false
python-versions = ">=3.6.2"
files = [
{file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"},
{file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"},
]
[package.dependencies]
idna = ">=2.8"
sniffio = ">=1.1"
[package.extras]
doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"]
trio = ["trio (>=0.16,<0.22)"]
[[package]]
name = "attrs"
version = "22.1.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
python-versions = ">=3.5"
files = [
{file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"},
{file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"},
]
[package.extras]
dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"]
docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"]
tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"]
tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"]
[[package]]
name = "certifi"
version = "2022.12.7"
description = "Python package for providing Mozilla's CA Bundle."
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
{file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
]
[[package]]
name = "cfgv"
version = "3.3.1"
description = "Validate configuration and produce human readable error messages."
category = "dev"
optional = false
python-versions = ">=3.6.1"
files = [
{file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"},
{file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"},
]
[[package]]
name = "charset-normalizer"
version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "dev"
optional = false
python-versions = ">=3.6.0"
files = [
{file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
{file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
]
[package.extras]
unicode-backport = ["unicodedata2"]
[[package]]
name = "click"
version = "8.1.3"
description = "Composable command line interface toolkit"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
{file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
]
[package.dependencies]
colorama = {version = "*", markers = "platform_system == \"Windows\""}
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "coverage"
version = "7.0.1"
description = "Code coverage measurement for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "coverage-7.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3695c4f4750bca943b3e1f74ad4be8d29e4aeab927d50772c41359107bd5d5c"},
{file = "coverage-7.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa6a5a224b7f4cfb226f4fc55a57e8537fcc096f42219128c2c74c0e7d0953e1"},
{file = "coverage-7.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74f70cd92669394eaf8d7756d1b195c8032cf7bbbdfce3bc489d4e15b3b8cf73"},
{file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b66bb21a23680dee0be66557dc6b02a3152ddb55edf9f6723fa4a93368f7158d"},
{file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d87717959d4d0ee9db08a0f1d80d21eb585aafe30f9b0a54ecf779a69cb015f6"},
{file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:854f22fa361d1ff914c7efa347398374cc7d567bdafa48ac3aa22334650dfba2"},
{file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e414dc32ee5c3f36544ea466b6f52f28a7af788653744b8570d0bf12ff34bc0"},
{file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6c5ad996c6fa4d8ed669cfa1e8551348729d008a2caf81489ab9ea67cfbc7498"},
{file = "coverage-7.0.1-cp310-cp310-win32.whl", hash = "sha256:691571f31ace1837838b7e421d3a09a8c00b4aac32efacb4fc9bd0a5c647d25a"},
{file = "coverage-7.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:89caf4425fe88889e2973a8e9a3f6f5f9bbe5dd411d7d521e86428c08a873a4a"},
{file = "coverage-7.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:63d56165a7c76265468d7e0c5548215a5ba515fc2cba5232d17df97bffa10f6c"},
{file = "coverage-7.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f943a3b2bc520102dd3e0bb465e1286e12c9a54f58accd71b9e65324d9c7c01"},
{file = "coverage-7.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:830525361249dc4cd013652b0efad645a385707a5ae49350c894b67d23fbb07c"},
{file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd1b9c5adc066db699ccf7fa839189a649afcdd9e02cb5dc9d24e67e7922737d"},
{file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00c14720b8b3b6c23b487e70bd406abafc976ddc50490f645166f111c419c39"},
{file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d55d840e1b8c0002fce66443e124e8581f30f9ead2e54fbf6709fb593181f2c"},
{file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66b18c3cf8bbab0cce0d7b9e4262dc830e93588986865a8c78ab2ae324b3ed56"},
{file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:12a5aa77783d49e05439fbe6e6b427484f8a0f9f456b46a51d8aac022cfd024d"},
{file = "coverage-7.0.1-cp311-cp311-win32.whl", hash = "sha256:b77015d1cb8fe941be1222a5a8b4e3fbca88180cfa7e2d4a4e58aeabadef0ab7"},
{file = "coverage-7.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb992c47cb1e5bd6a01e97182400bcc2ba2077080a17fcd7be23aaa6e572e390"},
{file = "coverage-7.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e78e9dcbf4f3853d3ae18a8f9272111242531535ec9e1009fa8ec4a2b74557dc"},
{file = "coverage-7.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60bef2e2416f15fdc05772bf87db06c6a6f9870d1db08fdd019fbec98ae24a9"},
{file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9823e4789ab70f3ec88724bba1a203f2856331986cd893dedbe3e23a6cfc1e4e"},
{file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9158f8fb06747ac17bd237930c4372336edc85b6e13bdc778e60f9d685c3ca37"},
{file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:486ee81fa694b4b796fc5617e376326a088f7b9729c74d9defa211813f3861e4"},
{file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1285648428a6101b5f41a18991c84f1c3959cee359e51b8375c5882fc364a13f"},
{file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2c44fcfb3781b41409d0f060a4ed748537557de9362a8a9282182fafb7a76ab4"},
{file = "coverage-7.0.1-cp37-cp37m-win32.whl", hash = "sha256:d6814854c02cbcd9c873c0f3286a02e3ac1250625cca822ca6bc1018c5b19f1c"},
{file = "coverage-7.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66460f17c9319ea4f91c165d46840314f0a7c004720b20be58594d162a441d8"},
{file = "coverage-7.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b373c9345c584bb4b5f5b8840df7f4ab48c4cbb7934b58d52c57020d911b856"},
{file = "coverage-7.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d3022c3007d3267a880b5adcf18c2a9bf1fc64469b394a804886b401959b8742"},
{file = "coverage-7.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92651580bd46519067e36493acb394ea0607b55b45bd81dd4e26379ed1871f55"},
{file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cfc595d2af13856505631be072835c59f1acf30028d1c860b435c5fc9c15b69"},
{file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b4b3a4d9915b2be879aff6299c0a6129f3d08a775d5a061f503cf79571f73e4"},
{file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b6f22bb64cc39bcb883e5910f99a27b200fdc14cdd79df8696fa96b0005c9444"},
{file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72d1507f152abacea81f65fee38e4ef3ac3c02ff8bc16f21d935fd3a8a4ad910"},
{file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a79137fc99815fff6a852c233628e735ec15903cfd16da0f229d9c4d45926ab"},
{file = "coverage-7.0.1-cp38-cp38-win32.whl", hash = "sha256:b3763e7fcade2ff6c8e62340af9277f54336920489ceb6a8cd6cc96da52fcc62"},
{file = "coverage-7.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:09f6b5a8415b6b3e136d5fec62b552972187265cb705097bf030eb9d4ffb9b60"},
{file = "coverage-7.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:978258fec36c154b5e250d356c59af7d4c3ba02bef4b99cda90b6029441d797d"},
{file = "coverage-7.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:19ec666533f0f70a0993f88b8273057b96c07b9d26457b41863ccd021a043b9a"},
{file = "coverage-7.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfded268092a84605f1cc19e5c737f9ce630a8900a3589e9289622db161967e9"},
{file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bcfb1d8ac94af886b54e18a88b393f6a73d5959bb31e46644a02453c36e475"},
{file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b4a923cc7566bbc7ae2dfd0ba5a039b61d19c740f1373791f2ebd11caea59"},
{file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aec2d1515d9d39ff270059fd3afbb3b44e6ec5758af73caf18991807138c7118"},
{file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c20cfebcc149a4c212f6491a5f9ff56f41829cd4f607b5be71bb2d530ef243b1"},
{file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fd556ff16a57a070ce4f31c635953cc44e25244f91a0378c6e9bdfd40fdb249f"},
{file = "coverage-7.0.1-cp39-cp39-win32.whl", hash = "sha256:b9ea158775c7c2d3e54530a92da79496fb3fb577c876eec761c23e028f1e216c"},
{file = "coverage-7.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:d1991f1dd95eba69d2cd7708ff6c2bbd2426160ffc73c2b81f617a053ebcb1a8"},
{file = "coverage-7.0.1-pp37.pp38.pp39-none-any.whl", hash = "sha256:3dd4ee135e08037f458425b8842d24a95a0961831a33f89685ff86b77d378f89"},
{file = "coverage-7.0.1.tar.gz", hash = "sha256:a4a574a19eeb67575a5328a5760bbbb737faa685616586a9f9da4281f940109c"},
]
[package.dependencies]
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras]
toml = ["tomli"]
[[package]]
name = "distlib"
version = "0.3.6"
description = "Distribution utilities"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"},
{file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
]
[[package]]
name = "exceptiongroup"
version = "1.0.4"
description = "Backport of PEP 654 (exception groups)"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"},
{file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "faker"
version = "15.3.4"
description = "Faker is a Python package that generates fake data for you."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "Faker-15.3.4-py3-none-any.whl", hash = "sha256:c2a2ff9dd8dfd991109b517ab98d5cb465e857acb45f6b643a0e284a9eb2cc76"},
{file = "Faker-15.3.4.tar.gz", hash = "sha256:2d5443724f640ce07658ca8ca8bbd40d26b58914e63eec6549727869aa67e2cc"},
]
[package.dependencies]
python-dateutil = ">=2.4"
[[package]]
name = "fastapi"
version = "0.88.0"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "fastapi-0.88.0-py3-none-any.whl", hash = "sha256:263b718bb384422fe3d042ffc9a0c8dece5e034ab6586ff034f6b4b1667c3eee"},
{file = "fastapi-0.88.0.tar.gz", hash = "sha256:915bf304180a0e7c5605ec81097b7d4cd8826ff87a02bb198e336fb9f3b5ff02"},
]
[package.dependencies]
pydantic = ">=1.6.2,<1.7 || >1.7,<1.7.1 || >1.7.1,<1.7.2 || >1.7.2,<1.7.3 || >1.7.3,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0"
starlette = "0.22.0"
[package.extras]
all = ["email-validator (>=1.1.1)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
dev = ["pre-commit (>=2.17.0,<3.0.0)", "ruff (==0.0.138)", "uvicorn[standard] (>=0.12.0,<0.19.0)"]
doc = ["mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-markdownextradata-plugin (>=0.1.7,<0.3.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pyyaml (>=5.3.1,<7.0.0)", "typer[all] (>=0.6.1,<0.7.0)"]
test = ["anyio[trio] (>=3.2.1,<4.0.0)", "black (==22.10.0)", "coverage[toml] (>=6.5.0,<7.0)", "databases[sqlite] (>=0.3.2,<0.7.0)", "email-validator (>=1.1.1,<2.0.0)", "flask (>=1.1.2,<3.0.0)", "httpx (>=0.23.0,<0.24.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.982)", "orjson (>=3.2.1,<4.0.0)", "passlib[bcrypt] (>=1.7.2,<2.0.0)", "peewee (>=3.13.3,<4.0.0)", "pytest (>=7.1.3,<8.0.0)", "python-jose[cryptography] (>=3.3.0,<4.0.0)", "python-multipart (>=0.0.5,<0.0.6)", "pyyaml (>=5.3.1,<7.0.0)", "ruff (==0.0.138)", "sqlalchemy (>=1.3.18,<=1.4.41)", "types-orjson (==3.6.2)", "types-ujson (==5.5.0)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0,<6.0.0)"]
[[package]]
name = "filelock"
version = "3.8.2"
description = "A platform independent file lock."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"},
{file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"},
]
[package.extras]
docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
[[package]]
name = "greenlet"
version = "2.0.1"
description = "Lightweight in-process concurrent programming"
category = "main"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*"
files = [
{file = "greenlet-2.0.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:9ed358312e63bf683b9ef22c8e442ef6c5c02973f0c2a939ec1d7b50c974015c"},
{file = "greenlet-2.0.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:4f09b0010e55bec3239278f642a8a506b91034f03a4fb28289a7d448a67f1515"},
{file = "greenlet-2.0.1-cp27-cp27m-win32.whl", hash = "sha256:1407fe45246632d0ffb7a3f4a520ba4e6051fc2cbd61ba1f806900c27f47706a"},
{file = "greenlet-2.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:3001d00eba6bbf084ae60ec7f4bb8ed375748f53aeaefaf2a37d9f0370558524"},
{file = "greenlet-2.0.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d566b82e92ff2e09dd6342df7e0eb4ff6275a3f08db284888dcd98134dbd4243"},
{file = "greenlet-2.0.1-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:0722c9be0797f544a3ed212569ca3fe3d9d1a1b13942d10dd6f0e8601e484d26"},
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d37990425b4687ade27810e3b1a1c37825d242ebc275066cfee8cb6b8829ccd"},
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be35822f35f99dcc48152c9839d0171a06186f2d71ef76dc57fa556cc9bf6b45"},
{file = "greenlet-2.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c140e7eb5ce47249668056edf3b7e9900c6a2e22fb0eaf0513f18a1b2c14e1da"},
{file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d21681f09e297a5adaa73060737e3aa1279a13ecdcfcc6ef66c292cb25125b2d"},
{file = "greenlet-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fb412b7db83fe56847df9c47b6fe3f13911b06339c2aa02dcc09dce8bbf582cd"},
{file = "greenlet-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c6a08799e9e88052221adca55741bf106ec7ea0710bca635c208b751f0d5b617"},
{file = "greenlet-2.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9e112e03d37987d7b90c1e98ba5e1b59e1645226d78d73282f45b326f7bddcb9"},
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56961cfca7da2fdd178f95ca407fa330c64f33289e1804b592a77d5593d9bd94"},
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13ba6e8e326e2116c954074c994da14954982ba2795aebb881c07ac5d093a58a"},
{file = "greenlet-2.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bf633a50cc93ed17e494015897361010fc08700d92676c87931d3ea464123ce"},
{file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9f2c221eecb7ead00b8e3ddb913c67f75cba078fd1d326053225a3f59d850d72"},
{file = "greenlet-2.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:13ebf93c343dd8bd010cd98e617cb4c1c1f352a0cf2524c82d3814154116aa82"},
{file = "greenlet-2.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:6f61d71bbc9b4a3de768371b210d906726535d6ca43506737682caa754b956cd"},
{file = "greenlet-2.0.1-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:2d0bac0385d2b43a7bd1d651621a4e0f1380abc63d6fb1012213a401cbd5bf8f"},
{file = "greenlet-2.0.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:f6327b6907b4cb72f650a5b7b1be23a2aab395017aa6f1adb13069d66360eb3f"},
{file = "greenlet-2.0.1-cp35-cp35m-win32.whl", hash = "sha256:81b0ea3715bf6a848d6f7149d25bf018fd24554a4be01fcbbe3fdc78e890b955"},
{file = "greenlet-2.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:38255a3f1e8942573b067510f9611fc9e38196077b0c8eb7a8c795e105f9ce77"},
{file = "greenlet-2.0.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:04957dc96669be041e0c260964cfef4c77287f07c40452e61abe19d647505581"},
{file = "greenlet-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:4aeaebcd91d9fee9aa768c1b39cb12214b30bf36d2b7370505a9f2165fedd8d9"},
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974a39bdb8c90a85982cdb78a103a32e0b1be986d411303064b28a80611f6e51"},
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dca09dedf1bd8684767bc736cc20c97c29bc0c04c413e3276e0962cd7aeb148"},
{file = "greenlet-2.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4c0757db9bd08470ff8277791795e70d0bf035a011a528ee9a5ce9454b6cba2"},
{file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5067920de254f1a2dee8d3d9d7e4e03718e8fd2d2d9db962c8c9fa781ae82a39"},
{file = "greenlet-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:5a8e05057fab2a365c81abc696cb753da7549d20266e8511eb6c9d9f72fe3e92"},
{file = "greenlet-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:3d75b8d013086b08e801fbbb896f7d5c9e6ccd44f13a9241d2bf7c0df9eda928"},
{file = "greenlet-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:097e3dae69321e9100202fc62977f687454cd0ea147d0fd5a766e57450c569fd"},
{file = "greenlet-2.0.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cb242fc2cda5a307a7698c93173d3627a2a90d00507bccf5bc228851e8304963"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:72b00a8e7c25dcea5946692a2485b1a0c0661ed93ecfedfa9b6687bd89a24ef5"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5b0ff9878333823226d270417f24f4d06f235cb3e54d1103b71ea537a6a86ce"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be9e0fb2ada7e5124f5282d6381903183ecc73ea019568d6d63d33f25b2a9000"},
{file = "greenlet-2.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b493db84d124805865adc587532ebad30efa68f79ad68f11b336e0a51ec86c2"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0459d94f73265744fee4c2d5ec44c6f34aa8a31017e6e9de770f7bcf29710be9"},
{file = "greenlet-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a20d33124935d27b80e6fdacbd34205732660e0a1d35d8b10b3328179a2b51a1"},
{file = "greenlet-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:ea688d11707d30e212e0110a1aac7f7f3f542a259235d396f88be68b649e47d1"},
{file = "greenlet-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:afe07421c969e259e9403c3bb658968702bc3b78ec0b6fde3ae1e73440529c23"},
{file = "greenlet-2.0.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:cd4ccc364cf75d1422e66e247e52a93da6a9b73cefa8cad696f3cbbb75af179d"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c8b1c43e75c42a6cafcc71defa9e01ead39ae80bd733a2608b297412beede68"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:659f167f419a4609bc0516fb18ea69ed39dbb25594934bd2dd4d0401660e8a1e"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:356e4519d4dfa766d50ecc498544b44c0249b6de66426041d7f8b751de4d6b48"},
{file = "greenlet-2.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:811e1d37d60b47cb8126e0a929b58c046251f28117cb16fcd371eed61f66b764"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d38ffd0e81ba8ef347d2be0772e899c289b59ff150ebbbbe05dc61b1246eb4e0"},
{file = "greenlet-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0109af1138afbfb8ae647e31a2b1ab030f58b21dd8528c27beaeb0093b7938a9"},
{file = "greenlet-2.0.1-cp38-cp38-win32.whl", hash = "sha256:88c8d517e78acdf7df8a2134a3c4b964415b575d2840a2746ddb1cc6175f8608"},
{file = "greenlet-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:d6ee1aa7ab36475035eb48c01efae87d37936a8173fc4d7b10bb02c2d75dd8f6"},
{file = "greenlet-2.0.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:b1992ba9d4780d9af9726bbcef6a1db12d9ab1ccc35e5773685a24b7fb2758eb"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:b5e83e4de81dcc9425598d9469a624826a0b1211380ac444c7c791d4a2137c19"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:505138d4fa69462447a562a7c2ef723c6025ba12ac04478bc1ce2fcc279a2db5"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cce1e90dd302f45716a7715517c6aa0468af0bf38e814ad4eab58e88fc09f7f7"},
{file = "greenlet-2.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e9744c657d896c7b580455e739899e492a4a452e2dd4d2b3e459f6b244a638d"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:662e8f7cad915ba75d8017b3e601afc01ef20deeeabf281bd00369de196d7726"},
{file = "greenlet-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:41b825d65f31e394b523c84db84f9383a2f7eefc13d987f308f4663794d2687e"},
{file = "greenlet-2.0.1-cp39-cp39-win32.whl", hash = "sha256:db38f80540083ea33bdab614a9d28bcec4b54daa5aff1668d7827a9fc769ae0a"},
{file = "greenlet-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b23d2a46d53210b498e5b701a1913697671988f4bf8e10f935433f6e7c332fb6"},
{file = "greenlet-2.0.1.tar.gz", hash = "sha256:42e602564460da0e8ee67cb6d7236363ee5e131aa15943b6670e44e5c2ed0f67"},
]
[package.extras]
docs = ["Sphinx", "docutils (<0.18)"]
test = ["faulthandler", "objgraph", "psutil"]
[[package]]
name = "h11"
version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
[[package]]
name = "identify"
version = "2.5.11"
description = "File identification library for Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "identify-2.5.11-py2.py3-none-any.whl", hash = "sha256:e7db36b772b188099616aaf2accbee122949d1c6a1bac4f38196720d6f9f06db"},
{file = "identify-2.5.11.tar.gz", hash = "sha256:14b7076b29c99b1b0b8b08e96d448c7b877a9b07683cd8cfda2ea06af85ffa1c"},
]
[package.extras]
license = ["ukkonen"]
[[package]]
name = "idna"
version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
[[package]]
name = "iniconfig"
version = "1.1.1"
description = "iniconfig: brain-dead simple config-ini parsing"
category = "dev"
optional = false
python-versions = "*"
files = [
{file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"},
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
[[package]]
name = "nodeenv"
version = "1.7.0"
description = "Node.js virtual environment builder"
category = "dev"
optional = false
python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*"
files = [
{file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"},
{file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"},
]
[package.dependencies]
setuptools = "*"
[[package]]
name = "packaging"
version = "22.0"
description = "Core utilities for Python packages"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"},
{file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"},
]
[[package]]
name = "platformdirs"
version = "2.6.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"},
{file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"},
]
[package.extras]
docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"]
test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
[[package]]
name = "pluggy"
version = "1.0.0"
description = "plugin and hook calling mechanisms for python"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"},
{file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
version = "2.20.0"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"},
{file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"},
]
[package.dependencies]
cfgv = ">=2.0.0"
identify = ">=1.0.0"
nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
toml = "*"
virtualenv = ">=20.0.8"
[[package]]
name = "pydantic"
version = "1.10.2"
description = "Data validation and settings management using python type hints"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "pydantic-1.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb6ad4489af1bac6955d38ebcb95079a836af31e4c4f74aba1ca05bb9f6027bd"},
{file = "pydantic-1.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1f5a63a6dfe19d719b1b6e6106561869d2efaca6167f84f5ab9347887d78b98"},
{file = "pydantic-1.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352aedb1d71b8b0736c6d56ad2bd34c6982720644b0624462059ab29bd6e5912"},
{file = "pydantic-1.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19b3b9ccf97af2b7519c42032441a891a5e05c68368f40865a90eb88833c2559"},
{file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e9069e1b01525a96e6ff49e25876d90d5a563bc31c658289a8772ae186552236"},
{file = "pydantic-1.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:355639d9afc76bcb9b0c3000ddcd08472ae75318a6eb67a15866b87e2efa168c"},
{file = "pydantic-1.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:ae544c47bec47a86bc7d350f965d8b15540e27e5aa4f55170ac6a75e5f73b644"},
{file = "pydantic-1.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4c805731c33a8db4b6ace45ce440c4ef5336e712508b4d9e1aafa617dc9907f"},
{file = "pydantic-1.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d49f3db871575e0426b12e2f32fdb25e579dea16486a26e5a0474af87cb1ab0a"},
{file = "pydantic-1.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c90345ec7dd2f1bcef82ce49b6235b40f282b94d3eec47e801baf864d15525"},
{file = "pydantic-1.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b5ba54d026c2bd2cb769d3468885f23f43710f651688e91f5fb1edcf0ee9283"},
{file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e00dbebbe810b33c7a7362f231893183bcc4251f3f2ff991c31d5c08240c42"},
{file = "pydantic-1.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2d0567e60eb01bccda3a4df01df677adf6b437958d35c12a3ac3e0f078b0ee52"},
{file = "pydantic-1.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:c6f981882aea41e021f72779ce2a4e87267458cc4d39ea990729e21ef18f0f8c"},
{file = "pydantic-1.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4aac8e7103bf598373208f6299fa9a5cfd1fc571f2d40bf1dd1955a63d6eeb5"},
{file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81a7b66c3f499108b448f3f004801fcd7d7165fb4200acb03f1c2402da73ce4c"},
{file = "pydantic-1.10.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedf309630209e78582ffacda64a21f96f3ed2e51fbf3962d4d488e503420254"},
{file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9300fcbebf85f6339a02c6994b2eb3ff1b9c8c14f502058b5bf349d42447dcf5"},
{file = "pydantic-1.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:216f3bcbf19c726b1cc22b099dd409aa371f55c08800bcea4c44c8f74b73478d"},
{file = "pydantic-1.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dd3f9a40c16daf323cf913593083698caee97df2804aa36c4b3175d5ac1b92a2"},
{file = "pydantic-1.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b97890e56a694486f772d36efd2ba31612739bc6f3caeee50e9e7e3ebd2fdd13"},
{file = "pydantic-1.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9cabf4a7f05a776e7793e72793cd92cc865ea0e83a819f9ae4ecccb1b8aa6116"},
{file = "pydantic-1.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06094d18dd5e6f2bbf93efa54991c3240964bb663b87729ac340eb5014310624"},
{file = "pydantic-1.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc78cc83110d2f275ec1970e7a831f4e371ee92405332ebfe9860a715f8336e1"},
{file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ee433e274268a4b0c8fde7ad9d58ecba12b069a033ecc4645bb6303c062d2e9"},
{file = "pydantic-1.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c2abc4393dea97a4ccbb4ec7d8658d4e22c4765b7b9b9445588f16c71ad9965"},
{file = "pydantic-1.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:0b959f4d8211fc964772b595ebb25f7652da3f22322c007b6fed26846a40685e"},
{file = "pydantic-1.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c33602f93bfb67779f9c507e4d69451664524389546bacfe1bee13cae6dc7488"},
{file = "pydantic-1.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5760e164b807a48a8f25f8aa1a6d857e6ce62e7ec83ea5d5c5a802eac81bad41"},
{file = "pydantic-1.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6eb843dcc411b6a2237a694f5e1d649fc66c6064d02b204a7e9d194dff81eb4b"},
{file = "pydantic-1.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b8795290deaae348c4eba0cebb196e1c6b98bdbe7f50b2d0d9a4a99716342fe"},
{file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e0bedafe4bc165ad0a56ac0bd7695df25c50f76961da29c050712596cf092d6d"},
{file = "pydantic-1.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e05aed07fa02231dbf03d0adb1be1d79cabb09025dd45aa094aa8b4e7b9dcda"},
{file = "pydantic-1.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:c1ba1afb396148bbc70e9eaa8c06c1716fdddabaf86e7027c5988bae2a829ab6"},
{file = "pydantic-1.10.2-py3-none-any.whl", hash = "sha256:1b6ee725bd6e83ec78b1aa32c5b1fa67a3a65badddde3976bca5fe4568f27709"},
{file = "pydantic-1.10.2.tar.gz", hash = "sha256:91b8e218852ef6007c2b98cd861601c6a09f1aa32bbbb74fab5b1c33d4a1e410"},
]
[package.dependencies]
typing-extensions = ">=4.1.0"
[package.extras]
dotenv = ["python-dotenv (>=0.10.4)"]
email = ["email-validator (>=1.0.3)"]
[[package]]
name = "pytest"
version = "7.2.0"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"},
{file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"},
]
[package.dependencies]
attrs = ">=19.2.0"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"]
[[package]]
name = "pytest-cov"
version = "4.0.0"
description = "Pytest plugin for measuring coverage."
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"},
{file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"},
]
[package.dependencies]
coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
category = "dev"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
files = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
[package.dependencies]
six = ">=1.5"
[[package]]
name = "pyyaml"
version = "6.0"
description = "YAML parser and emitter for Python"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"},
{file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"},
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
{file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
{file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
{file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
{file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"},
{file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"},
{file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"},
{file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"},
{file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"},
{file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"},
{file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"},
{file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"},
{file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"},
{file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"},
{file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"},
{file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"},
{file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"},
{file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"},
{file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"},
{file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"},
{file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"},
]
[[package]]
name = "requests"
version = "2.28.1"
description = "Python HTTP for Humans."
category = "dev"
optional = false
python-versions = ">=3.7, <4"
files = [
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<3"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "setuptools"
version = "65.6.3"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"},
{file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"},
]
[package.extras]
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "sniffio"
version = "1.3.0"
description = "Sniff out which async library your code is running under"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
]
[[package]]
name = "sqlalchemy"
version = "1.4.45"
description = "Database Abstraction Library"
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
files = [
{file = "SQLAlchemy-1.4.45-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:f1d3fb02a4d0b07d1351a4a52f159e5e7b3045c903468b7e9349ebf0020ffdb9"},
{file = "SQLAlchemy-1.4.45-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9b7025d46aba946272f6b6b357a22f3787473ef27451f342df1a2a6de23743e3"},
{file = "SQLAlchemy-1.4.45-cp27-cp27m-win32.whl", hash = "sha256:26b8424b32eeefa4faad21decd7bdd4aade58640b39407bf43e7d0a7c1bc0453"},
{file = "SQLAlchemy-1.4.45-cp27-cp27m-win_amd64.whl", hash = "sha256:13578d1cda69bc5e76c59fec9180d6db7ceb71c1360a4d7861c37d87ea6ca0b1"},
{file = "SQLAlchemy-1.4.45-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6cd53b4c756a6f9c6518a3dc9c05a38840f9ae442c91fe1abde50d73651b6922"},
{file = "SQLAlchemy-1.4.45-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:ca152ffc7f0aa069c95fba46165030267ec5e4bb0107aba45e5e9e86fe4d9363"},
{file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06055476d38ed7915eeed22b78580556d446d175c3574a01b9eb04d91f3a8b2e"},
{file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:081e2a2d75466353c738ca2ee71c0cfb08229b4f9909b5fa085f75c48d021471"},
{file = "SQLAlchemy-1.4.45-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96821d806c0c90c68ce3f2ce6dd529c10e5d7587961f31dd5c30e3bfddc4545d"},
{file = "SQLAlchemy-1.4.45-cp310-cp310-win32.whl", hash = "sha256:c8051bff4ce48cbc98f11e95ac46bfd1e36272401070c010248a3230d099663f"},
{file = "SQLAlchemy-1.4.45-cp310-cp310-win_amd64.whl", hash = "sha256:16ad798fc121cad5ea019eb2297127b08c54e1aa95fe17b3fea9fdbc5c34fe62"},
{file = "SQLAlchemy-1.4.45-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099efef0de9fbda4c2d7cb129e4e7f812007901942259d4e6c6e19bd69de1088"},
{file = "SQLAlchemy-1.4.45-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a29d02c9e6f6b105580c5ed7afb722b97bc2e2fdb85e1d45d7ddd8440cfbca"},
{file = "SQLAlchemy-1.4.45-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc10423b59d6d032d6dff0bb42aa06dc6a8824eb6029d70c7d1b6981a2e7f4d8"},
{file = "SQLAlchemy-1.4.45-cp311-cp311-win32.whl", hash = "sha256:1a92685db3b0682776a5abcb5f9e9addb3d7d9a6d841a452a17ec2d8d457bea7"},
{file = "SQLAlchemy-1.4.45-cp311-cp311-win_amd64.whl", hash = "sha256:db3ccbce4a861bf4338b254f95916fc68dd8b7aa50eea838ecdaf3a52810e9c0"},
{file = "SQLAlchemy-1.4.45-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:a62ae2ea3b940ce9c9cbd675489c2047921ce0a79f971d3082978be91bd58117"},
{file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a87f8595390764db333a1705591d0934973d132af607f4fa8b792b366eacbb3c"},
{file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9a21c1fb71c69c8ec65430160cd3eee44bbcea15b5a4e556f29d03f246f425ec"},
{file = "SQLAlchemy-1.4.45-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7944b04e6fcf8d733964dd9ee36b6a587251a1a4049af3a9b846f6e64eb349a"},
{file = "SQLAlchemy-1.4.45-cp36-cp36m-win32.whl", hash = "sha256:a3bcd5e2049ceb97e8c273e6a84ff4abcfa1dc47b6d8bbd36e07cce7176610d3"},
{file = "SQLAlchemy-1.4.45-cp36-cp36m-win_amd64.whl", hash = "sha256:5953e225be47d80410ae519f865b5c341f541d8e383fb6d11f67fb71a45bf890"},
{file = "SQLAlchemy-1.4.45-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:6a91b7883cb7855a27bc0637166eed622fdf1bb94a4d1630165e5dd88c7e64d3"},
{file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d458fd0566bc9e10b8be857f089e96b5ca1b1ef033226f24512f9ffdf485a8c0"},
{file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f4ad3b081c0dbb738886f8d425a5d983328670ee83b38192687d78fc82bd1e"},
{file = "SQLAlchemy-1.4.45-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd95a3e6ab46da2c5b0703e797a772f3fab44d085b3919a4f27339aa3b1f51d3"},
{file = "SQLAlchemy-1.4.45-cp37-cp37m-win32.whl", hash = "sha256:715f5859daa3bee6ecbad64501637fa4640ca6734e8cda6135e3898d5f8ccadd"},
{file = "SQLAlchemy-1.4.45-cp37-cp37m-win_amd64.whl", hash = "sha256:2d1539fbc82d2206380a86d6d7d0453764fdca5d042d78161bbfb8dd047c80ec"},
{file = "SQLAlchemy-1.4.45-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:01aa76f324c9bbc0dcb2bc3d9e2a9d7ede4808afa1c38d40d5e2007e3163b206"},
{file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:416fe7d228937bd37990b5a429fd00ad0e49eabcea3455af7beed7955f192edd"},
{file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7e32ce2584564d9e068bb7e0ccd1810cbb0a824c0687f8016fe67e97c345a637"},
{file = "SQLAlchemy-1.4.45-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:561605cfc26273825ed2fb8484428faf36e853c13e4c90c61c58988aeccb34ed"},
{file = "SQLAlchemy-1.4.45-cp38-cp38-win32.whl", hash = "sha256:55ddb5585129c5d964a537c9e32a8a68a8c6293b747f3fa164e1c034e1657a98"},
{file = "SQLAlchemy-1.4.45-cp38-cp38-win_amd64.whl", hash = "sha256:445914dcadc0b623bd9851260ee54915ecf4e3041a62d57709b18a0eed19f33b"},
{file = "SQLAlchemy-1.4.45-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:2db887dbf05bcc3151de1c4b506b14764c6240a42e844b4269132a7584de1e5f"},
{file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52b90c9487e4449ad954624d01dea34c90cd8c104bce46b322c83654f37a23c5"},
{file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f61e54b8c2b389de1a8ad52394729c478c67712dbdcdadb52c2575e41dae94a5"},
{file = "SQLAlchemy-1.4.45-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e91a5e45a2ea083fe344b3503405978dff14d60ef3aa836432c9ca8cd47806b6"},
{file = "SQLAlchemy-1.4.45-cp39-cp39-win32.whl", hash = "sha256:0e068b8414d60dd35d43c693555fc3d2e1d822cef07960bb8ca3f1ee6c4ff762"},
{file = "SQLAlchemy-1.4.45-cp39-cp39-win_amd64.whl", hash = "sha256:2d6f178ff2923730da271c8aa317f70cf0df11a4d1812f1d7a704b1cf29c5fe3"},
{file = "SQLAlchemy-1.4.45.tar.gz", hash = "sha256:fd69850860093a3f69fefe0ab56d041edfdfe18510b53d9a2eaecba2f15fa795"},
]
[package.dependencies]
greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"}
[package.extras]
aiomysql = ["aiomysql", "greenlet (!=0.4.17)"]
aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"]
asyncio = ["greenlet (!=0.4.17)"]
asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"]
mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"]
mssql = ["pyodbc"]
mssql-pymssql = ["pymssql"]
mssql-pyodbc = ["pyodbc"]
mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"]
mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"]
mysql-connector = ["mysql-connector-python"]
oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"]
postgresql = ["psycopg2 (>=2.7)"]
postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"]
postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"]
postgresql-psycopg2binary = ["psycopg2-binary"]
postgresql-psycopg2cffi = ["psycopg2cffi"]
pymysql = ["pymysql", "pymysql (<1)"]
sqlcipher = ["sqlcipher3-binary"]
[[package]]
name = "starlette"
version = "0.22.0"
description = "The little ASGI library that shines."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "starlette-0.22.0-py3-none-any.whl", hash = "sha256:b5eda991ad5f0ee5d8ce4c4540202a573bb6691ecd0c712262d0bc85cf8f2c50"},
{file = "starlette-0.22.0.tar.gz", hash = "sha256:b092cbc365bea34dd6840b42861bdabb2f507f8671e642e8272d2442e08ea4ff"},
]
[package.dependencies]
anyio = ">=3.4.0,<5"
[package.extras]
full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"]
[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
category = "dev"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]]
name = "typing-extensions"
version = "4.4.0"
description = "Backported and Experimental Type Hints for Python 3.7+"
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
]
[[package]]
name = "urllib3"
version = "1.26.13"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
files = [
{file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"},
{file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "uvicorn"
version = "0.20.0"
description = "The lightning-fast ASGI server."
category = "main"
optional = false
python-versions = ">=3.7"
files = [
{file = "uvicorn-0.20.0-py3-none-any.whl", hash = "sha256:c3ed1598a5668208723f2bb49336f4509424ad198d6ab2615b7783db58d919fd"},
{file = "uvicorn-0.20.0.tar.gz", hash = "sha256:a4e12017b940247f836bc90b72e725d7dfd0c8ed1c51eb365f5ba30d9f5127d8"},
]
[package.dependencies]
click = ">=7.0"
h11 = ">=0.8"
[package.extras]
standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"]
[[package]]
name = "virtualenv"
version = "20.17.1"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
python-versions = ">=3.6"
files = [
{file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"},
{file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"},
]
[package.dependencies]
distlib = ">=0.3.6,<1"
filelock = ">=3.4.1,<4"
platformdirs = ">=2.4,<3"
[package.extras]
docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"]
testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "dc9cca0226b975a91650ffdc42c0572f04707fc0de80fbcfee9809ebaf34a0bf"

View File

@@ -1,24 +0,0 @@
[tool.poetry]
name = "backend"
version = "0.1.0"
description = ""
authors = ["Bertrand Benjamin <benjamin.bertrand@opytex.org>"]
readme = "README.md"
[tool.poetry.dependencies]
python = "^3.10"
sqlalchemy = "^1.4.45"
fastapi = "^0.88.0"
uvicorn = "^0.20.0"
[tool.poetry.group.dev.dependencies]
pre-commit = "^2.20.0"
pytest = "^7.2.0"
faker = "^15.3.4"
requests = "^2.28.1"
pytest-cov = "^4.0.0"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@@ -0,0 +1,20 @@
#!/usr/bin/env python
# encoding: utf-8
import dash
import flask
from .layout.layout import layout
server = flask.Flask(__name__)
app = dash.Dash(
__name__,
server=server,
suppress_callback_exceptions=True,
meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}],
)
app.layout = layout
server = app.server

View File

@@ -0,0 +1,23 @@
#!/usr/bin/env python
# encoding: utf-8
def highlight_scores(highlight_columns, score_color):
""" Cells style in a datatable for scores
:param highlight_columns: columns to highlight
:param value_color: dictionnary {"score": "color"}
"""
hight = []
for v, color in score_color.items():
if v:
hight += [
{
"if": {"filter_query": "{{{}}} = {}".format(col, v), "column_id": col},
"backgroundColor": color,
"color": "white",
}
for col in highlight_columns
]
return hight

View File

@@ -0,0 +1,8 @@
#!/usr/bin/env python
# encoding: utf-8
from .app import app, server
from .routes import render_page_content
if __name__ == "__main__":
app.run_server(debug=True)

View File

@@ -0,0 +1,9 @@
#!/usr/bin/env python
# encoding: utf-8
import dash_html_components as html
import dash_core_components as dcc
content = html.Div(id="page-content")
layout = html.Div([dcc.Location(id="url"), content])

View File

@@ -0,0 +1,112 @@
#!/usr/bin/env python
# encoding: utf-8
import dash_html_components as html
import dash_core_components as dcc
from .models import get_tribes, get_exams
from .callbacks import *
layout = html.Div(
children=[
html.Header(
children=[
html.H1("Analyse des notes"),
html.P("Dernière sauvegarde", id="lastsave"),
],
),
html.Main(
children=[
html.Section(
children=[
html.Div(
children=[
"Classe: ",
dcc.Dropdown(
id="tribe",
options=[
{"label": t["name"], "value": t["name"]}
for t in get_tribes().values()
],
value=next(iter(get_tribes().values()))["name"],
),
],
),
html.Div(
children=[
"Evaluation: ",
dcc.Dropdown(id="exam_select"),
],
),
],
id="selects",
),
html.Section(
children=[
html.Div(
children=[
dash_table.DataTable(
id="final_score_table",
columns=[
{"name": "Étudiant", "id": "student_name"},
{"name": "Note", "id": "mark"},
{"name": "Barème", "id": "score_rate"},
],
)
],
id="final_score_table_container",
),
html.Div(
children=[
dash_table.DataTable(
id="score_statistics_table",
columns=[],
)
],
id="score_statistics_table_container",
),
html.Div(
children=[
dcc.Graph(
id="fig_exam_histo",
config={"displayModeBar": False},
)
],
id="fig_exam_histo_container",
),
html.Div(
children=[
dcc.Graph(
id="fig_questions_bar",
config={"displayModeBar": False},
)
],
id="fig_questions_bar_container",
),
],
id="analysis",
),
html.Section(
children=[
dash_table.DataTable(
id="scores_table",
columns=[],
style_data_conditional=[],
fixed_columns={},
editable=True,
style_table={"minWidth": "100%"},
style_cell={
"minWidth": "100px",
"width": "100px",
"maxWidth": "100px",
"overflow": "hidden",
"textOverflow": "ellipsis",
},
)
],
id="edit",
),
],
),
dcc.Store(id="scores"),
],
)

View File

@@ -0,0 +1,216 @@
#!/usr/bin/env python
# encoding: utf-8
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import plotly.graph_objects as go
import dash_table
import json
import pandas as pd
import numpy as np
from recopytex.dashboard.app import app
from recopytex.dashboard.common.formating import highlight_scores
from .models import (
get_tribes,
get_exams,
get_unstack_scores,
get_students_from_exam,
get_score_colors,
get_level_color_bar,
score_to_final_mark,
stack_scores,
pivot_score_on,
)
@app.callback(
[
Output("exam_select", "options"),
Output("exam_select", "value"),
],
[Input("tribe", "value")],
)
def update_exams_choices(tribe):
if not tribe:
raise PreventUpdate
exams = get_exams(tribe)
exams.reset_index(inplace=True)
if not exams.empty:
return [
{"label": e["name"], "value": e.to_json()} for i, e in exams.iterrows()
], exams.loc[0].to_json()
return [], None
@app.callback(
[
Output("scores_table", "columns"),
Output("scores_table", "data"),
Output("scores_table", "style_data_conditional"),
Output("scores_table", "fixed_columns"),
],
[
Input("exam_select", "value"),
],
)
def update_scores_store(exam):
if not exam:
return [[], [], [], {}]
exam = pd.DataFrame.from_dict([json.loads(exam)])
scores = get_unstack_scores(exam)
fixed_columns = [
"exercise",
"question",
"competence",
"theme",
"comment",
"score_rate",
"is_leveled",
]
students = list(get_students_from_exam(exam))
columns = fixed_columns + students
score_color = get_score_colors()
return [
[{"id": c, "name": c} for c in columns],
scores.to_dict("records"),
highlight_scores(students, score_color),
{"headers": True, "data": len(fixed_columns)},
]
@app.callback(
[
Output("final_score_table", "data"),
],
[
Input("scores_table", "data"),
],
)
def update_finale_score_table(scores):
scores_df = pd.DataFrame.from_records(scores)
stacked_scores = stack_scores(scores_df)
return score_to_final_mark(stacked_scores)
@app.callback(
[
Output("score_statistics_table", "columns"),
Output("score_statistics_table", "data"),
],
[
Input("final_score_table", "data"),
],
)
def update_statictics_table(finale_score):
df = pd.DataFrame.from_records(finale_score)
statistics = df["mark"].describe().to_frame().T
return [
[{"id": c, "name": c} for c in statistics.columns],
statistics.to_dict("records"),
]
@app.callback(
[
Output("fig_exam_histo", "figure"),
],
[
Input("final_score_table", "data"),
],
)
def update_exam_histo(finale_scores):
scores = pd.DataFrame.from_records(finale_scores)
if scores.empty:
return [go.Figure(data=[go.Scatter(x=[], y=[])])]
ranges = np.linspace(
-0.5,
scores["score_rate"].max(),
int(scores["score_rate"].max() * 2 + 2),
)
bins = pd.cut(scores["mark"], ranges)
scores["Bin"] = bins
grouped = (
scores.reset_index()
.groupby("Bin")
.agg({"score_rate": "count", "student_name": lambda x: "\n".join(x)})
)
grouped.index = grouped.index.map(lambda i: i.right)
fig = go.Figure()
fig.add_bar(
x=grouped.index,
y=grouped["score_rate"],
text=grouped["student_name"],
textposition="auto",
hovertemplate="",
marker_color="#4E89DE",
)
fig.update_layout(
height=300,
margin=dict(l=5, r=5, b=5, t=5),
)
return [fig]
@app.callback(
[
Output("fig_questions_bar", "figure"),
],
[
Input("scores_table", "data"),
],
)
def update_questions_bar(finale_scores):
scores = pd.DataFrame.from_records(finale_scores)
scores = stack_scores(scores)
if scores.empty:
return [go.Figure(data=[go.Scatter(x=[], y=[])])]
pt = pivot_score_on(scores, ["exercise", "question", "comment"], "score")
# separation between exercises
for i in {i for i in pt.index.get_level_values(0)}:
pt.loc[(str(i), "", ""), :] = ""
pt.sort_index(inplace=True)
# Bar label
index = (
pt.index.get_level_values(0).map(str)
+ ":"
+ pt.index.get_level_values(1).map(str)
+ " "
+ pt.index.get_level_values(2).map(str)
)
fig = go.Figure()
bars = get_level_color_bar()
for b in bars:
try:
fig.add_bar(
x=index, y=pt[b["score"]], name=b["name"], marker_color=b["color"]
)
except KeyError:
pass
fig.update_layout(barmode="relative")
fig.update_layout(
height=500,
margin=dict(l=5, r=5, b=5, t=5),
legend=dict(
orientation="h",
yanchor="bottom",
y=1.02,
xanchor="right",
x=1
)
)
return [fig]

View File

@@ -0,0 +1,128 @@
#!/use/bin/env python
# encoding: utf-8
from recopytex.database.filesystem.loader import CSVLoader
from recopytex.datalib.dataframe import column_values_to_column
import recopytex.datalib.on_score_column as on_column
import pandas as pd
LOADER = CSVLoader("./test_confia.ml")
SCORES_CONFIG = LOADER.get_config()["scores"]
def unstack_scores(scores):
"""Put student_name values to columns
:param scores: Score dataframe with one line per score
:returns: Scrore dataframe with student_name in columns
"""
kept_columns = [col for col in LOADER.score_columns if col != "score"]
return column_values_to_column("student_name", "score", kept_columns, scores)
def stack_scores(scores):
"""Student columns are melt to rows with student_name column
:param scores: Score dataframe with student_name in columns
:returns: Scrore dataframe with one line per score
"""
kept_columns = [
c for c in LOADER.score_columns if c not in ["score", "student_name"]
]
student_names = [c for c in scores.columns if c not in kept_columns]
return pd.melt(
scores,
id_vars=kept_columns,
value_vars=student_names,
var_name="student_name",
value_name="score",
)
def get_tribes():
return LOADER.get_tribes()
def get_exams(tribe):
return LOADER.get_exams([tribe])
def get_record_scores(exam):
return LOADER.get_exam_scores(exam)
def get_unstack_scores(exam):
flat_scores = LOADER.get_exam_scores(exam)
return unstack_scores(flat_scores)
def get_students_from_exam(exam):
flat_scores = LOADER.get_exam_scores(exam)
return flat_scores["student_name"].unique()
def get_score_colors():
score_color = {}
for key, score in SCORES_CONFIG.items():
score_color[score["value"]] = score["color"]
return score_color
def get_level_color_bar():
return [
{"score": str(s["value"]), "name": s["comment"], "color": s["color"]}
for s in SCORES_CONFIG.values()
]
is_none_score = lambda x: on_column.is_none_score(x, SCORES_CONFIG)
format_score = lambda x: on_column.format_score(x, SCORES_CONFIG)
score_to_numeric_score = lambda x: on_column.score_to_numeric_score(x, SCORES_CONFIG)
score_to_mark = lambda x: on_column.score_to_mark(
x, max([v["value"] for v in SCORES_CONFIG.values() if isinstance(v["value"], int)])
)
def filter_clean_score(scores):
filtered_scores = scores[~scores.apply(is_none_score, axis=1)]
filtered_scores = filtered_scores.assign(
score=filtered_scores.apply(format_score, axis=1)
)
return filtered_scores
def score_to_final_mark(scores):
""" Compute marks then reduce to final mark per student """
filtered_scores = filter_clean_score(scores)
filtered_scores = filtered_scores.assign(
score=filtered_scores.apply(score_to_numeric_score, axis=1)
)
filtered_scores = filtered_scores.assign(
mark=filtered_scores.apply(score_to_mark, axis=1)
)
final_score = filtered_scores.groupby(["student_name"])[
["mark", "score_rate"]
].sum()
return [final_score.reset_index().to_dict("records")]
def pivot_score_on(scores, index, columns, aggfunc="size"):
"""Pivot scores on index, columns with aggfunc
It assumes thant scores are levels
"""
filtered_scores = filter_clean_score(scores)
filtered_scores["score"] = filtered_scores["score"].astype(str)
pt = pd.pivot_table(
filtered_scores,
index=index,
columns=columns,
aggfunc=aggfunc,
fill_value=0,
)
return pt

View File

@@ -0,0 +1,50 @@
#!/usr/bin/env python
# encoding: utf-8
import dash_html_components as html
from recopytex.database.filesystem.loader import CSVLoader
from .models import get_tribes, get_exams, get_students
loader = CSVLoader("./test_config.yml")
def listing(elements, formating=lambda x: x):
return html.Ul(
children=[html.Li(children=formating(element)) for element in elements]
)
def format_tribe(tribe):
children = [html.H3(tribe["name"])]
exams = loader.get_exams([tribe["name"]])
if exams.empty:
children.append(html.P("Pas d'évaluation"))
else:
exams_html = listing([exam for id, exam in exams.iterrows()], format_exam)
children.append(exams_html)
return children
def format_exam(exam):
children = [html.P(exam["name"])]
return children
layout = html.Div(
children=[
html.H1("Recopytex"),
html.H2("Tribes"),
html.Div(
children=[listing(loader.get_tribes().values(), format_tribe)],
id="tribes",
),
html.H2("Config"),
html.Div(
children=[
html.P(str(loader.get_config())),
],
id="config",
),
]
)

View File

@@ -0,0 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
from dash.dependencies import Input, Output
from recopytex.dashboard.app import app

View File

@@ -0,0 +1,14 @@
#!/usr/bin/env python
# encoding: utf-8
def get_tribes(loader):
return loader.get_tribes()
def get_exams(loader, tribe):
return loader.get_exams([tribe])
def get_students(loader, tribe):
return loader.get_students([tribe])

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python
# encoding: utf-8
from dash.dependencies import Input, Output
from .app import app
from .pages.home import app as home
from .pages.exams_scores import app as exams_scores
import dash_html_components as html
@app.callback(Output("page-content", "children"), [Input("url", "pathname")])
def render_page_content(pathname):
if pathname == "/":
return home.layout
elif pathname == "/exams/scores/":
return exams_scores.layout
# elif pathname == iris_page_location:
# return iris.layout
# # If the user tries to reach a different page, return a 404 message
return html.Div(
[
html.H1("404: Not found", className="text-danger"),
html.Hr(),
html.P(f"The pathname {pathname} was not recognised..."),
]
)

View File

@@ -0,0 +1,88 @@
#!/usr/bin/env python
# encoding: utf-8
from abc import ABC, abstractmethod
import yaml
"""
Adapter to pull data from the filesystem
# Loader
# Writer
"""
class Loader(ABC):
"""Load data from source"""
CONFIG = {}
def __init__(self, configfile=""):
"""Init loader
:param configfile: yaml file with informations on data source
"""
self._config = self.CONFIG
if configfile.endswith(".yml"):
with open(configfile, "r") as config:
self._config.update(yaml.load(config, Loader=yaml.FullLoader))
def get_config(self):
""" Get config"""
return self._config
@abstractmethod
def get_tribes(self):
""" Get tribes list """
pass
@abstractmethod
def get_exams(self, tribes=[]):
"""Get exams list
:param tribes: get only exams for those tribes
"""
pass
@abstractmethod
def get_students(self, tribes=[]):
"""Get student list
:param filters: list of filters
"""
pass
@abstractmethod
def get_exam_questions(self, exams=[]):
"""Get questions for the exam
:param exams: questions for those exams only
"""
pass
@abstractmethod
def get_questions_scores(self, questions=[]):
"""Get scores of those questions
:param questions: score for those questions
"""
pass
# @abstractmethod
# def get_student_scores(self, student):
# """Get scores of the student
# :param student:
# """
# pass
class Writer(ABC):
""" Write datas to the source """
def __init__(self):
pass

View File

@@ -0,0 +1,15 @@
#!/usr/bin/env python
# encoding: utf-8
"""
Store data using filesystem for organisation, csv for scores
## Organisation
- tribe1.csv # list of students for the tribe
- tribe1/
- exam1.csv # questions and scores for exam1
- exam1.yml # Extra information about exam1
- exam2.csv # questions and scores for exam2
"""

View File

@@ -0,0 +1,75 @@
---
source: ./ # basepath where to start
competences: # Competences
Chercher:
name: Chercher
abrv: Cher
Représenter:
name: Représenter
abrv: Rep
Modéliser:
name: Modéliser
abrv: Mod
Raisonner:
name: Raisonner
abrv: Rai
Calculer:
name: Calculer
abrv: Cal
Communiquer:
name: Communiquer
abrv: Com
scores: #
BAD: # Everything is bad
value: 0
numeric_value: 0
color: "#E7472B"
comment: Faux
FEW: # Few good things
value: 1
numeric_value: 1
color: "#FF712B"
comment: Peu juste
NEARLY: # Nearly good but things are missing
value: 2
numeric_value: 2
color: "#F2EC4C"
comment: Presque juste
GOOD: # Everything is good
value: 3
numeric_value: 3
color: "#68D42F"
comment: Juste
NOTFILLED: # The item is not scored yet
value: ""
numeric_value: None
color: white
comment: En attente
NOANSWER: # Student gives no answer (count as 0)
value: "."
numeric_value: 0
color: black
comment: Pas de réponse
ABS: # Student has absent (this score won't be impact the final mark)
value: a
numeric_value: None
color: lightgray
comment: Non noté
csv_fields: # dataframe_field: csv_field
term: Trimestre
exam: Nom
date: Date
exercise: Exercice
question: Question
competence: Competence
theme: Domaine
comment: Commentaire
score_rate: Bareme
is_leveled: Est_nivele
id_templates:
exam: "{name}_{tribe}"
question: "{exam_id}_{exercise}_{question}_{comment}"

View File

@@ -0,0 +1,52 @@
#!/usr/bin/env python
# encoding: utf-8
import pandas as pd
from pathlib import Path
from unidecode import unidecode
__all__ = ["list_csvs", "extract_fields"]
def list_csvs(path):
"""list csv files in path
:example:
>>> list_csvs("./example/Tribe1/")
[PosixPath('example/Tribe1/210112_DS.csv'), PosixPath('example/Tribe1/210122_DS6.csv')]
>>> list_csvs("./example/Tribe1")
[PosixPath('example/Tribe1/210112_DS.csv'), PosixPath('example/Tribe1/210122_DS6.csv')]
"""
return list(Path(path).glob("*.csv"))
def extract_fields(csv_filename, fields=[], remove_duplicates=True):
"""Extract fields in csv
:param csv_filename: csv filename (with header)
:param fields: list of fields to extract (all fields if empty list - default)
:param remove_duplicates: keep uniques rows (default True)
:example:
>>> extract_fields("./example/Tribe1/210122_DS6.csv", ["Trimestre", "Nom", "Date"])
Trimestre Nom Date
0 1 DS6 22/01/2021
"""
df = pd.read_csv(csv_filename)
if fields:
df = df[fields]
if remove_duplicates:
return df.drop_duplicates()
return df
def build_id(template, element):
"""Build an id from template to the element
:example:
>>> element = {"name": "pléà", "place": "here", "foo":"bar"}
>>> build_id("{name} {place}", element)
'plea_here'
"""
return unidecode(template.format(**element)).replace(" ", "_")

View File

@@ -0,0 +1,298 @@
#!/usr/bin/env python
# encoding: utf-8
import yaml
import os
import uuid
from pathlib import Path
import pandas as pd
from .. import Loader
from .lib import list_csvs, extract_fields, build_id
DEFAULT_CONFIG_FILE = os.path.join(os.path.dirname(__file__), "default_config.yml")
with open(DEFAULT_CONFIG_FILE, "r") as config:
DEFAULT_CONFIG = yaml.load(config, Loader=yaml.FullLoader)
def maybe_dataframe(datas):
try:
return [e[1] for e in datas.iterrows()]
except AttributeError:
return datas
class CSVLoader(Loader):
"""Loader when scores and metadatas are stored in csv files
:config:
:example:
>>> loader = CSVLoader()
>>> loader.get_config()
{'source': './', 'competences': {'Chercher': {'name': 'Chercher', 'abrv': 'Cher'}, 'Représenter': {'name': 'Représenter', 'abrv': 'Rep'}, 'Modéliser': {'name': 'Modéliser', 'abrv': 'Mod'}, 'Raisonner': {'name': 'Raisonner', 'abrv': 'Rai'}, 'Calculer': {'name': 'Calculer', 'abrv': 'Cal'}, 'Communiquer': {'name': 'Communiquer', 'abrv': 'Com'}}, 'scores': {'BAD': {'value': 0, 'numeric_value': 0, 'color': '#E7472B', 'comment': 'Faux'}, 'FEW': {'value': 1, 'numeric_value': 1, 'color': '#FF712B', 'comment': 'Peu juste'}, 'NEARLY': {'value': 2, 'numeric_value': 2, 'color': '#F2EC4C', 'comment': 'Presque juste'}, 'GOOD': {'value': 3, 'numeric_value': 3, 'color': '#68D42F', 'comment': 'Juste'}, 'NOTFILLED': {'value': '', 'numeric_value': 'None', 'color': 'white', 'comment': 'En attente'}, 'NOANSWER': {'value': '.', 'numeric_value': 0, 'color': 'black', 'comment': 'Pas de réponse'}, 'ABS': {'value': 'a', 'numeric_value': 'None', 'color': 'lightgray', 'comment': 'Non noté'}}, 'csv_fields': {'term': 'Trimestre', 'exam': 'Nom', 'date': 'Date', 'exercise': 'Exercice', 'question': 'Question', 'competence': 'Competence', 'theme': 'Domaine', 'comment': 'Commentaire', 'score_rate': 'Bareme', 'is_leveled': 'Est_nivele'}, 'id_templates': {'exam': '{name}_{tribe}', 'question': '{exam_id}_{exercise}_{question}_{comment}'}}
>>> loader = CSVLoader("./test_config.yml")
>>> loader.get_config()
{'source': './example', 'competences': {'Chercher': {'name': 'Chercher', 'abrv': 'Cher'}, 'Représenter': {'name': 'Représenter', 'abrv': 'Rep'}, 'Modéliser': {'name': 'Modéliser', 'abrv': 'Mod'}, 'Raisonner': {'name': 'Raisonner', 'abrv': 'Rai'}, 'Calculer': {'name': 'Calculer', 'abrv': 'Cal'}, 'Communiquer': {'name': 'Communiquer', 'abrv': 'Com'}}, 'scores': {'BAD': {'value': 0, 'numeric_value': 0, 'color': '#E7472B', 'comment': 'Faux'}, 'FEW': {'value': 1, 'numeric_value': 1, 'color': '#FF712B', 'comment': 'Peu juste'}, 'NEARLY': {'value': 2, 'numeric_value': 2, 'color': '#F2EC4C', 'comment': 'Presque juste'}, 'GOOD': {'value': 3, 'numeric_value': 3, 'color': '#68D42F', 'comment': 'Juste'}, 'NOTFILLED': {'value': '', 'numeric_value': 'None', 'color': 'white', 'comment': 'En attente'}, 'NOANSWER': {'value': '.', 'numeric_value': 0, 'color': 'black', 'comment': 'Pas de réponse'}, 'ABS': {'value': 'a', 'numeric_value': 'None', 'color': 'lightgray', 'comment': 'Non noté'}}, 'csv_fields': {'term': 'Trimestre', 'exam': 'Nom', 'date': 'Date', 'exercise': 'Exercice', 'question': 'Question', 'competence': 'Competence', 'theme': 'Domaine', 'comment': 'Commentaire', 'score_rate': 'Bareme', 'is_leveled': 'Est_nivele'}, 'id_templates': {'exam': '{name}_{tribe}', 'question': '{exam_id}_{exercise}_{question}_{comment}'}, 'output': './output', 'templates': 'templates/', 'tribes': {'Tribe1': {'name': 'Tribe1', 'type': 'Type1', 'students': 'tribe1.csv'}, 'Tribe2': {'name': 'Tribe2', 'students': 'tribe2.csv'}}}
"""
CONFIG = DEFAULT_CONFIG
def get_config(self):
""" Get config """
return self._config
@property
def exam_columns(self):
return pd.Index(["name", "date", "term", "origin", "tribe", "id"])
@property
def question_columns(self):
return pd.Index(
[
"exercise",
"question",
"competence",
"theme",
"comment",
"score_rate",
"is_leveled",
"origin",
"exam_id",
"id",
]
)
@property
def score_columns(self):
return pd.Index(
[
"term",
"exam",
"date",
"exercise",
"question",
"competence",
"theme",
"comment",
"score_rate",
"is_leveled",
"origin",
"exam_id",
"question_id",
"student_name",
"score",
]
)
def rename_columns(self, dataframe):
"""Rename dataframe column to match with `csv_fields`
:param dataframe: the dataframe
:example:
>>> loader = CSVLoader()
>>>
"""
return dataframe.rename(
columns={v: k for k, v in self._config["csv_fields"].items()}
)
def reverse_csv_field(self, keys):
""" Reverse csv field from keys """
return [self._config["csv_fields"][k] for k in keys]
def get_tribes(self, only_names=False):
"""Get tribes list
:example:
>>> loader = CSVLoader("./test_config.yml")
>>> loader.get_tribes()
{'Tribe1': {'name': 'Tribe1', 'type': 'Type1', 'students': 'tribe1.csv'}, 'Tribe2': {'name': 'Tribe2', 'students': 'tribe2.csv'}}
>>> loader.get_tribes(only_names=True)
['Tribe1', 'Tribe2']
"""
if only_names:
return list(self._config["tribes"].keys())
return self._config["tribes"]
def get_exams(self, tribes=[]):
"""Get exams list
:param tribes: get only exams for those tribes
:return: dataframe of exams
:example:
>>> loader = CSVLoader("./test_config.yml")
>>> exams = loader.get_exams(["Tribe1"])
>>> all(exams.columns == loader.exam_columns)
True
>>> exams
name date term origin tribe id
0 DS 12/01/2021 1 example/Tribe1/210112_DS.csv Tribe1 DS_Tribe1
0 DS6 22/01/2021 1 example/Tribe1/210122_DS6.csv Tribe1 DS6_Tribe1
"""
exams = []
for tribe in tribes:
tribe_path = Path(self._config["source"]) / tribe
csvs = list_csvs(tribe_path)
for csv in csvs:
fields = self.reverse_csv_field(["exam", "date", "term"])
exam = extract_fields(csv, fields)
exam = self.rename_columns(exam)
exam = exam.rename(columns={"exam": "name"})
exam["origin"] = str(csv)
exam["tribe"] = tribe
exam["id"] = build_id(
self._config["id_templates"]["exam"], exam.iloc[0]
)
exams.append(exam)
if exams:
return pd.concat(exams)
return pd.DataFrame(columns=["name", "date", "term", "origin", "tribe", "id"])
def get_exam_questions(self, exams=[]):
"""Get questions for exams stored in score_files
:param exams: list or dataframe of exams metadatas (need origin field to find the csv)
:example:
>>> loader = CSVLoader("./test_config.yml")
>>> exams = loader.get_exams(["Tribe1"])
>>> all(loader.get_exam_questions([exams.iloc[0]]).columns == loader.question_columns)
True
>>> questions = loader.get_exam_questions(exams)
>>> questions.iloc[0]
exercise Exercice 1
question 1
competence Calculer
theme Plop
comment Coucou
score_rate 1.0
is_leveled 1.0
origin example/Tribe1/210112_DS.csv
exam_id DS_Tribe1
id DS_Tribe1_Exercice_1_1_Coucou
Name: 0, dtype: object
"""
_exams = maybe_dataframe(exams)
questions = []
for exam in _exams:
fields = self.reverse_csv_field(
[
"exercise",
"question",
"competence",
"theme",
"comment",
"score_rate",
"is_leveled",
]
)
question = extract_fields(exam["origin"], fields)
question = self.rename_columns(question)
question["origin"] = exam["origin"]
question["exam_id"] = exam["id"]
question["id"] = build_id(
self._config["id_templates"]["question"], question.iloc[0]
)
questions.append(question)
return pd.concat(questions)
def get_questions_scores(self, questions=[]):
"""Get scores of those questions
:param questions: list or dataframe of questions metadatas (need origin field to find the csv)
:example:
>>> loader = CSVLoader("./test_config.yml")
>>> exams = loader.get_exams(["Tribe1"])
>>> questions = loader.get_exam_questions(exams)
>>> scores = loader.get_questions_scores(questions)
>>> all(scores.columns == loader.score_columns)
True
>>> scores["student_name"].unique()
array(['Star Tice', 'Umberto Dingate', 'Starlin Crangle',
'Humbert Bourcq', 'Gabriella Handyside', 'Stewart Eaves',
'Erick Going', 'Ase Praton', 'Rollins Planks', 'Dunstan Sarjant',
'Stacy Guiton', 'Ange Stanes', 'Amabelle Elleton',
'Darn Broomhall', 'Dyan Chatto', 'Keane Rennebach', 'Nari Paulton',
'Brandy Wase', 'Jaclyn Firidolfi', 'Violette Lockney'],
dtype=object)
"""
scores = []
group_questions = questions.groupby("origin")
for origin, questions_df in group_questions:
scores_df = pd.read_csv(origin)
scores_df = self.rename_columns(scores_df)
student_names = [
c
for c in scores_df.columns
if c not in self._config["csv_fields"].keys()
]
common_columns = [c for c in questions_df.columns if c in scores_df.columns]
scores_df = pd.merge(scores_df, questions_df, on=common_columns)
kept_columns = [c for c in scores_df if c not in student_names]
scores_df = pd.melt(
scores_df,
id_vars=kept_columns,
value_vars=student_names,
var_name="student_name",
value_name="score",
)
scores_df = scores_df.rename(columns={"id": "question_id"})
scores.append(scores_df)
return pd.concat(scores)
def get_exam_scores(self, exams=[]):
"""Get scores for all question of the exam
:param exams: list or dataframe of exams metadatas (need origin field to find the csv)
:example:
>>> loader = CSVLoader("./test_config.yml")
>>> exams = loader.get_exams(["Tribe1"])
>>> scores = loader.get_exam_scores(exams)
>>> scores.columns
Index(['term', 'exam', 'date', 'exercise', 'question', 'competence', 'theme',
'comment', 'score_rate', 'is_leveled', 'origin', 'exam_id',
'question_id', 'student_name', 'score'],
dtype='object')
"""
questions = self.get_exam_questions(exams)
return self.get_questions_scores(questions)
def get_students(self, tribes=[]):
"""Get student list
:param tribes: concerned tribes
:example:
>>> loader = CSVLoader("./test_config.yml")
>>> tribes = loader.get_tribes()
>>> students = loader.get_students([tribes["Tribe1"]])
>>> students.columns
Index(['Nom', 'email', 'origin', 'tribe'], dtype='object')
"""
students = []
for tribe in tribes:
students_csv = Path(self._config["source"]) / tribe["students"]
students_df = pd.read_csv(students_csv)
students_df["origin"] = students_csv
students_df["tribe"] = tribe["name"]
students.append(students_df)
return pd.concat(students)
def get_student_scores(self, student=[]):
"""Get all scores for students"""
pass

View File

@@ -0,0 +1,7 @@
#!/usr/bin/env python
# encoding: utf-8
"""
"""

View File

View File

@@ -0,0 +1,21 @@
#!/usr/bin/env python
# encoding: utf-8
def column_values_to_column(pivot_column, value_column, kept_columns, df):
"""Pivot_column's values go to column with value_column under it, keeping kept_columns
:param pivot_column: column name where value will become columns
:param value_column: column name where value will be under pivot_column
:param kept_columns: unchanged columns
:param df: DataFrame to work with
:return: Stack dataframe
"""
if pivot_column in kept_columns:
pivot_columns = kept_columns
else:
pivot_columns = kept_columns + [pivot_column]
return df.set_index(pivot_columns).unstack(pivot_column)[value_column].reset_index()

View File

@@ -0,0 +1,257 @@
#!/usr/bin/env python
# encoding: utf-8
from math import ceil
import pandas as pd
def is_none_score(x, score_config):
"""Is a score correspond to a None numeric_value which
>>> import pandas as pd
>>> d = {"Eleve":["E1"]*7,
... "score_rate": [1]*7,
... "is_leveled":[0]+[1]*6,
... "score":[0.33, "", ".", "a", 1, 2, 3],
... }
>>> score_config = {
... 'BAD': {'value': 0, 'numeric_value': 0},
... 'FEW': {'value': 1, 'numeric_value': 1},
... 'NEARLY': {'value': 2, 'numeric_value': 2},
... 'GOOD': {'value': 3, 'numeric_value': 3},
... 'NOTFILLED': {'value': '', 'numeric_value': 'None'},
... 'NOANSWER': {'value': '.', 'numeric_value': 0},
... 'ABS': {'value': 'a', 'numeric_value': 'None'}
... }
>>> df = pd.DataFrame(d)
>>> df.apply(lambda x:is_none_score(x, score_config), axis=1)
0 False
1 True
2 False
3 True
4 False
5 False
6 False
dtype: bool
"""
none_values = [
v["value"]
for v in score_config.values()
if str(v["numeric_value"]).lower() == "none"
]
return x["score"] in none_values or pd.isnull(x["score"])
def format_score(x, score_config):
"""Make sure that score have the appropriate format
>>> import pandas as pd
>>> d = {"Eleve":["E1"]*6,
... "score_rate": [1]*6,
... "is_leveled":[0]+[1]*5,
... "score":[0.33, ".", "a", 1, 2, 3],
... }
>>> score_config = {
... 'BAD': {'value': 0, 'numeric_value': 0},
... 'FEW': {'value': 1, 'numeric_value': 1},
... 'NEARLY': {'value': 2, 'numeric_value': 2},
... 'GOOD': {'value': 3, 'numeric_value': 3},
... 'NOTFILLED': {'value': '', 'numeric_value': 'None'},
... 'NOANSWER': {'value': '.', 'numeric_value': 0},
... 'ABS': {'value': 'a', 'numeric_value': 'None'}
... }
>>> df = pd.DataFrame(d)
>>> df.apply(lambda x:format_score(x, score_config), axis=1)
0 0.33
1 .
2 a
3 1
4 2
5 3
dtype: object
>>> format_score({"score": "1.0", "is_leveled": 1}, score_config)
1
>>> format_score({"score": "3.0", "is_leveled": 1}, score_config)
3
>>> format_score({"score": 4, "is_leveled": 1}, score_config)
Traceback (most recent call last):
...
ValueError: 4 (<class 'int'>) can't be a score
"""
if not x["is_leveled"]:
return float(x["score"])
try:
score = int(float(x["score"]))
except ValueError:
score = str(x["score"])
if score in [v["value"] for v in score_config.values()]:
return score
raise ValueError(f"{x['score']} ({type(x['score'])}) can't be a score")
def score_to_numeric_score(x, score_config):
"""Convert a score to the corresponding numeric value
>>> import pandas as pd
>>> d = {"Eleve":["E1"]*7,
... "score_rate": [1]*7,
... "is_leveled":[0]+[1]*6,
... "score":[0.33, "", ".", "a", 1, 2, 3],
... }
>>> score_config = {
... 'BAD': {'value': 0, 'numeric_value': 0},
... 'FEW': {'value': 1, 'numeric_value': 1},
... 'NEARLY': {'value': 2, 'numeric_value': 2},
... 'GOOD': {'value': 3, 'numeric_value': 3},
... 'NOTFILLED': {'value': '', 'numeric_value': 'None'},
... 'NOANSWER': {'value': '.', 'numeric_value': 0},
... 'ABS': {'value': 'a', 'numeric_value': 'None'}
... }
>>> df = pd.DataFrame(d)
>>> df.apply(lambda x:score_to_numeric_score(x, score_config), axis=1)
0 0.33
1 None
2 0
3 None
4 1
5 2
6 3
dtype: object
"""
if x["is_leveled"]:
replacements = {v["value"]: v["numeric_value"] for v in score_config.values()}
return replacements[x["score"]]
return x["score"]
def score_to_mark(x, score_max, rounding=lambda x: round(x, 2)):
"""Compute the mark from "score" which have to be filtered and in numeric form
if the item is leveled then the score is multiply by the score_rate
otherwise it copies the score
:param x: dictionnary with "is_leveled", "score" (need to be number) and "score_rate" keys
:param score_max:
:param rounding: rounding mark function
:return: the mark
>>> import pandas as pd
>>> d = {"Eleve":["E1"]*7,
... "score_rate": [1]*7,
... "is_leveled":[0]+[1]*6,
... "score":[0.33, "", ".", "a", 1, 2, 3],
... }
>>> score_config = {
... 'BAD': {'value': 0, 'numeric_value': 0},
... 'FEW': {'value': 1, 'numeric_value': 1},
... 'NEARLY': {'value': 2, 'numeric_value': 2},
... 'GOOD': {'value': 3, 'numeric_value': 3},
... 'NOTFILLED': {'value': '', 'numeric_value': 'None'},
... 'NOANSWER': {'value': '.', 'numeric_value': 0},
... 'ABS': {'value': 'a', 'numeric_value': 'None'}
... }
>>> df = pd.DataFrame(d)
>>> df = df[~df.apply(lambda x:is_none_score(x, score_config), axis=1)]
>>> df["score"] = df.apply(lambda x:score_to_numeric_score(x, score_config), axis=1)
>>> df.apply(lambda x:score_to_mark(x, 3), axis=1)
0 0.33
2 0.00
4 0.33
5 0.67
6 1.00
dtype: float64
>>> from .on_value import round_half_point
>>> df.apply(lambda x:score_to_mark(x, 3, round_half_point), axis=1)
0 0.5
2 0.0
4 0.5
5 0.5
6 1.0
dtype: float64
"""
if x["is_leveled"]:
if x["score"] not in list(range(score_max + 1)):
raise ValueError(f"The evaluation is out of range: {x['score']} at {x}")
return rounding(x["score"] * x["score_rate"] / score_max)
return rounding(x["score"])
def score_to_level(x, level_max=3):
"""Compute the level (".",0,1,2,3).
:param x: dictionnary with "is_leveled", "score" and "score_rate" keys
:return: the level
>>> import pandas as pd
>>> d = {"Eleve":["E1"]*6 + ["E2"]*6,
... "score_rate":[1]*2+[2]*2+[2]*2 + [1]*2+[2]*2+[2]*2,
... "is_leveled":[0]*4+[1]*2 + [0]*4+[1]*2,
... "score":[1, 0.33, 0, 1.5, 1, 3, 0.666, 1, 1.5, 1, 2, 3],
... }
>>> df = pd.DataFrame(d)
>>> df
Eleve score_rate is_leveled score
0 E1 1 0 1.000
1 E1 1 0 0.330
2 E1 2 0 0.000
3 E1 2 0 1.500
4 E1 2 1 1.000
5 E1 2 1 3.000
6 E2 1 0 0.666
7 E2 1 0 1.000
8 E2 2 0 1.500
9 E2 2 0 1.000
10 E2 2 1 2.000
11 E2 2 1 3.000
>>> df.apply(score_to_level, axis=1)
0 3
1 1
2 0
3 3
4 1
5 3
6 2
7 3
8 3
9 2
10 2
11 3
dtype: int64
>>> df.apply(lambda x: score_to_level(x, 5), axis=1)
0 5
1 2
2 0
3 4
4 1
5 3
6 4
7 5
8 4
9 3
10 2
11 3
dtype: int64
"""
if x["is_leveled"]:
return int(x["score"])
if x["score"] > x["score_rate"]:
raise ValueError(
f"score is higher than score_rate ({x['score']} > {x['score_rate']}) for {x}"
)
return int(ceil(x["score"] / x["score_rate"] * level_max))
# -----------------------------
# Reglages pour 'vim'
# vim:set autoindent expandtab tabstop=4 shiftwidth=4:
# cursor: 16 del

View File

@@ -0,0 +1,40 @@
#!/usr/bin/env python
# encoding: utf-8
from math import ceil, floor
def round_with_base(x, base=0.5):
"""Round to a multiple of base
:example:
>>> round_with_base(1.33, 0.1)
1.3
>>> round_with_base(1.33, 0.2)
1.4
>>> round_with_base(1.33, 1)
1
>>> round_with_base(1.33, 2)
2
"""
try:
prec = len(str(base).split(".")[1])
except IndexError:
prec = 0
return round(base * round(float(x) / base), prec)
def round_half_point(x):
"""Round to nearest half point
:example:
>>> round_half_point(1.33)
1.5
>>> round_half_point(1.1)
1.0
>>> round_half_point(1.66)
1.5
>>> round_half_point(1.76)
2.0
"""
return round_with_base(x, base=0.5)

View File

@@ -0,0 +1,18 @@
#!/usr/bin/env python
# encoding: utf-8
import click
from recopytex.dashboard.index import app as dash
@click.group()
def cli():
pass
@cli.command()
@click.option("--debug", default=0, help="Debug mode for dash")
def dashboard(debug):
dash.run_server(debug=bool(debug))
if __name__ == "__main__":
cli()

4
requirements.txt Normal file
View File

@@ -0,0 +1,4 @@
pandas
click
papermill
prompt_toolkit

69
requirements_dev.txt Normal file
View File

@@ -0,0 +1,69 @@
ansiwrap
attrs
backcall
bleach
certifi
chardet
Click
colorama
cycler
decorator
defusedxml
entrypoints
future
idna
importlib-resources
ipykernel
ipython
ipython-genutils
ipywidgets
jedi
Jinja2
jsonschema
jupyter
jupyter-client
jupyter-console
jupyter-core
jupytex
kiwisolver
MarkupSafe
matplotlib
mistune
nbconvert
nbformat
notebook
numpy
pandas
pandocfilters
papermill
parso
pexpect
pickleshare
prometheus-client
prompt-toolkit
ptyprocess
Pygments
pyparsing
pyrsistent
python-dateutil
pytz
PyYAML
pyzmq
qtconsole
-e git+git_opytex:/lafrite/recopytex.git@e9a8310f151ead60434ae944d726a2fd22b23d06#egg=Recopytex
requests
scipy
seaborn
Send2Trash
six
tenacity
terminado
testpath
textwrap3
tornado
tqdm
traitlets
urllib3
wcwidth
webencodings
widgetsnbextension

26
setup.py Normal file
View File

@@ -0,0 +1,26 @@
#!/usr/bin/env python
# encoding: utf-8
from setuptools import setup, find_packages
setup(
name='Recopytex',
version='0.1',
description='Assessment analysis',
author='Benjamin Bertrand',
author_email='',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Click',
],
entry_points='''
[console_scripts]
recopytex=recopytex.scripts.recopytex:cli
''',
)
# -----------------------------
# Reglages pour 'vim'
# vim:set autoindent expandtab tabstop=4 shiftwidth=4:
# cursor: 16 del

File diff suppressed because one or more lines are too long

1111
templates/tpl_student.ipynb Normal file

File diff suppressed because it is too large Load Diff

13
test_config.yml Normal file
View File

@@ -0,0 +1,13 @@
---
source: ./example
output: ./output
templates: templates/
tribes:
Tribe1:
name: Tribe1
type: Type1
students: tribe1.csv
Tribe2:
name: Tribe2
students: tribe2.csv

View File

@@ -1,134 +0,0 @@
import sqlite3
import time
from pathlib import Path
import pytest
import requests
from sqlalchemy import create_engine
from sqlalchemy.orm import clear_mappers, sessionmaker
from backend import config
from backend.adapters.orm import metadata, start_mappers
from backend.adapters.sqlite import create_db
from backend.model.student import Student
from backend.model.tribe import Tribe
from tests.model.fakes import build_student, build_tribes
@pytest.fixture
def in_memory_db():
engine = create_engine("sqlite:///:memory:")
metadata.create_all(engine)
return engine
@pytest.fixture
def session(in_memory_db):
start_mappers()
yield sessionmaker(bind=in_memory_db)()
clear_mappers()
@pytest.fixture
def memory_sqlite_conn():
sqlite_db = ":memory:"
conn = sqlite3.connect(sqlite_db)
create_db(conn)
yield conn
conn.close()
@pytest.fixture
def clean_db():
sqlite_db = "sqlite.db"
conn = sqlite3.connect(sqlite_db)
create_db(conn)
yield
cursor = conn.cursor()
cursor.execute("""DROP TABLE tribes""")
cursor.execute("""DROP TABLE students""")
conn.commit()
def populate_tribes(conn) -> list[Tribe]:
cursor = conn.cursor()
tribes = build_tribes(3)
cursor.executemany(
"""
INSERT INTO tribes(name, level) VALUES (?, ?)
""",
[t.to_tuple() for t in tribes],
)
conn.commit()
return tribes
def populate_students(conn, tribes: list[Tribe]) -> list[Student]:
cursor = conn.cursor()
prebuild_students = build_student(tribes, 2)
cursor.executemany(
"""
INSERT INTO students(id, name, tribe_name) VALUES (:id, :name, :tribe_name)
""",
[s.to_dict() for s in prebuild_students],
)
conn.commit()
return prebuild_students
@pytest.fixture
def populate_db():
class Student_tribe_context:
_tribes = []
_students = []
def __init__(self, conn):
self.conn = conn
def __enter__(self):
self._tribes += populate_tribes(self.conn)
self._students += populate_students(self.conn, self._tribes)
return self._tribes, self._students
def __exit__(self, *args):
for student in self._students:
self.conn.execute(
"""
DELETE FROM students WHERE id=:id
""",
{"id": student.id},
)
for tribe in self._tribes:
self.conn.execute(
"""
DELETE FROM tribes WHERE name=:name
""",
{"name": tribe.name},
)
self.conn.commit()
def fixture(conn):
return Student_tribe_context(conn)
yield fixture
def wait_for_webapp_to_come_up():
deadline = time.time() + 10
url = config.get_api_url()
while time.time() < deadline:
try:
return requests.get(url)
except ConnectionError:
time.sleep(0.5)
pytest.fail("API never came up")
@pytest.fixture
def restart_api():
(Path(__file__).parent.parent / "backend" / "api" / "main.py").touch()
time.sleep(0.5)
wait_for_webapp_to_come_up()

View File

@@ -1,104 +0,0 @@
import pytest
import requests
from backend import config
from tests.model.fakes import build_student, build_tribes
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_post_student():
url = config.get_api_url()
tribe = build_tribes(1)[0]
requests.post(f"{url}/tribes", json=tribe.to_dict())
data = {"name": "zart", "tribe_name": tribe.name}
r = requests.post(f"{url}/students", json=data)
post_request = r.history[0]
assert post_request.status_code == 302
assert r.status_code == 200
assert r.json()["name"] == "zart"
assert r.json()["tribe_name"] == tribe.name
assert r.json()["id"]
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_post_student_with_id():
url = config.get_api_url()
tribe = build_tribes(1)[0]
requests.post(f"{url}/tribes", json=tribe.to_dict())
data = {"id": "1234", "name": "zart", "tribe_name": tribe.name}
requests.post(f"{url}/students", json=data)
r = requests.post(f"{url}/students", json=data)
assert r.status_code == 409
assert (
r.json()
== f"You can't post a student with an id. It is already registrered. Use PUT to modify it."
)
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_post_student_in_non_existant_tribe():
url = config.get_api_url()
tribe = build_tribes(1)[0]
requests.post(f"{url}/tribes", json=tribe.to_dict())
data = {"name": "zart", "tribe_name": tribe.name + "_"}
requests.post(f"{url}/students", json=data)
r = requests.post(f"{url}/students", json=data)
assert r.status_code == 409
assert (
r.json()
== f"The tribe {tribe.name+'_'} does not exists. You can't add a student in it."
)
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_put_student():
url = config.get_api_url()
tribe = build_tribes(1)[0]
requests.post(f"{url}/tribes", json=tribe.to_dict())
data = {"name": "zart", "tribe_name": tribe.name}
r = requests.post(f"{url}/students", json=data)
student = r.json()
student["name"] = "Choupinou"
r2 = requests.put(f"{url}/students/{student['id']}", json=student)
post_request = r2.history[0]
assert post_request.status_code == 302
assert r2.status_code == 200
assert r2.json()["name"] == "Choupinou"
assert r2.json()["tribe_name"] == tribe.name
assert r2.json()["id"] == r.json()["id"]
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_delete_student():
url = config.get_api_url()
tribe = build_tribes(1)[0]
requests.post(f"{url}/tribes", json=tribe.to_dict())
student = build_student([tribe], 1)[0]
r = requests.post(
f"{url}/students", json={"name": student.name, "tribe_name": student.tribe.name}
)
student_id = r.json()["id"]
r = requests.delete(f"{url}/students/{student_id}")
assert r.status_code == 204
r = requests.get(f"{url}/students/")
assert r.json() == []

View File

@@ -1,119 +0,0 @@
import pytest
import requests
from backend import config
from tests.model.fakes import build_tribes
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_post_tribe():
data = {"name": "tribe", "level": "2nd"}
url = config.get_api_url()
r = requests.post(f"{url}/tribes", json=data)
post_request = r.history[0]
assert post_request.status_code == 302
assert r.status_code == 200
assert r.json() == {
"assessments": [],
"level": "2nd",
"name": "tribe",
"students": [],
}
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_post_tribe_already_exists():
data = {"name": "Pioupiou", "level": "2nd"}
url = config.get_api_url()
requests.post(f"{url}/tribes", json=data)
r = requests.post(f"{url}/tribes", json=data)
assert r.status_code == 409
assert r.json() == f"The tribe {data['name']} already exists"
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_put_tribe():
tribe = build_tribes(1)[0]
url = config.get_api_url()
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
mod_tribe = tribe
mod_tribe.level = "other level"
r = requests.put(f"{url}/tribes/{tribe.name}", json=mod_tribe.to_dict())
post_request = r.history[0]
assert post_request.status_code == 302
assert r.status_code == 200
r = requests.get(f"{url}/tribes")
assert [t["name"] for t in r.json()] == [mod_tribe.name]
assert [t["level"] for t in r.json()] == [mod_tribe.level]
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_put_tribe_doesnt_exists():
tribe = build_tribes(1)[0]
url = config.get_api_url()
r = requests.put(f"{url}/tribes/{tribe.name}", json=tribe.to_dict())
assert r.status_code == 409
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_delete_tribe():
tribe = build_tribes(1)[0]
url = config.get_api_url()
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
r = requests.delete(f"{url}/tribes/{tribe.name}")
assert r.status_code == 204
r = requests.get(f"{url}/tribes")
assert r.json() == []
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_delete_tribe_doesnt_exists():
tribe = build_tribes(1)[0]
url = config.get_api_url()
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
r = requests.delete(f"{url}/tribes/notexisting")
assert r.status_code == 409
r = requests.get(f"{url}/tribes")
assert [t["name"] for t in r.json()] == [tribe.name]
assert [t["level"] for t in r.json()] == [tribe.level]
@pytest.mark.usefixtures("restart_api")
@pytest.mark.usefixtures("clean_db")
def test_api_post_list_tribe():
tribe = build_tribes(1)[0]
url = config.get_api_url()
r = requests.post(f"{url}/tribes", json=tribe.to_dict())
r = requests.get(f"{url}/tribes")
assert r.json() == [
{
"assessments": [],
"level": tribe.level,
"name": tribe.name,
"students": [],
}
]

View File

@@ -1,43 +0,0 @@
from backend.adapters.orm import metadata, start_mappers
from backend.model.student import Student
from backend.model.tribe import Tribe
def test_tribes_mapper_can_load_tribe(session):
session.execute(
"INSERT INTO tribes (name, level) VALUES "
"('tribe1', '2nd'),"
"('tribe2', '1ST')"
)
expected = [
(Tribe("tribe1", "2nd")),
(Tribe("tribe2", "1ST")),
]
assert session.query(Tribe).all() == expected
def test_tribe_mapper_can_save_tribe(session):
tribe_infos = ("tribe1", "2nd")
tribe = Tribe(*tribe_infos)
session.add(tribe)
rows = list(session.execute("SELECT name, level FROM 'tribes'"))
assert rows == []
session.commit()
rows = list(session.execute("SELECT name, level FROM 'tribes'"))
assert rows == [tribe_infos]
def test_tribe_mapper_can_save_and_load_tribe(session):
tribe_infos = ("tribe1", "2nd")
tribe = Tribe(*tribe_infos)
assert session.query(Tribe).all() == []
session.add(tribe)
assert session.query(Tribe).all() == [tribe]
session.commit()
assert session.query(Tribe).all() == [tribe]

View File

@@ -1,107 +0,0 @@
import sqlite3
import pytest
from backend.model.student import Student
from backend.repository.student_sqlite_repository import (
StudentRepositoryError,
StudentSQLiteRepository,
)
def test_get_student(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
student_id = prebuild_students[0].id
student = student_repo.get(student_id, prebuild_tribes)
assert prebuild_students[0] == student
def test_get_student_not_exists(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
with pytest.raises(ValueError):
student_repo.get("student0", prebuild_tribes)
def test_list_students(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
students = student_repo.list(prebuild_tribes)
assert prebuild_students == students
def test_add_student(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
student_infos = {"name": "student1", "tribe": prebuild_tribes[0]}
student = Student(**student_infos)
student_repo.add(student)
memory_sqlite_conn.commit()
cursor = memory_sqlite_conn.cursor()
cursor.execute(
"""
SELECT id, name, tribe_name FROM students WHERE id=?
""",
(student.id,),
)
row = cursor.fetchone()
assert row == student.to_tuple()
def test_add_student_fail_exists(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
student_infos = {"name": "student1", "tribe": prebuild_tribes[0]}
student = Student(**student_infos)
student_repo.add(student)
memory_sqlite_conn.commit()
with pytest.raises(sqlite3.IntegrityError):
student_repo.add(student)
def test_update_student(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
student = prebuild_students[0]
student.name = "Boby"
student.tribe = prebuild_tribes[-1]
student_repo.update(student)
memory_sqlite_conn.commit()
student_list = student_repo.list(prebuild_tribes)
assert set(student_list) == set(prebuild_students)
moded_student = next(filter(lambda s: s.id == student.id, student_list))
assert moded_student == student
def test_update_student_does_not_exists(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
student = Student(name="jkl", tribe=prebuild_tribes[0])
with pytest.raises(StudentRepositoryError):
student_repo.update(student)
def test_delete_student(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, prebuild_students):
student_repo = StudentSQLiteRepository(memory_sqlite_conn)
deleted_student = prebuild_students.pop()
student_repo.delete(deleted_student.id)
memory_sqlite_conn.commit()
assert student_repo.list(prebuild_tribes) == prebuild_students

View File

@@ -1,93 +0,0 @@
import pytest
from backend.model.tribe import Tribe
from backend.repository.tribe_sqlite_repository import (
TribeRepositoryError,
TribeSQLiteRepository,
)
def test_get_tribe(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
name = prebuild_tribes[0].name
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
tribes = tribe_repo.get(name)
assert prebuild_tribes[0] == tribes
def test_get_tribe_not_exists(memory_sqlite_conn):
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
with pytest.raises(TribeRepositoryError):
tribe_repo.get("Tribe0")
def test_list_tribes(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
listed_tribes = tribe_repo.list()
assert prebuild_tribes == listed_tribes
def test_add_tribe(memory_sqlite_conn):
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
tribe_infos = ("tribe1", "2nd")
tribe = Tribe(*tribe_infos)
tribe_repo.add(tribe)
memory_sqlite_conn.commit()
cursor = memory_sqlite_conn.cursor()
cursor.execute(
"""
SELECT * FROM tribes WHERE name=?
""",
("tribe1",),
)
row = cursor.fetchone()
assert row == tribe_infos
def test_add_tribe_fail_exists(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
existing_tribe = prebuild_tribes[0]
with pytest.raises(TribeRepositoryError):
tribe_repo.add(existing_tribe)
def test_update_tribe(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
name = prebuild_tribes[0].name
new_tribe = Tribe("Tribe0", "Term")
tribe_repo.update(name, new_tribe)
memory_sqlite_conn.commit()
prebuild_tribes[0] = new_tribe
assert tribe_repo.list() == prebuild_tribes
def test_update_tribe_not_exists(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
name = prebuild_tribes[0].name
new_tribe = Tribe("Tribe0", "Term")
with pytest.raises(TribeRepositoryError):
tribe_repo.update("iouiou", new_tribe)
def test_delete_tribe(memory_sqlite_conn, populate_db):
with populate_db(memory_sqlite_conn) as (prebuild_tribes, _):
tribe_repo = TribeSQLiteRepository(memory_sqlite_conn)
deleted_tribe = prebuild_tribes.pop()
deleted_tribe.name = "iouiou"
with pytest.raises(TribeRepositoryError):
tribe_repo.delete(deleted_tribe)

View File

@@ -1,93 +0,0 @@
from random import choice, randint
from faker import Faker
from faker.providers import DynamicProvider
from backend.model.assessment import Assessment, Domain, Exercise, Question, Skill
from backend.model.student import Student
from backend.model.tribe import Tribe
from backend.repository.abstract_repository import AbstractRepository
level_provider = DynamicProvider(
provider_name="level",
elements=["2nd", "1ST", "SNT", "1G", "TG", "EnsSci"],
)
faker = Faker("fr_FR")
faker.add_provider(level_provider)
def build_tribes(quantity: int = 1) -> list[Tribe]:
return [Tribe(name=faker.word(), level=faker.level()) for _ in range(quantity)]
def build_assessments(
tribes: list[Tribe], assessment_per_tribe: int = 1
) -> list[Assessment]:
assessments = []
for t in tribes:
assessments += [
Assessment("faker.word()", t, randint(1, 3))
for _ in range(assessment_per_tribe)
]
return assessments
def build_exercises(
assessments: list[Assessment], exercise_per_assessment=1
) -> list[Exercise]:
exercises = []
for assessment in assessments:
exercises += [
Exercise("faker.word()", assessment, "today")
for _ in range(exercise_per_assessment)
]
return exercises
def build_skills(quantity=1) -> list[Skill]:
return [Skill(faker.word(), faker.text()) for _ in range(quantity)]
def build_domains(quantity=1) -> list[Domain]:
return [Domain(faker.word(), faker.text()) for _ in range(quantity)]
def build_questions(
exercises: list[Exercise],
question_per_exercise=1,
) -> list[Question]:
skills = build_skills()
domains = build_domains()
questions = []
for exercise in exercises:
questions += [
Question(
faker.word(),
exercise,
description="desc",
skill=choice(skills),
domain=choice(domains),
is_leveled=choice([True, False]),
scale=randint(1, 20),
)
for _ in range(question_per_exercise)
]
return questions
def build_student(
tribes: list[Tribe],
students_per_tribe=1,
) -> list[Student]:
students = []
for tribe in tribes:
students += [
Student(name=faker.name(), tribe=tribe) for _ in range(students_per_tribe)
]
return students

View File

@@ -1,26 +0,0 @@
from random import choice, randint
from tests.model.fakes import build_questions
from .fakes import build_assessments, build_exercises, build_tribes
def test_assessement_register_exercise():
exercise_qty = randint(1, 10)
tribes = build_tribes(1)
assessments = build_assessments(tribes, 1)
exercises = build_exercises(assessments, exercise_qty)
assert len(assessments[0].exercises) == exercise_qty
assert assessments[0].exercises == exercises
def test_exercise_register_question():
question_qty = randint(1, 10)
tribes = build_tribes(1)
assessments = build_assessments(tribes, 1)
exercises = build_exercises(assessments, 1)
questions = build_questions(exercises, question_qty)
assert len(exercises[0].questions) == question_qty
assert exercises[0].questions == questions

View File

@@ -1,16 +0,0 @@
from backend.model.student import Student
from backend.model.tribe import Tribe
from tests.model.fakes import build_tribes
def test_init_student():
tribe = build_tribes(1)[0]
student = Student(name="Bob", tribe=tribe)
print(student)
assert type(student.name) == str
assert type(student.tribe) == Tribe
assert type(student.id) == str
student2 = Student(name="Hop", tribe=tribe)
assert student.id != student2.id

View File

@@ -1,33 +0,0 @@
from random import randint
import pytest
from backend.model.student import Student
from backend.model.tribe import Tribe, TribeError
from tests.model.fakes import build_assessments, build_tribes
def test_tribe_register_assessment():
assessments_qty = randint(1, 10)
tribes = build_tribes(1)
assessments = build_assessments(tribes, assessments_qty)
assert len(tribes[0].assessments) == assessments_qty
assert tribes[0].assessments == assessments
def test_tribe_register_student():
tribe = Tribe("foo", "2nd")
student = Student(id="1", name="Bob", tribe=tribe)
assert len(tribe.students) == 1
assert tribe.students[0] == student
def test_tribe_register_student_already_exists_overwrite():
tribe = Tribe("foo", "2nd")
student = Student(id="1", name="Bob", tribe=tribe)
changed_student = Student(id="1", name="Choupy", tribe=tribe)
assert tribe.students[0] == changed_student
assert student.name not in [s.name for s in tribe.students]

View File

@@ -1,350 +0,0 @@
import pytest
from backend.model.student import Student
from backend.model.tribe import Tribe
from backend.repository.abstract_repository import AbstractRepository
from backend.repository.student_sqlite_repository import StudentRepositoryError
from backend.repository.tribe_sqlite_repository import TribeRepositoryError
from backend.service import services
from backend.service.services import (
StudentDoesExist,
StudentExists,
TribeDoesNotExist,
TribeExists,
)
from tests.model.fakes import build_student, build_tribes
class FakeTribeRepository(AbstractRepository):
def __init__(self, tribes: list[Tribe] = []) -> None:
self._tribes = {t.name: t for t in tribes}
def add(self, tribe: Tribe) -> None:
if tribe.name not in self._tribes.keys():
self._tribes[tribe.name] = tribe
else:
raise TribeRepositoryError(f"{tribe} already exists")
def update(self, name: str, tribe: Tribe) -> None:
try:
self._tribes.pop(name)
self._tribes[tribe.name] = tribe
except KeyError:
raise TribeRepositoryError(f"The tribe {tribe} does not exists")
def list(self) -> list[Tribe]:
return list(self._tribes.values())
def get(self, name: str) -> Tribe:
try:
return self._tribes[name]
except KeyError:
raise TribeRepositoryError(f"The tribe {name} does not exists")
def delete(self, name: str) -> None:
try:
self._tribes.pop(name)
except KeyError:
raise TribeRepositoryError(f"The tribe {name} does not exists")
class FakeStudentRepository(AbstractRepository):
def __init__(self, students: list[Student] = []) -> None:
self._students = {s.id: s for s in students}
def add(self, student: Student) -> None:
if student.id not in self._students.keys():
self._students[student.id] = student
else:
raise StudentRepositoryError(f"{student} already exists")
def update(self, student: Student) -> None:
if student.id not in self._students.keys():
raise StudentRepositoryError(f"The student {student} does not exists")
self._students[student.id] = student
def list(self) -> list[Student]:
return list(self._students.values())
def get(self, id: str) -> Student:
try:
return self._students[id]
except KeyError:
raise KeyError(f"The student ({id=}) does not exists")
def delete(self, id: str) -> None:
try:
self._students.pop(id)
except KeyError:
raise StudentRepositoryError(f"The student with id {id} does not exists")
class FakeConn:
committed = False
def commit(self):
self.committed = True
def reset_commit(self):
self.committed = False
def test_add_tribe():
tribe_repo = FakeTribeRepository()
tribe = build_tribes(1)[0]
conn = FakeConn()
services.add_tribe(
name=tribe.name, level=tribe.level, tribe_repo=tribe_repo, conn=conn
)
assert conn.committed is True
assert tribe_repo.list() == [tribe]
def test_add_tribe_fail_exists():
tribe_repo = FakeTribeRepository()
tribe = build_tribes(1)[0]
conn = FakeConn()
services.add_tribe(
name=tribe.name, level=tribe.level, tribe_repo=tribe_repo, conn=conn
)
conn.reset_commit()
with pytest.raises(TribeExists):
services.add_tribe(
name=tribe.name, level=tribe.level, tribe_repo=tribe_repo, conn=conn
)
assert conn.committed == False
def test_update_tribe():
tribes = build_tribes(3)
tribe_repo = FakeTribeRepository(tribes)
conn = FakeConn()
other_level = "iouiouiou"
tribes[0].level = other_level
services.update_tribe(
name=tribes[0].name, level=other_level, tribe_repo=tribe_repo, conn=conn
)
assert conn.committed is True
assert set(tribe_repo.list()) == set(tribes)
def test_update_tribe_fail_not_exists():
tribes = build_tribes(3)
tribe_repo = FakeTribeRepository(tribes)
conn = FakeConn()
with pytest.raises(TribeDoesNotExist):
services.update_tribe(
name="azerty", level="jkl", tribe_repo=tribe_repo, conn=conn
)
assert conn.committed == False
def test_delete_tribe():
tribes = build_tribes(3)
tribe_repo = FakeTribeRepository(tribes)
conn = FakeConn()
tribe = tribes.pop()
services.delete_tribe(name=tribe.name, tribe_repo=tribe_repo, conn=conn)
assert conn.committed is True
assert set(tribe_repo.list()) == set(tribes)
def test_delete_tribe_fail_not_exists():
tribes = build_tribes(3)
tribe_repo = FakeTribeRepository(tribes)
conn = FakeConn()
with pytest.raises(TribeDoesNotExist):
services.delete_tribe(name="azerty", tribe_repo=tribe_repo, conn=conn)
assert conn.committed == False
def test_add_student():
tribes = build_tribes(1)
tribe_repo = FakeTribeRepository(tribes)
student = build_student(tribes, 1)[0]
student_repo = FakeStudentRepository()
conn = FakeConn()
saved_student = services.add_student(
name=student.name,
tribe=student.tribe.name,
student_repo=student_repo,
tribe_repo=tribe_repo,
conn=conn,
)
assert conn.committed is True
listed_student = student_repo.list()[0]
assert student.name == listed_student.name
assert student.tribe.name == listed_student.tribe.name
# The id is not passed to the service, they can't have the same.
assert student.id != listed_student.id
assert saved_student == listed_student
def test_add_student_tribe_doesnt_exist():
tribes = build_tribes(1)
tribe_repo = FakeTribeRepository(tribes)
students = build_student(tribes, 1)
student_repo = FakeStudentRepository()
conn = FakeConn()
student = students[0]
with pytest.raises(TribeDoesNotExist):
services.add_student(
name=student.name,
tribe="iuouiouiouio",
student_repo=student_repo,
tribe_repo=tribe_repo,
conn=conn,
)
assert conn.committed is False
def test_update_student():
tribes = build_tribes(2)
tribe_repo = FakeTribeRepository(tribes)
students = build_student(tribes, 1)
student_repo = FakeStudentRepository(students)
conn = FakeConn()
id = students[0].id
new_name = "new name"
new_tribe_name = tribes[1].name
saved_student = services.update_student(
id=id,
name=new_name,
tribe=new_tribe_name,
student_repo=student_repo,
tribe_repo=tribe_repo,
conn=conn,
)
assert conn.committed is True
mod_student = student_repo.get(id)
assert mod_student.name == new_name
assert mod_student.tribe.name == new_tribe_name
listed_student = student_repo.list()
assert len(listed_student) == 2
def test_update_student_tribe_doesnt_exist():
tribes = build_tribes(2)
tribe_repo = FakeTribeRepository(tribes)
students = build_student(tribes, 1)
student_repo = FakeStudentRepository(students)
conn = FakeConn()
id = students[0].id
new_name = "new name"
new_tribe_name = "not existing tribe"
with pytest.raises(TribeDoesNotExist):
services.update_student(
id=id,
name=new_name,
tribe=new_tribe_name,
student_repo=student_repo,
tribe_repo=tribe_repo,
conn=conn,
)
assert conn.committed is False
mod_student = student_repo.get(id)
assert mod_student.name == students[0].name
assert mod_student.tribe.name == students[0].tribe.name
listed_student = student_repo.list()
assert len(listed_student) == 2
def test_update_student_doesnt_exist():
tribes = build_tribes(2)
tribe_repo = FakeTribeRepository(tribes)
students = build_student(tribes, 1)
student_repo = FakeStudentRepository(students)
conn = FakeConn()
id = "not existing id"
new_name = students[0].name
new_tribe_name = students[0].tribe.name
with pytest.raises(StudentDoesExist):
services.update_student(
id=id,
name=new_name,
tribe=new_tribe_name,
student_repo=student_repo,
tribe_repo=tribe_repo,
conn=conn,
)
assert conn.committed is False
original_student = student_repo.get(students[0].id)
assert original_student.name == students[0].name
assert original_student.tribe.name == students[0].tribe.name
listed_student = student_repo.list()
assert len(listed_student) == 2
def test_delete_student():
tribes = build_tribes(2)
tribe_repo = FakeTribeRepository(tribes)
students = build_student(tribes, 1)
student_repo = FakeStudentRepository(students)
conn = FakeConn()
student = students.pop()
services.delete_student(
id=student.id,
student_repo=student_repo,
conn=conn,
)
assert conn.committed is True
listed_student = student_repo.list()
assert listed_student == students
def test_delete_student_doesnt_exist():
tribes = build_tribes(2)
tribe_repo = FakeTribeRepository(tribes)
students = build_student(tribes, 1)
student_repo = FakeStudentRepository(students)
conn = FakeConn()
with pytest.raises(StudentDoesExist):
services.delete_student(
id="not existing id",
student_repo=student_repo,
conn=conn,
)
assert conn.committed is False
listed_student = student_repo.list()
assert set(listed_student) == set(students)