161 lines
4.5 KiB
Python
161 lines
4.5 KiB
Python
#!/usr/bin/env python
|
|
# encoding: utf-8
|
|
|
|
import click
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
from PyInquirer import prompt, print_json
|
|
import pandas as pd
|
|
import numpy as np
|
|
|
|
from .config import config
|
|
from ..config import NO_ST_COLUMNS
|
|
|
|
|
|
class PromptAbortException(EOFError):
|
|
def __init__(self, message, errors=None):
|
|
|
|
# Call the base class constructor with the parameters it needs
|
|
super(PromptAbortException, self).__init__("Abort questionnary", errors)
|
|
|
|
|
|
def get_tribes(answers):
|
|
""" List tribes based on subdirectory of config["source"] which have an "eleves.csv" file inside """
|
|
return [
|
|
p.name for p in Path(config["source"]).iterdir() if (p / "eleves.csv").exists()
|
|
]
|
|
|
|
|
|
def prepare_csv():
|
|
items = new_eval()
|
|
|
|
item = items[0]
|
|
# item = {"tribe": "308", "date": datetime.today(), "assessment": "plop"}
|
|
csv_output = (
|
|
Path(config["source"])
|
|
/ item["tribe"]
|
|
/ f"{item['date']:%y%m%d}_{item['assessment']}.csv"
|
|
)
|
|
|
|
students = pd.read_csv(Path(config["source"]) / item["tribe"] / "eleves.csv")["Nom"]
|
|
|
|
columns = list(NO_ST_COLUMNS.keys())
|
|
items = [[it[c] for c in columns] for it in items]
|
|
columns = list(NO_ST_COLUMNS.values())
|
|
items_df = pd.DataFrame.from_records(items, columns=columns)
|
|
for s in students:
|
|
items_df[s] = np.nan
|
|
|
|
items_df.to_csv(csv_output, index=False, date_format="%d/%m/%Y")
|
|
click.echo(f"Saving csv file to {csv_output}")
|
|
|
|
|
|
def new_eval(answers={}):
|
|
click.echo(f"Préparation d'un nouveau devoir")
|
|
|
|
eval_questions = [
|
|
{"type": "input", "name": "assessment", "message": "Nom de l'évaluation",},
|
|
{
|
|
"type": "list",
|
|
"name": "tribe",
|
|
"message": "Classe concernée",
|
|
"choices": get_tribes,
|
|
},
|
|
{
|
|
"type": "input",
|
|
"name": "date",
|
|
"message": "Date du devoir (%y%m%d)",
|
|
"default": datetime.today().strftime("%y%m%d"),
|
|
"filter": lambda val: datetime.strptime(val, "%y%m%d"),
|
|
},
|
|
{
|
|
"type": "list",
|
|
"name": "term",
|
|
"message": "Trimestre",
|
|
"choices": ["1", "2", "3"],
|
|
},
|
|
]
|
|
|
|
eval_ans = prompt(eval_questions)
|
|
|
|
items = []
|
|
add_exo = True
|
|
while add_exo:
|
|
ex_items = new_exercice(eval_ans)
|
|
items += ex_items
|
|
add_exo = prompt(
|
|
[
|
|
{
|
|
"type": "confirm",
|
|
"name": "add_exo",
|
|
"message": "Ajouter un autre exercice",
|
|
"default": True,
|
|
}
|
|
]
|
|
)["add_exo"]
|
|
return items
|
|
|
|
|
|
def new_exercice(answers={}):
|
|
exercise_questions = [
|
|
{"type": "input", "name": "exercise", "message": "Nom de l'exercice"},
|
|
]
|
|
|
|
click.echo(f"Nouvel exercice")
|
|
exercise_ans = prompt(exercise_questions, answers=answers)
|
|
|
|
items = []
|
|
|
|
add_item = True
|
|
while add_item:
|
|
try:
|
|
item_ans = new_item(exercise_ans)
|
|
except PromptAbortException:
|
|
click.echo("Création de l'item annulée")
|
|
else:
|
|
items.append(item_ans)
|
|
add_item = prompt(
|
|
[
|
|
{
|
|
"type": "confirm",
|
|
"name": "add_item",
|
|
"message": f"Ajouter un autre item pour l'exercice {exercise_ans['exercise']}",
|
|
"default": True,
|
|
}
|
|
]
|
|
)["add_item"]
|
|
|
|
return items
|
|
|
|
|
|
def new_item(answers={}):
|
|
item_questions = [
|
|
{"type": "input", "name": "question", "message": "Nom de l'item",},
|
|
{"type": "input", "name": "comment", "message": "Commentaire",},
|
|
{
|
|
"type": "list",
|
|
"name": "competence",
|
|
"message": "Competence",
|
|
"choices": ["Cher", "Rep", "Mod", "Rai", "Cal", "Com"],
|
|
},
|
|
{"type": "input", "name": "theme", "message": "Domaine",},
|
|
{
|
|
"type": "confirm",
|
|
"name": "is_leveled",
|
|
"message": "Évaluation par niveau",
|
|
"default": True,
|
|
},
|
|
{"type": "input", "name": "score_rate", "message": "Bareme"},
|
|
{
|
|
"type": "confirm",
|
|
"name": "correct",
|
|
"message": "Tout est correct?",
|
|
"default": True,
|
|
},
|
|
]
|
|
click.echo(f"Nouvelle question pour l'exercice {answers['exercise']}")
|
|
item_ans = prompt(item_questions, answers=answers)
|
|
if item_ans["correct"]:
|
|
return item_ans
|
|
raise PromptAbortException("Abort item creation")
|