diff --git a/recopytex/config.py b/recopytex/config.py index c5eb096..75413d7 100644 --- a/recopytex/config.py +++ b/recopytex/config.py @@ -2,16 +2,16 @@ # encoding: utf-8 NO_ST_COLUMNS = { - "term": "Trimestre", "assessment": "Nom", + "term": "Trimestre", "date": "Date", "exercise": "Exercice", "question": "Question", "competence": "Competence", "theme": "Domaine", "comment": "Commentaire", - "score_rate": "Bareme", "is_leveled": "Est_nivele", + "score_rate": "Bareme", } COLUMNS = { diff --git a/recopytex/scripts/prepare_csv.py b/recopytex/scripts/prepare_csv.py index 535c1ff..38edc4c 100644 --- a/recopytex/scripts/prepare_csv.py +++ b/recopytex/scripts/prepare_csv.py @@ -5,8 +5,11 @@ import click from pathlib import Path from datetime import datetime from PyInquirer import prompt, print_json +import pandas as pd +import numpy as np from .config import config +from ..config import NO_ST_COLUMNS class PromptAbortException(EOFError): @@ -26,12 +29,32 @@ def get_tribes(answers): def prepare_csv(): items = new_eval() + item = items[0] + # item = {"tribe": "308", "date": datetime.today(), "assessment": "plop"} + csv_output = ( + Path(config["source"]) + / item["tribe"] + / f"{item['date']:%y%m%d}_{item['assessment']}.csv" + ) + + students = pd.read_csv(Path(config["source"]) / item["tribe"] / "eleves.csv")["Nom"] + + columns = list(NO_ST_COLUMNS.keys()) + items = [[it[c] for c in columns] for it in items] + columns = list(NO_ST_COLUMNS.values()) + items_df = pd.DataFrame.from_records(items, columns=columns) + for s in students: + items_df[s] = np.nan + + items_df.to_csv(csv_output, index=False, date_format="%d/%m/%Y") + click.echo(f"Saving csv file to {csv_output}") + def new_eval(answers={}): click.echo(f"Préparation d'un nouveau devoir") eval_questions = [ - {"type": "input", "name": "evalname", "message": "Nom de l'évaluation",}, + {"type": "input", "name": "assessment", "message": "Nom de l'évaluation",}, { "type": "list", "name": "tribe", @@ -75,7 +98,7 @@ def new_eval(answers={}): def new_exercice(answers={}): exercise_questions = [ - {"type": "input", "name": "exercisename", "message": "Nom de l'exercice"}, + {"type": "input", "name": "exercise", "message": "Nom de l'exercice"}, ] click.echo(f"Nouvel exercice") @@ -96,7 +119,7 @@ def new_exercice(answers={}): { "type": "confirm", "name": "add_item", - "message": f"Ajouter un autre item pour l'exercice {exercise_ans['exercisename']}", + "message": f"Ajouter un autre item pour l'exercice {exercise_ans['exercise']}", "default": True, } ] @@ -107,22 +130,22 @@ def new_exercice(answers={}): def new_item(answers={}): item_questions = [ - {"type": "input", "name": "itemname", "message": "Nom de l'item",}, - {"type": "input", "name": "comment", "message": "Description",}, + {"type": "input", "name": "question", "message": "Nom de l'item",}, + {"type": "input", "name": "comment", "message": "Commentaire",}, { "type": "list", "name": "competence", "message": "Competence", "choices": ["Cher", "Rep", "Mod", "Rai", "Cal", "Com"], }, - {"type": "input", "name": "domain", "message": "Domaine",}, + {"type": "input", "name": "theme", "message": "Domaine",}, { "type": "confirm", "name": "is_leveled", "message": "Évaluation par niveau", "default": True, }, - {"type": "input", "name": "scorerate", "message": "Bareme"}, + {"type": "input", "name": "score_rate", "message": "Bareme"}, { "type": "confirm", "name": "correct", @@ -130,7 +153,7 @@ def new_item(answers={}): "default": True, }, ] - click.echo(f"Nouvelle question pour l'exercice {answers['exercisename']}") + click.echo(f"Nouvelle question pour l'exercice {answers['exercise']}") item_ans = prompt(item_questions, answers=answers) if item_ans["correct"]: return item_ans