Merge ssh://git_opytex:/lafrite/recopytex into dev

This commit is contained in:
Bertrand Benjamin 2021-05-10 11:27:24 +02:00
commit 1ffdd8676b
7 changed files with 256 additions and 9 deletions

View File

@ -2,16 +2,16 @@
# encoding: utf-8 # encoding: utf-8
NO_ST_COLUMNS = { NO_ST_COLUMNS = {
"term": "Trimestre",
"assessment": "Nom", "assessment": "Nom",
"term": "Trimestre",
"date": "Date", "date": "Date",
"exercise": "Exercice", "exercise": "Exercice",
"question": "Question", "question": "Question",
"competence": "Competence", "competence": "Competence",
"theme": "Domaine", "theme": "Domaine",
"comment": "Commentaire", "comment": "Commentaire",
"score_rate": "Bareme",
"is_leveled": "Est_nivele", "is_leveled": "Est_nivele",
"score_rate": "Bareme",
} }
COLUMNS = { COLUMNS = {

View File

@ -50,7 +50,6 @@ def score_to_mark(x):
f"The evaluation is out of range: {x[COLUMNS['score']]} at {x}" f"The evaluation is out of range: {x[COLUMNS['score']]} at {x}"
) )
return round(x[COLUMNS["score"]] * x[COLUMNS["score_rate"]] / 3, 2) return round(x[COLUMNS["score"]] * x[COLUMNS["score_rate"]] / 3, 2)
#return round_half_point(x[COLUMNS["score"]] * x[COLUMNS["score_rate"]] / 3)
if x[COLUMNS["score"]] > x[COLUMNS["score_rate"]]: if x[COLUMNS["score"]] > x[COLUMNS["score_rate"]]:
raise ValueError( raise ValueError(

View File

@ -0,0 +1,10 @@
#!/usr/bin/env python
# encoding: utf-8
import yaml
CONFIGPATH = "recoconfig.yml"
with open(CONFIGPATH, "r") as configfile:
config = yaml.load(configfile, Loader=yaml.FullLoader)

View File

@ -0,0 +1,160 @@
#!/usr/bin/env python
# encoding: utf-8
import click
from pathlib import Path
from datetime import datetime
from PyInquirer import prompt, print_json
import pandas as pd
import numpy as np
from .config import config
from ..config import NO_ST_COLUMNS
class PromptAbortException(EOFError):
def __init__(self, message, errors=None):
# Call the base class constructor with the parameters it needs
super(PromptAbortException, self).__init__("Abort questionnary", errors)
def get_tribes(answers):
""" List tribes based on subdirectory of config["source"] which have an "eleves.csv" file inside """
return [
p.name for p in Path(config["source"]).iterdir() if (p / "eleves.csv").exists()
]
def prepare_csv():
items = new_eval()
item = items[0]
# item = {"tribe": "308", "date": datetime.today(), "assessment": "plop"}
csv_output = (
Path(config["source"])
/ item["tribe"]
/ f"{item['date']:%y%m%d}_{item['assessment']}.csv"
)
students = pd.read_csv(Path(config["source"]) / item["tribe"] / "eleves.csv")["Nom"]
columns = list(NO_ST_COLUMNS.keys())
items = [[it[c] for c in columns] for it in items]
columns = list(NO_ST_COLUMNS.values())
items_df = pd.DataFrame.from_records(items, columns=columns)
for s in students:
items_df[s] = np.nan
items_df.to_csv(csv_output, index=False, date_format="%d/%m/%Y")
click.echo(f"Saving csv file to {csv_output}")
def new_eval(answers={}):
click.echo(f"Préparation d'un nouveau devoir")
eval_questions = [
{"type": "input", "name": "assessment", "message": "Nom de l'évaluation",},
{
"type": "list",
"name": "tribe",
"message": "Classe concernée",
"choices": get_tribes,
},
{
"type": "input",
"name": "date",
"message": "Date du devoir (%y%m%d)",
"default": datetime.today().strftime("%y%m%d"),
"filter": lambda val: datetime.strptime(val, "%y%m%d"),
},
{
"type": "list",
"name": "term",
"message": "Trimestre",
"choices": ["1", "2", "3"],
},
]
eval_ans = prompt(eval_questions)
items = []
add_exo = True
while add_exo:
ex_items = new_exercice(eval_ans)
items += ex_items
add_exo = prompt(
[
{
"type": "confirm",
"name": "add_exo",
"message": "Ajouter un autre exercice",
"default": True,
}
]
)["add_exo"]
return items
def new_exercice(answers={}):
exercise_questions = [
{"type": "input", "name": "exercise", "message": "Nom de l'exercice"},
]
click.echo(f"Nouvel exercice")
exercise_ans = prompt(exercise_questions, answers=answers)
items = []
add_item = True
while add_item:
try:
item_ans = new_item(exercise_ans)
except PromptAbortException:
click.echo("Création de l'item annulée")
else:
items.append(item_ans)
add_item = prompt(
[
{
"type": "confirm",
"name": "add_item",
"message": f"Ajouter un autre item pour l'exercice {exercise_ans['exercise']}",
"default": True,
}
]
)["add_item"]
return items
def new_item(answers={}):
item_questions = [
{"type": "input", "name": "question", "message": "Nom de l'item",},
{"type": "input", "name": "comment", "message": "Commentaire",},
{
"type": "list",
"name": "competence",
"message": "Competence",
"choices": ["Cher", "Rep", "Mod", "Rai", "Cal", "Com"],
},
{"type": "input", "name": "theme", "message": "Domaine",},
{
"type": "confirm",
"name": "is_leveled",
"message": "Évaluation par niveau",
"default": True,
},
{"type": "input", "name": "score_rate", "message": "Bareme"},
{
"type": "confirm",
"name": "correct",
"message": "Tout est correct?",
"default": True,
},
]
click.echo(f"Nouvelle question pour l'exercice {answers['exercise']}")
item_ans = prompt(item_questions, answers=answers)
if item_ans["correct"]:
return item_ans
raise PromptAbortException("Abort item creation")

View File

@ -39,7 +39,7 @@ def setup():
@cli.command() @cli.command()
def new_exam(): def new_exam():
""" Create new exam csv file """ """Create new exam csv file"""
exam = Exam(**prompt_exam()) exam = Exam(**prompt_exam())
if exam.path(".yml").exists(): if exam.path(".yml").exists():
@ -112,7 +112,7 @@ def report(csv_file):
try: try:
date = datetime.strptime(date, "%y%m%d") date = datetime.strptime(date, "%y%m%d")
except ValueError: except ValueError:
date = None date = datetime.today().strptime(date, "%y%m%d")
tribe = str(tribe_dir).split("/")[-1] tribe = str(tribe_dir).split("/")[-1]

View File

@ -1,4 +1,77 @@
pandas
click
papermill
prompt_toolkit prompt_toolkit
ansiwrap==0.8.4
appdirs==1.4.3
attrs==19.1.0
backcall==0.1.0
black==19.10b0
bleach==3.1.0
certifi==2019.6.16
chardet==3.0.4
Click==7.0
colorama==0.4.1
cycler==0.10.0
decorator==4.4.0
defusedxml==0.6.0
entrypoints==0.3
future==0.17.1
idna==2.8
importlib-resources==1.0.2
ipykernel==5.1.3
ipython==7.11.1
ipython-genutils==0.2.0
ipywidgets==7.5.1
jedi==0.15.2
Jinja2==2.10.3
jsonschema==3.2.0
jupyter==1.0.0
jupyter-client==5.3.4
jupyter-console==6.1.0
jupyter-core==4.6.1
jupytex==0.0.3
kiwisolver==1.1.0
Markdown==3.1.1
MarkupSafe==1.1.1
matplotlib==3.1.2
mistune==0.8.4
nbconvert==5.6.1
nbformat==5.0.3
notebook==6.0.3
numpy==1.18.1
pandas==0.25.3
pandocfilters==1.4.2
papermill==1.2.1
parso==0.5.2
pathspec==0.7.0
pexpect==4.8.0
pickleshare==0.7.5
prometheus-client==0.7.1
prompt-toolkit==1.0.14
ptyprocess==0.6.0
Pygments==2.5.2
PyInquirer==1.0.3
pyparsing==2.4.6
pyrsistent==0.15.7
python-dateutil==2.8.0
pytz==2019.3
PyYAML==5.3
pyzmq==18.1.1
qtconsole==4.6.0
-e git+git_opytex:/lafrite/recopytex.git@7e026bedb24c1ca8bef3b71b3d63f8b0d6916e81#egg=Recopytex
regex==2020.1.8
requests==2.22.0
scipy==1.4.1
Send2Trash==1.5.0
six==1.12.0
tenacity==6.0.0
terminado==0.8.3
testpath==0.4.4
textwrap3==0.9.2
toml==0.10.0
tornado==6.0.3
tqdm==4.41.1
traitlets==4.3.2
typed-ast==1.4.1
urllib3==1.25.8
wcwidth==0.1.8
webencodings==0.5.1
widgetsnbextension==3.5.1

View File

@ -5,7 +5,7 @@ from setuptools import setup, find_packages
setup( setup(
name='Recopytex', name='Recopytex',
version='0.1', version='1.1.1',
description='Assessment analysis', description='Assessment analysis',
author='Benjamin Bertrand', author='Benjamin Bertrand',
author_email='', author_email='',
@ -13,6 +13,11 @@ setup(
include_package_data=True, include_package_data=True,
install_requires=[ install_requires=[
'Click', 'Click',
'pandas',
'numpy',
'papermill',
'pyyaml',
'PyInquirer',
], ],
entry_points=''' entry_points='''
[console_scripts] [console_scripts]