Merge branch 'dev' of git_opytex:/lafrite/recopytex into dev

This commit is contained in:
Bertrand Benjamin 2021-01-21 09:25:58 +01:00
commit 494567cdb5
3 changed files with 118 additions and 73 deletions

View File

@ -0,0 +1,41 @@
body {
margin: 0px;
font-family: 'Source Sans Pro','Roboto','Open Sans','Liberation Sans','DejaVu Sans','Verdana','Helvetica','Arial',sans-serif;
}
header {
margin: 0px 0px 20px 0px;
background-color: #333333;
color: #ffffff;
padding: 20px;
}
header > h1 {
margin: 0px;
}
main {
width: 95vw;
margin: auto;
}
#select {
margin-bottom: 20px;
}
#select > div {
width: 40vw;
margin: auto;
}
#analysis {
display: flex;
flex-flow: row wrap;
}
#analysis > * {
display: flex;
flex-flow: column;
width: 45vw;
margin: auto;
}

View File

@ -26,39 +26,57 @@ COLORS = {
3: "#68D42F",
}
app = dash.Dash(external_stylesheets=[dbc.themes.SIMPLEX])
# external_stylesheets = ["https://codepen.io/chriddyp/pen/bWLwgP.css"]
# app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
# app = dash.Dash(__name__)
app = dash.Dash(__name__)
app.layout = html.Div(
children=[
dbc.NavbarSimple(
html.Header(
children=[
dbc.Alert("Dernière sauvegarde", id="lastsave", color="success"),
html.H1("Analyse des notes"),
html.P("Dernière sauvegarde", id="lastsave"),
],
brand="Analyse des notes",
brand_href="#",
color="success",
dark=True,
),
html.H2("Choix de l'évaluation"),
dbc.Row(
html.Main(
[
dbc.Col(
html.Section(
[
"Classe: ",
dbc.Select(
id="tribe",
options=[
{"label": t["name"], "value": t["name"]}
for t in config["tribes"]
html.Div(
[
"Classe: ",
dcc.Dropdown(
id="tribe",
options=[
{"label": t["name"], "value": t["name"]}
for t in config["tribes"]
],
value=config["tribes"][0]["name"],
),
],
value=config["tribes"][0]["name"],
style={
"display": "flex",
"flex-flow": "column",
},
),
]
html.Div(
[
"Evaluation: ",
dcc.Dropdown(id="csv"),
],
style={
"display": "flex",
"flex-flow": "column",
},
),
],
id="select",
style={
"display": "flex",
"flex-flow": "row wrap",
},
),
dbc.Col(
html.Div(
[
"Evaluation: ",
dbc.Select(id="csv"),
@ -95,48 +113,33 @@ app.layout = html.Div(
},
)
),
dbc.Col(
html.Div(
[
dash_table.DataTable(
id="final_score_describe",
columns=[{"id": "count", "name": "count"},
{"id": "mean", "name": "mean"},
{"id": "std", "name": "std"},
{"id": "min", "name": "min"},
{"id": "25%", "name": "25%"},
{"id": "50%", "name": "50%"},
{"id": "75%", "name": "75%"},
{"id": "max", "name": "max"},
]
id="scores_table",
columns=[
{"id": c, "name": c} for c in NO_ST_COLUMNS.values()
],
style_cell={
"whiteSpace": "normal",
"height": "auto",
},
style_data_conditional=[],
editable=True,
),
dcc.Graph(
id="fig_assessment_hist",
),
dcc.Graph(id="fig_competences"),
]
html.Button("Ajouter un élément", id="btn_add_element"),
],
id="big_table",
),
dcc.Store(id="final_score"),
],
className="content",
style={
"width": "95vw",
"margin": "auto",
},
),
html.H2("Édition des notes"),
dbc.Row(
[
dash_table.DataTable(
id="scores_table",
columns=[{"id": c, "name": c} for c in NO_ST_COLUMNS.values()],
style_cell={
"whiteSpace": "normal",
"height": "auto",
},
style_data_conditional=[],
editable=True,
),
dbc.Button("Ajouter un élément", id="btn_add_element"),
]
),
html.H2("Actions"),
dcc.Store(id="final_score"),
]
],
)
@ -179,9 +182,7 @@ def update_final_scores(data):
return [{}]
scores = pp_q_scores(scores)
assessment_scores = scores.groupby(["Eleve"]).agg(
{"Note": "sum", "Bareme": "sum"}
)
assessment_scores = scores.groupby(["Eleve"]).agg({"Note": "sum", "Bareme": "sum"})
return [assessment_scores.reset_index().to_dict("records")]
@ -203,7 +204,7 @@ def update_final_scores_table(data):
[dash.dependencies.Input("final_score", "data")],
)
def update_final_scores_descr(data):
scores = pd.DataFrame.from_records(data)
scores = pd.DataFrame.from_records(data)
if scores.empty:
return [[{}]]
desc = scores["Note"].describe().T
@ -223,7 +224,9 @@ def update_final_scores_hist(data):
return [{'data': [], 'layout':[]}]
ranges = np.linspace(
0, assessment_scores.Bareme.max(), int(assessment_scores.Bareme.max() * 2 + 1)
-0.5,
assessment_scores.Bareme.max(),
int(assessment_scores.Bareme.max() * 2 + 2),
)
bins = pd.cut(assessment_scores["Note"], ranges)
assessment_scores["Bin"] = bins
@ -279,11 +282,11 @@ def update_competence_fig(data):
pt.loc[(str(i), "", ""), :] = ""
pt.sort_index(inplace=True)
index = (
pt.index.get_level_values(0)
pt.index.get_level_values(0).map(str)
+ ":"
+ pt.index.get_level_values(1)
+ pt.index.get_level_values(1).map(str)
+ " "
+ pt.index.get_level_values(2)
+ pt.index.get_level_values(2).map(str)
)
fig = go.Figure()
@ -312,7 +315,6 @@ def update_competence_fig(data):
@app.callback(
[
dash.dependencies.Output("lastsave", "children"),
dash.dependencies.Output("lastsave", "color"),
],
[
dash.dependencies.Input("scores_table", "data"),
@ -324,9 +326,9 @@ def save_scores(data, csv):
scores = pd.DataFrame.from_records(data)
scores.to_csv(csv, index=False)
except:
return [f"Soucis pour sauvegarder à {datetime.today()} dans {csv}"], "warning"
return [f"Soucis pour sauvegarder à {datetime.today()} dans {csv}"]
else:
return [f"Dernière sauvegarde {datetime.today()} dans {csv}"], "success"
return [f"Dernière sauvegarde {datetime.today()} dans {csv}"]
def highlight_value(df):
@ -359,15 +361,16 @@ def highlight_value(df):
)
def update_scores_table(csv, add_element, data):
ctx = dash.callback_context
if ctx.triggered[0]['prop_id'] == "csv.value":
if ctx.triggered[0]["prop_id"] == "csv.value":
stack = pd.read_csv(csv, encoding="UTF8")
elif ctx.triggered[0]['prop_id'] == "btn_add_element.n_clicks":
elif ctx.triggered[0]["prop_id"] == "btn_add_element.n_clicks":
stack = pd.DataFrame.from_records(data)
infos = pd.DataFrame.from_records([{k: stack.iloc[-1][k] for k in NO_ST_COLUMNS.values()}])
infos = pd.DataFrame.from_records(
[{k: stack.iloc[-1][k] for k in NO_ST_COLUMNS.values()}]
)
stack = stack.append(infos)
return (
[{"id": c, "name": c} for c in stack.columns],
stack.to_dict("records"),
highlight_value(stack),
)

View File

@ -87,8 +87,9 @@ def new_exam():
@cli.command()
def exam_analysis():
exam_app.run_server(debug=True)
@click.option("--debug", default=0, help="Debug mode for dash")
def exam_analysis(debug):
exam_app.run_server(debug=bool(debug))
@cli.command()