Feat: use and test csv files

This commit is contained in:
2019-12-23 16:14:56 +01:00
parent 9d5c231c9c
commit 1ba29c057b
3 changed files with 78 additions and 34 deletions

View File

@@ -100,14 +100,33 @@ def pdfjoin(pdf_files, destname, working_dir=".", rm_pdfs=1):
def extract_student_csv(csv_filename):
""" Extract student list from csv_filename
Student identifier is got in the column "Élève".
"""
""" Extract student list from csv_filename """
with open(csv_filename, "r") as csvfile:
reader = csv.DictReader(csvfile)
return [r["Élève"] for r in reader]
return [r for r in reader]
def subject_metadata(quantity=0, metacsv=None):
""" Return metadata on subject to produce
if csv is given it will based on is
otherwise it will be based on quantity
:example:
>>> subject_metadata(10)
"""
if metacsv:
metadata = []
for (i, s) in enumerate(extract_student_csv(metacsv)):
d = {"num": f"{i+1:02d}"}
d.update(s)
metadata.append(d)
elif quantity > 0:
metadata = [{"num": f"{i+1:02d}"} for i in range(quantity)]
else:
raise ValueError("Need metacsv or quantity to build subject metadata")
return metadata
def produce_and_compile(options):
@@ -122,15 +141,11 @@ def produce_and_compile(options):
template = Path(options["template"]).name
logger.debug(f"Template will be {template}")
if options["students_csv"]:
list_infos = [
{"num": f"{i+1:02d}", "name": s}
for (i, s) in enumerate(extract_student_csv(options["students_csv"]))
]
else:
list_infos = [
{"num": f"{i+1:02d}"} for i in range(options["number_subjects"])
]
list_infos = subject_metadata(
options["number_subjects"], options["students_csv"]
)
logger.debug(f"Metadata {list_infos}")
tex_files = []
for infos in list_infos:
@@ -162,7 +177,7 @@ def produce_and_compile(options):
logger.debug(f"Start compiling {texfile}")
pytex.pdflatex(texfile)
logger.debug(f"End compiling {texfile}")
pdf_files.append(str(texfile).split('.')[0] + ".pdf")
pdf_files.append(str(texfile).split(".")[0] + ".pdf")
logger.debug(f"Compiled files : {pdf_files}")
if not options["no_join"] and not options["no_compile"]: