2013-09-27 18:09:38 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
# encoding: utf-8
|
|
|
|
|
2017-04-16 08:36:27 +00:00
|
|
|
"""
|
|
|
|
Producing then compiling templates
|
|
|
|
"""
|
|
|
|
|
2019-10-16 20:45:00 +00:00
|
|
|
import csv
|
2014-09-02 09:20:09 +00:00
|
|
|
import os
|
2017-04-16 15:46:14 +00:00
|
|
|
import logging
|
2017-04-16 08:36:27 +00:00
|
|
|
|
2019-12-23 08:10:42 +00:00
|
|
|
from pathlib import Path
|
2017-04-16 15:46:14 +00:00
|
|
|
import pytex
|
2019-10-15 17:50:54 +00:00
|
|
|
from mapytex import Expression, Integer, Decimal
|
2017-04-17 14:00:22 +00:00
|
|
|
import bopytex.filters as filters
|
2017-04-16 15:46:14 +00:00
|
|
|
|
2019-10-15 17:50:54 +00:00
|
|
|
formatter = logging.Formatter("%(name)s :: %(levelname)s :: %(message)s")
|
2017-04-16 15:46:14 +00:00
|
|
|
steam_handler = logging.StreamHandler()
|
|
|
|
steam_handler.setLevel(logging.DEBUG)
|
|
|
|
steam_handler.setFormatter(formatter)
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
logger.setLevel(logging.DEBUG)
|
|
|
|
logger.addHandler(steam_handler)
|
|
|
|
|
2017-04-17 14:10:38 +00:00
|
|
|
|
2017-04-17 07:42:13 +00:00
|
|
|
def setup():
|
2019-07-12 07:07:07 +00:00
|
|
|
Expression.set_render("tex")
|
|
|
|
logger.debug(f"Render for Expression is {Expression.RENDER}")
|
2017-04-17 07:42:13 +00:00
|
|
|
mapytex_tools = {
|
2019-07-12 07:07:07 +00:00
|
|
|
"Expression": Expression,
|
2019-10-15 17:50:54 +00:00
|
|
|
"Integer": Integer,
|
|
|
|
"Decimal": Decimal,
|
2019-06-28 07:16:17 +00:00
|
|
|
# "Polynom": mapytex.Polynom,
|
|
|
|
# "Fraction": mapytex.Fraction,
|
|
|
|
# "Equation": mapytex.Equation,
|
|
|
|
# "random_str": mapytex.random_str,
|
|
|
|
# "random_pythagore": mapytex.random_pythagore,
|
|
|
|
# "Dataset": mapytex.Dataset,
|
|
|
|
# "WeightedDataset": mapytex.WeightedDataset,
|
2019-10-15 17:50:54 +00:00
|
|
|
}
|
2017-04-17 07:42:13 +00:00
|
|
|
pytex.update_export_dict(mapytex_tools)
|
2017-04-16 15:46:14 +00:00
|
|
|
|
2017-12-06 08:27:02 +00:00
|
|
|
pytex.add_filter("calculus", filters.do_calculus)
|
2015-01-06 08:22:52 +00:00
|
|
|
|
2013-09-27 20:21:46 +00:00
|
|
|
|
2017-04-17 05:27:03 +00:00
|
|
|
def get_working_dir(options):
|
|
|
|
""" Get the working directory """
|
2019-12-23 08:10:42 +00:00
|
|
|
if options["working_dir"]:
|
|
|
|
working_dir = Path(options["working_dir"])
|
2017-04-16 15:46:14 +00:00
|
|
|
else:
|
2017-04-17 05:27:03 +00:00
|
|
|
try:
|
2019-12-23 08:10:42 +00:00
|
|
|
template = Path(options["template"])
|
2017-04-17 05:27:03 +00:00
|
|
|
except TypeError:
|
2019-10-15 17:50:54 +00:00
|
|
|
raise ValueError(
|
|
|
|
"Need to set the working directory \
|
|
|
|
or to give a template"
|
|
|
|
)
|
2017-04-17 05:27:03 +00:00
|
|
|
else:
|
2019-12-23 08:10:42 +00:00
|
|
|
working_dir = template.parent
|
2017-04-17 05:27:03 +00:00
|
|
|
logger.debug(f"The output directory will be {working_dir}")
|
|
|
|
return working_dir
|
|
|
|
|
|
|
|
|
2019-10-15 17:50:54 +00:00
|
|
|
def activate_printanswers(
|
|
|
|
texfile, noans=r"solution/print = false", ans=r"solution/print = true"
|
|
|
|
):
|
2017-04-17 05:27:03 +00:00
|
|
|
""" Activate printanswers mod in texfile """
|
|
|
|
output_fname = "corr_" + texfile
|
2019-10-15 17:50:54 +00:00
|
|
|
with open(texfile, "r") as input_f:
|
2017-04-17 05:27:03 +00:00
|
|
|
with open(output_fname, "w") as output_f:
|
|
|
|
for line in input_f.readlines():
|
2019-10-15 17:50:54 +00:00
|
|
|
output_f.write(line.replace(noans, ans))
|
2017-04-17 05:27:03 +00:00
|
|
|
return output_fname
|
|
|
|
|
|
|
|
|
|
|
|
def deactivate_printanswers(corr_fname):
|
|
|
|
""" Activate printanswers mod in texfile """
|
|
|
|
Path(corr_fname).remove()
|
|
|
|
|
|
|
|
|
|
|
|
def pdfjoin(pdf_files, destname, working_dir=".", rm_pdfs=1):
|
|
|
|
"""TODO: Docstring for pdfjoin.
|
|
|
|
|
|
|
|
:param pdf_files: list of pdf files to join
|
|
|
|
:param destname: name for joined pdf
|
|
|
|
:param working_dir: the working directory
|
|
|
|
:param rm_pdfs: Remove pdf_files after joining them
|
|
|
|
:returns: TODO
|
2013-09-27 20:21:46 +00:00
|
|
|
|
2017-04-17 05:27:03 +00:00
|
|
|
"""
|
|
|
|
joined_pdfs = Path(working_dir) / Path(destname)
|
|
|
|
pdf_files_str = " ".join(pdf_files)
|
|
|
|
pdfjam = f"pdfjam {pdf_files_str} -o {joined_pdfs}"
|
|
|
|
logger.debug(f"Run {pdfjam}")
|
|
|
|
logger.info("Joining pdf files")
|
|
|
|
os.system(pdfjam)
|
|
|
|
if rm_pdfs:
|
|
|
|
logger.info(f"Remove {pdf_files_str}")
|
|
|
|
os.system(f"rm {pdf_files_str}")
|
2014-09-03 17:30:09 +00:00
|
|
|
|
2014-08-29 13:29:57 +00:00
|
|
|
|
2019-10-16 20:45:00 +00:00
|
|
|
def extract_student_csv(csv_filename):
|
2019-12-23 15:14:56 +00:00
|
|
|
""" Extract student list from csv_filename """
|
|
|
|
with open(csv_filename, "r") as csvfile:
|
|
|
|
reader = csv.DictReader(csvfile)
|
|
|
|
return [r for r in reader]
|
|
|
|
|
2019-12-23 14:05:50 +00:00
|
|
|
|
2019-12-23 17:12:58 +00:00
|
|
|
def subject_metadatas(options):
|
2019-12-23 15:14:56 +00:00
|
|
|
""" Return metadata on subject to produce
|
2019-12-23 14:05:50 +00:00
|
|
|
|
2019-12-23 15:14:56 +00:00
|
|
|
if csv is given it will based on is
|
|
|
|
otherwise it will be based on quantity
|
|
|
|
|
|
|
|
:example:
|
|
|
|
>>> subject_metadata(10)
|
2019-12-23 14:05:50 +00:00
|
|
|
"""
|
2019-12-23 17:12:58 +00:00
|
|
|
if options["students_csv"]:
|
|
|
|
metadatas = []
|
|
|
|
for (i, s) in enumerate(extract_student_csv(options["students_csv"])):
|
2019-12-23 15:14:56 +00:00
|
|
|
d = {"num": f"{i+1:02d}"}
|
|
|
|
d.update(s)
|
2019-12-23 17:12:58 +00:00
|
|
|
metadatas.append(d)
|
|
|
|
elif options["number_subjects"] > 0:
|
|
|
|
metadatas = [{"num": f"{i+1:02d}"} for i in range(options["number_subjects"])]
|
2019-12-23 15:14:56 +00:00
|
|
|
else:
|
|
|
|
raise ValueError("Need metacsv or quantity to build subject metadata")
|
|
|
|
|
2019-12-23 17:12:58 +00:00
|
|
|
for meta in metadatas:
|
|
|
|
meta.update(
|
|
|
|
{
|
|
|
|
"template": str(Path(options["template"]).name),
|
|
|
|
"texfile": str(Path(options["template"]).name).replace(
|
|
|
|
"tpl", meta["num"]
|
|
|
|
),
|
|
|
|
"directory": str(Path(options["template"]).parent),
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
return metadatas
|
|
|
|
|
|
|
|
|
|
|
|
def feed(*args, **kwrds):
|
|
|
|
""" Nice and smooth pytex feed """
|
|
|
|
pytex.feed(*args, **kwrds)
|
|
|
|
|
|
|
|
|
|
|
|
def crazy_feed(*args, **kwrds):
|
|
|
|
""" Crazy mod for pytex feed """
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
pytex.feed(*args, **kwrds)
|
|
|
|
except:
|
|
|
|
logger.debug(f"Crazy feed is working hard...! {args} {kwrds}")
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
|
|
def clean(directory):
|
|
|
|
pytex.clean(directory)
|
|
|
|
|
|
|
|
|
|
|
|
def texcompile(filename):
|
|
|
|
logger.debug(f"Start compiling {filename}")
|
|
|
|
pytex.pdflatex(Path(filename))
|
|
|
|
logger.debug(f"End compiling")
|
2019-10-16 20:45:00 +00:00
|
|
|
|
|
|
|
|
2017-04-17 05:27:03 +00:00
|
|
|
def produce_and_compile(options):
|
|
|
|
""" Produce and compile subjects
|
|
|
|
"""
|
2019-12-24 07:01:26 +00:00
|
|
|
logger.debug(f"CI parser gets {options}")
|
2013-09-27 20:21:46 +00:00
|
|
|
|
2019-12-24 07:01:26 +00:00
|
|
|
template = Path(options["template"]).name
|
|
|
|
directory = Path(options["template"]).parent
|
|
|
|
metadatas = subject_metadatas(options)
|
|
|
|
logger.debug(f"Metadata {metadatas}")
|
2017-04-17 05:27:03 +00:00
|
|
|
|
2019-12-24 07:01:26 +00:00
|
|
|
for meta in metadatas:
|
|
|
|
logger.debug(f"Feeding template toward {meta['texfile']}")
|
|
|
|
if options["crazy"]:
|
|
|
|
crazy_feed(
|
|
|
|
template=Path(meta["directory"]) / meta["template"],
|
|
|
|
data=meta,
|
|
|
|
output=meta["texfile"],
|
|
|
|
force=1,
|
2019-10-15 17:50:54 +00:00
|
|
|
)
|
2019-12-24 07:01:26 +00:00
|
|
|
else:
|
|
|
|
feed(
|
|
|
|
template=Path(meta["directory"]) / meta["template"],
|
|
|
|
data=meta,
|
|
|
|
output=meta["texfile"],
|
|
|
|
force=1,
|
2019-10-15 17:50:54 +00:00
|
|
|
)
|
2019-12-24 07:01:26 +00:00
|
|
|
assert(Path(meta["texfile"]).exists())
|
|
|
|
logger.debug(f"{meta['texfile']} fed")
|
|
|
|
|
|
|
|
if options["corr"]:
|
|
|
|
logger.debug(f"Building correction for {meta['texfile']}")
|
|
|
|
meta.update({
|
|
|
|
"corr_texfile": activate_printanswers(meta["texfile"]),
|
|
|
|
})
|
|
|
|
|
|
|
|
if not options["no_compile"]:
|
|
|
|
for prefix in ["", "corr_"]:
|
|
|
|
key = prefix + "texfile"
|
|
|
|
try:
|
|
|
|
meta[key]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
texcompile(meta[key])
|
|
|
|
meta.update({
|
|
|
|
prefix+'pdffile': meta[key].replace('tex', 'pdf')
|
|
|
|
})
|
|
|
|
|
|
|
|
if not options["no_join"]:
|
|
|
|
for prefix in ["", "corr_"]:
|
|
|
|
key = prefix + "pdffile"
|
|
|
|
try:
|
|
|
|
pdfs = [m[key] for m in metadatas]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
pdfjoin(
|
|
|
|
pdfs,
|
|
|
|
template.replace("tpl", prefix+"all").replace(".tex", ".pdf"),
|
|
|
|
directory,
|
|
|
|
rm_pdfs=1,
|
|
|
|
)
|
2013-09-27 20:21:46 +00:00
|
|
|
|
2019-12-23 08:10:42 +00:00
|
|
|
if not options["dirty"]:
|
2019-12-24 07:01:26 +00:00
|
|
|
clean(directory)
|
2014-01-19 20:37:46 +00:00
|
|
|
|
2017-04-16 08:36:27 +00:00
|
|
|
|
2013-09-27 18:09:38 +00:00
|
|
|
# -----------------------------
|
|
|
|
# Reglages pour 'vim'
|
|
|
|
# vim:set autoindent expandtab tabstop=4 shiftwidth=4:
|
2017-04-16 08:36:27 +00:00
|
|
|
# cursor: 16 del
|