Compare commits

...

6 Commits

18 changed files with 269 additions and 111 deletions

View File

@ -1,66 +0,0 @@
DATA_BASE=./datas
PDF_BASE=$(DATA_BASE)/pdfs
PDF_YEARS=$(wildcard $(PDF_BASE)/*)
RAW_BASE=$(DATA_BASE)/raw
RAW_CRG=$(RAW_BASE)/CRG
RAW_CRG_YEARS=$(subst $(PDF_BASE), $(RAW_CRG), $(PDF_YEARS))
$(RAW_CRG)/%/: $(wildcard $(PDF_BASE)/%/*)
echo $(wildcard $(PDF_BASE)/$*/*)
@echo ----
ls $(PDF_BASE)/$*/
@echo ----
echo $*
@echo ----
echo $^
@echo ----
echo $?
#./datas/raw/CRG/%:
#pdf-oralia extract all --src $$year --dest $$(subst $$PDF_BASE, $$RAW_CRG, $$year)
# $(RAW_CRG_YEARS): $(PDF_PATHS)
# for year in $(PDF_PATHS); do \
# echo $$year; \
# echo $$(subst $$PDF_BASE, $$RAW_CRG, $$year); \
# echo "----"; \
# done;
extract_pdfs:
for year in 2021 2022 2023 2024; do \
mkdir -p $(RAW_CRG)/$$year/extracted;\
pdf-oralia extract all --src $(PDF_BASE)/$$year/ --dest $(RAW_CRG)/$$year/extracted; \
pdf-oralia join --src $(RAW_CRG)/$$year/extracted/ --dest $(RAW_CRG)/$$year/; \
done
clean_raw:
rm -rf ./PLESNA Compta SYSTEM/raw/**/*.csv
clean_built:
rm -rf $(DATA_BASE)/staging/**/*.csv
rm -rf $(DATA_BASE)/gold/**/*.csv
rm -rf $(DATA_BASE)/datamart/**/*.csv
rm -rf $(DATA_BASE)/datamart/**/*.xlsx
run_ingest:
python -m scripts ingest
run_feature:
python -m scripts feature
run_datamart:
python -m scripts datamart
build: clean_built run_ingest run_feature run_datamart
clean_all: clean_built clean_raw
import_nextcloud:
rsync -av ~/Nextcloud/PLESNA\ Compta\ SYSTEM/Histoire/ ./datas/Histoire
push_nextcloud:
rsync -av ./datas/datamart/ ~/Nextcloud/PLESNA\ Compta\ SYSTEM/DataMart

View File

@ -1,5 +1,15 @@
# E(T)LT pour Plesna
## Installation
## Concepts
- `dataplatform`: agrégation d'un datacatalogue, de moteur de compute et du dag des transformations.
- `datacatalogue`: gestion du contenu des datastores.
- `datastore`: interface de stockage des données.
- `compute`: moteur de traitement des fluxs.
- `graph/dag`: organisation logique des fluxs et des données.
## Stages
- Raw: fichiers les plus brutes possibles

27
plesna/dataplatform.py Normal file
View File

@ -0,0 +1,27 @@
from plesna.datastore.datacatalogue import DataCatalogue
from plesna.graph.graph_set import GraphSet
class DataPlateformError(Exception):
pass
class DataPlateform:
def __init__(self):
self._graphset = GraphSet()
self._metadata_engine = ""
self._transformations = {}
self._datacatalogues = {}
def add_datacatalague(self, name: str, datacatalogue: DataCatalogue):
if name in self._datacatalogues:
raise DataPlateformError("The datacatalogue {name} already exists")
self._datacatalogues[name] = datacatalogue
@property
def datacatalogues(self):
return list(self._datacatalogues)
def get_datacatalogue(self, name: str):
return self._datacatalogues[name]

View File

@ -1,18 +1,31 @@
import abc
from plesna.models.storage import Schema, Table
class DataCatalogue:
def __init__(self):
pass
@property
@abc.abstractmethod
def schemas(self) -> dict[str:str]:
"""List schemas"""
def schemas(self) -> list[str]:
"""List schema's names"""
raise NotImplementedError
@abc.abstractmethod
def tables(self, schema) -> dict[str:str]:
"""List table in schema"""
def schema(self, name: str) -> Schema:
"""Get the schema properties"""
raise NotImplementedError
@abc.abstractmethod
def tables(self, schema:str) -> list[str]:
"""List table's name in schema"""
raise NotImplementedError
@abc.abstractmethod
def table(self, schema:str, table:str) -> Table:
"""Get the table properties"""
raise NotImplementedError
@abc.abstractmethod

View File

@ -0,0 +1,81 @@
from pathlib import Path
from pydantic import BaseModel, computed_field
from plesna.models.storage import Schema, Table
from .datacatalogue import DataCatalogue
class FakeSchema(BaseModel):
name: str
@computed_field
@property
def ref(self) -> Schema:
return Schema(
id=str(self.name),
value=str(self.name),
)
class FakeTable(BaseModel):
name: str
data: dict[str, list]
@computed_field
@property
def ref(self) -> Table:
return Table(
id=str(self.name),
value=str(self.name),
)
class FakeDataCatalogue(DataCatalogue):
"""DataCatalogue based on dictionnaries"""
def __init__(self, name: str):
self.name = name
def ls(
self, dir="", only_files=False, only_directories=False, recursive=False
) -> list[str]:
dirpath = self._basepath / dir
if only_files:
return [
str(f.relative_to(dirpath))
for f in dirpath.iterdir()
if not f.is_dir() and not str(f).startswith(".")
]
if only_directories:
if recursive:
return [
str(f[0].relative_to(dirpath))
for f in dirpath.walk()
if not str(f).startswith(".")
]
return [
str(f.relative_to(dirpath))
for f in dirpath.iterdir()
if f.is_dir() and not str(f).startswith(".")
]
return [
str(f.relative_to(dirpath))
for f in dirpath.iterdir()
if not str(f).startswith(".")
]
def schemas(self) -> dict[str, FSSchema]:
"""List schemas (sub directories within basepath)"""
subdirectories = self.ls("", only_directories=True, recursive=True)
return {str(path): FSSchema(path=path) for path in subdirectories}
def tables(self, schema_id=".") -> dict[str, FSTable]:
"""List table in schema (which are files in the directory)"""
schema_path = schema_id
return {path: FSTable(path=path) for path in self.ls(schema_path, only_files=True)}

View File

@ -7,18 +7,6 @@ from plesna.models.storage import Schema, Table
from .datacatalogue import DataCatalogue
class FSSchema(BaseModel):
path: Path
@computed_field
@property
def ref(self) -> Schema:
return Schema(
id=str(self.path),
value=str(self.path),
)
class FSTable(BaseModel):
path: Path
@ -31,6 +19,20 @@ class FSTable(BaseModel):
)
class FSSchema(BaseModel):
path: Path
tables: list[str]
@computed_field
@property
def ref(self) -> Schema:
return Schema(
id=str(self.path),
value=str(self.path),
)
class FSDataCatalogue(DataCatalogue):
"""DataCatalogue based on files tree structure"""
@ -72,12 +74,18 @@ class FSDataCatalogue(DataCatalogue):
if not str(f).startswith(".")
]
def schemas(self) -> dict[str, FSSchema]:
@property
def schemas(self) -> list[str]:
"""List schemas (sub directories within basepath)"""
subdirectories = self.ls("", only_directories=True, recursive=True)
return {str(path): FSSchema(path=path) for path in subdirectories}
return [str(d) for d in subdirectories]
def tables(self, schema_id=".") -> dict[str, FSTable]:
def schema(self, schema: str) -> FSSchema:
"""List schemas (sub directories within basepath)"""
tables = self.ls(schema, only_files=True)
return FSSchema(path=Path(schema), tables=tables)
def table(self, schema: str, table:str) -> FSTable:
"""List table in schema (which are files in the directory)"""
schema_path = schema_id
return {path: FSTable(path=path) for path in self.ls(schema_path, only_files=True)}

View File

@ -0,0 +1,43 @@
from pathlib import Path
import pytest
from plesna.dataplatform import DataPlateform
from plesna.datastore.fs_datacatalogue import FSDataCatalogue
FIXTURE_DIR = Path(__file__).parent / Path("raw_data")
@pytest.fixture
def raw_catalogue(tmp_path):
raw_path = Path(tmp_path) / "raw"
raw_path.mkdir()
return FSDataCatalogue("raw", raw_path)
@pytest.fixture
def bronze_catalogue(tmp_path):
bronze_path = Path(tmp_path) / "bronze"
bronze_path.mkdir()
return FSDataCatalogue("bronze", bronze_path)
@pytest.fixture
def silver_catalogue(tmp_path):
silver_path = Path(tmp_path) / "silver"
silver_path.mkdir()
return FSDataCatalogue("silver", silver_path)
def test_add_catalogue(
raw_catalogue: FSDataCatalogue,
bronze_catalogue: FSDataCatalogue,
silver_catalogue: FSDataCatalogue,
):
dp = DataPlateform()
dp.add_datacatalague("raw", raw_catalogue)
dp.add_datacatalague("bronze", bronze_catalogue)
dp.add_datacatalague("silver", silver_catalogue)
assert dp.datacatalogues == ["raw", "bronze", "silver"]
assert dp.get_datacatalogue("raw") == raw_catalogue

View File

@ -4,8 +4,9 @@ from pathlib import Path
import pytest
from plesna.datastore.fs_datacatalogue import FSDataCatalogue
from plesna.models.storage import Schema
FIXTURE_DIR = Path(__file__).parent / Path("./fs_files/")
FIXTURE_DIR = Path(__file__).parent.parent / Path("./raw_datas/")
@pytest.fixture
@ -42,31 +43,19 @@ def test_init(location):
def test_list_schema(location):
repo = FSDataCatalogue("example", location)
assert {id: s.model_dump()["ref"]["id"] for id, s in repo.schemas().items()} == {
".": ".",
"username": "username",
"salary": "salary",
}
assert {id: s.model_dump()["ref"]["value"] for id, s in repo.schemas().items()} == {
".": ".",
"username": "username",
"salary": "salary",
}
assert {id: s.model_dump()["path"] for id, s in repo.schemas().items()} == {
".": Path("."),
"username": Path("username"),
"salary": Path("salary"),
}
assert repo.schemas == [".", "username", "salary"]
assert repo.schema(".").ref == Schema(id=".", value=".")
assert repo.schema("username").ref == Schema(id="username", value="username")
def test_list_tables(location):
def test_list_tables_schema(location):
repo = FSDataCatalogue("example", location)
assert repo.tables() == {}
assert {id: t.model_dump()["ref"]["value"] for id,t in repo.tables("username").items()} == {
"username.csv": "username.csv",
"username-password-recovery-code.xlsx": "username-password-recovery-code.xlsx",
"username-password-recovery-code.xls": "username-password-recovery-code.xls",
}
assert {id: t.model_dump()["ref"]["value"] for id,t in repo.tables("salary").items()} == {
"salary.pdf": "salary.pdf",
}
assert repo.schema(".").tables == []
assert repo.schema("username").tables == [
'username.csv',
'username-password-recovery-code.xlsx',
'username-password-recovery-code.xls',
]
assert repo.schema("salary").tables == ["salary.pdf"]

View File

@ -0,0 +1,39 @@
from pathlib import Path
import pytest
from plesna.dataplatform import DataPlateform
from plesna.datastore.fs_datacatalogue import FSDataCatalogue
FIXTURE_DIR = Path(__file__).parent / Path("raw_data")
@pytest.fixture
def raw_catalogue(tmp_path):
raw_path = Path(tmp_path) / "raw"
return FSDataCatalogue(raw_path)
@pytest.fixture
def bronze_catalogue(tmp_path):
bronze_path = Path(tmp_path) / "bronze"
return FSDataCatalogue(bronze_path)
@pytest.fixture
def silver_catalogue(tmp_path):
silver_path = Path(tmp_path) / "silver"
return FSDataCatalogue(silver_path)
@pytest.fixture
def dataplateform(
raw_catalogue: FSDataCatalogue,
bronze_catalogue: FSDataCatalogue,
silver_catalogue: FSDataCatalogue,
):
dp = DataPlateform()
dp.add_datacatalague("raw", raw_catalogue)
dp.add_datacatalague("bronze", bronze_catalogue)
dp.add_datacatalague("silver", silver_catalogue)
pass

BIN
tests/raw_datas/salary.pdf Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,7 @@
Username;Identifier;First name;Last name
booker12;9012;Rachel;Booker
grey07;2070;Laura;Grey
johnson81;4081;Craig;Johnson
jenkins46;9346;Mary;Jenkins
smith79;5079;Jamie;Smith
1 Username Identifier First name Last name
2 booker12 9012 Rachel Booker
3 grey07 2070 Laura Grey
4 johnson81 4081 Craig Johnson
5 jenkins46 9346 Mary Jenkins
6 smith79 5079 Jamie Smith

7
uv.lock generated Normal file
View File

@ -0,0 +1,7 @@
version = 1
requires-python = ">=3.13"
[[package]]
name = "plesna"
version = "0.1.0"
source = { virtual = "." }