Compare commits

...

4 Commits

15 changed files with 374 additions and 203 deletions

View File

@ -1,5 +1,5 @@
from plesna.datastore.datacatalogue import DataCatalogue
from plesna.graph.graph_set import GraphSet
from plesna.storage.repository.repository import Repository
class DataPlateformError(Exception):
@ -11,17 +11,17 @@ class DataPlateform:
self._graphset = GraphSet()
self._metadata_engine = ""
self._transformations = {}
self._datacatalogues = {}
self._repositories = {}
def add_datacatalague(self, name: str, datacatalogue: DataCatalogue):
if name in self._datacatalogues:
raise DataPlateformError("The datacatalogue {name} already exists")
def add_repository(self, name: str, repository: Repository):
if name in self._repositories:
raise DataPlateformError("The repository {name} already exists")
self._datacatalogues[name] = datacatalogue
self._repositories[name] = repository
@property
def datacatalogues(self):
return list(self._datacatalogues)
def repositories(self) -> list[str]:
return list(self._repositories)
def get_datacatalogue(self, name: str):
return self._datacatalogues[name]
def repository(self, name: str) -> Repository:
return self._repositories[name]

View File

@ -1,3 +0,0 @@
class DataStore:
def __init__(self, name):
self._name

View File

@ -1,91 +0,0 @@
from pathlib import Path
from pydantic import BaseModel, computed_field
from plesna.models.storage import Schema, Table
from .datacatalogue import DataCatalogue
class FSTable(BaseModel):
path: Path
@computed_field
@property
def ref(self) -> Table:
return Table(
id=str(self.path),
value=str(self.path),
)
class FSSchema(BaseModel):
path: Path
tables: list[str]
@computed_field
@property
def ref(self) -> Schema:
return Schema(
id=str(self.path),
value=str(self.path),
)
class FSDataCatalogue(DataCatalogue):
"""DataCatalogue based on files tree structure"""
def __init__(self, name: str, basepath: str = "."):
self._basepath = Path(basepath)
self.name = name
assert self._basepath.exists()
def ls(
self, dir="", only_files=False, only_directories=False, recursive=False
) -> list[str]:
dirpath = self._basepath / dir
if only_files:
return [
str(f.relative_to(dirpath))
for f in dirpath.iterdir()
if not f.is_dir() and not str(f).startswith(".")
]
if only_directories:
if recursive:
return [
str(f[0].relative_to(dirpath))
for f in dirpath.walk()
if not str(f).startswith(".")
]
return [
str(f.relative_to(dirpath))
for f in dirpath.iterdir()
if f.is_dir() and not str(f).startswith(".")
]
return [
str(f.relative_to(dirpath))
for f in dirpath.iterdir()
if not str(f).startswith(".")
]
@property
def schemas(self) -> list[str]:
"""List schemas (sub directories within basepath)"""
subdirectories = self.ls("", only_directories=True, recursive=True)
return [str(d) for d in subdirectories]
def schema(self, schema: str) -> FSSchema:
"""List schemas (sub directories within basepath)"""
tables = self.ls(schema, only_files=True)
return FSSchema(path=Path(schema), tables=tables)
def table(self, schema: str, table:str) -> FSTable:
"""List table in schema (which are files in the directory)"""
schema_path = schema_id
return {path: FSTable(path=path) for path in self.ls(schema_path, only_files=True)}

View File

@ -2,24 +2,54 @@ from pydantic import BaseModel
class Schema(BaseModel):
"""Logical agregation for Table
"""Where multiple tables are stored
id: uniq identifier for the schema
value: string which describe where to find the schema in the storage system
repo_id: id of the repo where the schema belong to
name: name of the schema
value: string which describe where to find the schema in the repository
"""
id: str
repo_id: str
name: str
value: str
tables: list[str] = []
class Table(BaseModel):
"""Place where same structured data are stored
id: uniq identifier for the table
repo_id: id of the repo where the table belong to
schema_id: id of the schema where table belong to
name: the name of the table
value: string which describe where to find the table in the storage system
"""
id: str
repo_id: str
schema_id: str
name: str
value: str
partitions: list[str] = []
class Partition(BaseModel):
"""Place where data are stored
id: uniq identifier for the table
value: string which describe where to find the table in the storage system
repo_id: id of the repo where the table belong to
schema_id: id of the schema where table belong to
table_id: id of the schema where table belong to
name: the name of the partition
value: string which describe where to find the partition in the storage system
"""
id: str
repo_id: str
schema_id: str
table_id: str
name: str
value: str

View File

@ -0,0 +1,24 @@
import abc
from plesna.models.storage import Schema
class DataCatalogue:
def __init__(self):
pass
@property
@abc.abstractmethod
def schemas(self) -> list[str]:
"""List schema's names"""
raise NotImplementedError
@abc.abstractmethod
def schema(self, name: str) -> Schema:
"""Get the schema properties"""
raise NotImplementedError
@abc.abstractmethod
def tables(self, schema: str) -> list[str]:
"""List table's name in schema"""
raise NotImplementedError

View File

@ -0,0 +1,152 @@
from pathlib import Path
from pydantic import BaseModel, computed_field
from plesna.models.storage import Partition, Schema, Table
from plesna.storage.repository.repository import Repository
class FSPartition(BaseModel):
name: str
path: Path
@computed_field
@property
def ref(self) -> Partition:
return Partition(
id=str(self.path),
repo_id=str(self.path.parent.parent.parent),
schema_id=str(self.path.parent.parent),
table_id=str(self.path.parent),
name=self.name,
value=str(self.path.absolute()),
)
class FSTable(BaseModel):
name: str
path: Path
is_partitionned: bool
partitions: list[str] = []
@computed_field
@property
def ref(self) -> Table:
return Table(
id=str(self.path),
repo_id=str(self.path.parent.parent),
schema_id=str(self.path.parent),
name=self.name,
value=str(self.path.absolute()),
partitions=self.partitions,
)
class FSSchema(BaseModel):
name: str
path: Path
tables: list[str]
@computed_field
@property
def ref(self) -> Schema:
return Schema(
id=str(self.path),
repo_id=str(self.path.parent),
name=self.name,
value=str(self.path.absolute()),
tables=self.tables,
)
class FSRepository(Repository):
"""Repository based on files tree structure
- first level: schemas
- second level: tables
- third level: partition (actual datas)
"""
def __init__(self, name: str, basepath: str, id: str):
self._basepath = Path(basepath)
self.name = name
self.id = id
assert self._basepath.exists()
def ls(
self, dir="", only_files=False, only_directories=False, recursive=False
) -> list[str]:
"""List files in dir
:param dir: relative path from self._basepath
:param only_files: if true return only files
:param only_directories: if true return only directories
:param recursive: list content recursively (only for)
:return: list of string describing path from self._basepath / dir
"""
dirpath = self._basepath / dir
if recursive:
paths = dirpath.rglob("*")
else:
paths = dirpath.iterdir()
if only_files:
return [
str(f.relative_to(dirpath))
for f in paths
if not f.is_dir() and not str(f).startswith(".")
]
if only_directories:
return [
str(f.relative_to(dirpath))
for f in paths
if f.is_dir() and not str(f).startswith(".")
]
return [
str(f.relative_to(dirpath)) for f in paths if not str(f).startswith(".")
]
def schemas(self) -> list[str]:
"""List schemas (sub directories within basepath)"""
subdirectories = self.ls("", only_directories=True)
return [str(d) for d in subdirectories]
def _schema(self, name: str) -> FSSchema:
"""List schemas (sub directories within basepath)"""
schema_path = self._basepath / name
tables = self.ls(name)
return FSSchema(name=name, path=schema_path, tables=tables)
def schema(self, name: str) -> Schema:
return self._schema(name).ref
def _table(self, schema: str, name: str) -> FSTable:
"""Get infos on the table"""
table_path = self._basepath / schema / name
is_partitionned = table_path.is_dir()
if is_partitionned:
partitions = self.ls(f"{schema}/{name}", only_files=True)
else:
partitions = []
return FSTable(
name=name,
path=table_path,
is_partitionned=is_partitionned,
partitions=partitions,
)
def table(self, schema: str, name: str) -> Table:
return self._table(schema, name).ref
def _partition(self, schema: str, table: str, partition: str) -> FSPartition:
"""Get infos on the partition"""
table_path = self._basepath / schema / table
return FSPartition(name=partition, table_path=table_path)
def partition(self, schema: str, name: str) -> Partition:
return self._partition(schema, name).ref

View File

@ -1,13 +1,12 @@
import abc
from plesna.models.storage import Schema, Table
from plesna.models.storage import Partition, Schema, Table
class DataCatalogue:
class Repository:
def __init__(self):
pass
@property
@abc.abstractmethod
def schemas(self) -> list[str]:
"""List schema's names"""
@ -19,16 +18,21 @@ class DataCatalogue:
raise NotImplementedError
@abc.abstractmethod
def tables(self, schema:str) -> list[str]:
def tables(self, schema: str) -> list[str]:
"""List table's name in schema"""
raise NotImplementedError
@abc.abstractmethod
def table(self, schema:str, table:str) -> Table:
def table(self, schema: str, name: str) -> Table:
"""Get the table properties"""
raise NotImplementedError
@abc.abstractmethod
def infos(self, table: str, schema: str) -> dict[str, str]:
"""Get infos about the table"""
def partitions(self, schema: str, table: str) -> list[str]:
"""List partition's name in table"""
raise NotImplementedError
@abc.abstractmethod
def partition(self, schema: str, name: str, partition: str) -> Partition:
"""Get the partition properties"""
raise NotImplementedError

View File

@ -6,12 +6,20 @@ from plesna.models.transformation import Transformation
def test_consume_flux():
sources = {
"src1": Table(id="src1", value="here"),
"src2": Table(id="src2", value="here"),
"src1": Table(
id="src1", repo_id="test", schema_id="test", name="test", value="here"
),
"src2": Table(
id="src2", repo_id="test", schema_id="test", name="test", value="here"
),
}
targets = {
"tgt1": Table(id="tgt1", value="this"),
"tgt2": Table(id="tgt2", value="that"),
"tgt1": Table(
id="tgt1", repo_id="test", schema_id="test", name="test", value="this"
),
"tgt2": Table(
id="tgt2", repo_id="test", schema_id="test", name="test", value="that"
),
}
def func(sources, targets, **kwrds):

View File

@ -1,43 +1,74 @@
import shutil
from pathlib import Path
import pytest
from plesna.dataplatform import DataPlateform
from plesna.datastore.fs_datacatalogue import FSDataCatalogue
from plesna.storage.repository.fs_repository import FSRepository
FIXTURE_DIR = Path(__file__).parent / Path("raw_data")
FIXTURE_DIR = Path(__file__).parent.parent / Path("raw_datas")
@pytest.fixture
def raw_catalogue(tmp_path):
def repository(tmp_path) -> FSRepository:
raw_path = Path(tmp_path) / "raw"
raw_path.mkdir()
return FSDataCatalogue("raw", raw_path)
example_src = FIXTURE_DIR
assert example_src.exists()
recovery_loc = raw_path / "recovery"
recovery_loc.mkdir()
username_loc = raw_path / "username"
username_loc.mkdir()
salary_loc = raw_path / "salary"
salary_loc.mkdir()
for f in example_src.glob("*"):
if "recovery" in str(f):
shutil.copy(f, recovery_loc)
if "salary" in str(f):
shutil.copy(f, salary_loc)
else:
shutil.copy(f, username_loc)
@pytest.fixture
def bronze_catalogue(tmp_path):
bronze_path = Path(tmp_path) / "bronze"
bronze_path.mkdir()
return FSDataCatalogue("bronze", bronze_path)
silver_path = Path(tmp_path) / "silver"
silver_path.mkdir()
return FSRepository("test", tmp_path, "test")
def test_add_repository(
repository: FSRepository,
):
dp = DataPlateform()
dp.add_repository("test", repository)
assert dp.repositories == ["test"]
assert dp.repository("test") == repository
@pytest.fixture
def silver_catalogue(tmp_path):
silver_path = Path(tmp_path) / "silver"
silver_path.mkdir()
return FSDataCatalogue("silver", silver_path)
def test_add_catalogue(
raw_catalogue: FSDataCatalogue,
bronze_catalogue: FSDataCatalogue,
silver_catalogue: FSDataCatalogue,
):
def dataplatform(
repository: FSRepository,
) -> DataPlateform:
dp = DataPlateform()
dp.add_datacatalague("raw", raw_catalogue)
dp.add_datacatalague("bronze", bronze_catalogue)
dp.add_datacatalague("silver", silver_catalogue)
dp.add_repository("test", repository)
return dp
assert dp.datacatalogues == ["raw", "bronze", "silver"]
assert dp.get_datacatalogue("raw") == raw_catalogue
def test_listing_content(dataplatform: DataPlateform):
assert dataplatform.repository("test").schemas() == ["raw", "bronze", "silver"]
assert dataplatform.repository("test").schema("raw").tables == [
"recovery",
"username",
"salary",
]
def test_add_flux(dataplatform: DataPlateform):
# dataplatform.add_flux()
pass

View File

@ -1,61 +0,0 @@
import shutil
from pathlib import Path
import pytest
from plesna.datastore.fs_datacatalogue import FSDataCatalogue
from plesna.models.storage import Schema
FIXTURE_DIR = Path(__file__).parent.parent / Path("./raw_datas/")
@pytest.fixture
def location(tmp_path):
loc = tmp_path
username_loc = loc / "username"
username_loc.mkdir()
salary_loc = loc / "salary"
salary_loc.mkdir()
example_src = FIXTURE_DIR
assert example_src.exists()
for f in example_src.glob("*"):
if "username" in str(f):
shutil.copy(f, username_loc)
else:
shutil.copy(f, salary_loc)
return loc
def test_init(location):
repo = FSDataCatalogue("example", location)
assert repo.ls() == [
"username",
"salary",
]
assert repo.ls(recursive=True) == [
"username",
"salary",
]
def test_list_schema(location):
repo = FSDataCatalogue("example", location)
assert repo.schemas == [".", "username", "salary"]
assert repo.schema(".").ref == Schema(id=".", value=".")
assert repo.schema("username").ref == Schema(id="username", value="username")
def test_list_tables_schema(location):
repo = FSDataCatalogue("example", location)
assert repo.schema(".").tables == []
assert repo.schema("username").tables == [
'username.csv',
'username-password-recovery-code.xlsx',
'username-password-recovery-code.xls',
]
assert repo.schema("salary").tables == ["salary.pdf"]

View File

View File

@ -0,0 +1,77 @@
import shutil
from pathlib import Path
import pytest
from plesna.models.storage import Schema
from plesna.storage.repository.fs_repository import FSRepository
FIXTURE_DIR = Path(__file__).parent.parent / Path("./raw_datas/")
@pytest.fixture
def location(tmp_path):
loc = tmp_path
username_loc = loc / "username"
username_loc.mkdir()
salary_loc = loc / "salary"
salary_loc.mkdir()
example_src = FIXTURE_DIR
assert example_src.exists()
for f in example_src.glob("*"):
if "username" in str(f):
shutil.copy(f, username_loc)
else:
shutil.copy(f, salary_loc)
return loc
def test_init(location):
repo = FSRepository("example", location, "example")
assert repo.ls() == [
"username",
"salary",
]
assert repo.ls(recursive=True) == [
"username",
"salary",
"username/username.csv",
"username/username-password-recovery-code.xlsx",
"username/username-password-recovery-code.xls",
"salary/salary.pdf",
]
@pytest.fixture
def repository(location) -> FSRepository:
return FSRepository("example", location, "example")
def test_list_schema(location, repository):
assert repository.schemas() == ["username", "salary"]
assert repository.schema("username").name == "username"
assert repository.schema("username").id == str(location / "username")
assert repository.schema("username").repo_id == str(location)
assert repository.schema("username").value == str(location / "username")
def test_list_tables_schema(repository):
assert repository.schema("username").tables == [
"username.csv",
"username-password-recovery-code.xlsx",
"username-password-recovery-code.xls",
]
assert repository.schema("salary").tables == ["salary.pdf"]
def test_describe_table(location, repository):
table = repository.table("username", "username.csv")
assert table.id == str(location / "username" / "username.csv")
assert table.repo_id == str(location)
assert table.schema_id == str(location / "username")
assert table.name == "username.csv"
assert table.value == str(location / "username" / "username.csv")
assert table.partitions == []