Skip to content

Commit df5ff8a

Browse files
authored
Merge pull request #167 from BIH-CEI/165-api-design-with-abc
165 api design with abc
2 parents 8b934fe + 42d9ba8 commit df5ff8a

File tree

4 files changed

+82
-1
lines changed

4 files changed

+82
-1
lines changed

.gitignore

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -161,3 +161,7 @@ cython_debug/
161161
# and can be added to the global gitignore or merged into this file. For a more nuclear
162162
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
163163
.idea/
164+
165+
# VsCode
166+
*.code-workspace
167+
.vscode
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
"""
2+
This package is intended to expose the PhenopacketMapper API to the user.
3+
"""
4+
5+
import abc
6+
from typing import Tuple, Iterable, Iterator
7+
from dataclasses import dataclass
8+
9+
10+
class DataModelDefiner(metaclass=abc.ABCMeta):
11+
"""
12+
Take some data model definition and try to load it into :class:`DataModel`.
13+
14+
E.g. protobuf model "definer".
15+
"""
16+
pass
17+
18+
19+
class DataModel(metaclass=abc.ABCMeta):
20+
"""
21+
Value class.
22+
The fields:
23+
- label, version
24+
- a root `DataNode`, it must be there (not `Optional`)
25+
- resources (maybe generate dynamically, or keep as a list)
26+
27+
We want to be able to (de)serialize this.
28+
"""
29+
pass
30+
31+
32+
@dataclass
33+
class DataNode(metaclass=abc.ABCMeta):
34+
"""
35+
This is very much like Jackson (Java) `TreeNode`,
36+
because it can be many things.
37+
38+
The common things may include
39+
- label
40+
- maybe it knows about the parent (optional) and children
41+
42+
We want to be able to (de)serialize this.
43+
"""
44+
label: str
45+
id: str
46+
required: bool
47+
48+
49+
class DataInstance:
50+
pass
51+
52+
53+
class Transformation(metaclass=abc.ABCMeta):
54+
"""
55+
56+
"""
57+
steps: Tuple
58+
59+
60+
class Mapper:
61+
62+
def __init__(
63+
self,
64+
transformation: Transformation,
65+
):
66+
pass
67+
68+
def transform_dataset(
69+
self,
70+
data_set: Iterable[DataInstance],
71+
) -> Iterator[DataInstance]:
72+
return map(lambda item: self.transform(item), data_set)
73+
74+
def transform(self, item: DataInstance) -> DataInstance:
75+
# TODO: implement based on self.transformation
76+
pass

src/phenopacket_mapper/data_standards/data_model.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ class DataField:
5555
:ivar required: Required flag of the field
5656
:ivar ordinal: Ordinal of the field (E.g. 1.1, 1.2, 2.1, etc.)
5757
"""
58+
# TODO: change section into path to data
5859
name: str = field()
5960
specification: Union[ValueSet, type, List[type]] = field()
6061
id: str = field(default=None)

src/phenopacket_mapper/mapping/phenopacket_building_block.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def map(self, instance: DataModelInstance):
4444
return self.phenopacket_element(**kwargs)
4545

4646

47-
def map_single(key, e, instance, kwargs):
47+
def map_single(key, e, instance: DataModelInstance, kwargs):
4848
if isinstance(e, DataField):
4949
data_field = e
5050
try:

0 commit comments

Comments
 (0)