Skip to content

Commit

Permalink
Clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
rly committed Jul 6, 2024
1 parent 1f6a7f5 commit 188e5aa
Show file tree
Hide file tree
Showing 9 changed files with 39 additions and 38 deletions.
4 changes: 1 addition & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,7 @@ Support for N-dimensional arrays in LinkML.
# Quick reference for common commands

```bash
cd linkml-model
poetry run gen-json-schema tests/input/examples/schema_definition-array-2.yaml
poetry run gen-pydantic tests/input/examples/schema_definition-array-2.yaml
poetry run gen-pydantic tests/input/temperature_schema.yaml > tests/array_classes_lol.py
```

# Acknowledgements
Expand Down
2 changes: 1 addition & 1 deletion src/linkml_arrays/dumpers/yaml_array_file_dumper.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Base class for dumping a LinkML model to a YAML file with paths to files containing individual arrays."""
"""Base class for dumping a LinkML model to YAML with paths to files containing arrays."""

import os
from abc import ABCMeta, abstractmethod
Expand Down
2 changes: 1 addition & 1 deletion src/linkml_arrays/dumpers/yaml_dumper.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Class for dumping a LinkML model to a YAML file."""
"""Class for dumping a LinkML model to YAML."""

from typing import Union

Expand Down
4 changes: 2 additions & 2 deletions src/linkml_arrays/dumpers/yaml_hdf5_dumper.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Class for dumping a LinkML model to a YAML file with paths to HDF5 files."""
"""Class for dumping a LinkML model to YAML with paths to HDF5 files."""

from pathlib import Path
from typing import List, Union
Expand All @@ -10,7 +10,7 @@


class YamlHdf5Dumper(YamlArrayFileDumper):
"""Dumper class for LinkML models to YAML files with paths to HDF5 files, one for each array.
"""Dumper class for LinkML models to YAML with paths to HDF5 files, one per array.
Each array is written to an HDF5 dataset at path "/data" in a new HDF5 file.
"""
Expand Down
4 changes: 2 additions & 2 deletions src/linkml_arrays/dumpers/yaml_numpy_dumper.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
"""Class for dumping a LinkML model to a YAML file with paths to NumPy files."""
"""Class for dumping a LinkML model to YAML with paths to NumPy files."""

from pathlib import Path
from typing import List, Union
Expand All @@ -9,7 +9,7 @@


class YamlNumpyDumper(YamlArrayFileDumper):
"""Dumper class for LinkML models to YAML files with paths to NumPy .npy files, one for each array.
"""Dumper class for LinkML models to YAML with paths to .npy files, one per array.
Each array is written to an HDF5 dataset at path "/data" in a new HDF5 file.
"""
Expand Down
1 change: 0 additions & 1 deletion src/linkml_arrays/loaders/yaml_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

from typing import Type, Union

import numpy as np
import yaml
from linkml_runtime import SchemaView
from linkml_runtime.linkml_model import ClassDefinition
Expand Down
16 changes: 8 additions & 8 deletions tests/test_dumpers/test_dumpers.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
"""Test dumping LinkML pydantic models with arrays as lists-of-lists to various file formats."""

import os
import unittest
from pathlib import Path

import h5py
Expand Down Expand Up @@ -30,7 +29,7 @@
INPUT_DIR = Path(__file__).parent.parent / "input"


def create_container() -> Container:
def _create_container() -> Container:
latitude_in_deg = LatitudeInDegSeries(name="my_latitude", values=[[1, 2], [3, 4]])
longitude_in_deg = LongitudeInDegSeries(name="my_longitude", values=[[5, 6], [7, 8]])
date = DateSeries(values=["2020-01-01", "2020-01-02"])
Expand All @@ -39,9 +38,10 @@ def create_container() -> Container:
conversion_factor=1000.0,
values=[[[0, 1], [2, 3]], [[4, 5], [6, 7]]],
)
# NOTE: currently no way to pass in the actual LatitudeInDegSeries object
temperature_dataset = TemperatureDataset(
name="my_temperature",
latitude_in_deg="my_latitude", # currently no way to pass in the actual LatitudeInDegSeries object
latitude_in_deg="my_latitude",
longitude_in_deg="my_longitude",
date=date,
day_in_d=days_in_d_since,
Expand All @@ -59,7 +59,7 @@ def create_container() -> Container:

def test_yaml_dumper():
"""Test YamlDumper dumping to a YAML file."""
container = create_container()
container = _create_container()

schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
ret = YamlDumper().dumps(container, schemaview=schemaview)
Expand All @@ -75,7 +75,7 @@ def test_yaml_dumper():

def test_yaml_numpy_dumper():
"""Test YamlNumpyDumper dumping to a YAML file and NumPy .npy files in a directory."""
container = create_container()
container = _create_container()

schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
ret = YamlNumpyDumper().dumps(container, schemaview=schemaview, output_dir="./out")
Expand All @@ -91,7 +91,7 @@ def test_yaml_numpy_dumper():

def test_yaml_hdf5_dumper():
"""Test YamlNumpyDumper dumping to a YAML file and HDF5 datasets in a directory."""
container = create_container()
container = _create_container()

schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
ret = YamlHdf5Dumper().dumps(container, schemaview=schemaview, output_dir="./out")
Expand All @@ -107,7 +107,7 @@ def test_yaml_hdf5_dumper():

def test_hdf5_dumper(tmp_path):
"""Test Hdf5Dumper dumping to an HDF5 file."""
container = create_container()
container = _create_container()

schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
output_file_path = tmp_path / "my_container.h5"
Expand Down Expand Up @@ -137,7 +137,7 @@ def test_hdf5_dumper(tmp_path):

def test_zarr_directory_store_dumper(tmp_path):
"""Test ZarrDumper dumping to an HDF5 file."""
container = create_container()
container = _create_container()

schemaview = SchemaView(INPUT_DIR / "temperature_schema.yaml")
output_file_path = tmp_path / "my_container.zarr"
Expand Down
22 changes: 10 additions & 12 deletions tests/test_loaders/test_loaders.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
"""Test loading data from various file formats into LinkML pydantic models with arrays as lists-of-lists."""
"""Test loading data from various file formats into pydantic models with arrays as LoLs."""

import unittest
from pathlib import Path

import numpy as np
from hbreader import hbread
from linkml_runtime import SchemaView

Expand All @@ -25,7 +23,7 @@
)


def check_container(container: Container):
def _check_container(container: Container):
assert isinstance(container, Container)
assert container.name == "my_container"

Expand All @@ -40,8 +38,9 @@ def check_container(container: Container):
assert isinstance(container.temperature_dataset, TemperatureDataset)
assert container.temperature_dataset.name == "my_temperature"
assert container.temperature_dataset.latitude_in_deg == "my_latitude"
# currently no way to get the actual LatitudeInDegSeries object from the TemperatureDataset object
# because the TemperatureDataset Pydantic object expects a string for the latitude_in_deg field
# currently no way to get the actual LatitudeInDegSeries object from the
# TemperatureDataset object because the TemperatureDataset Pydantic object
# expects a string for the latitude_in_deg field
# to be isomorphic with the json schema / yaml representation

assert container.temperature_dataset.longitude_in_deg == "my_longitude"
Expand All @@ -65,31 +64,31 @@ def test_yaml_loader():
data_yaml = hbread("container_yaml.yaml", base_path=str(Path(__file__) / "../../input"))
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
container = YamlLoader().loads(data_yaml, target_class=Container, schemaview=schemaview)
check_container(container)
_check_container(container)


def test_yaml_numpy_loader():
"""Test loading of pydantic-style classes from YAML + Numpy arrays."""
read_yaml = hbread("container_yaml_numpy.yaml", base_path=str(Path(__file__) / "../../input"))
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
container = YamlNumpyLoader().loads(read_yaml, target_class=Container, schemaview=schemaview)
check_container(container)
_check_container(container)


def test_yaml_hdf5_loader():
"""Test loading of pydantic-style classes from YAML + Numpy arrays."""
read_yaml = hbread("container_yaml_hdf5.yaml", base_path=str(Path(__file__) / "../../input"))
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
container = YamlHdf5Loader().loads(read_yaml, target_class=Container, schemaview=schemaview)
check_container(container)
_check_container(container)


def test_hdf5_loader():
"""Test loading of pydantic-style classes from HDF5 datasets."""
file_path = str(Path(__file__).parent.parent / "input" / "my_container.h5")
schemaview = SchemaView(Path(__file__) / "../../input/temperature_schema.yaml")
container = Hdf5Loader().loads(file_path, target_class=Container, schemaview=schemaview)
check_container(container)
_check_container(container)


def test_zarr_directory_store_loader():
Expand All @@ -99,5 +98,4 @@ def test_zarr_directory_store_loader():
container = ZarrDirectoryStoreLoader().loads(
file_path, target_class=Container, schemaview=schemaview
)
check_container(container)

_check_container(container)
22 changes: 14 additions & 8 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ description = Run documentation linters.
skip_install = true
deps =
darglint
flake8<5.0.0
flake8
flake8-black
flake8-bandit
flake8-bugbear
Expand All @@ -73,14 +73,20 @@ description = Run the flake8 tool with several plugins (bandit, docstrings, impo
[flake8]
max-line-length = 100
ignore =
DAR101 # Missing parameter(s) in Docstring: - with_git_hash
DAR201 # Missing "Returns" in Docstring: - return
DAR301 # Missing "Yields" in Docstring: - yield
E111 # indentation is not a multiple of 4
T201 # print found.
S101 # Use of assert detected.
DAR101
DAR201
DAR301
E111
T201
S101
; DAR101 # Missing parameter(s) in Docstring: - with_git_hash
; DAR201 # Missing "Returns" in Docstring: - return
; DAR301 # Missing "Yields" in Docstring: - yield
; E111 # indentation is not a multiple of 4
; T201 # print found.
; S101 # Use of assert detected.
exclude =
tests/test_dumpers/array_classes.py
tests/array_classes_lol.py

[testenv:mypy]
deps = mypy
Expand Down

0 comments on commit 188e5aa

Please sign in to comment.