Skip to content

Commit 7f86813

Browse files
authored
Enable strict type checking (#615)
1 parent f466a6a commit 7f86813

20 files changed

+239
-169
lines changed

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@ dependencies = [
2424
"deprecation>=2.1.0",
2525
"pyyaml>=6.0.1",
2626
"rdflib>=6.0.0",
27-
"importlib-resources>=6.1.1",
2827
]
2928

3029
# see https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#dependencies-optional-dependencies

src/sssom/__init__.py

Lines changed: 31 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,11 @@
88
# package is not installed
99
__version__ = "0.0.0" # pragma: no cover
1010

11-
from sssom_schema import Mapping, MappingSet, slots # noqa:401
11+
from sssom_schema import Mapping, MappingSet, slots
1212

13-
from sssom.io import get_metadata_and_prefix_map # noqa:401
14-
from sssom.sssom_document import MappingSetDocument # noqa:401
15-
from sssom.util import ( # noqa:401
13+
from sssom.io import get_metadata_and_prefix_map
14+
from sssom.sssom_document import MappingSetDocument
15+
from sssom.util import (
1616
MappingSetDataFrame,
1717
collapse,
1818
compare_dataframes,
@@ -22,6 +22,30 @@
2222
reconcile_prefix_and_data,
2323
)
2424

25-
from .constants import generate_mapping_set_id, get_default_metadata # noqa:401
26-
from .parsers import parse_csv, parse_sssom_table, parse_tsv # noqa:401
27-
from .writers import write_json, write_owl, write_rdf, write_tsv # noqa:401
25+
from .constants import generate_mapping_set_id, get_default_metadata
26+
from .parsers import parse_csv, parse_sssom_table, parse_tsv
27+
from .writers import write_json, write_owl, write_rdf, write_tsv
28+
29+
__all__ = [
30+
"write_json",
31+
"write_owl",
32+
"write_rdf",
33+
"write_tsv",
34+
"parse_csv",
35+
"generate_mapping_set_id",
36+
"get_default_metadata",
37+
"parse_sssom_table",
38+
"parse_tsv",
39+
"Mapping",
40+
"MappingSet",
41+
"MappingSetDocument",
42+
"MappingSetDataFrame",
43+
"slots",
44+
"get_metadata_and_prefix_map",
45+
"collapse",
46+
"compare_dataframes",
47+
"dataframe_to_ptable",
48+
"filter_redundant_rows",
49+
"group_mappings",
50+
"reconcile_prefix_and_data",
51+
]

src/sssom/cli.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@
1111
.. seealso:: https://click.palletsprojects.com/en/8.0.x/setuptools/
1212
"""
1313

14+
from __future__ import annotations
15+
1416
import logging as _logging
1517
import os
1618
import sys
@@ -214,7 +216,7 @@ def parse(
214216
strict_clean_prefixes: bool,
215217
output: TextIO,
216218
embedded_mode: bool,
217-
mapping_predicate_filter: Optional[tuple],
219+
mapping_predicate_filter: list[str],
218220
) -> None:
219221
"""Parse a file in one of the supported formats (such as obographs) into an SSSOM TSV file."""
220222
parse_file(
@@ -239,7 +241,7 @@ def parse(
239241
multiple=True,
240242
default=DEFAULT_VALIDATION_TYPES,
241243
)
242-
def validate(input: str, validation_types: List[SchemaValidationType]):
244+
def validate(input: str, validation_types: List[SchemaValidationType]) -> None:
243245
"""Produce an error report for an SSSOM file."""
244246
validation_type_list = [t for t in validation_types]
245247
validate_file(input_path=input, validation_types=validation_type_list)
@@ -262,7 +264,7 @@ def split(input: str, output_directory: str) -> None:
262264
type=click.FloatRange(0, 1),
263265
help="Default confidence to be assigned if absent.",
264266
)
265-
def ptable(input, output: TextIO, inverse_factor: float, default_confidence: float) -> None:
267+
def ptable(input: str, output: TextIO, inverse_factor: float, default_confidence: float) -> None:
266268
"""Convert an SSSOM file to a ptable for kboom/`boomer <https://github.com/INCATools/boomer>`_."""
267269
# TODO should maybe move to boomer (but for now it can live here, so cjm can tweak
268270
msdf = parse_sssom_table(input)
@@ -347,10 +349,10 @@ def dosql(query: str, inputs: List[str], output: TextIO) -> None:
347349
@click.option("-P", "--prefix", type=click.Tuple([str, str]), multiple=True)
348350
@output_option
349351
def sparql(
350-
url: str,
351-
config,
352-
graph: str,
353-
limit: int,
352+
url: str | None,
353+
config: TextIO | None,
354+
graph: str | None,
355+
limit: int | None,
354356
object_labels: bool,
355357
prefix: List[Tuple[str, str]],
356358
output: TextIO,
@@ -403,9 +405,7 @@ def diff(inputs: Tuple[str, str], output: TextIO) -> None:
403405
msdf = MappingSetDataFrame.with_converter(
404406
df=d.combined_dataframe.drop_duplicates(), converter=converter
405407
)
406-
msdf.metadata[ # type:ignore
407-
"comment"
408-
] = (
408+
msdf.metadata["comment"] = (
409409
f"Diff between {input1} and {input2}. See comment column for information."
410410
)
411411
write_table(msdf, output)

src/sssom/constants.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2,20 +2,21 @@
22

33
from __future__ import annotations
44

5+
import importlib.resources
56
import pathlib
67
import uuid
78
from enum import Enum
89
from functools import cached_property, lru_cache
910
from typing import Any, ClassVar, Dict, List, Literal, Mapping, Set, TextIO, Union, cast
1011

11-
import importlib_resources
1212
import yaml
1313
from linkml_runtime.utils.schema_as_dict import schema_as_dict
1414
from linkml_runtime.utils.schemaview import SchemaView
1515

1616
HERE = pathlib.Path(__file__).parent.resolve()
1717

18-
SCHEMA_YAML = importlib_resources.files("sssom_schema").joinpath("schema/sssom_schema.yaml")
18+
SCHEMA_RESOURCES = importlib.resources.files("sssom_schema")
19+
SCHEMA_YAML = SCHEMA_RESOURCES.joinpath("schema/sssom_schema.yaml")
1920
EXTENDED_PREFIX_MAP = HERE / "obo.epm.json"
2021

2122
OWL_EQUIV_CLASS_URI = "http://www.w3.org/2002/07/owl#equivalentClass"
@@ -240,12 +241,12 @@ def view(self) -> SchemaView:
240241
@cached_property
241242
def dict(self) -> Dict[str, Any]:
242243
"""Return SchemaView as a dictionary."""
243-
return schema_as_dict(self.view.schema)
244+
return schema_as_dict(self.view.schema) # type:ignore
244245

245246
@cached_property
246247
def mapping_slots(self) -> List[str]:
247248
"""Return list of mapping slots."""
248-
return self.view.get_class("mapping").slots
249+
return self.view.get_class("mapping").slots # type:ignore
249250

250251
@cached_property
251252
def mapping_set_slots(self) -> List[str]:
@@ -270,7 +271,7 @@ def mapping_enum_keys(self) -> Set[str]:
270271
@cached_property
271272
def slots(self) -> Dict[str, str]:
272273
"""Return the slots for SSSOMSchemaView object."""
273-
return self.dict["slots"]
274+
return self.dict["slots"] # type:ignore
274275

275276
@cached_property
276277
def double_slots(self) -> Set[str]:
@@ -292,9 +293,7 @@ def propagatable_slots(self) -> List[str]:
292293
def _get_sssom_schema_object() -> SSSOMSchemaView:
293294
"""Get a view over the SSSOM schema."""
294295
sssom_sv_object = (
295-
SSSOMSchemaView.instance
296-
if hasattr(SSSOMSchemaView, "instance")
297-
else SSSOMSchemaView() # type:ignore[misc]
296+
SSSOMSchemaView.instance if hasattr(SSSOMSchemaView, "instance") else SSSOMSchemaView()
298297
)
299298
return sssom_sv_object
300299

src/sssom/context.py

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,17 @@
11
"""Utilities for loading JSON-LD contexts."""
22

3+
from __future__ import annotations
4+
35
import json
46
from functools import lru_cache
5-
from typing import Any, Mapping, Union
7+
from typing import Any, Mapping, Union, cast
68

79
import curies
8-
import importlib_resources
910
from curies import Converter
1011
from rdflib.namespace import is_ncname
1112
from typing_extensions import TypeAlias
1213

13-
from .constants import EXTENDED_PREFIX_MAP
14+
from .constants import EXTENDED_PREFIX_MAP, SCHEMA_RESOURCES
1415

1516
__all__ = [
1617
"SSSOM_BUILT_IN_PREFIXES",
@@ -20,9 +21,7 @@
2021
]
2122

2223
SSSOM_BUILT_IN_PREFIXES = ("sssom", "owl", "rdf", "rdfs", "skos", "semapv")
23-
SSSOM_CONTEXT = importlib_resources.files("sssom_schema").joinpath(
24-
"context/sssom_schema.context.jsonld"
25-
)
24+
SSSOM_CONTEXT = SCHEMA_RESOURCES.joinpath("context/sssom_schema.context.jsonld")
2625

2726

2827
@lru_cache(1)
@@ -47,8 +46,8 @@ def _get_default_converter() -> Converter:
4746

4847

4948
def _load_sssom_context() -> Context:
50-
with open(SSSOM_CONTEXT) as file:
51-
return json.load(file, strict=False)
49+
with SSSOM_CONTEXT.open() as file:
50+
return cast(Context, json.load(file, strict=False))
5251

5352

5453
@lru_cache(1)

src/sssom/io.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
"""I/O utilities for SSSOM."""
22

3+
from __future__ import annotations
4+
35
import logging
46
import os
57
import re
@@ -33,6 +35,9 @@
3335
from .util import MappingSetDataFrame, are_params_slots, augment_metadata, raise_for_bad_path
3436
from .writers import get_writer_function, write_table, write_tables
3537

38+
VV = Union[str, Path]
39+
RecursivePathList: TypeAlias = Union[VV, Iterable[Union[VV, "RecursivePathList"]]]
40+
3641

3742
def convert_file(
3843
input_path: str,
@@ -62,7 +67,7 @@ def parse_file(
6267
clean_prefixes: bool = True,
6368
strict_clean_prefixes: bool = True,
6469
embedded_mode: bool = True,
65-
mapping_predicate_filter: tuple = None,
70+
mapping_predicate_filter: RecursivePathList | None = None,
6671
) -> None:
6772
"""Parse an SSSOM metadata file and write to a table.
6873
@@ -131,7 +136,7 @@ def split_file(input_path: str, output_directory: Union[str, Path]) -> None:
131136
write_tables(splitted, output_directory)
132137

133138

134-
@deprecated(
139+
@deprecated( # type:ignore[misc]
135140
deprecated_in="0.4.3",
136141
details="This functionality for loading SSSOM metadata from a YAML file is deprecated from the "
137142
"public API since it has internal assumptions which are usually not valid for downstream users.",
@@ -178,10 +183,6 @@ def _merge_converter(
178183
raise ValueError(f"Invalid prefix map mode: {prefix_map_mode}")
179184

180185

181-
VV = Union[str, Path]
182-
RecursivePathList: TypeAlias = Union[VV, Iterable[Union[VV, "RecursivePathList"]]]
183-
184-
185186
def extract_iris(input: RecursivePathList, converter: Converter) -> List[str]:
186187
"""
187188
Recursively extracts a list of IRIs from a string or file.

0 commit comments

Comments
 (0)