Skip to content

Commit 9f2951e

Browse files
committed
lint: ruff fixes
1 parent 83ce9f9 commit 9f2951e

20 files changed

+10
-80
lines changed

docs/scripts/autorefs/plugin.py

Lines changed: 8 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -84,9 +84,7 @@ def handleMatch(self, m, data) -> Union[Element, EvalIDType]: # type: ignore[ov
8484

8585
return self.makeTag(identifier, text), m.start(0), end
8686

87-
def evalId(
88-
self, data: str, index: int, text: str
89-
) -> EvalIDType: # noqa: N802 (parent's casing)
87+
def evalId(self, data: str, index: int, text: str) -> EvalIDType: # noqa: N802 (parent's casing)
9088
"""Evaluate the id portion of `[ref][id]`.
9189
9290
If `[ref][]` use `[ref]`.
@@ -157,9 +155,7 @@ def relative_url(url_a: str, url_b: str) -> str:
157155
return f"{relative}#{anchor}"
158156

159157

160-
def fix_ref(
161-
url_mapper: Callable[[str], str], unmapped: List[str]
162-
) -> Callable: # noqa: WPS212,WPS231
158+
def fix_ref(url_mapper: Callable[[str], str], unmapped: List[str]) -> Callable: # noqa: WPS212,WPS231
163159
"""Return a `repl` function for [`re.sub`](https://docs.python.org/3/library/re.html#re.sub).
164160
165161
In our context, we match Markdown references and replace them with HTML links.
@@ -225,9 +221,7 @@ def fix_refs(html: str, url_mapper: Callable[[str], str]) -> Tuple[str, List[str
225221
class AutorefsExtension(Extension):
226222
"""Extension that inserts auto-references in Markdown."""
227223

228-
def extendMarkdown(
229-
self, md: Markdown
230-
) -> None: # noqa: N802 (casing: parent method's name)
224+
def extendMarkdown(self, md: Markdown) -> None: # noqa: N802 (casing: parent method's name)
231225
"""Register the extension.
232226
233227
Add an instance of our [`AutoRefInlineProcessor`][mkdocs_autorefs.references.AutoRefInlineProcessor] to the Markdown parser.
@@ -268,9 +262,7 @@ def __init__(self) -> None:
268262
super().__init__()
269263
self._url_map: Dict[str, str] = {}
270264
self._abs_url_map: Dict[str, str] = {}
271-
self.get_fallback_anchor: Optional[
272-
Callable[[str], Optional[str]]
273-
] = None # noqa: WPS234
265+
self.get_fallback_anchor: Optional[Callable[[str], Optional[str]]] = None # noqa: WPS234
274266
self._priority_patterns = None
275267

276268
@property
@@ -355,9 +347,7 @@ def get_item_url( # noqa: WPS234
355347
"""
356348
return self._get_item_url(identifier, fallback)
357349

358-
def on_config(
359-
self, config: Config, **kwargs
360-
) -> Config: # noqa: W0613,R0201 (unused arguments, cannot be static)
350+
def on_config(self, config: Config, **kwargs) -> Config: # noqa: W0613,R0201 (unused arguments, cannot be static)
361351
"""Instantiate our Markdown extension.
362352
363353
Hook for the [`on_config` event](https://www.mkdocs.org/user-guide/plugins/#on_config).
@@ -375,9 +365,7 @@ def on_config(
375365
config["markdown_extensions"].append(AutorefsExtension())
376366
return config
377367

378-
def on_page_markdown(
379-
self, markdown: str, page: Page, **kwargs
380-
) -> str: # noqa: W0613 (unused arguments)
368+
def on_page_markdown(self, markdown: str, page: Page, **kwargs) -> str: # noqa: W0613 (unused arguments)
381369
"""Remember which page is the current one.
382370
383371
Arguments:
@@ -391,9 +379,7 @@ def on_page_markdown(
391379
self.current_page = page.url # noqa: WPS601
392380
return markdown
393381

394-
def on_page_content(
395-
self, html: str, page: Page, **kwargs
396-
) -> str: # noqa: W0613 (unused arguments)
382+
def on_page_content(self, html: str, page: Page, **kwargs) -> str: # noqa: W0613 (unused arguments)
397383
"""Map anchors to URLs.
398384
399385
Hook for the [`on_page_content` event](https://www.mkdocs.org/user-guide/plugins/#on_page_content).
@@ -431,9 +417,7 @@ def map_urls(self, page: Page, anchor: AnchorLink) -> None:
431417
for child in anchor.children:
432418
self.map_urls(page, child)
433419

434-
def on_post_page(
435-
self, output: str, page: Page, **kwargs
436-
) -> str: # noqa: W0613 (unused arguments)
420+
def on_post_page(self, output: str, page: Page, **kwargs) -> str: # noqa: W0613 (unused arguments)
437421
"""Fix cross-references.
438422
439423
Hook for the [`on_post_page` event](https://www.mkdocs.org/user-guide/plugins/#on_post_page).

scripts/adicap.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@ def parse_each_dict(df, dictionaryCode: str):
3535
def get_decode_dict(df, dict_keys=["D1", "D2", "D3", "D4", "D5", "D6", "D7"]):
3636
decode_dict = {}
3737
for key in dict_keys:
38-
3938
decode_dict[key] = parse_each_dict(df, dictionaryCode=key)
4039

4140
return decode_dict

scripts/conjugate_verbs.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
def conjugate_verbs(
1616
output_path: Path = typer.Argument(
1717
"edsnlp/resources/verbs.csv.gz", help="Path to the output CSV table."
18-
)
18+
),
1919
) -> None:
2020
"""
2121
Convenience script to automatically conjugate a set of verbs,

scripts/serve.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@
3232

3333

3434
class Entity(BaseModel): # (2)
35-
3635
# OMOP-style attributes
3736
start: int
3837
end: int
@@ -56,7 +55,6 @@ class Document(BaseModel): # (1)
5655
async def process(
5756
notes: List[str], # (2)
5857
):
59-
6058
documents = []
6159

6260
for doc in nlp.pipe(notes):

tests/connectors/test_labeltool.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99

1010

1111
def test_docs2labeltool(nlp):
12-
1312
modifiers = ["negated", "hypothesis", "reported_speech"]
1413

1514
docs = list(nlp.pipe(texts))

tests/connectors/test_omop.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ def random_text():
2525

2626

2727
def random_note_nlp(text):
28-
2928
ents = []
3029

3130
for match in re.finditer(r"\w+", text):
@@ -44,7 +43,6 @@ def random_note_nlp(text):
4443

4544
@pytest.fixture
4645
def note():
47-
4846
df = pd.DataFrame(dict(note_text=[random_text() for _ in range(10)]))
4947
df["note_id"] = range(len(df))
5048
df["note_datetime"] = "2021-10-19"
@@ -54,7 +52,6 @@ def note():
5452

5553
@pytest.fixture
5654
def note_nlp(note):
57-
5855
df = note.copy()
5956
df["ents"] = df.note_text.apply(random_note_nlp)
6057
df = df.explode("ents")
@@ -77,7 +74,6 @@ def docs(omop: OmopConnector, note, note_nlp):
7774

7875

7976
def test_omop2docs(docs, note, note_nlp):
80-
8177
lexical_variants = note_nlp.groupby("note_id")["lexical_variant"].agg(list)
8278

8379
for doc, text, lvs in zip(docs, note.note_text, lexical_variants):

tests/matchers/test_phrase.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,6 @@ def test_eds_phrase_matcher(doc, nlp):
2121

2222

2323
def test_offset(blank_nlp):
24-
2524
text = "Ceci est un test de matching"
2625

2726
doc = blank_nlp(text)
@@ -45,7 +44,6 @@ def test_offset(blank_nlp):
4544

4645

4746
def test_remove(blank_nlp):
48-
4947
pattern = blank_nlp("matching")
5048
pattern2 = blank_nlp("Ceci")
5149

tests/pipelines/core/test_contextual_matcher.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -156,7 +156,6 @@
156156

157157
@pytest.mark.parametrize("params,example", list(zip(ALL_PARAMS, EXAMPLES)))
158158
def test_contextual(blank_nlp, params, example):
159-
160159
include_assigned, replace_entity, reduce_mode_stage, reduce_mode_metastase = params
161160

162161
blank_nlp.add_pipe(
@@ -225,9 +224,7 @@ def test_contextual(blank_nlp, params, example):
225224
assert len(doc.ents) == len(entities)
226225

227226
for entity, ent in zip(entities, doc.ents):
228-
229227
for modifier in entity.modifiers:
230-
231228
assert (
232229
rgetattr(ent, modifier.key) == modifier.value
233230
), f"{modifier.key} labels don't match."

tests/pipelines/core/test_normalisation.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ def test_full_normalization(doc):
2525
@fixture
2626
def nlp_factory(blank_nlp):
2727
def f(a=False, lc=False, q=False, p=False):
28-
2928
if a:
3029
a = dict(accents=accents)
3130
if q:
@@ -48,7 +47,6 @@ def f(a=False, lc=False, q=False, p=False):
4847

4948

5049
def test_normalization_accents(nlp_factory, text):
51-
5250
nlp = nlp_factory(a=True)
5351
doc = nlp(text)
5452

@@ -58,7 +56,6 @@ def test_normalization_accents(nlp_factory, text):
5856

5957

6058
def test_normalization_spaces(nlp_factory, text):
61-
6259
nlp = nlp_factory(a=True)
6360
doc = nlp("Phrase avec des espaces \n et un retour à la ligne")
6461

@@ -67,7 +64,6 @@ def test_normalization_spaces(nlp_factory, text):
6764

6865

6966
def test_normalization_quotes(nlp_factory, text):
70-
7167
nlp = nlp_factory(q=True)
7268
doc = nlp(text)
7369

@@ -79,7 +75,6 @@ def test_normalization_quotes(nlp_factory, text):
7975

8076

8177
def test_normalization_lowercase(nlp_factory, text):
82-
8378
nlp = nlp_factory(lc=True)
8479
doc = nlp(text)
8580

@@ -89,7 +84,6 @@ def test_normalization_lowercase(nlp_factory, text):
8984

9085

9186
def test_normalization_pollution(nlp_factory, text):
92-
9387
nlp = nlp_factory(p=True)
9488
doc = nlp(text)
9589

tests/pipelines/misc/test_consultation_date.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,6 @@
5353
@pytest.mark.parametrize("date_pipeline", [True, False])
5454
@pytest.mark.parametrize("example", [cons, cons_town, cons_town_doc])
5555
def test_cons_dates(date_pipeline, example, blank_nlp):
56-
5756
blank_nlp.add_pipe(
5857
"eds.normalizer",
5958
config=dict(lowercase=True, accents=True, quotes=True, pollution=False),

0 commit comments

Comments
 (0)