Skip to content

Commit 585ea0d

Browse files
committed
Migrate to ruff.
1 parent 5523b7d commit 585ea0d

37 files changed

+441
-407
lines changed

.flake8

Lines changed: 0 additions & 53 deletions
This file was deleted.

.pre-commit-config.yaml

Lines changed: 6 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,7 @@
11
repos:
2-
- hooks:
3-
- id: black
4-
language_version: python3
5-
repo: https://github.com/ambv/black
6-
rev: 24.10.0
7-
- hooks:
8-
- id: isort
9-
language_version: python3
10-
repo: https://github.com/PyCQA/isort
11-
rev: 5.13.2
12-
- hooks:
13-
- id: flake8
14-
language_version: python3
15-
additional_dependencies:
16-
- flake8-bugbear
17-
- flake8-comprehensions
18-
- flake8-debugger
19-
- flake8-docstrings
20-
- flake8-string-format
21-
repo: https://github.com/pycqa/flake8
22-
rev: 7.1.1
2+
- repo: https://github.com/astral-sh/ruff-pre-commit
3+
rev: v0.13.1
4+
hooks:
5+
- id: ruff-check
6+
args: [ --fix ]
7+
- id: ruff-format

docs/conf.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,13 +10,12 @@
1010
# add these directories to sys.path here. If the directory is relative to the
1111
# documentation root, use os.path.abspath to make it absolute, like shown here.
1212
#
13-
import os
1413
import pkgutil
1514
import sys
1615
from datetime import datetime
1716
from pathlib import Path
1817

19-
sys.path.insert(0, os.path.abspath("../"))
18+
sys.path.insert(0, str(Path(__file__).parent.parent))
2019

2120

2221
def get_copyright(attribution, *, first_year):
@@ -29,7 +28,7 @@ def get_copyright(attribution, *, first_year):
2928

3029
def get_version_and_release():
3130
try:
32-
import scrapy_poet # noqa: F401
31+
import scrapy_poet # noqa: F401,PLC0415
3332
except ImportError:
3433
return "", ""
3534
version_bytes = pkgutil.get_data("scrapy_poet", "VERSION") or b""
@@ -42,7 +41,7 @@ def get_version_and_release():
4241
# -- Project information -----------------------------------------------------
4342

4443
project = "scrapy-poet"
45-
copyright = get_copyright("Zyte Group Ltd", first_year=2019)
44+
project_copyright = get_copyright("Zyte Group Ltd", first_year=2019)
4645
author = "Zyte"
4746

4847
version, release = get_version_and_release()

example/example/autoextract.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
which even requires an API request.
44
"""
55

6-
from typing import Any, Dict
6+
from typing import Any
77

88
import attr
99
from scrapy import Request
@@ -18,7 +18,7 @@
1818
class AutoextractProductResponse:
1919
"""Input data"""
2020

21-
data: Dict[str, Any]
21+
data: dict[str, Any]
2222

2323

2424
class AutoextractProductProvider(PageObjectInputProvider):
@@ -51,5 +51,4 @@ def url(self):
5151
return self.autoextract_resp.data["product"]["url"]
5252

5353
def to_item(self):
54-
product = self.autoextract_resp.data["product"]
55-
return product
54+
return self.autoextract_resp.data["product"]

example/example/spiders/books_03.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
"""
44

55
import scrapy
6-
from example.autoextract import ProductPage
76

7+
from example.autoextract import ProductPage
88
from scrapy_poet import callback_for
99

1010

example/example/spiders/books_05.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,10 @@
44
"""
55

66
import scrapy
7-
from example.autoextract import ProductPage
87
from web_poet import WebPage
98

9+
from example.autoextract import ProductPage
10+
1011

1112
class BookListPage(WebPage):
1213
def product_urls(self):

example/example/spiders/books_05_1.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,9 +12,9 @@
1212
"""
1313

1414
import scrapy
15-
from example.autoextract import ProductPage
1615
from web_poet import WebPage
1716

17+
from example.autoextract import ProductPage
1818
from scrapy_poet import DummyResponse
1919

2020

pyproject.toml

Lines changed: 151 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
[tool.black]
2-
line-length = 88
3-
41
[tool.bumpversion]
52
current_version = "0.26.0"
63
commit = true
@@ -34,12 +31,6 @@ exclude_also = [
3431
"@(abc\\.)?abstractmethod",
3532
]
3633

37-
[tool.isort]
38-
profile = "black"
39-
multi_line_output = 3
40-
# scrapy_poet/__init__.py: Automatic sorting causes circular dependencies.
41-
skip = ["scrapy_poet/__init__.py"]
42-
4334
[[tool.mypy.overrides]]
4435
module = [
4536
"tests.test_cache.*",
@@ -51,3 +42,154 @@ module = [
5142
# when test cases are decorated with @inlineCallbacks. However, the
5243
# tests doesn't return anything at all.
5344
disable_error_code = "misc"
45+
46+
[tool.ruff.lint]
47+
extend-select = [
48+
# flake8-builtins
49+
"A",
50+
# flake8-async
51+
"ASYNC",
52+
# flake8-bugbear
53+
"B",
54+
# flake8-comprehensions
55+
"C4",
56+
# flake8-commas
57+
"COM",
58+
# pydocstyle
59+
"D",
60+
# flake8-future-annotations
61+
"FA",
62+
# flynt
63+
"FLY",
64+
# refurb
65+
"FURB",
66+
# isort
67+
"I",
68+
# flake8-implicit-str-concat
69+
"ISC",
70+
# flake8-logging
71+
"LOG",
72+
# Perflint
73+
"PERF",
74+
# pygrep-hooks
75+
"PGH",
76+
# flake8-pie
77+
"PIE",
78+
# pylint
79+
"PL",
80+
# flake8-pytest-style
81+
"PT",
82+
# flake8-use-pathlib
83+
"PTH",
84+
# flake8-pyi
85+
"PYI",
86+
# flake8-quotes
87+
"Q",
88+
# flake8-return
89+
"RET",
90+
# flake8-raise
91+
"RSE",
92+
# Ruff-specific rules
93+
"RUF",
94+
# flake8-bandit
95+
"S",
96+
# flake8-simplify
97+
"SIM",
98+
# flake8-slots
99+
"SLOT",
100+
# flake8-debugger
101+
"T10",
102+
# flake8-type-checking
103+
"TC",
104+
# flake8-tidy-imports
105+
"TID",
106+
# pyupgrade
107+
"UP",
108+
# pycodestyle warnings
109+
"W",
110+
# flake8-2020
111+
"YTT",
112+
]
113+
ignore = [
114+
# Trailing comma missing
115+
"COM812",
116+
# Missing docstring in public module
117+
"D100",
118+
# Missing docstring in public class
119+
"D101",
120+
# Missing docstring in public method
121+
"D102",
122+
# Missing docstring in public function
123+
"D103",
124+
# Missing docstring in public package
125+
"D104",
126+
# Missing docstring in magic method
127+
"D105",
128+
# Missing docstring in __init__
129+
"D107",
130+
# One-line docstring should fit on one line with quotes
131+
"D200",
132+
# No blank lines allowed after function docstring
133+
"D202",
134+
# 1 blank line required between summary line and description
135+
"D205",
136+
# Multi-line docstring closing quotes should be on a separate line
137+
"D209",
138+
# First line should end with a period
139+
"D400",
140+
# First line should be in imperative mood; try rephrasing
141+
"D401",
142+
# First line should not be the function's "signature"
143+
"D402",
144+
# Too many return statements
145+
"PLR0911",
146+
# Too many branches
147+
"PLR0912",
148+
# Too many arguments in function definition
149+
"PLR0913",
150+
# Too many statements
151+
"PLR0915",
152+
# Magic value used in comparison
153+
"PLR2004",
154+
# String contains ambiguous {}.
155+
"RUF001",
156+
# Docstring contains ambiguous {}.
157+
"RUF002",
158+
# Comment contains ambiguous {}.
159+
"RUF003",
160+
# Mutable class attributes should be annotated with `typing.ClassVar`
161+
"RUF012",
162+
# Use of `assert` detected
163+
"S101",
164+
# Yoda condition detected
165+
"SIM300",
166+
# Add `from __future__ import annotations` to simplify
167+
# (It's harder to keep annotations resolvable at the runtime with it.)
168+
"FA100",
169+
]
170+
171+
[tool.ruff.lint.flake8-tidy-imports]
172+
banned-module-level-imports = [
173+
"twisted.internet.reactor",
174+
]
175+
176+
[tool.ruff.lint.isort]
177+
split-on-trailing-comma = false
178+
179+
[tool.ruff.lint.per-file-ignores]
180+
"example/*" = ["PLC0415"]
181+
# scrapy_poet/__init__.py: Automatic import sorting causes circular dependencies.
182+
"scrapy_poet/__init__.py" = ["F401", "I"]
183+
"scrapy_poet/page_inputs/__init__.py" = ["F401"]
184+
"tests/*" = ["SLOT000", "S"]
185+
186+
# we need to use typing.Set[] over modern alternatives with web-poet<0.19.0 && Python<3.11
187+
# see https://github.com/scrapinghub/web-poet/pull/219
188+
"scrapy_poet/page_input_providers.py" = ["UP006", "UP035"]
189+
"tests/test_downloader.py" =["UP006", "UP035"]
190+
"tests/test_providers.py" =["UP006", "UP035"]
191+
"tests/test_request_fingerprinter.py" =["UP006", "UP035"]
192+
"tests/test_web_poet_rules.py" =["UP006", "UP035"]
193+
194+
[tool.ruff.lint.pydocstyle]
195+
convention = "pep257"

scrapy_poet/_addon.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,15 +22,15 @@ def _replace_builtin(
2222
f"{builtin_cls} entry with {new_cls}. Add {new_cls} manually to "
2323
f"silence this warning."
2424
)
25-
return None
25+
return
2626

2727
if new_cls in setting_value:
28-
return None
28+
return
2929
for cls_or_path in setting_value:
3030
if isinstance(cls_or_path, str):
3131
_cls = load_object(cls_or_path)
3232
if _cls == new_cls:
33-
return None
33+
return
3434

3535
builtin_entry: object = None
3636
for _setting_value in (setting_value, settings[f"{setting}_BASE"]):
@@ -54,7 +54,7 @@ def _replace_builtin(
5454
f"missing built-in entry {builtin_cls}. Cannot replace it with {new_cls}. "
5555
f"Add {new_cls} manually to silence this warning."
5656
)
57-
return None
57+
return
5858

5959
if pos is None:
6060
logger.warning(

0 commit comments

Comments
 (0)