Skip to content

Commit dd4f9ba

Browse files
committed
Refactor plugin logic with a cleaner structure
1 parent c7716a2 commit dd4f9ba

File tree

6 files changed

+122
-56
lines changed

6 files changed

+122
-56
lines changed

.github/workflows/ci.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,5 @@ jobs:
3939
- name: Install dependencies
4040
run: |
4141
pip install .[dev]
42-
pip uninstall pytest-benchmark -y
4342
- name: Run tests
4443
run: pytest

pyproject.toml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,6 @@ dev = [
4444
"pytest ~= 7.0",
4545
"pytest-cov ~= 4.0.0",
4646
"ruff ~= 0.0.100",
47-
"pytest-benchmark>=3.4.1",
4847
]
4948

5049
[project.entry-points]

src/pytest_codspeed/_wrapper/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from cffi import FFI # type: ignore
55

66
if TYPE_CHECKING:
7-
from .wrapper import lib as _lib
7+
from .wrapper import lib as LibType
88

99
_wrapper_dir = os.path.dirname(os.path.abspath(__file__))
1010

@@ -21,7 +21,7 @@ def _get_ffi():
2121
return ffi
2222

2323

24-
def get_lib() -> "_lib":
24+
def get_lib() -> "LibType":
2525
try:
2626
ffi = _get_ffi()
2727
ffi.compile(
Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,13 @@
11
class lib:
2-
def start_instrumentation(self) -> None: ...
3-
def stop_instrumentation(self) -> None: ...
4-
def dump_stats(self) -> None: ...
5-
def dump_stats_at(self, trigger: bytes) -> None: ...
6-
def zero_stats(self) -> None: ...
7-
def toggle_collect(self) -> None: ...
2+
@staticmethod
3+
def start_instrumentation() -> None: ...
4+
@staticmethod
5+
def stop_instrumentation() -> None: ...
6+
@staticmethod
7+
def dump_stats() -> None: ...
8+
@staticmethod
9+
def dump_stats_at(trigger: bytes) -> None: ...
10+
@staticmethod
11+
def zero_stats() -> None: ...
12+
@staticmethod
13+
def toggle_collect() -> None: ...

src/pytest_codspeed/plugin.py

Lines changed: 72 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,14 @@
11
import os
2-
from typing import Any, Callable, List
2+
from dataclasses import dataclass
3+
from typing import TYPE_CHECKING, Any, Callable, List, Optional
34

45
import pytest
56

67
from . import __version__
78
from ._wrapper import get_lib
89

9-
lib = get_lib()
10-
11-
_benchmark_count = 0
12-
13-
14-
@pytest.hookimpl(trylast=True)
15-
def pytest_report_header(config: "pytest.Config"):
16-
return f"codspeed: {__version__}"
10+
if TYPE_CHECKING:
11+
from ._wrapper import LibType
1712

1813

1914
@pytest.hookimpl(trylast=True)
@@ -27,6 +22,21 @@ def pytest_addoption(parser: "pytest.Parser"):
2722
)
2823

2924

25+
@dataclass(unsafe_hash=True)
26+
class CodSpeedPlugin:
27+
is_codspeed_enabled: bool
28+
should_measure: bool
29+
lib: Optional["LibType"] = None
30+
benchmark_count: int = 0
31+
32+
33+
PLUGIN_NAME = "codspeed_plugin"
34+
35+
36+
def get_plugin(config: "pytest.Config") -> "CodSpeedPlugin":
37+
return config.pluginmanager.get_plugin(PLUGIN_NAME)
38+
39+
3040
@pytest.hookimpl()
3141
def pytest_configure(config: "pytest.Config"):
3242
config.addinivalue_line(
@@ -35,10 +45,26 @@ def pytest_configure(config: "pytest.Config"):
3545
config.addinivalue_line(
3646
"markers", "benchmark: mark an entire test for codspeed benchmarking"
3747
)
48+
plugin = CodSpeedPlugin(
49+
is_codspeed_enabled=config.getoption("--codspeed")
50+
or os.environ.get("CODSPEED_ENV") is not None,
51+
should_measure=os.environ.get("CODSPEED_ENV") is not None,
52+
)
53+
if plugin.should_measure:
54+
plugin.lib = get_lib()
55+
config.pluginmanager.register(plugin, PLUGIN_NAME)
3856

3957

40-
def is_benchmark_enabled(config: "pytest.Config") -> bool:
41-
return config.getoption("--codspeed") or os.environ.get("CODSPEED_ENV") is not None
58+
@pytest.hookimpl(trylast=True)
59+
def pytest_report_header(config: "pytest.Config"):
60+
out = [f"codspeed: {__version__}"]
61+
plugin = get_plugin(config)
62+
if plugin.is_codspeed_enabled and not plugin.should_measure:
63+
out.append(
64+
"NOTICE: codspeed is enabled, but no performance measurement"
65+
" will be made since it's running in an unknown environment."
66+
)
67+
return "\n".join(out)
4268

4369

4470
def should_benchmark_item(item: "pytest.Item") -> bool:
@@ -51,16 +77,17 @@ def should_benchmark_item(item: "pytest.Item") -> bool:
5177

5278
@pytest.hookimpl()
5379
def pytest_sessionstart(session: "pytest.Session"):
54-
if is_benchmark_enabled(session.config):
55-
global _benchmark_count
56-
_benchmark_count = 0
80+
plugin = get_plugin(session.config)
81+
if plugin.is_codspeed_enabled:
82+
plugin.benchmark_count = 0
5783

5884

5985
@pytest.hookimpl(trylast=True)
6086
def pytest_collection_modifyitems(
6187
session: "pytest.Session", config: "pytest.Config", items: "List[pytest.Item]"
6288
):
63-
if is_benchmark_enabled(session.config):
89+
plugin = get_plugin(config)
90+
if plugin.is_codspeed_enabled:
6491
deselected = []
6592
selected = []
6693
for item in items:
@@ -74,45 +101,51 @@ def pytest_collection_modifyitems(
74101

75102
@pytest.hookimpl()
76103
def pytest_runtest_call(item: "pytest.Item"):
77-
if not is_benchmark_enabled(item.config) or not should_benchmark_item(item):
104+
plugin = get_plugin(item.config)
105+
if not plugin.is_codspeed_enabled or not should_benchmark_item(item):
78106
item.runtest()
79107
else:
80-
global _benchmark_count
81-
_benchmark_count += 1
108+
plugin.benchmark_count += 1
82109
if "benchmark" in getattr(item, "fixturenames", []):
110+
# This is a benchmark fixture, so the measurement is done by the fixture
111+
item.runtest()
112+
elif not plugin.should_measure:
83113
item.runtest()
84114
else:
85-
lib.zero_stats()
86-
lib.start_instrumentation()
115+
assert plugin.lib is not None
116+
plugin.lib.zero_stats()
117+
plugin.lib.start_instrumentation()
87118
item.runtest()
88-
lib.stop_instrumentation()
89-
lib.dump_stats_at(f"{item.nodeid}".encode("ascii"))
119+
plugin.lib.stop_instrumentation()
120+
plugin.lib.dump_stats_at(f"{item.nodeid}".encode("ascii"))
90121

91122

92123
@pytest.hookimpl()
93124
def pytest_sessionfinish(session: "pytest.Session", exitstatus):
94-
if is_benchmark_enabled(session.config):
125+
plugin = get_plugin(session.config)
126+
if plugin.is_codspeed_enabled:
95127
reporter = session.config.pluginmanager.get_plugin("terminalreporter")
96-
reporter.write_sep("=", f"{_benchmark_count} benchmarked")
97-
98-
99-
@pytest.fixture(scope="session")
100-
def _is_benchmark_enabled(request: "pytest.FixtureRequest") -> bool:
101-
return is_benchmark_enabled(request.config)
128+
count_suffix = "benchmarked" if plugin.should_measure else "benchmark tested"
129+
reporter.write_sep(
130+
"=",
131+
f"{plugin.benchmark_count} {count_suffix}",
132+
)
102133

103134

104135
@pytest.fixture
105-
def codspeed_benchmark(
106-
request: "pytest.FixtureRequest", _is_benchmark_enabled: bool
107-
) -> Callable:
136+
def codspeed_benchmark(request: "pytest.FixtureRequest") -> Callable:
137+
plugin = get_plugin(request.config)
138+
108139
def run(func: Callable[..., Any], *args: Any):
109-
if _is_benchmark_enabled:
110-
lib.zero_stats()
111-
lib.start_instrumentation()
112-
func(*args)
113-
if _is_benchmark_enabled:
114-
lib.stop_instrumentation()
115-
lib.dump_stats_at(f"{request.node.nodeid}".encode("ascii"))
140+
if plugin.is_codspeed_enabled and plugin.should_measure:
141+
assert plugin.lib is not None
142+
plugin.lib.zero_stats()
143+
plugin.lib.start_instrumentation()
144+
func(*args)
145+
plugin.lib.stop_instrumentation()
146+
plugin.lib.dump_stats_at(f"{request.node.nodeid}".encode("ascii"))
147+
else:
148+
func(*args)
116149

117150
return run
118151

tests/test_pytest_plugin.py

Lines changed: 36 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,19 @@
11
import os
2+
from contextlib import contextmanager
23

34
import pytest
45

56

6-
def test_plugin_enabled_cli(pytester: pytest.Pytester) -> None:
7+
@contextmanager
8+
def codspeed_env():
9+
os.environ["CODSPEED_ENV"] = "1"
10+
try:
11+
yield
12+
finally:
13+
del os.environ["CODSPEED_ENV"]
14+
15+
16+
def test_plugin_enabled_without_env(pytester: pytest.Pytester) -> None:
717
pytester.makepyfile(
818
"""
919
def test_some_addition_performance(benchmark):
@@ -13,10 +23,30 @@ def _():
1323
"""
1424
)
1525
result = pytester.runpytest("--codspeed")
26+
result.stdout.fnmatch_lines(
27+
[
28+
"NOTICE: codspeed is enabled, but no performance measurement will be made*",
29+
"*1 benchmark tested*",
30+
"*1 passed*",
31+
]
32+
)
33+
34+
35+
def test_plugin_enabled_by_env(pytester: pytest.Pytester) -> None:
36+
pytester.makepyfile(
37+
"""
38+
def test_some_addition_performance(benchmark):
39+
@benchmark
40+
def _():
41+
return 1 + 1
42+
"""
43+
)
44+
with codspeed_env():
45+
result = pytester.runpytest()
1646
result.stdout.fnmatch_lines(["*1 benchmarked*", "*1 passed*"])
1747

1848

19-
def test_plugin_enabled_env(pytester: pytest.Pytester) -> None:
49+
def test_plugin_enabled_and_env(pytester: pytest.Pytester) -> None:
2050
pytester.makepyfile(
2151
"""
2252
def test_some_addition_performance(benchmark):
@@ -25,11 +55,9 @@ def _():
2555
return 1 + 1
2656
"""
2757
)
28-
os.environ["CODSPEED_ENV"] = "1"
29-
pytester.runpytest()
30-
result = pytester.runpytest()
58+
with codspeed_env():
59+
result = pytester.runpytest("--codspeed")
3160
result.stdout.fnmatch_lines(["*1 benchmarked*", "*1 passed*"])
32-
del os.environ["CODSPEED_ENV"]
3361

3462

3563
def test_plugin_disabled(pytester: pytest.Pytester) -> None:
@@ -52,7 +80,8 @@ def test_some_addition_performance():
5280
return 1 + 1
5381
"""
5482
)
55-
result = pytester.runpytest("--codspeed")
83+
with codspeed_env():
84+
result = pytester.runpytest("--codspeed")
5685
result.stdout.fnmatch_lines(["*0 benchmarked*", "*1 deselected*"])
5786

5887

0 commit comments

Comments
 (0)