11import os
2- from typing import Any , Callable , List
2+ from dataclasses import dataclass
3+ from typing import TYPE_CHECKING , Any , Callable , List , Optional
34
45import pytest
56
67from . import __version__
78from ._wrapper import get_lib
89
9- lib = get_lib ()
10-
11- _benchmark_count = 0
12-
13-
14- @pytest .hookimpl (trylast = True )
15- def pytest_report_header (config : "pytest.Config" ):
16- return f"codspeed: { __version__ } "
10+ if TYPE_CHECKING :
11+ from ._wrapper import LibType
1712
1813
1914@pytest .hookimpl (trylast = True )
@@ -27,6 +22,21 @@ def pytest_addoption(parser: "pytest.Parser"):
2722 )
2823
2924
25+ @dataclass (unsafe_hash = True )
26+ class CodSpeedPlugin :
27+ is_codspeed_enabled : bool
28+ should_measure : bool
29+ lib : Optional ["LibType" ] = None
30+ benchmark_count : int = 0
31+
32+
33+ PLUGIN_NAME = "codspeed_plugin"
34+
35+
36+ def get_plugin (config : "pytest.Config" ) -> "CodSpeedPlugin" :
37+ return config .pluginmanager .get_plugin (PLUGIN_NAME )
38+
39+
3040@pytest .hookimpl ()
3141def pytest_configure (config : "pytest.Config" ):
3242 config .addinivalue_line (
@@ -35,10 +45,26 @@ def pytest_configure(config: "pytest.Config"):
3545 config .addinivalue_line (
3646 "markers" , "benchmark: mark an entire test for codspeed benchmarking"
3747 )
48+ plugin = CodSpeedPlugin (
49+ is_codspeed_enabled = config .getoption ("--codspeed" )
50+ or os .environ .get ("CODSPEED_ENV" ) is not None ,
51+ should_measure = os .environ .get ("CODSPEED_ENV" ) is not None ,
52+ )
53+ if plugin .should_measure :
54+ plugin .lib = get_lib ()
55+ config .pluginmanager .register (plugin , PLUGIN_NAME )
3856
3957
40- def is_benchmark_enabled (config : "pytest.Config" ) -> bool :
41- return config .getoption ("--codspeed" ) or os .environ .get ("CODSPEED_ENV" ) is not None
58+ @pytest .hookimpl (trylast = True )
59+ def pytest_report_header (config : "pytest.Config" ):
60+ out = [f"codspeed: { __version__ } " ]
61+ plugin = get_plugin (config )
62+ if plugin .is_codspeed_enabled and not plugin .should_measure :
63+ out .append (
64+ "NOTICE: codspeed is enabled, but no performance measurement"
65+ " will be made since it's running in an unknown environment."
66+ )
67+ return "\n " .join (out )
4268
4369
4470def should_benchmark_item (item : "pytest.Item" ) -> bool :
@@ -51,16 +77,17 @@ def should_benchmark_item(item: "pytest.Item") -> bool:
5177
5278@pytest .hookimpl ()
5379def pytest_sessionstart (session : "pytest.Session" ):
54- if is_benchmark_enabled (session .config ):
55- global _benchmark_count
56- _benchmark_count = 0
80+ plugin = get_plugin (session .config )
81+ if plugin . is_codspeed_enabled :
82+ plugin . benchmark_count = 0
5783
5884
5985@pytest .hookimpl (trylast = True )
6086def pytest_collection_modifyitems (
6187 session : "pytest.Session" , config : "pytest.Config" , items : "List[pytest.Item]"
6288):
63- if is_benchmark_enabled (session .config ):
89+ plugin = get_plugin (config )
90+ if plugin .is_codspeed_enabled :
6491 deselected = []
6592 selected = []
6693 for item in items :
@@ -74,45 +101,51 @@ def pytest_collection_modifyitems(
74101
75102@pytest .hookimpl ()
76103def pytest_runtest_call (item : "pytest.Item" ):
77- if not is_benchmark_enabled (item .config ) or not should_benchmark_item (item ):
104+ plugin = get_plugin (item .config )
105+ if not plugin .is_codspeed_enabled or not should_benchmark_item (item ):
78106 item .runtest ()
79107 else :
80- global _benchmark_count
81- _benchmark_count += 1
108+ plugin .benchmark_count += 1
82109 if "benchmark" in getattr (item , "fixturenames" , []):
110+ # This is a benchmark fixture, so the measurement is done by the fixture
111+ item .runtest ()
112+ elif not plugin .should_measure :
83113 item .runtest ()
84114 else :
85- lib .zero_stats ()
86- lib .start_instrumentation ()
115+ assert plugin .lib is not None
116+ plugin .lib .zero_stats ()
117+ plugin .lib .start_instrumentation ()
87118 item .runtest ()
88- lib .stop_instrumentation ()
89- lib .dump_stats_at (f"{ item .nodeid } " .encode ("ascii" ))
119+ plugin . lib .stop_instrumentation ()
120+ plugin . lib .dump_stats_at (f"{ item .nodeid } " .encode ("ascii" ))
90121
91122
92123@pytest .hookimpl ()
93124def pytest_sessionfinish (session : "pytest.Session" , exitstatus ):
94- if is_benchmark_enabled (session .config ):
125+ plugin = get_plugin (session .config )
126+ if plugin .is_codspeed_enabled :
95127 reporter = session .config .pluginmanager .get_plugin ("terminalreporter" )
96- reporter .write_sep ("=" , f"{ _benchmark_count } benchmarked" )
97-
98-
99- @pytest .fixture (scope = "session" )
100- def _is_benchmark_enabled (request : "pytest.FixtureRequest" ) -> bool :
101- return is_benchmark_enabled (request .config )
128+ count_suffix = "benchmarked" if plugin .should_measure else "benchmark tested"
129+ reporter .write_sep (
130+ "=" ,
131+ f"{ plugin .benchmark_count } { count_suffix } " ,
132+ )
102133
103134
104135@pytest .fixture
105- def codspeed_benchmark (
106- request : "pytest.FixtureRequest" , _is_benchmark_enabled : bool
107- ) -> Callable :
136+ def codspeed_benchmark (request : "pytest.FixtureRequest" ) -> Callable :
137+ plugin = get_plugin ( request . config )
138+
108139 def run (func : Callable [..., Any ], * args : Any ):
109- if _is_benchmark_enabled :
110- lib .zero_stats ()
111- lib .start_instrumentation ()
112- func (* args )
113- if _is_benchmark_enabled :
114- lib .stop_instrumentation ()
115- lib .dump_stats_at (f"{ request .node .nodeid } " .encode ("ascii" ))
140+ if plugin .is_codspeed_enabled and plugin .should_measure :
141+ assert plugin .lib is not None
142+ plugin .lib .zero_stats ()
143+ plugin .lib .start_instrumentation ()
144+ func (* args )
145+ plugin .lib .stop_instrumentation ()
146+ plugin .lib .dump_stats_at (f"{ request .node .nodeid } " .encode ("ascii" ))
147+ else :
148+ func (* args )
116149
117150 return run
118151
0 commit comments