Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 21 additions & 0 deletions src/pytest_codspeed/instruments/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from collections.abc import Awaitable
from typing import Any, Callable, ClassVar, TypeVar

import pytest
Expand Down Expand Up @@ -37,6 +38,17 @@ def measure(
**kwargs: P.kwargs,
) -> T: ...

@abstractmethod
async def measure_async(
self,
marker_options: BenchmarkMarkerOptions,
name: str,
uri: str,
fn: Callable[P, Awaitable[T]],
*args: P.args,
**kwargs: P.kwargs,
) -> T: ...

@abstractmethod
def measure_pedantic(
self,
Expand All @@ -46,6 +58,15 @@ def measure_pedantic(
uri: str,
) -> T: ...

@abstractmethod
async def measure_pedantic_async(
self,
marker_options: BenchmarkMarkerOptions,
pedantic_options: PedanticOptions[Awaitable[T]],
name: str,
uri: str,
) -> T: ...

@abstractmethod
def report(self, session: pytest.Session) -> None: ...

Expand Down
109 changes: 83 additions & 26 deletions src/pytest_codspeed/instruments/valgrind.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import os
import warnings
from contextlib import contextmanager
from typing import TYPE_CHECKING

from pytest_codspeed import __semver_version__
Expand All @@ -10,6 +11,7 @@
from pytest_codspeed.utils import SUPPORTS_PERF_TRAMPOLINE

if TYPE_CHECKING:
from collections.abc import Awaitable, Iterator
from typing import Any, Callable

from pytest import Session
Expand Down Expand Up @@ -52,6 +54,24 @@ def get_instrument_config_str_and_warns(self) -> tuple[str, list[str]]:
)
return config, warnings

@contextmanager
def _measure_context(self, uri: str):
self.benchmark_count += 1

if not self.instrument_hooks:
yield
return

# Manually call the library function to avoid an extra stack frame. Also
# call the callgrind markers directly to avoid extra overhead.
self.instrument_hooks.lib.callgrind_start_instrumentation()
try:
yield
finally:
# Ensure instrumentation is stopped even if the test failed
self.instrument_hooks.lib.callgrind_stop_instrumentation()
self.instrument_hooks.set_executed_benchmark(uri)

def measure(
self,
marker_options: BenchmarkMarkerOptions,
Expand All @@ -61,47 +81,71 @@ def measure(
*args: P.args,
**kwargs: P.kwargs,
) -> T:
self.benchmark_count += 1

if not self.instrument_hooks:
return fn(*args, **kwargs)

def __codspeed_root_frame__() -> T:
return fn(*args, **kwargs)

if SUPPORTS_PERF_TRAMPOLINE:
# Warmup CPython performance map cache
__codspeed_root_frame__()

# Manually call the library function to avoid an extra stack frame. Also
# call the callgrind markers directly to avoid extra overhead.
self.instrument_hooks.lib.callgrind_start_instrumentation()
try:
with self._measure_context(uri):
return __codspeed_root_frame__()
finally:
# Ensure instrumentation is stopped even if the test failed
self.instrument_hooks.lib.callgrind_stop_instrumentation()
self.instrument_hooks.set_executed_benchmark(uri)

def measure_pedantic(
async def measure_async(
self,
marker_options: BenchmarkMarkerOptions,
pedantic_options: PedanticOptions[T],
name: str,
uri: str,
fn: Callable[P, Awaitable[T]],
*args: P.args,
**kwargs: P.kwargs,
) -> T:
async def __codspeed_root_frame__() -> T:
return await fn(*args, **kwargs)

if SUPPORTS_PERF_TRAMPOLINE:
# Warmup CPython performance map cache
await __codspeed_root_frame__()

with self._measure_context(uri):
return await __codspeed_root_frame__()

@contextmanager
def _measure_pedantic_context(
self,
pedantic_options: PedanticOptions[Any],
uri: str,
) -> Iterator[None]:
if pedantic_options.rounds != 1 or pedantic_options.iterations != 1:
warnings.warn(
"Valgrind instrument ignores rounds and iterations settings "
"in pedantic mode"
)
if not self.instrument_hooks:
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
out = pedantic_options.target(*args, **kwargs)
yield
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)
return out
return

# Compute the actual result of the function
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
self.instrument_hooks.lib.callgrind_start_instrumentation()
try:
yield
finally:
self.instrument_hooks.lib.callgrind_stop_instrumentation()
self.instrument_hooks.set_executed_benchmark(uri)
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)

def measure_pedantic(
self,
marker_options: BenchmarkMarkerOptions,
pedantic_options: PedanticOptions[T],
name: str,
uri: str,
) -> T:
def __codspeed_root_frame__(*args, **kwargs) -> T:
return pedantic_options.target(*args, **kwargs)

Expand All @@ -115,18 +159,31 @@ def __codspeed_root_frame__(*args, **kwargs) -> T:
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)

# Compute the actual result of the function
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
self.instrument_hooks.lib.callgrind_start_instrumentation()
try:
out = __codspeed_root_frame__(*args, **kwargs)
finally:
self.instrument_hooks.lib.callgrind_stop_instrumentation()
self.instrument_hooks.set_executed_benchmark(uri)
with self._measure_pedantic_context(pedantic_options, uri):
return __codspeed_root_frame__(*args, **kwargs)

async def measure_pedantic_async(
self,
marker_options: BenchmarkMarkerOptions,
pedantic_options: PedanticOptions[Awaitable[T]],
name: str,
uri: str,
) -> T:
async def __codspeed_root_frame__(*args, **kwargs) -> T:
return await pedantic_options.target(*args, **kwargs)

# Warmup
warmup_rounds = max(
pedantic_options.warmup_rounds, 1 if SUPPORTS_PERF_TRAMPOLINE else 0
)
for _ in range(warmup_rounds):
args, kwargs = pedantic_options.setup_and_get_args_kwargs()
await __codspeed_root_frame__(*args, **kwargs)
if pedantic_options.teardown is not None:
pedantic_options.teardown(*args, **kwargs)

return out
with self._measure_pedantic_context(pedantic_options, uri):
return await __codspeed_root_frame__(*args, **kwargs)

def report(self, session: Session) -> None:
reporter = session.config.pluginmanager.get_plugin("terminalreporter")
Expand Down
Loading
Loading