Skip to content

Commit

Permalink
Merge branch 'topic/perf_testsuite' into 'master'
Browse files Browse the repository at this point in the history
Add performance tests for "lkql_jit_checker"

See merge request eng/libadalang/langkit-query-language!139
  • Loading branch information
HugoGGuerrier committed Apr 4, 2024
2 parents 72947ac + eb7161c commit 95006ae
Show file tree
Hide file tree
Showing 13 changed files with 70 additions and 30 deletions.
2 changes: 1 addition & 1 deletion Makefile
Expand Up @@ -58,7 +58,7 @@ lkql_jit: lkql

lkql_native_jit: lkql
$(MAVEN) -f lkql/build/java/ install
$(MAVEN) -f lkql_jit/ clean install -P native,dev
$(MAVEN) -f lkql_jit/ clean install -P native,$(BUILD_MODE)

.PHONY: lkql_checker

Expand Down
1 change: 1 addition & 0 deletions testsuite/drivers/base_driver.py
Expand Up @@ -245,6 +245,7 @@ def run(*prefix):
memory, time = result.split()[-2:]
times.append(time)
memories.append(memory)
self.result.info["run_count"] = param
self.result.info["time"] = " ".join(times)
self.result.info["memory"] = " ".join(memories)

Expand Down
6 changes: 5 additions & 1 deletion testsuite/drivers/checker_driver.py
Expand Up @@ -21,6 +21,7 @@ class CheckerDriver(BaseDriver):
- rule_arguments: A dict mapping rule argument names to their values
"""

perf_supported = True
flag_checking_supported = True

def run(self) -> None:
Expand All @@ -44,7 +45,10 @@ def run(self) -> None:
args += ['--keep-going-on-missing-file']

# Run the checker
self.check_run(args)
if self.perf_mode:
self.perf_run(args)
else:
self.check_run(args)

def parse_flagged_lines(self, output: str) -> Flags:
# Compile the pattern to match a checker output
Expand Down
@@ -0,0 +1,7 @@
driver: checker
project: gnatcoll.gpr
rule_name: integer_types_as_enum

perf:
default: 1
profile-time: true
@@ -0,0 +1,7 @@
driver: checker
project: gnatcoll.gpr
rule_name: outside_references_from_subprograms

perf:
default: 1
profile-time: true
@@ -0,0 +1,7 @@
driver: checker
project: gnatcoll.gpr
rule_name: recursive_subprograms

perf:
default: 1
profile-time: true
@@ -0,0 +1,7 @@
driver: checker
project: gnatcov.gpr
rule_name: integer_types_as_enum

perf:
default: 1
profile-time: true
@@ -0,0 +1,7 @@
driver: checker
project: gnatcov.gpr
rule_name: outside_references_from_subprograms

perf:
default: 1
profile-time: true
@@ -0,0 +1,7 @@
driver: checker
project: gnatcov.gpr
rule_name: recursive_subprograms

perf:
default: 1
profile-time: true
9 changes: 9 additions & 0 deletions testsuite/tests/perf/gnatcheck/test.yaml
@@ -0,0 +1,9 @@
driver: gnatcheck
project: gnatcoll.gpr
rules:
- +ALL
timeout: 1200

perf:
default: 1
profile-time: false
10 changes: 0 additions & 10 deletions testsuite/tests/perf/gnatcoll/test.yaml

This file was deleted.

10 changes: 0 additions & 10 deletions testsuite/tests/perf/gnatcoverage/test.yaml

This file was deleted.

20 changes: 12 additions & 8 deletions testsuite/testsuite.py
Expand Up @@ -11,7 +11,9 @@

from e3.fs import mkdir, rm
from e3.testsuite import Testsuite, logger, TestsuiteCore
from e3.testsuite.testcase_finder import ProbingError, YAMLTestFinder, TestFinderResult
from e3.testsuite.testcase_finder import (
ProbingError, TestFinder, YAMLTestFinder, TestFinderResult
)

from drivers import (
checker_driver, gnatcheck_driver, interpreter_driver, parser_driver, java_driver,
Expand Down Expand Up @@ -123,7 +125,8 @@ def add_options(self, parser: ArgumentParser) -> None:
'--perf-mode',
help='Run the testsuite in performance mode: only run tests with'
' instructions to measure performance. The argument is the'
' directory in which to put profile data files.'
' directory in which to put profile data files.',
dest='perf_output_dir'
)
parser.add_argument(
'--perf-no-profile',
Expand All @@ -134,7 +137,7 @@ def add_options(self, parser: ArgumentParser) -> None:
)

@property
def test_finders(self) -> list[YAMLTestFinder]:
def test_finders(self) -> list[TestFinder]:
return [
PerfTestFinder()
if self.env.perf_mode else
Expand All @@ -146,7 +149,7 @@ def set_up(self) -> None:
self.env.rewrite_baselines = self.env.options.rewrite

# Perf mode is incompatible with some other modes
if self.env.options.perf_mode:
if self.env.options.perf_output_dir:
if self.env.options.coverage:
logger.error(f"--perf-mode incompatible with --coverage")
raise RuntimeError
Expand All @@ -166,7 +169,7 @@ def set_up(self) -> None:
# If the performance mode is enabled, verify that the user has checked
# out the common-testsuite-sources in the "ada_projects" directory.
# Additionally add the internal sources to the GPR project path.
if self.env.options.perf_mode:
if self.env.options.perf_output_dir:
common_sources = P.join(
self.root_dir,
"ada_projects",
Expand Down Expand Up @@ -218,8 +221,8 @@ def in_repo(*args):

# If requested, enable the performance mode and ensure that the output
# profile data exists.
if self.env.options.perf_mode:
perf_dir = P.abspath(self.env.options.perf_mode)
if self.env.options.perf_output_dir:
perf_dir = P.abspath(self.env.options.perf_output_dir)
if not P.isdir(perf_dir):
os.makedirs(perf_dir)

Expand Down Expand Up @@ -272,7 +275,7 @@ def format_time(seconds: float) -> str:
return "{:.2f}s".format(seconds)

def format_memory(bytes_count: int) -> str:
units = ["B", "KB", "MB", "GB"]
units = ["KB", "MB", "GB"]
unit = units.pop(0)
while units and bytes_count > 1000:
unit = units.pop(0)
Expand All @@ -292,6 +295,7 @@ def compute_stats(numbers_str: str,

# Define the function to display the statistics of the ``time`` output
def print_time_stats(entry):
print(f"--- {test_name} (run {entry.info['run_count']} time(s))", file=output_file)
print(
f" time:"
f" {compute_stats(entry.info['time'], float, format_time)}",
Expand Down

0 comments on commit 95006ae

Please sign in to comment.