summaryrefslogtreecommitdiffstats
path: root/benchmark/benchmark.h
diff options
context:
space:
mode:
authorFilip Wandzio <contact@philw.dev>2026-03-01 01:03:39 +0100
committerFilip Wandzio <contact@philw.dev>2026-03-01 01:03:39 +0100
commitbf0d77d7d448e964e9716d5af67c48f3d014f090 (patch)
treee55f1e91a8c20cd737dfb01dc12a954c25711e01 /benchmark/benchmark.h
downloadembedded_guardian-bf0d77d7d448e964e9716d5af67c48f3d014f090.tar.gz
embedded_guardian-bf0d77d7d448e964e9716d5af67c48f3d014f090.zip
Scaffold basic project tree, implement benchmarking logic
Implement unit testing guardian
Diffstat (limited to 'benchmark/benchmark.h')
-rw-r--r--benchmark/benchmark.h76
1 files changed, 76 insertions, 0 deletions
diff --git a/benchmark/benchmark.h b/benchmark/benchmark.h
new file mode 100644
index 0000000..a47e2a6
--- /dev/null
+++ b/benchmark/benchmark.h
@@ -0,0 +1,76 @@
1#pragma once
2/**
3 * @file benchmark.h
4 * @brief High-resolution benchmark helper for embedded IoT/student functions.
5 *
6 * Provides functions to measure average execution time, report results
7 * with colors, and mark slow tests.
8 *
9 * PASS CRITERIA:
10 * - The function returns expected result
11 * - Average execution time <= MAX_ALLOWED_MS
12 *
13 * WCET STRATEGY:
14 * - Average latency is measured; for worst-case, run representative
15 * worst-case inputs, measure maximum, and apply safety margin.
16 *
17 * CROSS-PLATFORM:
18 * - Linux / POSIX: uses clock_gettime(CLOCK_MONOTONIC)
19 * - Windows: uses QueryPerformanceCounter
20 */
21
22#include <stddef.h>
23
24/** ANSI color codes for terminal output */
25extern const char *COLOR_RED;
26extern const char *COLOR_GREEN;
27extern const char *COLOR_YELLOW;
28extern const char *COLOR_RESET;
29
30/** Default maximum allowed average execution time in milliseconds */
31extern const double MAX_ALLOWED_MS;
32
33/** Default benchmark iteration counts */
34extern const size_t DEFAULT_BENCHMARKS;
35extern const size_t ITERATIONS_FAST;
36extern const size_t ITERATIONS_SLOW;
37
38/** Function pointer type for benchmarked functions */
39typedef int (*function_to_benchmark)(const char *arg);
40
41/**
42 * @brief Measure average execution time of a function.
43 *
44 * Runs the function `benchmarks` times, with warm-up, and returns
45 * average latency in milliseconds.
46 *
47 * @param func Function to benchmark.
48 * @param arg Argument to pass to function.
49 * @param benchmarks Number of iterations; 0 = DEFAULT_BENCHMARKS.
50 * @return Average latency per call in milliseconds.
51 */
52double benchmark_func(function_to_benchmark func, const char *arg,
53 size_t benchmarks);
54
55/**
56 * @brief Print benchmark result with colors and performance threshold.
57 *
58 * If passed = 0 OR avg_ms > max_allowed_ms, result is marked FAIL/SLOW.
59 *
60 * @param test_name Name of test.
61 * @param passed Function correctness result (1 = ok, 0 = fail)
62 * @param avg_ms Measured average latency
63 * @param max_allowed_ms Threshold for marking slow
64 */
65void report_result(const char *test_name, int passed, double avg_ms,
66 double max_allowed_ms);
67
68/**
69 * @brief
70 * @param func
71 * @param arg
72 * @param benchmarks
73 * @param label
74 */
75void benchmark_func_with_resources(function_to_benchmark func, const char *arg,
76 size_t benchmarks, const char *label);