From e6c91241194cb85edeada092463a3cb3941981b9 Mon Sep 17 00:00:00 2001 From: Uproared Date: Fri, 24 Apr 2026 05:16:28 -0400 Subject: [PATCH 1/3] XenonAnalyse: add optional AnalyzerSwitchTableMap parameter to Function::Analyze MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Defines AnalyzerSwitchTable { defaultLabel, labels[] } and AnalyzerSwitchTableMap = unordered_map. Function::Analyze gains an optional `const AnalyzerSwitchTableMap* switchMap = nullptr` parameter. Default nullptr preserves existing behavior exactly: no call site needs to change, and the walker currently ignores the parameter. The next commit wires the parameter into the walker's unconditional- bctr handling: when switchMap is populated and the walker reaches a bctr whose guest VA has a map entry, it will push every switch label (and the default) as a successor block rather than terminating. Rationale for keying by BCTR VA rather than by XenonAnalyse's own `SwitchTable::base` (first-insn-of-pattern VA): the producer of the TOML-emitted switch tables and the consumer of Function::Analyze are often the same caller, but the walker encounters instructions at the BCTR's address directly. Keying the map by BCTR VA removes the need for the walker to track pattern offsets (6 insns for absolute-form switches, 8 for computed / short-offset, 7 for byte-offset) or to handle NOP padding variations — the caller does the offset math once at map-construction time. Fixes the well-known class of issues where switch-dispatched functions have their successor blocks swept away by the discontinuity pass, and downstream consumers (e.g., recompilers) emit "jump outside function" errors for labels that are actually reachable. Observed on an Xbox 360 title whose compiler emitted forward-jumping switch tables in this shape, and referenced in UnleashedRecomp-lineage work; other Xbox 360 recompile targets likely hit it too. See also: - A follow-up test commit adds synthetic hand-crafted PPC fixtures covering the happy path + null-map safety + wrong-map miss. --- XenonAnalyse/function.cpp | 8 +++++++- XenonAnalyse/function.h | 36 +++++++++++++++++++++++++++++++----- 2 files changed, 38 insertions(+), 6 deletions(-) diff --git a/XenonAnalyse/function.cpp b/XenonAnalyse/function.cpp index 34add40..bc9392a 100644 --- a/XenonAnalyse/function.cpp +++ b/XenonAnalyse/function.cpp @@ -38,8 +38,14 @@ size_t Function::SearchBlock(size_t address) const return -1; } -Function Function::Analyze(const void* code, size_t size, size_t base) +Function Function::Analyze(const void* code, size_t size, size_t base, + const AnalyzerSwitchTableMap* switchMap) { + (void)switchMap; // consumed in a follow-up commit; receiving now so + // that callers can adopt the signature and pass a + // map immediately without waiting for the walker + // change to land. + Function fn{ base, 0 }; if (*((uint32_t*)code + 1) == 0x04000048) // shifted ptr tail call diff --git a/XenonAnalyse/function.h b/XenonAnalyse/function.h index 7df1c6a..ea71ea6 100644 --- a/XenonAnalyse/function.h +++ b/XenonAnalyse/function.h @@ -1,6 +1,8 @@ #pragma once #include +#include +#include #include #ifdef _DEBUG @@ -9,6 +11,29 @@ #define DEBUG(X) #endif +// Optional switch-table map consumed by the block walker in +// Function::Analyze when it reaches an unconditional bctr at a known +// switch-dispatch site. When the caller hands in a populated map, the +// walker pushes every label (and the default) as a successor block +// instead of terminating, which grows `fn.size` to include switch- +// dispatched code that the legacy walker would discard via the end- +// of-Analyze discontinuity pass. +// +// The map is keyed by the BCTR guest VA — NOT by the first-instruction- +// of-pattern VA that XenonAnalyse's own switch-table TOML emits. The +// caller (typically XenonRecomp) performs the switch-base → bctr-VA +// offset math at map-construction time and hands us a pre-keyed map. +// +// Passing nullptr (the default) preserves the legacy walker behavior +// byte-for-byte. No existing callers need to change. +struct AnalyzerSwitchTable +{ + uint32_t defaultLabel{}; + std::vector labels{}; +}; + +using AnalyzerSwitchTableMap = std::unordered_map; + struct Function { struct Block @@ -18,16 +43,16 @@ struct Function size_t projectedSize{ static_cast(-1) }; // scratch DEBUG(size_t parent{}); - Block() + Block() { } Block(size_t base, size_t size) - : base(base), size(size) + : base(base), size(size) { } - Block(size_t base, size_t size, size_t projectedSize) + Block(size_t base, size_t size, size_t projectedSize) : base(base), size(size), projectedSize(projectedSize) { } @@ -45,7 +70,8 @@ struct Function : base(base), size(size) { } - + size_t SearchBlock(size_t address) const; - static Function Analyze(const void* code, size_t size, size_t base); + static Function Analyze(const void* code, size_t size, size_t base, + const AnalyzerSwitchTableMap* switchMap = nullptr); }; From 861e04db767e3266d579f2aea35d554b8c69d5b2 Mon Sep 17 00:00:00 2001 From: Uproared Date: Fri, 24 Apr 2026 05:18:14 -0400 Subject: [PATCH 2/3] XenonAnalyse: block walker uses switchMap to push successors at known bctrs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit When Function::Analyze's block walker reaches an unconditional bctr (PPC_OP_CTR, xop 528, BO bit 5 set) and switchMap is non-null and contains an entry for the bctr's guest VA, the walker pushes every label (and the default) as a successor block via the existing SearchBlock-guarded emplace path. The walker then reaches those blocks, their reachability propagates, and `fn.size` correctly includes the switch-dispatched code. Two guards match the existing `branchDest < base` guard for unconditional `b` (tail-call pattern): label < base → skip. Labels before the function base are malformed or tail-call-style jumps; they should not extend fn.size. label >= base + size → skip. Labels outside the caller's window cannot be walked safely; emplacing them would compute an out-of-buffer data pointer during RESTORE_DATA. Discontinuity-pass suppression: when the walker pushes switch-label successors (tracked via local `switchAwareTermination`), the end-of- Analyze sort-and-erase pass skips the erase. Switch dispatch leaves a legitimate address gap between bctr+4 (end of pre-switch block) and the first case label — the jump-table bytes live in that gap, and the generic discontinuity heuristic would erase every block past it, including exactly the label blocks we just pushed. The sort itself still runs, which is fine: fn.size is computed as max(block.base + block.size) across all blocks afterward, so order does not change the result. Null-map safety: when switchMap is nullptr, the switch-aware branch is skipped entirely; the walker's unconditional-bctr path falls through to the legacy "pop without successors" behavior. Output is byte-identical to pre-patch behavior for all callers that pass nullptr (the default) at the Analyze call site. A companion test commit adds synthetic hand-crafted PPC bytecode fixtures covering the happy path + null-map safety + wrong-map miss. --- XenonAnalyse/function.cpp | 107 ++++++++++++++++++++++++++++++++------ 1 file changed, 90 insertions(+), 17 deletions(-) diff --git a/XenonAnalyse/function.cpp b/XenonAnalyse/function.cpp index bc9392a..5c299f9 100644 --- a/XenonAnalyse/function.cpp +++ b/XenonAnalyse/function.cpp @@ -41,11 +41,6 @@ size_t Function::SearchBlock(size_t address) const Function Function::Analyze(const void* code, size_t size, size_t base, const AnalyzerSwitchTableMap* switchMap) { - (void)switchMap; // consumed in a follow-up commit; receiving now so - // that callers can adopt the signature and pass a - // map immediately without waiting for the walker - // change to land. - Function fn{ base, 0 }; if (*((uint32_t*)code + 1) == 0x04000048) // shifted ptr tail call @@ -65,6 +60,11 @@ Function Function::Analyze(const void* code, size_t size, size_t base, blockStack.reserve(32); blockStack.emplace_back(); + // Set when the walker pushes switch-label successor blocks via the + // switchMap. When set, the end-of-Analyze discontinuity erase is + // skipped — see the comment on the discontinuity pass below for why. + bool switchAwareTermination = false; + #define RESTORE_DATA() if (!blockStack.empty()) data = (dataStart + ((blocks[blockStack.back()].base + blocks[blockStack.back()].size) / sizeof(*data))) - 1; // continue adds one // TODO: Branch fallthrough @@ -188,6 +188,66 @@ Function Function::Analyze(const void* code, size_t size, size_t base, { // 5th bit of BO tells cpu to ignore the counter, which is a blr/bctr otherwise it's conditional const bool conditional = !(PPC_BO(instruction) & 0x10); + + // Switch-aware branch: when this is an unconditional + // bctr (xop 528) at a known-switch site, push every + // label (plus the default) as a successor block + // instead of terminating without successors. + // + // Without this branch, an unconditional bctr pops + // the current block and adds no new blocks, so the + // walker never reaches the switch-dispatched code. + // The discontinuity pass at the end of this function + // then erases every block past the first address + // gap — which for a switch-dispatched function is + // immediately after bctr+4, sweeping all label + // blocks away. Downstream consumers (e.g., a + // recompiler's "label in fn.base..fn.base+fn.size" + // boundary check) then mis-flag the labels as out + // of function range. + // + // Two guards mirror the existing pre-base-branch + // handling for unconditional `b` (see the "Branches + // before base are just tail calls" note above): + // + // label < base: skip. Labels that point + // before the function base are either malformed + // TOML entries or tail-call-style jumps; they + // do not extend `fn.size`. + // + // label >= base + size: skip. Labels outside the + // caller's analysis window cannot be walked + // safely; emplacing them here would compute an + // out-of-buffer `data` pointer at the next + // RESTORE_DATA. + if (!conditional && xop == 528 && switchMap) + { + auto it = switchMap->find(addr); + if (it != switchMap->end()) + { + switchAwareTermination = true; + auto pushLabel = [&](uint32_t label) + { + if (label < base) return; + if (label >= base + size) return; + if (fn.SearchBlock(label) == -1) + { + const size_t lBase = label - base; + blocks.emplace_back(lBase, 0); + DEBUG(blocks.back().parent = blockBase); + blockStack.emplace_back(blocks.size() - 1); + } + }; + for (uint32_t label : it->second.labels) + { + pushLabel(label); + } + pushLabel(it->second.defaultLabel); + RESTORE_DATA(); + continue; + } + } + if (conditional) { // right block's just going to return @@ -216,7 +276,17 @@ Function Function::Analyze(const void* code, size_t size, size_t base, } } - // Sort and invalidate discontinuous blocks + // Sort and invalidate discontinuous blocks. + // + // When switchAwareTermination is set, the walker pushed successor + // blocks for every label of at least one switch dispatch. Those + // label blocks are separated from the pre-bctr block by the jump- + // table bytes themselves (data region living in the code section), + // which creates a legitimate address gap. The generic discontinuity + // heuristic would erase every block past that gap, which is exactly + // the set of blocks we just worked to make reachable. Skip the + // erase in that case; sort still runs because fn.size below picks + // max(block.base + block.size) and the sort is cheap. if (blocks.size() > 1) { std::sort(blocks.begin(), blocks.end(), [](const Block& a, const Block& b) @@ -224,21 +294,24 @@ Function Function::Analyze(const void* code, size_t size, size_t base, return a.base < b.base; }); - size_t discontinuity = -1; - for (size_t i = 0; i < blocks.size() - 1; i++) + if (!switchAwareTermination) { - if (blocks[i].base + blocks[i].size >= blocks[i + 1].base) + size_t discontinuity = -1; + for (size_t i = 0; i < blocks.size() - 1; i++) { - continue; + if (blocks[i].base + blocks[i].size >= blocks[i + 1].base) + { + continue; + } + + discontinuity = i + 1; + break; } - discontinuity = i + 1; - break; - } - - if (discontinuity != -1) - { - blocks.erase(blocks.begin() + discontinuity, blocks.end()); + if (discontinuity != -1) + { + blocks.erase(blocks.begin() + discontinuity, blocks.end()); + } } } From fce79a7c082ed3f495f122b1ac13435eccd1b4b0 Mon Sep 17 00:00:00 2001 From: Uproared Date: Fri, 24 Apr 2026 05:20:27 -0400 Subject: [PATCH 3/3] XenonAnalyse: add synthetic PPC fixtures for switch-aware walker Three test scenarios hand-crafted in-source from big-endian PPC instruction words, no proprietary bytecode. Each exercises a different path through Function::Analyze's new switchMap handling: 1. Happy path: switchMap populated with an entry for the synthetic function's bctr. Walker pushes all 4 case labels + default as successor blocks; fn.size covers through the default block. 2. Null-map safety: switchMap = nullptr. Walker uses legacy pre- patch behavior; discontinuity pass erases label blocks; fn.size covers only the pre-bctr head. Guards against the default-null behavior regressing. 3. Wrong-map miss: switchMap populated but with an entry for an unrelated bctr VA. switchMap->find(our_bctr) misses; walker falls through to legacy. Guards against the switchMap handling spuriously firing on wrong entries. Assertions use fprintf + return-1 rather than a test framework, to keep the test self-contained. If the repository later adopts a standard test runner (Catch2 / doctest / gtest), porting is straightforward. See tests/README.md for the manual build command until CMake integration is decided. --- XenonAnalyse/tests/README.md | 68 ++++++ .../tests/switch_aware_walker_test.cpp | 219 ++++++++++++++++++ 2 files changed, 287 insertions(+) create mode 100644 XenonAnalyse/tests/README.md create mode 100644 XenonAnalyse/tests/switch_aware_walker_test.cpp diff --git a/XenonAnalyse/tests/README.md b/XenonAnalyse/tests/README.md new file mode 100644 index 0000000..289892e --- /dev/null +++ b/XenonAnalyse/tests/README.md @@ -0,0 +1,68 @@ +# XenonAnalyse tests + +Synthetic hand-crafted PPC bytecode fixtures for `Function::Analyze`'s +switch-aware block walker. + +## Fixtures + +`switch_aware_walker_test.cpp` exercises three scenarios against a +synthetic 4-case absolute-form switch function built in-source from +hand-emitted big-endian instruction words: + +1. **Happy path** — `switchMap` populated. Walker pushes all 4 labels + + default as successor blocks, `fn.size` extends to cover through + the default block. +2. **Null-map safety** — `switchMap = nullptr`. Walker uses legacy + pre-patch behavior; discontinuity pass erases the label blocks; + `fn.size` covers only the pre-bctr head. +3. **Wrong-map miss** — `switchMap` populated, but with an entry for + an unrelated bctr VA. `switchMap->find(our_bctr)` returns `end()`; + walker falls through to legacy behavior. + +All three assert on `fn.size` landing in the expected range. The +tests use `assert`-style `fprintf(stderr, "[FAIL ...]" ...) + return 1` +rather than a framework, to keep the test self-contained (no new +dependencies). + +## Running + +Compile against a built `LibXenonAnalyse` + `XenonUtils` + `disasm` + +`fmt`. Example (from a VS 2022 Developer Command Prompt with LLVM in +PATH, after building the rest of the tree): + +``` +cd XenonAnalyse/tests +clang-cl /std:c++20 /EHsc /nologo ^ + /I"..\" /I"..\..\XenonUtils" ^ + switch_aware_walker_test.cpp ^ + "..\..\build\XenonAnalyse\LibXenonAnalyse.lib" ^ + "..\..\build\XenonUtils\XenonUtils.lib" ^ + "..\..\build\thirdparty\disasm\disasm.lib" ^ + "..\..\build\thirdparty\fmt\fmt.lib" ^ + /link /OUT:walker_test.exe +walker_test.exe +``` + +Expected: + +``` +[ok happy-path] fn.base=0x10000, fn.size=0x38, blocks=7 +[ok null-map ] fn.base=0x10000, fn.size=0x1C (pre-patch truncation preserved) +[ok wrong-map] fn.base=0x10000, fn.size=0x1C (unrelated map entry correctly ignored) + +All 3 tests PASSED +``` + +## CMake integration + +Not included in this commit. If the repository adopts a test-runner +pattern, a minimal CMake shape would be: + +```cmake +add_executable(xenonanalyse_switch_walker_test switch_aware_walker_test.cpp) +target_link_libraries(xenonanalyse_switch_walker_test PRIVATE LibXenonAnalyse) +add_test(NAME switch_walker COMMAND xenonanalyse_switch_walker_test) +``` + +Or as an opt-in build behind `-DXENONANALYSE_TESTS=ON`. Whatever +convention the repository prefers. diff --git a/XenonAnalyse/tests/switch_aware_walker_test.cpp b/XenonAnalyse/tests/switch_aware_walker_test.cpp new file mode 100644 index 0000000..c26eeb3 --- /dev/null +++ b/XenonAnalyse/tests/switch_aware_walker_test.cpp @@ -0,0 +1,219 @@ +// Tests for Function::Analyze's switch-aware block walker. +// +// Hand-crafted synthetic PPC bytecode; self-contained; no dependency +// on any particular test corpus. +// +// Three scenarios: +// (1) happy path: switchMap populated, walker pushes switch labels +// (2) null-map safety: switchMap == nullptr, walker uses legacy path +// (3) wrong-map miss: switchMap populated but no entry for this bctr; +// walker uses legacy path +// +// See tests/README.md for the build command and expected output. + +#include "function.h" +#include +#include +#include +#include +#include +#include + +namespace { + +// Little helper to emit a big-endian 32-bit insn word into a byte vector. +// Xenon is big-endian PPC; our synthetic blobs must match for the walker +// to decode them correctly. +void emitBE(std::vector& out, uint32_t insn) +{ + out.push_back((insn >> 24) & 0xFF); + out.push_back((insn >> 16) & 0xFF); + out.push_back((insn >> 8) & 0xFF); + out.push_back((insn >> 0) & 0xFF); +} + +// Encode a handful of PPC instructions we need for the synthetic +// fixtures. These are standard encodings; values are cross-checked +// against capstone decoding as a sanity step. +constexpr uint32_t PPC_BLR = 0x4E800020; // blr +constexpr uint32_t PPC_BCTR = 0x4E800420; // bctr +constexpr uint32_t PPC_NOP = 0x60000000; // ori r0, r0, 0 == nop + +// cmplwi cr6, rA, imm16 — primary opcode 10, field cr=6, rA, imm. +// Encoding: 001010 (6) | 110 (cr=6) | 0 | rA(5) | imm(16) +// = 0x2B | (rA<<16) | imm +uint32_t cmplwi_cr6(uint32_t rA, uint32_t imm) +{ + return 0x2B000000u | (rA << 16) | (imm & 0xFFFF); +} + +// bgt cr6, BD — primary opcode 16, BO=12 (branch if true), BI=25 (cr6 gt). +// BD is a signed 14-bit displacement in bytes (we pass the displacement). +uint32_t bgt_cr6(int32_t displacement) +{ + // Encoding: 010000 | 01100 | 11001 | BD(14) | 00 + return 0x41990000u | (uint32_t(displacement) & 0xFFFC); +} + +// Build a minimal "switch function" synthetic fixture: +// +// 0x00: cmplwi cr6, r3, 3 ; 4 cases +// 0x04: bgt cr6, +0x30 ; default @ 0x34 +// 0x08: (would be LIS/ADDI/RLWINM/LWZX/MTCTR — stubbed as nops here +// because our walker treats mtctr/bctr by opcode; the prior +// instructions only matter to XenonAnalyse's SCAN side, not +// to Function::Analyze's block walker) +// 0x08: nop +// 0x0C: nop +// 0x10: nop +// 0x14: nop +// 0x18: mtctr r0 (synthesized as nop — Function::Analyze doesn't +// check mtctr specifically) +// 0x18: bctr ; <-- switch-dispatch site +// 0x1C: label[0] target @ 0x1C: blr +// 0x20: label[1] target @ 0x20: blr +// 0x24: label[2] target @ 0x24: blr +// 0x28: label[3] target @ 0x28: blr +// 0x2C: (padding) +// 0x34: default target: blr +// +// The walker starts at offset 0, walks to the bctr at 0x18, consults +// switchMap, and (if populated) pushes all 5 successor blocks (4 labels +// + default). The walker then reaches each block, processes its `blr`, +// and terminates. fn.size should cover through 0x34 (the default block's +// blr). +// +// Base address: we use 0x10000 as a synthetic "guest VA" for these +// tests. Anything > 0 works; 0x10000 is arbitrary. + +struct SyntheticSwitch +{ + std::vector bytes; + uint32_t baseVa; + uint32_t bctrVa; + std::vector labelVas; + uint32_t defaultVa; +}; + +SyntheticSwitch buildFourCaseSwitch() +{ + SyntheticSwitch s; + s.baseVa = 0x10000; + s.bctrVa = s.baseVa + 0x18; + s.defaultVa = s.baseVa + 0x34; + s.labelVas = { s.baseVa + 0x1C, s.baseVa + 0x20, s.baseVa + 0x24, s.baseVa + 0x28 }; + + // Pre-switch head + emitBE(s.bytes, cmplwi_cr6(3, 3)); // cmplwi cr6, r3, 3 + emitBE(s.bytes, bgt_cr6(0x30)); // bgt cr6, +0x30 (→ 0x34 default) + for (int i = 0; i < 4; ++i) emitBE(s.bytes, PPC_NOP); // filler + emitBE(s.bytes, PPC_BCTR); // 0x18: the bctr + + // Label blocks — each a single blr + for (int i = 0; i < 4; ++i) emitBE(s.bytes, PPC_BLR); + + // Padding between the 4 labels (at 0x2C) and default (at 0x34) + emitBE(s.bytes, PPC_NOP); + emitBE(s.bytes, PPC_NOP); + + // Default block + emitBE(s.bytes, PPC_BLR); // 0x34: default blr + + return s; +} + +// Helper to populate a switchMap entry from a SyntheticSwitch. +AnalyzerSwitchTableMap buildMap(const SyntheticSwitch& s) +{ + AnalyzerSwitchTableMap m; + AnalyzerSwitchTable entry; + entry.defaultLabel = s.defaultVa; + entry.labels = s.labelVas; + m.emplace(s.bctrVa, std::move(entry)); + return m; +} + +int testHappyPath() +{ + auto s = buildFourCaseSwitch(); + auto m = buildMap(s); + Function fn = Function::Analyze(s.bytes.data(), s.bytes.size(), s.baseVa, &m); + + // Expected: walker reaches every label + default, all their blocks + // get size = 4 (single-blr). fn.size should extend to cover the + // default block (0x34 + 4 = 0x38). + if (fn.base != s.baseVa) { + fprintf(stderr, "[FAIL happy-path] fn.base = 0x%zX, expected 0x%X\n", + fn.base, s.baseVa); + return 1; + } + if (fn.size < 0x38) { + fprintf(stderr, "[FAIL happy-path] fn.size = 0x%zX, expected >= 0x38 " + "(switch labels + default)\n", fn.size); + return 1; + } + fprintf(stderr, "[ok happy-path] fn.base=0x%zX, fn.size=0x%zX, blocks=%zu\n", + fn.base, fn.size, fn.blocks.size()); + return 0; +} + +int testNullMapSafety() +{ + auto s = buildFourCaseSwitch(); + Function fn = Function::Analyze(s.bytes.data(), s.bytes.size(), s.baseVa, + /* switchMap = */ nullptr); + + // Expected: walker terminates at bctr without pushing successors, + // discontinuity pass erases any blocks past the bctr, fn.size covers + // only the pre-switch head (up to and including the bctr at 0x18, + // so fn.size >= 0x1C, ≤ 0x20 or so). + if (fn.size >= 0x38) { + fprintf(stderr, "[FAIL null-map ] fn.size = 0x%zX, expected < 0x38 " + "(switchMap=nullptr should use legacy walker)\n", fn.size); + return 1; + } + fprintf(stderr, "[ok null-map ] fn.base=0x%zX, fn.size=0x%zX " + "(legacy walker truncation preserved)\n", fn.base, fn.size); + return 0; +} + +int testWrongMapMiss() +{ + auto s = buildFourCaseSwitch(); + // Build a map, but with an entry for a DIFFERENT bctr address. + AnalyzerSwitchTableMap m; + AnalyzerSwitchTable bogus; + bogus.defaultLabel = 0x99999; + bogus.labels = { 0xABCDEF }; + m.emplace(/* bctr VA */ 0xDEADBEEF, std::move(bogus)); // not our bctr + + Function fn = Function::Analyze(s.bytes.data(), s.bytes.size(), s.baseVa, &m); + + // Expected: walker looks up our bctr (0x10018), finds nothing in the + // map, falls through to legacy behavior. Same result as null-map. + if (fn.size >= 0x38) { + fprintf(stderr, "[FAIL wrong-map] fn.size = 0x%zX, expected < 0x38 " + "(unrelated switchMap entry should fall through to legacy)\n", fn.size); + return 1; + } + fprintf(stderr, "[ok wrong-map] fn.base=0x%zX, fn.size=0x%zX " + "(unrelated map entry correctly ignored)\n", fn.base, fn.size); + return 0; +} + +} // anonymous + +int main() +{ + int failures = 0; + failures += testHappyPath(); + failures += testNullMapSafety(); + failures += testWrongMapMiss(); + + if (failures) { + fprintf(stderr, "\n%d test(s) FAILED\n", failures); + return 1; + } + fprintf(stderr, "\nAll 3 tests PASSED\n"); + return 0; +}