diff --git a/WASM_SURGICAL_LINKING_PLAN.md b/WASM_SURGICAL_LINKING_PLAN.md new file mode 100644 index 00000000000..6c6795f5ea0 --- /dev/null +++ b/WASM_SURGICAL_LINKING_PLAN.md @@ -0,0 +1,2679 @@ +# WASM Surgical Linking Plan + +## Status + +| Phase | Description | Status | +|-------|-------------|--------| +| 1 | Padded LEB128 Helpers | Done | +| 2 | Linking Data Structures | Done | +| 3 | WASM Module Parser | Done | +| 4 | Surgical Linking — `linkHostToAppCalls()` | Done | +| 5 | Memory, Table, and Stack Pointer Ownership | Done | +| 6 | WASM Function Pointer Representation & RocOps Layout | Done | +| 7a | Entrypoint ABI Migration | Done | +| 7b | CodeBuilder & WasmCodeGen Refactor | Done | +| 8 | Builtins Migration | Done | +| 9 | Hosted Call Lowering | Done | +| 10 | Dead Code Elimination | Done | +| 11 | Serialization Updates | Done | +| 12 | CLI Integration — `roc build --target=wasm32` | Done | +| 13 | PIC Support & Eval Builtins | Done | +| 14 | Rebase & Integration Fixes | Done | +| 15 | Remaining Test Failures | Done | + +## Overview + +This document specifies the design and implementation plan for surgical linking of WebAssembly +modules in the Roc compiler. Surgical linking enables fast compilation by starting with a +**prebuilt relocatable host module** from the platform, then appending Roc app code and builtins +directly into that module, and finally resolving cross-references by patching relocation entries +in-place — no external linker required. + +### Why Surgical Linking? + +1. **Speed**: The host platform is compiled once. On every `roc build`, only app code is + generated and "surgically" inserted. The linker does not re-process the host — it only + patches the specific relocation sites that reference app symbols. + +2. **Self-contained**: No dependency on `wasm-ld` or any external toolchain. The Roc compiler + handles the entire pipeline from source to final `.wasm`. + +3. **Proven approach**: The old Rust compiler (`crates/wasm_module/`) shipped this successfully. + We are porting the same architecture to Zig, adapted to the new compiler's IR pipeline. + +4. **Minimal output**: Dead code elimination after linking removes unused host functions, + producing smaller `.wasm` files. + +### The Model + +``` +Platform host.wasm (prebuilt, relocatable) + │ + │ parse into in-memory WasmModule + ▼ +┌─────────────────────────────────────────┐ +│ WasmModule (host code + relocations) │ +│ ├─ types, imports, functions, code │ +│ ├─ linking section (symbol table) │ +│ └─ reloc.CODE / reloc.DATA sections │ +└──────────────────┬──────────────────────┘ + │ + merge roc_builtins.o (relocatable WASM object) + │ + ▼ +┌─────────────────────────────────────────┐ +│ WasmModule (host + builtins merged) │ +│ ├─ builtin functions appended │ +│ ├─ symbol tables merged │ +│ └─ relocation entries merged │ +└──────────────────┬──────────────────────┘ + │ + WasmCodeGen appends app function bodies + │ + ▼ +┌─────────────────────────────────────────┐ +│ WasmModule (host + builtins + app) │ +│ ├─ app functions appended to code │ +│ ├─ app symbols added to symbol table │ +│ └─ app relocation entries added │ +└──────────────────┬──────────────────────┘ + │ + linkHostToAppCalls(): resolve host→app imports + via relocation patching + │ + ▼ +┌─────────────────────────────────────────┐ +│ WasmModule (fully resolved) │ +│ ├─ host imports replaced with │ +│ │ defined app functions │ +│ ├─ all call sites patched via relocs │ +│ └─ dummy functions preserve indices │ +└──────────────────┬──────────────────────┘ + │ + eliminateDeadCode(): trace call graph, + stub unreachable functions + │ + ▼ +┌─────────────────────────────────────────┐ +│ WasmModule (trimmed) │ +│ ├─ dead function bodies → unreachable │ +│ └─ dead imports removed │ +└──────────────────┬──────────────────────┘ + │ + serialize(): strip linking metadata, + emit standard WASM binary + │ + ▼ + final.wasm (ready to execute) +``` + +### Key Terminology + +- **Relocatable WASM module**: A `.wasm` file (or `.o` file) that contains custom `linking` + and `reloc.*` sections per the + [WebAssembly Tool Conventions](https://github.com/WebAssembly/tool-conventions/blob/main/Linking.md). + All mutable indices are encoded as 5-byte padded LEB128 so they can be patched in-place. +- **Surgical linking**: Resolving symbol references by overwriting padded LEB128 slots at + known relocation offsets, rather than re-encoding the entire module. +- **Host module**: The platform's prebuilt relocatable WASM, containing host code with + imports for Roc app functions (e.g. `roc__main_for_host_1_exposed`). +- **App functions**: Roc application code compiled by `WasmCodeGen`, appended into the host module. +- **Builtins**: `roc_builtins.o` — Roc's standard library functions (str ops, dec math, list ops) + compiled from `src/builtins/` to a relocatable WASM object. These are internal to the app + side, not provided by the host. + +--- + +## Phase 1: Padded LEB128 Helpers + +### What + +Add two utility functions for working with 5-byte padded LEB128, the encoding format that +makes surgical linking possible. + +### Why + +In standard WASM, LEB128 values use variable-length encoding (1–5 bytes for a u32). If we +changed a function index from `3` (1 byte) to `300` (2 bytes), every byte offset after that +point would shift, invalidating all subsequent relocation offsets. Padded LEB128 solves this +by always using exactly 5 bytes, regardless of value. This means we can overwrite any index +in-place without affecting surrounding code. + +### Implementation + +Add to `WasmModule.zig`: + +```zig +/// Overwrite 5 bytes at `buffer[offset..offset+5]` with a u32 in padded LEB128. +/// The buffer must already have 5 bytes reserved at that position. +/// This is the core primitive for surgical relocation patching. +pub fn overwritePaddedU32(buffer: []u8, offset: u32, value: u32) void { + var x = value; + const off = @as(usize, offset); + for (0..4) |i| { + buffer[off + i] = @as(u8, @truncate(x & 0x7f)) | 0x80; + x >>= 7; + } + buffer[off + 4] = @as(u8, @truncate(x)); +} + +/// Overwrite 5 bytes with a signed i32 in padded LEB128. +/// Used for signed memory address relocations. +pub fn overwritePaddedI32(buffer: []u8, offset: u32, value: i32) void { + var x = value; + const off = @as(usize, offset); + for (0..4) |i| { + buffer[off + i] = @as(u8, @truncate(@as(u32, @bitCast(x)) & 0x7f)) | 0x80; + x >>= 7; + } + buffer[off + 4] = @as(u8, @truncate(@as(u32, @bitCast(x)) & 0x7f)); +} + +/// Append a u32 as exactly 5 bytes of padded LEB128 to an output buffer. +/// Used when emitting new relocatable instructions (call, global.get/set). +pub fn appendPaddedU32(output: *std.ArrayList(u8), value: u32) !void { + var x = value; + for (0..4) |_| { + try output.append(@as(u8, @truncate(x & 0x7f)) | 0x80); + x >>= 7; + } + try output.append(@as(u8, @truncate(x))); +} +``` + +### Rust Reference + +- `crates/wasm_module/src/serialize.rs` lines 149–156: `overwrite_padded_u32()` +- `crates/wasm_module/src/serialize.rs` lines 140–147: `overwrite_padded_i32()` +- `crates/wasm_module/src/serialize.rs` lines 235–240: `encode_padded_u32()` (trait method) + +### Tests + +``` +test "overwritePaddedU32 — value 0 encodes as [0x80, 0x80, 0x80, 0x80, 0x00]" +test "overwritePaddedU32 — value 1 encodes as [0x81, 0x80, 0x80, 0x80, 0x00]" +test "overwritePaddedU32 — value 0x7F encodes as [0xFF, 0x80, 0x80, 0x80, 0x00]" +test "overwritePaddedU32 — value 128 encodes as [0x80, 0x81, 0x80, 0x80, 0x00]" +test "overwritePaddedU32 — max u32 (0xFFFFFFFF) encodes correctly" +test "overwritePaddedU32 — round-trip: write then decode matches original value" +test "overwritePaddedI32 — negative value (-1) encodes correctly" +test "overwritePaddedI32 — positive value round-trips correctly" +test "appendPaddedU32 — appends exactly 5 bytes" +test "appendPaddedU32 — output is decodable as standard LEB128" +``` + +--- + +## Phase 2: Linking Data Structures + +### What + +Define the data structures for WASM relocatable module metadata: symbol table entries, +relocation entries, and the linking section container. + +### Why + +These structures are the "map" that makes surgical linking possible. The **symbol table** +records what symbols exist (functions, data, globals) and whether they are defined or imported. +The **relocation entries** record where in the code/data sections each symbol is referenced. +Together they form a bidirectional index: given a symbol, we can find every instruction that +references it and patch it. + +Without these structures, changing a function's index would require scanning the entire code +section for `call` instructions — slow and error-prone. With relocations, we jump directly +to the exact byte offsets that need patching. + +### Implementation + +Create a new file `src/backend/wasm/WasmLinking.zig`: + +#### Relocation Types + +```zig +/// Index-based relocation types (no addend). +/// These patch indices in instructions like `call`, `global.get`, `call_indirect`. +pub const IndexRelocType = enum(u8) { + function_index_leb = 0, // R_WASM_FUNCTION_INDEX_LEB — function index in `call` + table_index_sleb = 1, // R_WASM_TABLE_INDEX_SLEB — signed table index in `i32.const` + table_index_i32 = 2, // R_WASM_TABLE_INDEX_I32 — table index as raw u32 in data + type_index_leb = 6, // R_WASM_TYPE_INDEX_LEB — type index in `call_indirect` + global_index_leb = 7, // R_WASM_GLOBAL_INDEX_LEB — global index in `global.get/set` + event_index_leb = 10, // R_WASM_EVENT_INDEX_LEB + global_index_i32 = 13, // R_WASM_GLOBAL_INDEX_I32 + table_number_leb = 20, // R_WASM_TABLE_NUMBER_LEB +}; + +/// Offset-based relocation types (have an addend). +/// These patch memory addresses in load/store instructions and data segments. +pub const OffsetRelocType = enum(u8) { + memory_addr_leb = 3, // R_WASM_MEMORY_ADDR_LEB — unsigned addr in load/store + memory_addr_sleb = 4, // R_WASM_MEMORY_ADDR_SLEB — signed addr in `i32.const` + memory_addr_i32 = 5, // R_WASM_MEMORY_ADDR_I32 — raw u32 addr in data segment + function_offset_i32 = 8, // R_WASM_FUNCTION_OFFSET_I32 + section_offset_i32 = 9, // R_WASM_SECTION_OFFSET_I32 +}; +``` + +#### Relocation Entry + +```zig +/// A single relocation entry. Describes one site in the code or data section +/// that references a symbol and needs patching when that symbol's value changes. +pub const RelocationEntry = union(enum) { + /// Index relocations: the value at `offset` is a symbol index (function, type, global). + /// No addend — the patched value is the symbol's resolved index directly. + index: struct { + type_id: IndexRelocType, + offset: u32, // byte offset within the target section body + symbol_index: u32, // index into the linking section's symbol table + }, + + /// Offset relocations: the value at `offset` is a memory address. + /// The patched value is the symbol's address + addend. + offset: struct { + type_id: OffsetRelocType, + offset: u32, + symbol_index: u32, + addend: i32, + }, + + pub fn getSymbolIndex(self: RelocationEntry) u32 { + return switch (self) { + .index => |i| i.symbol_index, + .offset => |o| o.symbol_index, + }; + } + + pub fn getOffset(self: RelocationEntry) u32 { + return switch (self) { + .index => |i| i.offset, + .offset => |o| o.offset, + }; + } +}; +``` + +#### Symbol Info + +```zig +/// Flags for symbol table entries. +pub const SymFlag = struct { + pub const BINDING_WEAK: u32 = 0x01; + pub const BINDING_LOCAL: u32 = 0x02; + pub const VISIBILITY_HIDDEN: u32 = 0x04; + pub const UNDEFINED: u32 = 0x10; + pub const EXPORTED: u32 = 0x20; + pub const EXPLICIT_NAME: u32 = 0x40; + pub const NO_STRIP: u32 = 0x80; +}; + +/// Symbol kinds in the linking section's symbol table. +pub const SymKind = enum(u8) { + function = 0, + data = 1, + global = 2, + section = 3, + event = 4, + table = 5, +}; + +/// A symbol table entry. Each symbol has a kind, flags, and an index +/// into the relevant index space (function index, global index, etc.). +/// +/// Function/global symbols can be **explicitly named** (name stored in the linking +/// section) or **implicitly named** (undefined symbols that inherit their name from +/// the import section entry they reference). The old Rust parser distinguishes these +/// as `ExplicitlyNamed` vs `ImplicitlyNamed` variants (`crates/wasm_module/src/linking.rs` +/// lines 354–385). We model this with an optional name field. +/// +/// Parsing rule: a function/global symbol gets a name from the linking section if +/// `(flags & WASM_SYM_EXPLICIT_NAME) != 0` OR `(flags & WASM_SYM_UNDEFINED) == 0` +/// (i.e. defined symbols always have names). Undefined symbols without EXPLICIT_NAME +/// have `name = null` — their name must be looked up from the import section at the +/// symbol's `index`. +pub const SymInfo = struct { + kind: SymKind, + flags: u32, + /// Explicit name from the linking section, or null for implicitly-named + /// imported symbols (whose name comes from the import section). + name: ?[]const u8, + /// For function symbols: the function index (import or defined). + /// For global symbols: the global index. + /// For data symbols: segment index (stored here, offset/size stored separately). + index: u32, + /// Data symbols only: offset within segment. + data_offset: u32 = 0, + /// Data symbols only: size in bytes. + data_size: u32 = 0, + + pub fn isUndefined(self: SymInfo) bool { + return (self.flags & SymFlag.UNDEFINED) != 0; + } + + pub fn isImplicitlyNamed(self: SymInfo) bool { + return self.name == null; + } + + pub fn isLocal(self: SymInfo) bool { + return (self.flags & SymFlag.BINDING_LOCAL) != 0; + } + + pub fn isFunction(self: SymInfo) bool { + return self.kind == .function; + } + + /// Resolve this symbol's name when one is available. + /// + /// Explicitly named symbols return their stored name. + /// Implicitly named undefined function/global/event/table symbols inherit + /// their name from the import section. + /// Section symbols and other unnamed non-import symbols return null. + pub fn resolveName(self: SymInfo, imports: []const Import) ?[]const u8 { + if (self.name) |n| return n; + + if (!self.isUndefined()) return null; + + return switch (self.kind) { + .function, .global, .event, .table => imports[self.index].field_name, + else => null, + }; + } +}; +``` + +#### Relocation Section + +```zig +/// Holds all relocation entries for one section (either "reloc.CODE" or "reloc.DATA"). +pub const RelocationSection = struct { + /// Name of this reloc section (e.g. "reloc.CODE"). + name: []const u8, + /// Index of the target section these relocations apply to. + target_section_index: u32, + /// The relocation entries, sorted by offset. + entries: std.ArrayList(RelocationEntry), + + /// Patch all sites in `section_bytes` that reference `sym_index` with `value`. + /// This is the core surgical linking primitive. + pub fn applyRelocsU32( + self: *const RelocationSection, + section_bytes: []u8, + sym_index: u32, + value: u32, + ) void { + for (self.entries.items) |entry| { + if (entry.getSymbolIndex() != sym_index) continue; + switch (entry) { + .index => |idx| { + switch (idx.type_id) { + .function_index_leb, + .type_index_leb, + .global_index_leb, + .event_index_leb, + .table_number_leb, + => overwritePaddedU32(section_bytes, idx.offset, value), + .table_index_sleb => overwritePaddedI32( + section_bytes, idx.offset, @as(i32, @intCast(value)), + ), + .table_index_i32, .global_index_i32 => { + const off = @as(usize, idx.offset); + std.mem.writeInt(u32, section_bytes[off..][0..4], value, .little); + }, + } + }, + .offset => |off| { + const patched = @as(i64, value) + @as(i64, off.addend); + switch (off.type_id) { + .memory_addr_leb => overwritePaddedU32( + section_bytes, off.offset, @intCast(patched), + ), + .memory_addr_sleb => overwritePaddedI32( + section_bytes, off.offset, @intCast(patched), + ), + .memory_addr_i32, + .function_offset_i32, + .section_offset_i32, + => { + const o = @as(usize, off.offset); + std.mem.writeInt(u32, section_bytes[o..][0..4], @intCast(patched), .little); + }, + } + }, + } + } + } +}; +``` + +#### Linking Section + +```zig +pub const LINKING_VERSION: u32 = 2; + +/// Linking subsection types (within the "linking" custom section). +pub const LinkingSubsection = enum(u8) { + segment_info = 5, + init_funcs = 6, + comdat_info = 7, + symbol_table = 8, +}; + +/// Container for all linking metadata from a relocatable WASM module. +pub const LinkingSection = struct { + symbol_table: std.ArrayList(SymInfo), + segment_info: std.ArrayList(SegmentInfo), + init_funcs: std.ArrayList(InitFunc), + + /// Find a symbol by name. For implicitly-named imported symbols, resolves + /// the name from the import section. Returns the symbol index, or null. + pub fn findSymbolByName( + self: *const LinkingSection, + name: []const u8, + imports: []const Import, + ) ?u32 { + for (self.symbol_table.items, 0..) |sym, i| { + if (sym.resolveName(imports)) |sym_name| { + if (std.mem.eql(u8, sym_name, name)) return @intCast(i); + } + } + return null; + } + + /// Find the symbol table index for an imported function at the given function index. + pub fn findImportedFnSymIndex(self: *const LinkingSection, fn_index: u32) ?u32 { + for (self.symbol_table.items, 0..) |sym, i| { + if (sym.kind == .function and sym.isUndefined() and sym.index == fn_index) { + return @intCast(i); + } + } + return null; + } + + /// Find the symbol for an imported function at `old_fn_index` and update it + /// to point to `new_fn_index`. Returns the symbol index. + pub fn findAndReindexImportedFn( + self: *LinkingSection, + old_fn_index: u32, + new_fn_index: u32, + ) ?u32 { + for (self.symbol_table.items, 0..) |*sym, i| { + if (sym.kind == .function and sym.isUndefined() and sym.index == old_fn_index) { + sym.index = new_fn_index; + return @intCast(i); + } + } + return null; + } +}; + +pub const SegmentInfo = struct { + name: []const u8, + alignment: u32, + flags: u32, +}; + +pub const InitFunc = struct { + priority: u32, + symbol_index: u32, +}; +``` + +### Rust Reference + +- `crates/wasm_module/src/linking.rs` lines 18–42: `IndexRelocType` enum +- `crates/wasm_module/src/linking.rs` lines 64–87: `OffsetRelocType` enum +- `crates/wasm_module/src/linking.rs` lines 105–118: `RelocationEntry` enum +- `crates/wasm_module/src/linking.rs` lines 321–352: symbol flag constants +- `crates/wasm_module/src/linking.rs` lines 445–453: `SymInfo` enum +- `crates/wasm_module/src/linking.rs` lines 557–562: `LinkingSection` struct +- `crates/wasm_module/src/linking.rs` lines 169–208: `apply_relocs_u32()` +- `crates/wasm_module/src/linking.rs` lines 576–621: symbol lookup/reindex functions + +### Tests + +``` +test "RelocationSection.applyRelocsU32 — patches function_index_leb at correct offset" +test "RelocationSection.applyRelocsU32 — patches multiple sites for same symbol" +test "RelocationSection.applyRelocsU32 — ignores entries for different symbols" +test "RelocationSection.applyRelocsU32 — memory_addr_leb adds addend correctly" +test "RelocationSection.applyRelocsU32 — memory_addr_sleb handles negative addend" +test "LinkingSection.findSymbolByName — finds existing symbol" +test "LinkingSection.findSymbolByName — returns null for missing symbol" +test "LinkingSection.findImportedFnSymIndex — finds undefined function symbol" +test "LinkingSection.findAndReindexImportedFn — updates index and returns sym index" +``` + +--- + +## Phase 3: WASM Module Parser + +### What + +Add a `preload()` function to `WasmModule.zig` that parses a relocatable WASM binary into +the in-memory `WasmModule` representation. This is the reverse of the existing `encode()`. + +### Why + +The surgical linking pipeline starts by loading the platform's prebuilt host module. We need +to parse it into a mutable structure so we can: + +1. Inspect its imports (to find which ones are app function stubs) +2. Append new function bodies (app code and builtins) +3. Modify its symbol table and relocation entries +4. Re-serialize the final combined module + +The parser must handle both standard WASM sections and the custom linking/relocation sections +that are specific to relocatable objects. It must validate that the module is actually +relocatable (has symbol table, has relocations, no internally-defined globals). + +### Data Structure Changes + +The `WasmModule` struct needs to be extended significantly. The current struct is serialization-only +and stores high-level types (function signatures, bodies, exports). For surgical linking, we +need to also store: + +- Raw code section bytes (for in-place relocation patching) +- Function byte offsets within the code section (to map function indices to code ranges) +- The linking section (symbol table + segment info) +- Relocation sections (reloc.CODE + reloc.DATA) +- Raw data section bytes +- Import details including module/field names +- A `dead_import_dummy_count` counter for index stability + +The extended struct should look like: + +```zig +pub const WasmModule = struct { + allocator: Allocator, + + // --- Standard sections --- + func_types: std.ArrayList(FuncType), + func_type_results: std.ArrayList(?ValType), + + /// Import entries. Function imports occupy indices 0..import_fn_count-1. + imports: std.ArrayList(Import), + import_fn_count: u32, + + /// Function section: type index for each locally-defined function. + func_type_indices: std.ArrayList(u32), + + // Table + has_table: bool, + table_func_indices: std.ArrayList(u32), + + // Memory + has_memory: bool, + memory_min_pages: u32, + + // Globals + has_stack_pointer: bool, + stack_pointer_init: u32, + + // Exports + exports: std.ArrayList(Export), + + // Element section (for call_indirect table) + // ... (existing fields) + + // --- Code section (raw bytes for surgical patching) --- + /// Raw bytes of all function bodies in the code section. + /// Relocation offsets refer to positions within this buffer. + code_bytes: std.ArrayList(u8), + /// Byte offset of each function body within code_bytes. + /// Length: func_type_indices.items.len (locally-defined functions only). + function_offsets: std.ArrayList(u32), + /// Number of dummy functions prepended during linking to maintain index stability. + dead_import_dummy_count: u32, + + // --- Data section --- + data_segments: std.ArrayList(DataSegment), + data_offset: u32, + + // --- Linking metadata (from custom sections) --- + linking: WasmLinking.LinkingSection, + reloc_code: WasmLinking.RelocationSection, + reloc_data: WasmLinking.RelocationSection, + + // ... (methods follow) +}; +``` + +### Parser Implementation + +Add a `preload()` method: + +```zig +/// Parse a relocatable WASM binary into a WasmModule. +/// The input bytes must contain `linking` and `reloc.*` custom sections. +/// Returns error if the module is not relocatable or is malformed. +pub fn preload(allocator: Allocator, bytes: []const u8, require_relocatable: bool) !WasmModule { + // 1. Validate magic ("\0asm") and version (1) + // 2. Iterate sections in Wasm binary order: + // - For each standard section: parse into the appropriate field + // - Consume optional DataCount if present (used by Zig-built objects) + // - For custom sections: check name, parse linking/reloc.CODE/reloc.DATA, + // skip all other custom sections unchanged + // 3. If require_relocatable: validate symbol table and reloc.CODE exist + // 4. Return populated WasmModule +} +``` + +**Section parsing order** (section IDs per WASM spec): + +| ID | Section | What to parse | +|----|---------|---------------| +| 1 | Type | Function signatures (param types, result types) | +| 2 | Import | Module name, field name, import descriptor (func/memory/table/global) | +| 3 | Function | Type index for each locally-defined function | +| 4 | Table | Table type and limits | +| 5 | Memory | Memory limits (min pages, optional max pages) | +| 6 | Global | Global type + init expression (must be empty for relocatable modules) | +| 7 | Export | Name, kind, index | +| 8 | Start | Start function index (optional) | +| 9 | Element | Element segments (function table initialization) | +| 12 | DataCount | Consume and ignore (present in some relocatable objects, including the shipped wasm builtins) | +| 10 | Code | **Store raw bytes + record function offsets** | +| 11 | Data | Data segments with memory offsets | +| 0 | Custom | Check name: "linking", "reloc.CODE", "reloc.DATA", "name"; skip all other custom sections such as debug metadata | + +**Code section parsing detail**: Do NOT parse individual instructions. Store the entire section +body as raw bytes in `code_bytes`. Walk through function entries only to record each function's +byte offset in `function_offsets`. This is critical — we need the raw bytes for relocation +patching, and parsing/re-encoding instructions would lose the padded LEB128 encoding. + +**Linking section parsing** (custom section named "linking"): +1. Read version (must be 2) +2. Loop over subsections: + - Subsection 8 (WASM_SYMBOL_TABLE): Parse symbol count, then for each symbol: kind, flags, + and kind-specific fields (function index + optional name, data segment/offset/size, etc.) + - Subsection 5 (WASM_SEGMENT_INFO): Parse segment metadata + - Subsection 6 (WASM_INIT_FUNCS): Parse init function list + - Subsection 7 (WASM_COMDAT_INFO): Parse COMDAT groups (can skip for now) + +**Relocation section parsing** (custom sections named "reloc.CODE" / "reloc.DATA"): +1. Read target section index +2. Read relocation count +3. For each entry: read type byte, decode as index or offset relocation, read fields + +**Validation for relocatable modules**: +- Symbol table must exist and be non-empty +- `reloc.CODE` must exist and be non-empty (`reloc.DATA` remains optional) +- No internally-defined globals (the `__stack_pointer` global must come from an import, + because its index needs to be relocatable) + +### Rust Reference + +- `crates/wasm_module/src/lib.rs` lines 118–234: `preload()` — the complete parser +- `crates/wasm_module/src/lib.rs` lines 146–151: magic/version validation +- `crates/wasm_module/src/lib.rs` lines 153–232: section dispatch loop +- `crates/wasm_module/src/lib.rs` lines 189–201: relocatable validation +- `crates/wasm_module/src/linking.rs` lines 520–543: linking subsection parsing +- `crates/wasm_module/src/linking.rs` lines 355–445: symbol table entry parsing +- `crates/wasm_module/src/linking.rs` lines 119–152: relocation section parsing +- `crates/wasm_module/src/sections.rs` lines 1410–1449: code section parsing (function offset tracking) + +### Tests + +``` +test "preload — rejects bytes without WASM magic number" +test "preload — rejects wrong version" +test "preload — parses type section with multiple signatures" +test "preload — parses import section with function and memory imports" +test "preload — records correct function_offsets for code section" +test "preload — parses linking section symbol table" +test "preload — parses reloc.CODE section entries" +test "preload — parses reloc.DATA section entries" +test "preload — accepts relocatable object with optional DataCount section" +test "preload — require_relocatable rejects module without linking section" +test "preload — require_relocatable rejects module without reloc sections" +test "preload — parsed module has correct function count, import count, symbol count" +``` + +Note: there is **no round-trip requirement**. The parser reads relocatable objects; the +serializer emits final (non-relocatable) modules with linking sections stripped. These are +different formats by design. To validate the parser independently, check that parsed field +counts and symbol names match expected values from known test fixtures. + +**Test fixture**: Use `clang --target=wasm32 -c -o test.o test.c` to produce a minimal +relocatable WASM object file with known symbols and relocations. Alternatively, hand-craft +a minimal relocatable module in a byte array literal. + +--- + +## Phase 4: Surgical Linking — `linkHostToAppCalls()` + +### What + +Implement the core surgical linking operation: given a mapping of +`(app_fn_name, app_fn_index)` pairs, find the host's imports for those names and replace +them with calls to the defined app functions, patching all relocation sites. + +### Why + +The host module has imports like `roc__main_for_host_1_exposed` — these are stubs that the +host calls but doesn't define. The app compiler generates the actual implementations and +appends them to the module at known function indices. Surgical linking bridges the gap: +it removes the import and redirects all call sites to the defined function. + +The tricky part is **index stability**. In WASM, function indices are a single global +namespace: imports occupy indices 0..N-1, locally-defined functions occupy N..M-1. Removing +an import shifts every defined function's index down by 1, which would require patching +every reference to every defined function. Instead, we use the **dummy function trick**: +insert a 3-byte `unreachable; end` stub at the vacated position so the total function count +stays the same and only the specific swapped indices need updating. + +### Algorithm + +For each `(app_fn_name, app_fn_index)` in the host-to-app map: + +``` +1. FIND the import: + Walk imports, counting only function imports. + Find the one whose field name matches app_fn_name. + Record: host_import_index (position in imports array) + host_fn_index (its function index = position among fn imports) + +2. FIND the last JS/env function import: + This is the import we'll move into the vacated slot. + Record: swap_import_index, swap_fn_index + +3. SWAP imports: + Remove the swap import from its position. + If swap_import_index != host_import_index: + Insert swap import at host_import_index position. + (This puts the swap import where the app import was, + and removes one import from the total count.) + +4. INSERT dummy function: + Increment dead_import_dummy_count. + Insert a dummy type signature at position 0 in func_type_indices. + (Dummy functions are prepended to the code section during serialization.) + +5. UPDATE symbol table and apply relocations for the host function: + sym_index = linking.findAndReindexImportedFn(host_fn_index, app_fn_index) + reloc_code.applyRelocsU32(code_bytes, sym_index, app_fn_index) + +6. UPDATE symbol table and apply relocations for the swapped function: + (If swap_fn_index != host_fn_index) + sym_index = linking.findAndReindexImportedFn(swap_fn_index, host_fn_index) + reloc_code.applyRelocsU32(code_bytes, sym_index, host_fn_index) +``` + +**Why the swap?** We can't just remove the app import and leave a gap — that would shift +all subsequent import indices. Instead we move the last import into the gap, and only two +symbols need relocation updates (the removed app import → now points to defined function, +the moved last import → now at the removed import's index). Everything else is untouched. + +### The Dummy Function + +```zig +const DUMMY_FUNCTION = [3]u8{ + 0x00, // zero local variable declarations + Op.unreachable, // trap if called (means DCE was wrong) + Op.end, // end of function body +}; +``` + +During serialization, `dead_import_dummy_count` dummy functions are prepended before the +real code section functions. This means: + +- Function index `import_count + 0` → first dummy +- Function index `import_count + dead_import_dummy_count` → first real defined function +- Original defined-function indices remain unchanged overall: the import count goes down by + exactly the same amount that dummy functions are inserted at the front of the code section. + +### Rust Reference + +- `crates/wasm_module/src/lib.rs` lines 524–627: `link_host_to_app_calls()` — the complete algorithm +- `crates/wasm_module/src/lib.rs` lines 544–570: finding host and swap imports +- `crates/wasm_module/src/lib.rs` lines 572–586: swapping imports +- `crates/wasm_module/src/lib.rs` lines 588–627: updating symbols and applying relocations +- `crates/wasm_module/src/lib.rs` lines 900–904: `DUMMY_FUNCTION` constant + +### Tests + +``` +test "linkHostToAppCalls — single app function: import removed, dummy inserted" +test "linkHostToAppCalls — verifies call instruction patched to app function index" +test "linkHostToAppCalls — last import swapped into vacated slot" +test "linkHostToAppCalls — swap import's call sites updated to new index" +test "linkHostToAppCalls — multiple app functions linked in sequence" +test "linkHostToAppCalls — dead_import_dummy_count incremented correctly" +test "linkHostToAppCalls — func_type_indices has dummy signature at position 0" +test "linkHostToAppCalls — total function count unchanged after linking" +``` + +**Test approach**: Construct a minimal WasmModule in memory with: +- 3 function imports: `js_foo`, `roc__main_exposed`, `js_bar` +- 2 defined functions with `call` instructions referencing the imports +- Proper symbol table and relocation entries +- Then call `linkHostToAppCalls` with `roc__main_exposed → fn_index_5` +- Verify: import count decreased by 1, `js_bar` moved to slot 1, dummy inserted, + call instructions patched to correct indices. + +--- + +## Phase 5: Memory, Table, and Stack Pointer Ownership + +### What + +Define and implement the ownership rules for WASM linear memory, the function reference +table, and the `__stack_pointer` global. These must be set up correctly before any code +generation or linking can proceed. + +### Why + +In a relocatable WASM object, memory, table, and `__stack_pointer` are **imported** — the +object doesn't own them. But in the final linked module, they must be **defined** (owned by +the module). The surgical linking pipeline must handle this transition: + +1. The host module imports memory and table from the environment +2. During setup, we **remove** those imports (the final module will define them) +3. We define memory with the correct page count and table with the correct size +4. The `__stack_pointer` global is imported in relocatable objects (so its index can be + relocated) but must become a defined mutable global in the final module + +The old Rust compiler handled this explicitly in `WasmBackend::new()` at +`crates/compiler/gen_wasm/src/backend.rs` lines 94–99 (import removal) and in `finalize()` +at lines 296–304 (memory layout and table sizing). + +### Design + +**During host module setup** (after `preload()`, before code generation): + +``` +1. REMOVE only Memory and Table imports from the host module's import section: + host_module.imports.retain(|imp| imp is not Memory and not Table) + Update import_fn_count if any non-function imports were before function imports. + KEEP the __stack_pointer global import — it is needed during code generation + and linking because relocation entries reference it by its global index. +``` + +**During finalization** (after all code generation and surgical linking, before serialization): + +``` +2. REPLACE __stack_pointer global import with a defined global: + Remove the __stack_pointer import from the import section. + Define it as: type=i32, mutable=true, init=i32.const(stack_pointer_init) + stack_pointer_init = memory_pages * 65536 (top of memory) + The old Rust compiler does this in set_memory_layout() at + crates/compiler/gen_wasm/src/backend.rs lines 151–174. + +3. DEFINE memory: + host_module.has_memory = true + host_module.memory_min_pages = calculated from stack_bytes and data size + Memory is exported as "memory" for host access. + +4. DEFINE table: + host_module.has_table = true + Table size = 1 + max element index (computed after all functions are registered) + Table type = funcref, limits = MinMax(size, size) +``` + +**Important**: The split between setup (step 1) and finalization (steps 2–4) is critical. +During the linking phase, `__stack_pointer` references are resolved via its global import +index and relocation entries. Only after all linking is complete can we replace the import +with a defined global at the correct initial value. + +**Memory layout**: + +``` +┌────────────────────────────────┐ ← memory_pages * 65536 +│ Stack (grows downward) │ +│ ← __stack_pointer starts here│ +├────────────────────────────────┤ +│ (free space) │ +├────────────────────────────────┤ +│ Data segments │ +│ (constants, string literals) │ +├────────────────────────────────┤ +│ Reserved (offset 0–1023) │ +│ RocOps struct at offset 0 │ +└────────────────────────────────┘ ← offset 0 +``` + +### Platform Host Artifact Type + +The CLI target selection at `src/cli/main.zig` lines 3989–4017 prefers `.exe` over +`.static_lib`. For WASM, the host artifact should be declared under the **`exe`** target +key in the platform's `TargetsConfig`: + +```roc +targets: { + files: "targets/", + exe: { + wasm32: ["host.wasm", app], + } +} +``` + +The `host.wasm` is a **relocatable WASM object** (produced with `clang --target=wasm32 -c` +or equivalent), not a final executable. Despite being listed under `exe`, it's an object file +that the surgical linker will combine with app code to produce the final executable `.wasm`. +This matches the pattern used by native targets where hosts provide `.o` files under `exe`. + +**Important CLI requirement**: the existing generic `LinkItem.file_path` handling in +`src/cli/main.zig` cannot treat `host.wasm` as an ordinary linker input. For wasm32 +executable builds, the CLI must resolve one host-module path from the link spec and hand +its bytes directly to `WasmModule.preload()`. It must NOT append `host.wasm` to +`platform_files_pre` / `platform_files_post`, and it must NOT pass it through the normal +`wasm-ld` input collection path. + +This can be implemented either by: + +1. adding a dedicated `LinkItem.host_wasm`, or +2. adding a wasm32-specific branch in `src/cli/main.zig` that extracts the first pre-`app` + host `.wasm` path from `targets.exe.wasm32`. + +The plan does not require one specific encoding in `TargetsConfig`; it does require that the +resolved host module path be consumed by the surgical-linking pipeline rather than by the +generic linker-input pipeline. + +### Rust Reference + +- `crates/compiler/gen_wasm/src/backend.rs` lines 94–99: removing Memory/Table imports +- `crates/compiler/gen_wasm/src/backend.rs` lines 296–304: `finalize()` — `set_memory_layout()`, + `export_globals()`, table sizing + +### Tests + +``` +test "setup — memory and table imports removed from host module" +test "setup — import_fn_count unchanged after removing non-function imports" +test "setup — __stack_pointer global defined with correct initial value" +test "setup — memory section has correct minimum pages" +test "setup — table size matches element count after finalization" +test "setup — memory exported as 'memory'" +``` + +--- + +## Phase 6: WASM Function Pointer Representation & RocOps Layout + +### What + +Define exactly how function pointers are represented in WASM for the RocOps struct and +HostedFunctions array. WASM cannot use raw function pointers — all indirect calls must go +through a `funcref` table via `call_indirect`. + +### Why + +The generic `RocOps` struct (`src/builtins/host_abi.zig` lines 88–179) stores raw function +pointers on native targets: + +```zig +// Native (64-bit): RocOps is 72 bytes +pub const RocOps = extern struct { + env: *anyopaque, // offset 0 (8 bytes) + roc_alloc: *const fn(...), // offset 8 (8 bytes) + roc_dealloc: *const fn(...), // offset 16 (8 bytes) + roc_realloc: *const fn(...), // offset 24 (8 bytes) + roc_dbg: *const fn(...), // offset 32 (8 bytes) + roc_expect_failed: *const fn(...), // offset 40 (8 bytes) + roc_crashed: *const fn(...), // offset 48 (8 bytes) + hosted_fns: HostedFunctions, // offset 56 (count: u32 + pad + fns: *[]) +}; +``` + +On WASM, function pointers don't exist in linear memory. Instead, functions are referenced +by **table indices** — integers that index into a `funcref` table. The `call_indirect` +instruction takes a table index from the stack and dispatches to the corresponding function. + +The current WASM backend already uses this pattern (`WasmCodeGen.zig` lines 214–240): +RocOps functions are added to the function table, and their table indices are stored in +a 36-byte struct in linear memory. But this was designed for the standalone eval mode — it +needs to be formalized for the surgical linking pipeline. + +### WASM RocOps Memory Layout (36 bytes on wasm32) + +``` +Offset Field Size Contents +────── ────────────────────────── ──── ──────────────────────────────── + 0 env_ptr 4 Host environment pointer (i32) + 4 roc_alloc_table_idx 4 Table index for roc_alloc + 8 roc_dealloc_table_idx 4 Table index for roc_dealloc +12 roc_realloc_table_idx 4 Table index for roc_realloc +16 roc_dbg_table_idx 4 Table index for roc_dbg +20 roc_expect_failed_table_idx 4 Table index for roc_expect_failed +24 roc_crashed_table_idx 4 Table index for roc_crashed +28 hosted_fns_count 4 Number of hosted functions +32 hosted_fns_ptr 4 Pointer to table index array in memory +``` + +This is **NOT** the same layout as the native `RocOps` struct. It is a WASM-specific +encoding where function pointers are replaced by `u32` table indices. This is acceptable +because the builtins compiled to `roc_builtins.o` for wasm32 will use the WASM calling +convention, and the `roc_ops` parameter they receive is already expected to contain +table indices (the `dev_wrappers.zig` functions receive `*RocOps` and call through it). + +**Critical detail**: The builtins in `roc_builtins.o` receive a `*RocOps` pointer and call +`roc_ops.roc_alloc(...)` etc. On native targets, these are direct function pointer calls. +On WASM, the builtins `.o` must be compiled with a WASM-specific `RocOps` definition that +stores table indices, and the call sequences must use `call_indirect`. This is handled by +the Zig compiler when targeting wasm32 — function pointers in `extern struct` fields become +table-index-based `call_indirect` calls. + +### How the Host Populates RocOps + +The host module's entry point receives a pointer to the RocOps struct in linear memory. +The host is responsible for: + +1. Writing the `env_ptr` field +2. Importing the RocOps functions (`roc_alloc`, etc.) from the embedding environment +3. Adding them to the function table +4. Writing their table indices into the struct at the correct offsets + +In practice, the host's initialization code does something like: + +```wasm +;; Store roc_alloc's table index at RocOps offset 4 +i32.const 0 ;; RocOps base address +i32.const +i32.store offset=4 +``` + +The surgical linker must ensure that the host's RocOps function imports are in the function +table (element section) so they have valid table indices. + +### HostedFunctions Representation + +`HostedFunctions.fns` on native targets is a pointer to an array of function pointers. +On WASM, it is a pointer to an array of **`u32` table indices** in linear memory: + +``` +Memory at hosted_fns_ptr: + [0]: table_index_of_hosted_fn_0 (u32) + [4]: table_index_of_hosted_fn_1 (u32) + [8]: table_index_of_hosted_fn_2 (u32) + ... +``` + +The host writes these table indices at initialization time. To call hosted function N: + +```wasm +;; Load table index from hosted_fns array +local.get $roc_ops_ptr +i32.load offset=32 ;; Load hosted_fns_ptr +i32.const N +i32.const 4 +i32.mul +i32.add +i32.load ;; Load table index of hosted fn N +;; Push args: (roc_ops, ret_ptr, args_ptr, table_idx) +;; call_indirect with RocCall type signature +call_indirect (type $roc_call) 0 +``` + +### Two Distinct Indirect Call ABIs + +There are **two different `call_indirect` type signatures** in use, and they must not +be conflated: + +**1. RocOps core callbacks** — 2-argument ABI: `(i32 args_struct_ptr, i32 env_ptr) → void` + +The 6 core RocOps callbacks (`roc_alloc`, `roc_dealloc`, `roc_realloc`, `roc_dbg`, +`roc_expect_failed`, `roc_crashed`) each take a pointer to a **specific args struct** +and the host `env` pointer. This matches `host_abi.zig` lines 88–106 where each field +has signature `fn(*SpecificStruct, *anyopaque) callconv(.c) void`. + +```wasm +(type $roc_ops_callback (func (param i32 i32))) ;; (args_struct_ptr, env_ptr) -> void +``` + +This is what the current WASM backend registers at `WasmCodeGen.zig` line 215 and what +builtins depend on when calling `roc_ops.alloc()` (`dev_wrappers.zig` line 405). The args +struct is laid out in linear memory (e.g. `RocAlloc` at lines 186–190: alignment, length, +answer), the callback reads inputs and writes its answer into the struct, and the caller +reads the answer back. + +Example — `roc_alloc` call: +```wasm +;; Write RocAlloc struct to stack slot +local.get $fp +local.get $alignment +i32.store offset=0 ;; RocAlloc.alignment +local.get $fp +local.get $length +i32.store offset=4 ;; RocAlloc.length + +;; Push args: (alloc_struct_ptr, env_ptr) +local.get $fp ;; args_struct_ptr +local.get $roc_ops_ptr +i32.load offset=0 ;; env_ptr (RocOps offset 0) + +;; Load table index, dispatch +local.get $roc_ops_ptr +i32.load offset=4 ;; roc_alloc table index +call_indirect (type $roc_ops_callback) 0 + +;; Read result +local.get $fp +i32.load offset=8 ;; RocAlloc.answer +``` + +**2. Hosted functions** — 3-argument RocCall ABI: `(i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void` + +Hosted functions (platform-provided) use the `RocCall` ABI from `host_abi.zig` line 15. +This is a different signature — 3 arguments, not 2. + +```wasm +(type $roc_call (func (param i32 i32 i32))) ;; (roc_ops, ret_ptr, args_ptr) -> void +``` + +Both type signatures must be registered in the module's type section, and callers must +use the correct one. Using the wrong type in `call_indirect` will trap at runtime +(WASM validates the type signature of indirect calls). + +### Codegen Must Register Both Types + +```zig +// Register the 2-arg RocOps callback type: (args_struct_ptr, env_ptr) → void +self.roc_ops_callback_type_idx = try self.module.addFuncType(&.{ .i32, .i32 }, &.{}); + +// Register the 3-arg RocCall type: (roc_ops, ret_ptr, args_ptr) → void +self.roc_call_type_idx = try self.module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); +``` + +| Call target | Type signature | Type index field | +|-------------|---------------|-----------------| +| `roc_alloc` | `(i32, i32) → void` | `roc_ops_callback_type_idx` | +| `roc_dealloc` | `(i32, i32) → void` | `roc_ops_callback_type_idx` | +| `roc_realloc` | `(i32, i32) → void` | `roc_ops_callback_type_idx` | +| `roc_dbg` | `(i32, i32) → void` | `roc_ops_callback_type_idx` | +| `roc_expect_failed` | `(i32, i32) → void` | `roc_ops_callback_type_idx` | +| `roc_crashed` | `(i32, i32) → void` | `roc_ops_callback_type_idx` | +| Hosted function N | `(i32, i32, i32) → void` | `roc_call_type_idx` | + +### Reference + +- `src/builtins/host_abi.zig` lines 88–106: RocOps callback field signatures (2-arg) +- `src/builtins/host_abi.zig` lines 186–242: RocAlloc/RocDealloc/etc. argument structs +- `src/builtins/host_abi.zig` lines 15–30: RocCall signature (3-arg, for hosted fns) +- `src/builtins/host_abi.zig` lines 37–44: HostedFn type definition +- `src/builtins/dev_wrappers.zig` lines 147–162: `alloc()` wrapper calling `self.roc_alloc(&args, self.env)` +- `src/backend/wasm/WasmCodeGen.zig` lines 215–219: current 2-arg type registration +- `src/backend/wasm/WasmCodeGen.zig` lines 4886–4908: current `call_indirect` for roc_alloc + +### Tests + +``` +test "RocOps struct — correct field offsets for wasm32 (36 bytes total)" +test "call_indirect — roc_alloc uses 2-arg callback type, not RocCall type" +test "call_indirect — hosted function uses 3-arg RocCall type" +test "call_indirect — mismatched type index would trap (validate type separation)" +test "function table — all RocOps functions have valid table entries after linking" +test "function table — hosted functions added to table with correct indices" +``` + +--- + +## Phase 7a: Entrypoint ABI Migration + +### What + +Migrate app-exposed functions from the current standalone eval ABI +`(i32 env_ptr) → result_val_type` to the real `RocCall` ABI +`(i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void`. + +### Why + +The current WASM backend (`WasmCodeGen.zig` line 376) generates a main function with +signature `(i32 env_ptr) → ` and synthesizes its own RocOps struct in linear +memory at offset 0 (line 428). This was designed for the standalone eval/REPL mode where +the host is the bytebox test harness. + +In the surgical linking model, the **host** owns the RocOps struct and passes it to the app. +The app-exposed function must accept `RocCall` ABI: + +```zig +// host_abi.zig line 15 +pub const RocCall = fn (*RocOps, *anyopaque, *anyopaque) callconv(.c) void; +``` + +On wasm32 this becomes: + +```wasm +(func $roc__main_for_host_1_exposed (param $roc_ops i32) (param $ret_ptr i32) (param $args_ptr i32)) +``` + +The host module imports this function. After surgical linking, the import is replaced with +the app's defined function. The host calls it with a pointer to its own RocOps struct, +a return buffer, and an arguments buffer. + +### Design Decision + +**App-exposed functions use RocCall ABI directly.** The host does NOT provide wrappers. + +This means: +- The app's main function signature changes from `(i32) → ` to `(i32, i32, i32) → void` +- The app reads arguments from `args_ptr` instead of receiving them as direct parameters +- The app writes its return value to `ret_ptr` instead of returning it on the WASM stack +- The app receives `roc_ops_ptr` from the host instead of building its own RocOps struct + +### Changes to WasmCodeGen + +The `generateModule()` method (current line 376) must be restructured: + +**Before** (standalone eval ABI): +```zig +// Create main: (i32 env_ptr) → result_vt +const main_type = try self.module.addFuncType(&.{.i32}, &.{result_vt}); +// ... generate body ... +// Return result on stack +``` + +**After** (RocCall ABI): +```zig +// Create main: (i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void +const main_type = try self.module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); +// roc_ops_ptr is parameter 0 — store as self.roc_ops_local +// args_ptr is parameter 2 — read arguments from memory at this address +// ... generate body ... +// Write result to ret_ptr (parameter 1) instead of returning on stack +``` + +**Key changes**: +- `roc_ops_local` comes from parameter 0 (no longer synthesized at offset 0) +- Arguments are loaded from `args_ptr` using `i32.load` with appropriate offsets +- Return value is stored to `ret_ptr` using `i32.store` / `i64.store` / `f64.store` +- No RocOps struct initialization in the app — the host provides it + +### Backward Compatibility for Eval + +The eval pipeline can either: +1. Build a tiny host shim (precompiled) that creates a RocOps struct and calls the app + with RocCall ABI — this is the cleanest approach. +2. Keep a separate `generateModuleForEval()` path with the old ABI — pragmatic but + maintains two code paths. + +Option 1 is preferred. The eval host shim would be ~50 lines of WAT that: +- Imports `roc_alloc`, `roc_dealloc`, etc. from the embedding environment +- Builds a RocOps struct in linear memory +- Calls the app's main function with `(roc_ops_ptr, ret_ptr, args_ptr)` +- Returns the result to the embedder + +### Rust Reference + +- `src/builtins/host_abi.zig` lines 15–30: `RocCall` function signature +- `src/backend/wasm/WasmCodeGen.zig` lines 376–452: current main function generation +- `src/backend/dev/LirCodeGen.zig` lines 4145–4241: dev backend hosted call lowering + (shows the same `(roc_ops, ret_ptr, args_ptr)` pattern) + +### Tests + +``` +test "app entrypoint — exports both roc__main_for_host_1_exposed and main" +test "app entrypoint — roc__main_for_host_1_exposed has RocCall type (i32, i32, i32) → void" +test "app entrypoint — main (eval wrapper) returns a value, not void" +test "app entrypoint — roc__main and main are distinct functions" +``` + +Structural tests parse the encoded wasm bytes with `WasmModule.preload` — no bytebox +execution needed. Behavioral correctness is validated by 1289 eval tests that exercise +the eval wrapper end-to-end. Full integration tests (calling `roc__main_for_host_1_exposed` +from a surgically-linked host) will be added in Phase 12. + +### Implementation Notes (Done) + +`generateModule()` now produces two exported functions in the same module: + +1. **``** — RocCall ABI `(i32, i32, i32) → void`. + The export name is passed as a parameter to `generateModule()`, derived from the + platform's `provides` section (e.g. `roc__main_for_host_1_exposed`). Parameter 0 + is `roc_ops_ptr` (provided by host, not synthesized). The expression body runs and + the result is written to `ret_ptr` via `emitStoreResultToRetPtr()`, which handles + both primitives (scalar store) and composites (unrolled 4-byte copy). + +2. **`main`** — Eval wrapper `(i32 env_ptr) → result_vt`. Built by `emitEvalWrapper()`. + Allocates its own stack frame for a return buffer, constructs the RocOps struct at + memory offset 0 (same as before), calls the RocCall function via `call`, loads the + result from the return buffer, and returns it on the wasm stack. + +Helper methods extracted: `emitStackPrologue()`, `emitStackEpilogue()`. + +--- + +## Phase 7b: CodeBuilder & WasmCodeGen Refactor + +### What + +Introduce a `CodeBuilder` pattern for accumulating function bodies, and refactor +`WasmCodeGen` to append completed functions into the parsed host module via an explicit +`insertIntoModule()` step. + +### Why + +The previous plan (v1) naively suggested recording relocation offsets during instruction +emission (`emitRelocatableCall` at the point of each `call` instruction). This is wrong. +The final byte offset of a relocation within the code section depends on: + +1. **Where the function lands in the code section** — the function's base offset within + `code_bytes`, which isn't known until insertion time +2. **The function body length prefix** — a LEB128-encoded size prepended to each function + body, whose byte length varies +3. **The locals declaration preamble** — encoded before the instruction bytes +4. **Deferred insertions** — the Rust code builder uses an insertion mechanism where chunks + of code are generated out-of-order and spliced in during finalization + +The old Rust compiler solved this with a `CodeBuilder` (`crates/compiler/gen_wasm/src/code_builder.rs` +lines 48–76) that tracks relocations as **(code_position, function_index)** pairs relative +to the function's own instruction stream. Only during `insert_into_module()` (lines 211–256) +are these converted to absolute code section offsets: + +```rust +// Rust: offset computation at insertion time +let offset = reloc_code_pos + code_offset + insertion_bytes; +``` + +We must follow the same pattern. + +### Design + +**CodeBuilder struct** — accumulates one function's body: + +```zig +const CodeBuilder = struct { + /// Main instruction bytes for the current function. + code: std.ArrayList(u8), + /// Locals declaration preamble (local count groups + types). + preamble: std.ArrayList(u8), + /// Relocations within this function: (code_pos, symbol_index). + /// code_pos is relative to the start of self.code, NOT the module's code section. + import_relocations: std.ArrayList(struct { code_pos: u32, symbol_index: u32 }), + + /// Emit a relocatable call instruction. + /// Records the relocation position relative to this function's code buffer. + pub fn emitRelocatableCall(self: *CodeBuilder, symbol_idx: u32) !void { + try self.code.append(Op.call); + const code_pos: u32 = @intCast(self.code.items.len); + try self.import_relocations.append(.{ + .code_pos = code_pos, + .symbol_index = symbol_idx, + }); + try appendPaddedU32(&self.code, 0); // 5-byte placeholder + } + + /// Finalize this function and insert it into the module's code section. + /// Computes final relocation offsets based on actual position in code_bytes. + pub fn insertIntoModule(self: *const CodeBuilder, module: *WasmModule) !u32 { + const fn_offset: u32 = @intCast(module.code_bytes.items.len); + module.function_offsets.append(fn_offset); + + // Encode body length (LEB128) + const body_len = self.preamble.items.len + self.code.items.len; + try leb128WriteU32(&module.code_bytes, @intCast(body_len)); + + // Append preamble (locals declaration) + try module.code_bytes.appendSlice(self.preamble.items); + + // Record the code start offset (after length prefix + preamble) + const code_start: u32 = @intCast(module.code_bytes.items.len); + + // Append instruction bytes + try module.code_bytes.appendSlice(self.code.items); + + // Create relocation entries with absolute offsets + for (self.import_relocations.items) |reloc| { + try module.reloc_code.entries.append(.{ .index = .{ + .type_id = .function_index_leb, + .offset = code_start + reloc.code_pos, + .symbol_index = reloc.symbol_index, + }}); + } + + // Compute and return the global function index + const func_idx = module.import_fn_count + + module.dead_import_dummy_count + + @as(u32, @intCast(module.func_type_indices.items.len)); + return func_idx; + } + + pub fn clear(self: *CodeBuilder) void { + self.code.clearRetainingCapacity(); + self.preamble.clearRetainingCapacity(); + self.import_relocations.clearRetainingCapacity(); + } +}; +``` + +**WasmCodeGen changes**: + +Replace the current `body: std.ArrayList(u8)` with a `CodeBuilder`. The codegen emits +instructions into `self.code_builder.code`. After each function is complete: + +```zig +// After generating a procedure body: +fn finalizeProcedure(self: *Self) !void { + // Build preamble (locals declaration) + try self.code_builder.encodePreamble(self.storage.local_types.items); + // Insert completed function into module + const func_idx = try self.code_builder.insertIntoModule(self.module); + // Add to function section + try self.module.func_type_indices.append(type_idx); + // Add symbol + try self.module.linking.symbol_table.append(.{ ... }); + // Clear for next function + self.code_builder.clear(); + self.storage.reset(); +} +``` + +This ensures relocation offsets are **always** computed at insertion time, never during +instruction emission. + +### Rust Reference + +- `crates/compiler/gen_wasm/src/code_builder.rs` lines 48–76: `CodeBuilder` struct definition +- `crates/compiler/gen_wasm/src/code_builder.rs` lines 211–256: `insert_into_module()` with + relocation offset computation: `offset = reloc_code_pos + code_offset + insertion_bytes` +- `crates/compiler/gen_wasm/src/code_builder.rs` lines 348–349: `call_import()` recording + relocation as `(code_pos, imported_fn_id)` pair +- `crates/compiler/gen_wasm/src/backend.rs` lines 390–396: `reset()` calls + `self.code_builder.insert_into_module(&mut self.module)` after each procedure + +### Tests + +``` +test "CodeBuilder.insertIntoModule — relocation offset accounts for body length prefix" +test "CodeBuilder.insertIntoModule — relocation offset accounts for preamble size" +test "CodeBuilder.insertIntoModule — multiple relocations in one function" +test "CodeBuilder.insertIntoModule — function appended at correct code_bytes position" +test "CodeBuilder — two functions inserted sequentially have non-overlapping offsets" +test "CodeBuilder.emitRelocatableCall — records code_pos relative to function start" +test "CodeBuilder — clear resets state for next function without leaking relocations" +``` + +--- + +## Phase 8: Builtins Migration — Symbol Mapping, ABI Rewrites, Import Removal + +### What + +Parse and merge `roc_builtins.o` into the host module, rewrite all builtin call sites +in WasmCodeGen to use the correct symbol names and decomposed C ABI, and verify that +no legacy `env.roc_*` builtin imports survive in the final module. + +### Why + +This phase addresses three intertwined problems: + +1. **The current backend registers ~40 builtin imports** (`WasmCodeGen.zig` lines 182–308) + with names like `roc_str_trim`, `roc_dec_mul`, etc. These are imported from the `"env"` + module namespace and implemented by the host (bytebox test harness). + +2. **The real builtins in `roc_builtins.o` export different symbol names** with different + ABIs. For example: + - Current import: `roc_str_trim(str_ptr: i32, result_ptr: i32) → void` (pointer-to-struct) + - Real builtin export: `roc_builtins_str_trim(out: *RocStr, bytes: ?[*]u8, len: usize, cap: usize, roc_ops: *RocOps) → void` (decomposed C ABI) + +3. **If we merge `roc_builtins.o` without rewriting call sites**, the app code would still + emit `call roc_str_trim` with the old ABI, but the module would contain + `roc_builtins_str_trim` with the new ABI — symbol mismatch and ABI mismatch. + +All three must be addressed together. The merge alone is not enough. + +### Sub-phase 8a: Merge `roc_builtins.o` + +Parse `roc_builtins.o` as a relocatable WASM module and merge it into the host module. +This is the same merge algorithm described in the previous plan version: + +``` +1. MERGE TYPE SECTIONS (with deduplication) +2. MERGE FUNCTION SECTIONS (remap type indices) +3. MERGE CODE SECTION (append bytes, track offsets) +4. MERGE DATA SECTION (adjust memory offsets) +5. MERGE SYMBOL TABLE (resolve shared symbols like roc_alloc) +6. MERGE RELOCATION ENTRIES (remap offsets and symbol indices) +``` + +After merging, the module contains the builtin function bodies and their symbols +(e.g. `roc_builtins_str_trim` as a defined function symbol). + +### Sub-phase 8b: Symbol Name Mapping + +Build a lookup table mapping the builtin operation to its symbol index in the merged module: + +```zig +const BuiltinSymbols = struct { + // Decimal/math + dec_mul: u32, // → "roc_builtins_dec_mul_saturated" + dec_div: u32, // → "roc_builtins_dec_div" + dec_div_trunc: u32, // → "roc_builtins_dec_div_trunc" + dec_to_str: u32, // → "roc_builtins_dec_to_str" + // String operations + str_eq: u32, // → "roc_builtins_str_equal" + str_concat: u32, // → "roc_builtins_str_concat" + str_trim: u32, // → "roc_builtins_str_trim" + str_trim_start: u32, // → "roc_builtins_str_trim_start" + str_trim_end: u32, // → "roc_builtins_str_trim_end" + // ... etc for all ~40 builtins +}; +``` + +Populated after merge by looking up each symbol name in the merged module's symbol table: +```zig +builtin_syms.str_trim = module.linking.findSymbolByName("roc_builtins_str_trim") orelse + return error.MissingBuiltinSymbol; +``` + +The exact symbol names come from `src/builtins/static_lib.zig` lines 28–112 +and `src/builtins/dev_wrappers.zig`. + +### Sub-phase 8c: ABI Rewrite at Call Sites + +Every place WasmCodeGen currently emits a host import call must be rewritten to: +1. Use the builtin symbol index (not an import function index) +2. Decompose struct arguments into individual fields +3. Pass `roc_ops_ptr` as the last argument (where required) +4. Use `emitRelocatableCall()` (via CodeBuilder) instead of direct `call` instruction + +**ABI categories** (from `src/builtins/dev_wrappers.zig`): + +| Category | Old ABI (pointer-to-struct) | New ABI (decomposed C) | Examples | +|----------|---------------------------|----------------------|----------| +| **Str unary** | `(str_ptr, result_ptr)` | `(result_ptr, bytes, len, cap, roc_ops)` | trim, trim_start, trim_end, lowercased, uppercased | +| **Str binary** | `(a_ptr, b_ptr, result_ptr)` | `(result_ptr, a_bytes, a_len, a_cap, b_bytes, b_len, b_cap, roc_ops)` | concat, drop_prefix, split, join_with | +| **Str equality** | `(a_ptr, b_ptr) → i32` | `(a_bytes, a_len, a_cap, b_bytes, b_len, b_cap) → i32` | str_eq, caseless_ascii_equals | +| **i128 binop** | `(lhs_ptr, rhs_ptr, result_ptr)` | `(result_ptr, lhs_lo, lhs_hi, rhs_lo, rhs_hi)` | i128_div_s, u128_div, dec_mul | +| **Dec to str** | `(dec_ptr, buf_ptr) → i32` | `(result: *RocStr, lo: u64, hi: u64, roc_ops: *RocOps)` | dec_to_str | +| **List ops** | `(list_ptr, ...)` | `(result_ptr, bytes, len, cap, ..., roc_ops)` | list_append, list_reverse | +| **Callback-bearing** | `(list_ptr, cmp_fn, cmp_data, ...)` | `(result_ptr, bytes, len, cap, cmp_fn_ptr, cmp_data, alignment, elem_width, roc_ops)` | list_sort_with | + +**Example rewrite** — `str_trim`: + +```zig +// BEFORE (old ABI): +// str_ptr and result_ptr are i32 pointers to 12-byte RocStr structs in memory +try self.emitCall(self.str_trim_import.?, &.{str_ptr, result_ptr}); + +// AFTER (decomposed C ABI): +// Decompose the RocStr at str_ptr into (bytes, len, cap) +try self.emitLocalGet(result_ptr_local); // arg 0: output pointer +try self.emitI32Load(str_ptr_local, 0); // arg 1: bytes (offset 0) +try self.emitI32Load(str_ptr_local, 4); // arg 2: length (offset 4) +try self.emitI32Load(str_ptr_local, 8); // arg 3: capacity (offset 8) +try self.emitLocalGet(self.roc_ops_local); // arg 4: roc_ops pointer +try self.code_builder.emitRelocatableCall(self.builtin_syms.str_trim); +``` + +**Callback-bearing builtins** require special treatment. `list_sort_with` is the primary +example. Its wrapper signature (`dev_wrappers.zig` lines 382–424): + +```zig +pub fn roc_builtins_list_sort_with( + out: *RocList, + list_bytes: ?[*]u8, list_len: usize, list_cap: usize, + cmp_fn_ptr: ?*const anyopaque, // comparator function pointer + cmp_data: ?[*]u8, // comparator closure data + alignment: u32, + element_width: usize, + roc_ops: *RocOps, +) callconv(.c) void +``` + +On WASM, function pointers don't exist in linear memory. The `cmp_fn_ptr` parameter must +be a **function table index** (a `u32` that indexes into the `funcref` table). The current +WASM backend already handles this: `WasmCodeGen.zig` line 11994 compiles the comparator +as a separate procedure and passes its function index. The dev backend does the equivalent +via PC-relative addressing (`LirCodeGen.zig` line 3366). + +The call-site rewrite for `list_sort_with` must: +1. Compile the comparator procedure and get its `func_idx` (already done by `compileAllProcSpecs`) +2. Pass `func_idx` as `cmp_fn_ptr` (it will be interpreted as a table index by the builtin) +3. Pass the closure data pointer as `cmp_data` +4. Decompose the list struct and pass remaining args in the new ABI + +Any future callback-bearing builtins (e.g. `list_map`, `list_keep_if`) will follow the +same pattern: compile the callback as a proc, pass its table index. + +### Sub-phase 8d: Remove Legacy Imports + +After all call sites are rewritten: + +1. **Remove** the ~40 `?u32` import fields from `WasmCodeGen` (`dec_mul_import`, `str_trim_import`, etc.) +2. **Remove** their registration in `registerHostImports()` — only keep the 6 RocOps imports +3. **Remove** the ~45 host function implementations from `src/eval/test/helpers.zig` + (`hostDecMul`, `hostStrTrim`, `hostListEq`, etc.) +4. **Remove** corresponding implementations from `src/repl/wasm_runner.zig` + +### Sub-phase 8e: Verification + +Add a verification pass that runs after surgical linking and before serialization: + +```zig +/// Verify that no builtin roc_* imports remain in the final module. +/// Only the 6 RocOps functions should be imported. +fn verifyNoBuiltinImports(module: *const WasmModule) !void { + const allowed = [_][]const u8{ + "roc_alloc", "roc_dealloc", "roc_realloc", + "roc_dbg", "roc_expect_failed", "roc_crashed", + }; + for (module.imports.items) |imp| { + if (imp.kind != .function) continue; + var is_allowed = false; + for (allowed) |name| { + if (std.mem.eql(u8, imp.field_name, name)) { is_allowed = true; break; } + } + if (!is_allowed and std.mem.startsWith(u8, imp.field_name, "roc_")) { + return error.UnresolvedBuiltinImport; + } + } +} +``` + +### Rust Reference + +- `src/builtins/static_lib.zig` lines 28–112: exported builtin symbol names +- `src/builtins/dev_wrappers.zig` line 134: `roc_builtins_str_trim` signature +- `src/builtins/dev_wrappers.zig` line 950: `roc_builtins_dec_mul_saturated` signature +- `src/backend/wasm/WasmCodeGen.zig` lines 182–308: current import registrations to remove +- `crates/wasm_module/src/lib.rs` lines 267–393: `eliminate_dead_code()` — index remapping + patterns reusable for merge +- `crates/compiler/gen_wasm/src/code_builder.rs` lines 211–256: `insert_into_module()` — + appending code with correct relocation offsets + +### Tests + +``` +test "mergeModule — type deduplication: identical signatures share index" +test "mergeModule — function indices remapped correctly" +test "mergeModule — code bytes appended at correct offset" +test "mergeModule — undefined symbol in builtins resolved to host's roc_alloc import" +test "mergeModule — relocation offsets shifted by base_code_offset" +test "BuiltinSymbols — all symbols found after merge" +test "str_trim ABI — decomposed args match roc_builtins_str_trim signature" +test "dec_mul ABI — decomposed i128 args match roc_builtins_dec_mul_saturated" +test "str_eq ABI — returns i32 with decomposed RocStr args" +test "verifyNoBuiltinImports — passes when only RocOps imports remain" +test "verifyNoBuiltinImports — fails if roc_str_trim import still present" +test "end-to-end: app using Str.trim produces correct output via builtins" +``` + +--- + +## Phase 9: Hosted Call Lowering + +### What + +Implement `hosted_call` expression lowering in WasmCodeGen so that app code can call +platform-provided hosted functions via the `RocOps.hosted_fns` array. + +### Why + +The WASM backend currently panics on `.hosted_call` expressions (`WasmCodeGen.zig` line 1016) +and hardcodes `hosted_fns` count to zero (`WasmCodeGen.zig` line 449). Hosted functions are +how platforms expose capabilities to Roc apps (e.g. HTTP requests, file I/O, rendering). +Without this, no real platform can work. + +The runtime contract already exists: +- Hosted functions are indexed during build (`src/compile/compile_build.zig` line 721) +- The dev backend lowers them by marshalling args and calling + `roc_ops.hosted_fns.fns[hc.index]` (`src/backend/dev/LirCodeGen.zig` line 4146) +- All hosted functions follow the `RocCall` ABI: `fn(roc_ops, ret_ptr, args_ptr) → void` + +### WASM Implementation + +Given a `hosted_call` expression with `index`, `args`, and `ret_layout`: + +```zig +fn generateHostedCall(self: *Self, hc: HostedCallInfo) !void { + // 1. Allocate return slot on stack frame + const ret_size = self.layout_store.layoutSize(hc.ret_layout); + const ret_slot = self.allocStackSlot(@max(ret_size, 4)); + + // 2. Marshal arguments into contiguous buffer on stack + var total_args_size: u32 = 0; + for (hc.args) |arg| { + const arg_size = self.layout_store.layoutSize(arg.layout); + const arg_align = self.layout_store.layoutAlign(arg.layout); + total_args_size = std.mem.alignForward(u32, total_args_size, arg_align); + total_args_size += arg_size; + } + const args_slot = self.allocStackSlot(@max(total_args_size, 4)); + + // Copy each argument into the args buffer + var offset: u32 = 0; + for (hc.args) |arg| { + const arg_size = self.layout_store.layoutSize(arg.layout); + const arg_align = self.layout_store.layoutAlign(arg.layout); + offset = std.mem.alignForward(u32, offset, arg_align); + self.copyToStackSlot(arg.loc, args_slot + offset, arg_size); + offset += arg_size; + } + + // 3. Load hosted function's table index from RocOps struct: + // roc_ops_ptr → load hosted_fns_ptr at offset 32 + // → load table_index at hosted_fns_ptr + (hc.index * 4) + self.emitLocalGet(self.roc_ops_local); + self.emitI32Load(32); // hosted_fns_ptr + self.emitI32Const(hc.index * 4); + self.emitOp(.i32_add); + self.emitI32Load(0); // table index of hosted fn + + // 4. Push RocCall args: (roc_ops_ptr, ret_ptr, args_ptr) + // Note: table index is already on stack from step 3, but call_indirect + // expects it LAST. So we need to push args first, then the table index. + // Reorder: save table index to temp local, push args, reload table index. + const table_idx_local = self.storage.allocAnonymousLocal(.i32); + self.emitLocalSet(table_idx_local); + + self.emitLocalGet(self.roc_ops_local); // arg 0: roc_ops + self.emitFpOffset(ret_slot); // arg 1: ret_ptr + self.emitFpOffset(args_slot); // arg 2: args_ptr + + self.emitLocalGet(table_idx_local); // table index (consumed by call_indirect) + + // 5. call_indirect with RocCall type signature, table 0 + self.emitOp(.call_indirect); + self.emitLeb128U32(self.roc_call_type_idx); // type index + self.emitLeb128U32(0); // table index + + // 6. Result is now at ret_slot — load it according to ret_layout + self.loadFromStackSlot(ret_slot, hc.ret_layout); +} +``` + +### Integration with Build System + +The `hosted_fns_count` in the RocOps struct must be set to the actual count of hosted +functions discovered during compilation (`compile_build.zig` line 721). The host is +responsible for populating the `hosted_fns_ptr` array in linear memory with the correct +table indices before calling the app's entry point. + +The `roc_call_type_idx` must be registered during codegen initialization: +```zig +// Register the RocCall function type: (i32, i32, i32) → void +self.roc_call_type_idx = try self.module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); +``` + +### Rust Reference + +- `src/backend/dev/LirCodeGen.zig` lines 4145–4241: dev backend `generateHostedCall()` + — the native implementation we're porting to WASM +- `src/backend/dev/LirCodeGen.zig` lines 4201–4218: loading function pointer from + `RocOps.hosted_fns.fns[index]` +- `src/compile/compile_build.zig` lines 721–739: hosted function indexing +- `src/builtins/host_abi.zig` lines 37–44: `HostedFn` type definition +- `src/builtins/host_abi.zig` lines 67–84: `HostedFunctions` struct + +### Tests + +``` +test "hosted_call — marshals arguments into contiguous stack buffer" +test "hosted_call — loads table index from RocOps.hosted_fns at correct offset" +test "hosted_call — emits call_indirect with RocCall type signature" +test "hosted_call — reads return value from ret_ptr after call" +test "hosted_call — handles zero-sized return type (no read from ret_ptr)" +test "hosted_call — multiple hosted calls use correct indices (0, 1, 2, ...)" +test "hosted_call — argument alignment respected in args buffer" +test "end-to-end: app calling hosted function through platform" +``` + +--- + +## Phase 10: Dead Code Elimination + +### What + +After surgical linking, trace the call graph from exported/live functions and replace +unreachable function bodies with `unreachable; end` stubs. + +### Why + +The host module may contain functions that are never called by the app (e.g. host helper +functions for features the app doesn't use). Similarly, the merged builtins module contains +all Roc builtins, but the app may only use a subset. Dead code elimination reduces the +final `.wasm` size by stubbing out unreachable functions. + +Importantly, we do NOT remove dead functions — we replace their bodies with 3-byte stubs. +This preserves all function indices so no relocation updates are needed. The WASM runtime +will trap if a dead function is somehow called (which indicates a bug in the DCE). + +### Algorithm + +``` +1. INITIALIZE live set: + - Mark all exported functions as live. + - Mark all functions referenced in element sections (indirect call targets) as live. + - Mark all init functions as live. + +2. TRACE call graph (iterate until stable): + For each newly-live function: + Find its byte range in code_bytes via function_offsets. + Find all relocation entries within that byte range. + For each relocation of type function_index_leb: + Look up the symbol → get target function index. + Mark target function as live. + For each relocation of type type_index_leb (call_indirect): + Mark all functions with matching type signature as potentially live. + (Conservative: indirect calls could target any function of that type.) + +3. ELIMINATE dead imports: + Iterate imports, but only over the FUNCTION-import index space: + If a function import is not live: + Remove it from imports array. + Increment dead_import_dummy_count. + If an import is memory/table/global: + Keep it. The live set is indexed by function index only. + Reindex remaining imported-function symbols and patch their relocations + before mutating code_bytes. + +4. ELIMINATE dead defined functions: + Rebuild the code section body: + For each defined function: + If live: copy its serialized body bytes verbatim. + If dead: serialize DUMMY_FUNCTION instead. + (No attempt to preserve original per-function byte lengths is required + after relocation-based import reindexing is complete.) +``` + +### Rust Reference + +- `crates/wasm_module/src/lib.rs` lines 267–393: `eliminate_dead_code()` — the complete implementation +- `crates/wasm_module/src/lib.rs` lines 395–503: `trace_live_functions()` — call graph tracing +- `crates/wasm_module/src/lib.rs` lines 276: `fn_index_min` calculation using `dead_import_dummy_count` +- `crates/wasm_module/src/lib.rs` lines 293–328: dead import elimination with reindexing +- `crates/wasm_module/src/lib.rs` lines 330–391: dead code body replacement + +### Tests + +``` +test "eliminateDeadCode — exported function and its callees are preserved" +test "eliminateDeadCode — unreachable function body replaced with unreachable stub" +test "eliminateDeadCode — function indices unchanged after elimination" +test "eliminateDeadCode — dead import removed, dead_import_dummy_count incremented" +test "eliminateDeadCode — non-function imports are preserved" +test "eliminateDeadCode — indirect call targets (element section) preserved" +test "eliminateDeadCode — transitive callees preserved (A calls B calls C → all live)" +test "eliminateDeadCode — init functions preserved" +test "eliminateDeadCode — call_indirect conservatively keeps matching-signature functions" +``` + +--- + +## Phase 11: Serialization Updates ✅ + +### What + +Extend the existing `encode()` method to handle the new fields from surgical linking: +raw code bytes, dummy functions, and stripping of linking metadata. + +### Why + +After surgical linking, the module contains: +- `code_bytes` with patched relocation values (not the old `func_bodies` array) +- `dead_import_dummy_count` dummy functions that must be prepended +- Linking and relocation sections that should be stripped from the final output + (they were only needed during the linking process) + +The serializer must produce a valid, standard WASM module (no custom linking sections). + +### Changes to `encode()` + +The code section serialization becomes: + +```zig +fn encodeCodeSection(self: *const WasmModule, output: *std.ArrayList(u8)) !void { + // Section header + try output.append(10); // section ID: Code + const size_offset = output.items.len; + try appendPaddedU32(output, 0); // placeholder for section size + + // Function count = dummies + real functions + const total = self.dead_import_dummy_count + @as(u32, @intCast(self.function_offsets.items.len)); + try leb128WriteU32(output, total); + + // Prepend dummy functions + for (0..self.dead_import_dummy_count) |_| { + try leb128WriteU32(output, DUMMY_FUNCTION.len); // body size + try output.appendSlice(&DUMMY_FUNCTION); + } + + // Append real function bodies from raw code_bytes + // (starting from the first function's offset) + if (self.function_offsets.items.len > 0) { + const first = self.function_offsets.items[0]; + try output.appendSlice(self.code_bytes.items[first..]); + } + + // Patch section size + const section_size = output.items.len - size_offset - 5; + overwritePaddedU32(output.items, @intCast(size_offset), @intCast(section_size)); +} +``` + +**Important**: The linking section, reloc.CODE, and reloc.DATA sections are NOT serialized +into the final output. They are only used during the surgical linking process. The final +`.wasm` is a standard module that any WASM runtime can execute. + +### Rust Reference + +- `crates/wasm_module/src/sections.rs` lines 1451–1468: code section serialization with + `dead_import_dummy_count` prepending +- `crates/wasm_module/src/lib.rs` lines 80–99: `serialize()` — section ordering and + selective serialization (skips empty sections) + +### Tests + +``` +test "encode — dummy functions prepended before real functions in code section" +test "encode — code section function count includes dummies" +test "encode — linking section NOT present in output" +test "encode — reloc.CODE section NOT present in output" +test "encode — output is valid WASM (magic, version, section ordering)" +``` + +--- + +## Phase 12: CLI Integration — `roc build --target=wasm32` ✅ + +### What + +Wire the surgical linking pipeline into the `roc build` command for the `wasm32` target. + +### Status: Complete + +The wasm32 target is now enabled in `src/cli/main.zig`. The full surgical linking +pipeline runs when `--target=wasm32` is specified, producing a standalone `.wasm` binary +without invoking `wasm-ld`. + +### Implementation + +#### Files Changed + +- **`src/backend/wasm/WasmModule.zig`** + - `findImportFuncIdx(module_name, field_name) → ?u32` — find an import by name + - `BuiltinSymbols.populate()` — **fixed** to return actual function indices + (`sym.index`) instead of symbol table indices + - `transferAppFunctions()` — copies app function bodies from `func_bodies` into + `code_bytes`/`function_offsets` so they're compatible with surgical linking, + relocation resolution, and dead code elimination + +- **`src/backend/wasm/WasmCodeGen.zig`** + - `initWithHostModule(allocator, store, layout_store, host_module, builtin_syms)` — + init variant that takes ownership of a preloaded host `WasmModule` + - `registerRocOpsFromModule()` — finds existing `roc_alloc`/`roc_dealloc`/etc. + imports in the host module (after builtins merge) and registers them in the + funcref table for `call_indirect` + - `generateEntrypointWrapper(proc, name, arg_layouts, ret_layout) → u32` — + generates a RocCall ABI wrapper `(i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void` + that reads args from `args_ptr`, calls the compiled proc, stores the result to + `ret_ptr`, and exports the function by name + +- **`src/cli/main.zig`** + - Removed the wasm32 error block — wasm32 is now supported alongside x86_64/aarch64 + - Added `target_usize` selection: uses `TargetUsize.u32` for wasm32 layouts + - Added the complete wasm32 surgical linking pipeline as a branch after LIR lowering + +#### Build Pipeline (as implemented) + +``` + 1. Get host.wasm path from platform's TargetsConfig link spec + (first file_path entry before `app` in the wasm32 exe spec) + + 2. Read and parse host module + host_module = WasmModule.preload(allocator, host_bytes, true) + + 3. Remove memory and table imports + host_module.removeMemoryAndTableImports() + + 4. Parse and merge builtins + builtins_module = WasmModule.preload(allocator, BuiltinsObjects.forTarget(.wasm32), true) + host_module.mergeModule(&builtins_module) + + 5. Build BuiltinSymbols lookup + builtin_syms = BuiltinSymbols.populate(&host_module) + + 6. Create code generator with host module + codegen = WasmCodeGen.initWithHostModule(allocator, &lir_store, &layout_store, host_module, builtin_syms) + + 7. Register RocOps callbacks from existing module imports + codegen.registerRocOpsFromModule() + + 8. Compile all LIR proc specs + codegen.compileAllProcSpecs(procs) + + 9. Generate entrypoint wrappers + build host-to-app map + for each entrypoint: + wrapper_idx = codegen.generateEntrypointWrapper(proc, name, arg_layouts, ret_layout) + host_to_app_map.append({ name, wrapper_idx }) + +10. Transfer app functions from func_bodies → code_bytes + codegen.module.transferAppFunctions() + +11. Surgical linking — redirect host imports to app functions + codegen.module.linkHostToAppCalls(host_to_app_map) + +12. Resolve code relocations (patches builtin call sites) + codegen.module.resolveCodeRelocations() + +13. Finalize memory and table (1MB stack) + codegen.module.finalizeMemoryAndTable(1024 * 1024) + +14. Verify no stale builtin imports + codegen.module.verifyNoBuiltinImports() + +15. Dead code elimination + codegen.module.eliminateDeadCode(called_fns) + +16. Materialize function bodies from code_bytes + codegen.module.materializeFuncBodies() + +17. Encode and write output + final_bytes = codegen.module.encode(allocator) + writeFile(final_output_path, final_bytes) +``` + +#### Key Design Decisions + +- **Two-representation bridge**: The preloaded host module uses `code_bytes`/`function_offsets` + while the codegen uses `func_bodies`/`setFunctionBody`. The `transferAppFunctions()` method + bridges these by copying app bodies into `code_bytes` before surgical linking. After DCE, + `materializeFuncBodies()` converts everything back to `func_bodies` for encoding. + +- **BuiltinSymbols fix**: `populate()` was returning symbol table indices but the codegen + used them as function indices in `call` instructions. Fixed to return `sym.index` (the + actual function index) from the symbol table entry. + +- **Canonical RocOps callbacks**: Eval-style modules reuse existing `roc_alloc`/`roc_dbg`/ + etc. imports, while host modules bind their locally-defined RocOps callback functions + (the implementations assigned into `host_abi.RocOps`) into the funcref table without + mutating the import section. + +- **Entrypoint wrappers**: Each entrypoint is compiled as a regular LIR proc via + `compileAllProcSpecs`. A thin RocCall wrapper reads args from `args_ptr`, calls the + compiled proc, and stores the result to `ret_ptr`. The wrapper (not the proc) is + exported and linked. + +### Platform Configuration + +Platforms declare their WASM host via the existing `TargetsConfig` system in +`src/compile/targets_config.zig`. The CLI target selection prefers `exe` over `static_lib`, +so the host should be declared under `exe`: + +```roc +targets: { + files: "targets/", + exe: { + wasm32: ["host.wasm", app], + } +} +``` + +The `host.wasm` file must be a relocatable WASM object (compiled with +`clang --target=wasm32 -c -o host.wasm host.c` or Zig's `--target=wasm32-freestanding`). +It must contain `linking` and `reloc.*` custom sections. + +### Remaining Work (not blocking Phase 12) + +- **Broader verification**: The surgical linking pipeline is wired into the CLI and eval + path now, but the full wasm verification story still needs to be hardened: + - rerun the full eval/REPL test suite after the latest linker fixes and update the + recorded pass/fail status + - make the end-to-end wasm build/test harness self-contained in CI (`test-backend`, + `test-wasm-static-lib`, bytebox/runner integration) + - add at least one true end-to-end test that compiles a Roc program via + `roc build --target=wasm32` and executes the emitted `.wasm` + +- **Hosted effects coverage**: `generateHostedCall()` exists and the RocOps layout has + slots for hosted functions, but the current wasm test platform still uses + `hosted_fns.count = 0`. We need one wasm host/platform test that exposes a non-empty + hosted-function table and proves `call_indirect` through `host_abi.RocOps.hosted_fns` + works end-to-end. + +### Rust Reference + +- `crates/compiler/gen_wasm/src/lib.rs` lines 55–60: `parse_host()` +- `crates/compiler/gen_wasm/src/lib.rs` lines 80–188: `build_app_module()` +- `crates/compiler/gen_wasm/src/backend.rs` lines 79–114: `WasmBackend::new()` +- `crates/compiler/gen_wasm/src/backend.rs` lines 296–304: `finalize()` + +--- + +## Phase 13: PIC Support & Eval Builtins — Done + +### What + +Support Position Independent Code (PIC) WASM modules in the surgical linker, and +wire the eval/REPL pipeline to merge real compiled builtins instead of using host +import reimplementations. + +### Status + +Implementation is in place: +- PIC imports/globals/tables are handled during preload/merge +- eval now merges real `roc_builtins.o` and resolves relocations +- `reloc.DATA` is normalized, merged, and resolved before encoding +- wasm host RocOps callbacks use canonical `host_abi` callback symbols instead of + late import insertion + +The remaining work is now primarily broader verification and end-to-end coverage, +captured below in Appendix C. + +### Root Causes Fixed + +**Relocation offset mismatch**: `reloc.CODE` offsets are relative to the code section +body (which includes a function-count LEB128 prefix), but `code_bytes` starts after +that prefix. Fix: record the function count's LEB128 byte size during +`parseCodeSection` and subtract it from all `reloc_code` entry offsets after parsing. + +**type_index_leb resolution**: `R_WASM_TYPE_INDEX_LEB` relocations (for `call_indirect`) +need to remap placeholder type indices from the source type section to the merged type +section. The original code used `sym.index` (function index) which is wrong. Fix: +resolve `type_index_leb` during `mergeModule` when `type_remap` is available. + +**Builtin ABI mismatch**: The merged `roc_builtins_*` functions use the wasm32 native +ABI (ptr/len/cap decomposition, split i128 args, sret result slots), but the codegen +was passing pointer-to-struct arguments. Fix: added shared helpers for the merged +builtins' ABI and migrated all call sites (string/list equality, string transforms, +numeric conversions, list operations). + +### Implementation + +#### PIC Module Support (WasmModule.zig, WasmLinking.zig) + +- **GlobalImport / TableImport**: New types stored during `parseImportSection` so + PIC globals (`__memory_base`, `__table_base`) and tables (`__indirect_function_table`) + can be resolved by name during merge. +- **resolveName dispatch**: Fixed to look up function symbols in `imports`, global + symbols in `global_imports`, and table symbols in `table_imports` (was incorrectly + indexing into `imports` for all symbol kinds). +- **PIC globals in mergeModule**: `__memory_base` and `__table_base` are defined as + immutable i32 globals initialized to 0. `__indirect_function_table` enables the + module's table. These are encoded in the global section alongside `__stack_pointer`. +- **Element section parsing**: `parseElementSection_` now extracts function indices + into `table_func_indices`. `mergeModule` remaps them through `func_remap`. +- **table_index_rel_sleb (type 12)**: Added as `IndexRelocType` (no addend), with + correct signed LEB128 patching in both `applyRelocsU32` and `resolveCodeRelocations`. +- **Table index resolution**: `table_index_sleb`, `table_index_rel_sleb`, and + `table_index_i32` resolve to the element section position (table index) rather + than the function index. + +#### Eval Pipeline (wasm_evaluator.zig, build.zig) + +- **Build system**: `wasm32_builtins` module embeds `roc_builtins.o` for wasm32 via + `addWriteFiles` + `@embedFile`, available to the eval module. +- **prepareModuleWithBuiltins**: Creates a fresh module, adds RocOps imports (so the + import count is correct before merge), merges builtins, populates `BuiltinSymbols`, + resolves relocations, and materializes `func_bodies`. +- **generateModule**: Reuses existing RocOps imports when the module was prepared with + them up front, instead of adding imports after builtins have already been merged. + +### Root Causes Fixed (continued) + +**list_append must use safe version**: The LIR op `list_append_unsafe` is a misnomer — +the dev backend actually calls `roc_builtins_list_append_safe` (which handles capacity +reservation). The wasm codegen was calling the unsafe version (no allocation), causing +wrong values or crashes. Fix: use `list_append_safe` for non-ZST elements, passing +alignment, element_width, and elements_refcounted. + +**List literal allocation needs refcount headers**: List literals were allocated via +`emitHeapAllocConst` (raw roc_alloc), but builtins expect data pointers with refcount +headers (for isUnique checks, reallocation, etc.). Fix: call +`roc_builtins_allocate_with_refcount` from `generateList` instead of raw heap alloc. + +### Integration Gaps Found During Review (2026-04-01) + +These are not theoretical cleanup items; they were found while diffing this branch + against `origin/main` and checking the current wasm platform artifact. + +**`reloc.DATA` was merged but never resolved**: `roc_builtins.o` contains a real +`reloc.DATA` section. The linker already preserved those entries through preload and +`mergeModule()`, but only `reloc.CODE` was ever patched before the final module was +encoded. Fix: record each parsed data segment's byte range within the original data +section body, normalize `reloc.DATA` entries to `(segment_index, in_segment_offset)`, +carry that remap through `mergeModule()`, and resolve data relocations before encode. + +**Host RocOps registration was mutating function indices after preload**: +`registerRocOpsFromModule()` was allowed to add imports when the current module already +contained defined functions from a preloaded host object or merged builtins. That +violates the `WasmModule.addImport()` invariant that imports must be finalized before +defined functions exist, and it corrupts every downstream function index. The current +wasm test platform also does not expose RocOps callbacks as imports: it builds a +`RocOps` struct from local callback functions and only imports `roc_dbg`, +`roc_expect_failed`, and `roc_panic`. Fix: expose canonical host-ABI callback symbols in +the wasm host module, bind those existing callback functions into the table without +adding imports, and keep eval-style modules on a separate "reuse existing imports" +path instead of introducing a callback/import fallback. + +**Builtin-import verification was stricter than the current platform contract**: +`verifyNoBuiltinImports()` only allowed the 6 RocOps callback names, but the current +platform legitimately imports `env.roc_panic` and uses it behind the local +`roc_crashed` wrapper. Verification runs before DCE, so rejecting `roc_panic` here is +a false failure against the current platform. Fix: treat `roc_panic` as an allowed +platform import at verification time. + +--- + +## Phase 14: Rebase & Integration Fixes — Done + +### What + +Rebased the 24-commit surgical-wasm branch from `lir-interpreter` onto `origin/main`, +resolving conflicts and fixing post-rebase issues. + +### Changes + +**Rebase** (2026-03-31): Rebased all 24 commits from `01701f9ed5` to HEAD onto +`origin/main` (`902f567ccf`), dropping the lir-interpreter branch history. Resolved +merge conflicts in `build.zig`, `wasm_evaluator.zig`, `WasmCodeGen.zig` across 6 +conflict rounds. + +**Post-rebase fixups**: +- Removed dead `generateI128Shift` and `emitI128DivByConst` functions (were from the + lir-interpreter branch, superseded by host-call based i128 ops) +- Added missing `dec_to_*_trunc` variants to `generateLowLevel` switch (main added + these enum variants; our branch had a consolidated handler that only listed + `dec_to_i64_trunc`) +- Exported `WasmCodeResult` from `eval/mod.zig` (it moved from inside `WasmEvaluator` + to module level during our refactor) +- Wired `wasm32_builtins` module import for eval test step in `build.zig` +- Migrated regression tests from `eval_tests.zig` (which didn't exist on main) to + `eval_test.zig` + +**`mergeModule` func_remap offset bug** (critical fix): +`self_defined_base` was computed BEFORE the import remapping loop, but that loop can +add new imports via `addImport()` which increases `importCount()`. All defined function +indices in `func_remap` were off by the number of new imports added during merge. This +caused every `call` instruction in merged builtins to reference the wrong function, +producing hundreds of type mismatch errors in the encoded module. +Fix: compute `self_defined_base` AFTER the import loop completes. +Location: `WasmModule.zig` `mergeModule()`, ~line 899. + +**Compiler-rt host functions**: +The wasm32 `roc_builtins.o` imports `__multi3` (128-bit multiply) and `__muloti4` +(128-bit multiply with overflow) from the Zig compiler-rt. These are function imports +in the `.o` file (not defined functions), so `mergeModule` correctly propagates them as +imports in the final module. Added host implementations in `wasm_runner.zig` using the +existing `compiler_rt_128.mul_i128` function. + +**`preload` require_relocatable flag**: Changed from `true` to `false` for the eval +pipeline. The wasm32 builtins object is a valid relocatable module but may not pass +all strict validation checks (e.g. missing reloc sections for some targets). The merge +pipeline only needs the linking section and reloc entries, not strict validation. + +**reloc.CODE offset test**: Fixed expected offset from 2 to 1 — `preload` adjusts +reloc offsets by subtracting the code section's function count LEB128 size (1 byte). + +**Unused variable suppressions**: Removed 9 `_ = variable;` patterns (replaced with +`_` parameter names or deleted unused lookups) to pass the codebase lint check. + +### Results + +- Repl tests: **38/40 pass** (up from 11/40 before fixes) +- Build + lints + snapshots: all pass +- Remaining 2 repl failures: `Str.from_utf8` tests (TrapUnreachable, pre-existing) + +--- + +## Phase 15: Remaining Test Failures — Done + +### Status + +**Repl**: 40/40 passing. +**Eval**: 1249/1249 passing (full suite). + +### Fixes Applied + +#### 1. `Str.from_utf8` FromUtf8Try ↔ tag union layout mismatch (FIXED) + +The C builtin `roc_builtins_str_from_utf8` writes its result in `FromUtf8Try` layout +(byte_index@0, string@8, is_ok@20, problem_code@21 on wasm32), but the codegen +was writing this directly into the Roc tag union buffer without conversion. The Roc +tag union has a different layout (Ok: Str@0, disc@disc_offset; Err: byte_index@0, +problem@8, disc@disc_offset). + +**Fix**: Added conversion code in the `str_from_utf8` codegen (WasmCodeGen.zig) that +calls the C builtin into a temporary 24-byte buffer, then reads `is_ok` and copies +the appropriate fields into the tag union result buffer: +- Ok: copy RocStr (12 bytes) from raw+8 to result+0, set disc=1 +- Err: copy byte_index (8 bytes) from raw+0, problem_code from raw+21 to result+8, set disc=0 + +#### 2. WASM stack frame alignment (FIXED) + +The WASM codegen's `emitStackPrologue` did not round the stack frame size to 8-byte +alignment. When a function's accumulated `stack_frame_size` was not a multiple of 8, +the frame pointer (obtained by subtracting from an 8-byte-aligned stack pointer) +would be misaligned. This caused `@alignCast` assertions in builtins that cast output +pointers to structs containing u64 fields (like `FromUtf8Try`). + +**Fix**: Round `stack_frame_size` up to 8-byte alignment in `emitStackPrologue`: +`self.stack_frame_size = (self.stack_frame_size + 7) & ~@as(u32, 7);` + +#### 3. Missing `transferAppFunctions()` before encode (FIXED) + +App function bodies added via `setFunctionBody` (RocCall entrypoint, eval wrapper) +were not transferred into `code_bytes` before `encode()`. This meant +`function_offsets` had no entries for app functions, causing `materializeFuncBodies` +to go out of bounds during encoding — resulting in tests hanging. + +**Fix**: Call `self.module.transferAppFunctions()` after all `setFunctionBody` calls +and before `encode()` in `WasmCodeGen.generateModule`. + +#### 4. RocOps struct at address 0 causes null pointer trap (FIXED) + +The eval wrapper placed the RocOps struct at WASM linear memory address 0. When +builtins cast `roc_ops` pointers to Zig optional pointers (`?*anyopaque`), address 0 +is treated as `null`. In `strDecref` (called during `list.decref` for +`Str.join_with` and `Str.split_on`), the null context check hit an explicit +`unreachable`, causing `TrapUnreachable`. + +**Fix**: Allocate the RocOps struct in the eval wrapper's stack frame (at a non-zero +address) instead of at memory offset 0. + +#### 5. Memory leaks in eval pipeline (FIXED) + +- `builtins_module` from `WasmModule.preload` was not freed after merge. Added + `defer builtins_module.deinit()`. +- `MergeResult.symbol_remap` from `mergeModule` was discarded. Changed to call + `merge_result.deinit()`. + +### Known Limitation: Compiler-rt imports + +The wasm32 `roc_builtins.o` imports `__multi3` and `__muloti4` from compiler-rt. +Currently handled by host function implementations in `helpers.zig`. For the +CLI `roc build` path, these will need to be resolved differently — either +by bundling compiler-rt into the builtins object for wasm32 only, or by providing +them through the host platform's object. + +--- + +## Implementation Order and Dependencies + +``` +Phase 1: Padded LEB128 Helpers + │ (no dependencies, pure utility functions) + ▼ +Phase 2: Linking Data Structures + │ (depends on Phase 1 for overwritePaddedU32 in applyRelocsU32) + ▼ +Phase 3: WASM Module Parser + │ (depends on Phase 2 for LinkingSection, RelocationSection types) + ▼ +Phase 4: Surgical Linking (linkHostToAppCalls) + │ (depends on Phases 1-3) + │ + │ ★ MILESTONE 1: Surgical linking works on test fixtures + │ Can parse, link, and serialize clang-produced .wasm objects. + │ + ├─────────────────────────┐ + ▼ ▼ +Phase 5: Memory/Table Phase 6: Function Pointer + Ownership Representation & RocOps + │ │ + │ (design decisions │ (design decisions that + │ that inform codegen) │ inform codegen + hosted calls) + │ │ + ├─────────────────────────┘ + ▼ +Phase 7a: Entrypoint ABI Migration + │ (depends on Phase 6 for RocOps layout) + ▼ +Phase 7b: CodeBuilder & WasmCodeGen Refactor + │ (depends on Phase 7a for RocCall signature) + │ + │ ★ MILESTONE 2: App code appends into host module + │ CodeBuilder inserts functions with correct relocation offsets. + │ + ├─────────────────────────┐ + ▼ │ +Phase 8: Builtins Migration │ + │ 8a: Merge .o │ + │ 8b: Symbol mapping │ + │ 8c: ABI rewrites │ + │ 8d: Remove old imports │ + │ 8e: Verification │ + │ │ + │ ★ MILESTONE 3: Builtins work via surgical linking + │ No env.roc_* builtin imports. All call sites use + │ decomposed C ABI through roc_builtins_* symbols. + │ │ + ▼ ▼ +Phase 9: Hosted Call Lowering + │ (depends on Phase 6 for function pointer representation) + │ + │ ★ MILESTONE 4: Hosted functions work + │ App can call platform-provided hosted functions + │ via RocOps.hosted_fns table indices. + │ + ▼ +Phase 10: Dead Code Elimination + │ (depends on all code generation phases being complete) + ▼ +Phase 11: Serialization Updates ✅ + │ (depends on Phase 10) + ▼ +Phase 12: CLI Integration ✅ + │ (depends on all previous phases) + │ + │ ★ MILESTONE 5: End-to-end WASM builds + │ `roc build --target=wasm32` produces working .wasm files. + │ Eval tests pass using surgical linking pipeline. + │ All host function reimplementations removed from helpers.zig. + ▼ + DONE +``` + +**Parallelism opportunities**: +- Phases 5 and 6 can be developed in parallel (design-only, no code dependencies) +- Phase 8 (builtins) and Phase 9 (hosted calls) can be developed in parallel after + Phase 7b is complete, since they are independent code generation concerns + +--- + +## Appendix A: WASM Relocatable Object Format Reference + +This section documents the binary format details needed for the parser (Phase 3). + +### Section Layout + +A relocatable `.wasm` file follows the standard WASM binary format with additional +custom sections: + +``` +┌──────────────────────────────────────┐ +│ Magic: 0x00 0x61 0x73 0x6D ("\0asm")│ +│ Version: 0x01 0x00 0x00 0x00 (1) │ +├──────────────────────────────────────┤ +│ Type Section (ID=1) │ +│ Import Section (ID=2) │ +│ Function Section (ID=3) │ +│ Table Section (ID=4) [optional] │ +│ Memory Section (ID=5) [optional] │ +│ Global Section (ID=6) [must be empty]│ +│ Export Section (ID=7) │ +│ Element Section (ID=9) [optional] │ +│ DataCount Section (ID=12) [optional] │ +│ Code Section (ID=10) │ +│ Data Section (ID=11) [optional] │ +├──────────────────────────────────────┤ +│ Custom: "linking" (symbol table) │ +│ Custom: "reloc.CODE" │ +│ Custom: "reloc.DATA" [optional] │ +│ Custom: "name" [optional] │ +│ Custom: other/debug [optional] │ +└──────────────────────────────────────┘ +``` + +Notes: +- `DataCount` appears in real relocatable objects produced by Zig, including the shipped + `src/cli/targets/wasm32/roc_builtins.o`, so the parser must consume it even though we do + not use its payload. +- Additional custom sections such as debug metadata and `reloc..debug_*` may appear and + should be skipped unless the parser explicitly needs them. + +### Linking Custom Section Format + +``` +Section header: ID=0 (custom), size, name="linking" +Body: + version: u32 (LEB128) = 2 + Subsections (repeated): + subsection_id: u8 + subsection_size: u32 (LEB128) + subsection_body: [subsection_size bytes] + +Subsection 8 (WASM_SYMBOL_TABLE): + count: u32 (LEB128) + For each symbol: + kind: u8 (0=func, 1=data, 2=global, 3=section, 4=event, 5=table) + flags: u32 (LEB128) + If kind == func or kind == global or kind == event or kind == table: + index: u32 (LEB128) // function/global/event/table index + If flags & WASM_SYM_EXPLICIT_NAME or NOT flags & WASM_SYM_UNDEFINED: + name_len: u32 (LEB128) + name: [name_len bytes] + If kind == data: + name_len: u32 (LEB128) + name: [name_len bytes] + If NOT flags & WASM_SYM_UNDEFINED: + segment_index: u32 (LEB128) + data_offset: u32 (LEB128) + data_size: u32 (LEB128) + +Subsection 5 (WASM_SEGMENT_INFO): + count: u32 (LEB128) + For each segment: + name_len: u32 (LEB128) + name: [name_len bytes] + alignment: u32 (LEB128) // log2 alignment + flags: u32 (LEB128) + +Subsection 6 (WASM_INIT_FUNCS): + count: u32 (LEB128) + For each init func: + priority: u32 (LEB128) + symbol_index: u32 (LEB128) +``` + +### Relocation Custom Section Format + +``` +Section header: ID=0 (custom), size, name="reloc.CODE" or "reloc.DATA" +Body: + target_section_index: u32 (LEB128) // which section these relocations apply to + count: u32 (LEB128) + For each relocation: + type: u8 + offset: u32 (LEB128) // byte offset within target section body + symbol_index: u32 (LEB128) // index into linking section's symbol table + If type in {3, 4, 5, 8, 9, 14, 15, 16}: // offset-type relocations + addend: i32 (LEB128, signed) +``` + +### Padded LEB128 Encoding in Code Section + +In a relocatable object, all `call`, `global.get`, `global.set`, and `call_indirect` +instructions use 5-byte padded LEB128 for their operands: + +``` +Standard: call 3 → 10 03 (2 bytes) +Padded: call 3 → 10 83 80 80 80 00 (6 bytes: opcode + 5-byte operand) +``` + +This padding is only present in relocatable objects. The final serialized module uses +standard variable-length LEB128 (the relocation values are already patched in, and +the raw bytes are emitted as-is — so the padding persists in the output, which is valid +WASM since padded LEB128 is a valid encoding of any value). + +--- + +## Appendix B: Files Modified/Created + +| File | Action | Description | +|------|--------|-------------| +| `src/backend/wasm/WasmModule.zig` | **Major rewrite** | Add parser (`preload`), extend struct, update serializer, memory/table ownership | +| `src/backend/wasm/WasmLinking.zig` | **New file** | Linking data structures, relocation types, `applyRelocsU32` | +| `src/backend/wasm/CodeBuilder.zig` | **New file** | Function body accumulator with deferred relocation offset computation | +| `src/backend/wasm/WasmCodeGen.zig` | **Major refactor** | RocCall ABI, CodeBuilder integration, builtin ABI rewrites, hosted call lowering | +| `src/backend/wasm/mod.zig` | **Minor update** | Export WasmLinking, CodeBuilder | +| `src/eval/wasm_evaluator.zig` | **Refactor** | Use surgical linking pipeline with eval host shim | +| `src/eval/test/helpers.zig` | **Simplify** | Remove ~45 host function reimplementations | +| `src/repl/wasm_runner.zig` | **Simplify** | Same host function removal as helpers.zig | +| `src/cli/main.zig` | **Enable** | Remove wasm32 block, wire up surgical linking build pipeline | +| `build.zig` | **Minor update** | Ensure `roc_builtins.o` for wasm32 is built and embedded | + +--- + +## Appendix C: Status and Remaining Open Questions + +1. **Builtins `.o` availability**: Resolved. + `roc_builtins.o` for wasm32 is already built by `build.zig` and embedded in the CLI, so + Phase 8 can treat it as an existing input rather than a prerequisite build task. + +1b. **Phase 8 completed** (2026-03-29): + All sub-phases implemented: + - 8a: `mergeModule()` — type dedup, function/code/data/symbol/reloc merging + - 8b: `BuiltinSymbols` — 45 builtin ops mapped to symbol indices via `populate()` + - 8c: All call sites rewritten to use `builtin_syms` directly (no legacy imports) + - 8d: All ~40 legacy import fields removed; `registerHostImports` → `registerRocOpsImports` + - 8e: `verifyNoBuiltinImports()`, `resolveCodeRelocations()`, `materializeFuncBodies()` + Missing builtins added to `dev_wrappers.zig`: list_eq, list_str_eq, list_list_eq, + list_reverse, i32_mod_by, i64_mod_by. + +1c. **Phase 9 completed** (2026-03-29): + Implemented `generateHostedCall()` in `WasmCodeGen.zig` (~100 lines): + - Generates arg expressions, stabilizes composites into locals + - Marshals args into contiguous stack buffer with alignment + - Loads table index from `RocOps.hosted_fns_ptr + (index * 4)` in linear memory + - Emits `call_indirect` with `roc_call_type_idx` (3-arg RocCall ABI) + - Loads return value (primitive, composite pointer, or ZST) + Not yet testable end-to-end: eval wrapper still hardcodes `hosted_fns_count=0`, + test platform provides no hosted functions, and build system doesn't thread + hosted function count to WASM backend. Will be exercisable after CLI integration + (Phase 12) connects the full pipeline. + +1d. **Phase 10 completed** (2026-03-29): + Implemented `eliminateDeadCode()` and `traceLiveFunctions()` in `WasmModule.zig`: + - Iterative call graph tracing from exports, init funcs, element section entries, and called_fns + - Dead JS imports removed (not just stubbed), `dead_import_dummy_count` incremented + - Remaining import call sites reindexed via relocation patching + - Dead defined-function bodies replaced with 3-byte `unreachable; end` stubs + - Function indices preserved (stubs maintain index stability) + - Conservative indirect call handling: `type_index_leb` relocs mark all + element-section functions with matching type signature as live + Also fixed `init_funcs` parsing in `WasmLinking.zig` (was previously skipped). + All 9 planned tests passing. + +1e. **Phase 14 completed** (2026-03-31): + Rebased onto origin/main, fixed critical `mergeModule` func_remap offset bug (defined + function indices were off by the count of imports added during merge), added compiler-rt + host functions for `__multi3`/`__muloti4`, and various post-rebase cleanups. Repl tests + went from 11/40 to 38/40. + +1f. **Current follow-up completed** (2026-04-01): + Three post-integration issues found during review were fixed: + - `reloc.DATA` entries are now normalized, remapped during merge, and resolved into + final data bytes before encoding + - host-side RocOps registration no longer adds late imports; host modules expose + canonical `host_abi` callback functions and bind those existing callbacks into the table + - `verifyNoBuiltinImports()` now allows the platform's legitimate `roc_panic` import + +1g. **Remaining work to call surgical WASM complete**: + - **Full eval/regression rerun**: rerun the full eval/REPL suites after the latest fixes, + update the recorded pass rate, and fix any remaining builtin/runtime failures if they + still exist + - **End-to-end wasm harness**: make the wasm integration tests self-contained and green. + In the current checkout, `zig build test-backend` is blocked by a missing `bytebox` + test-module import and `zig build test-wasm-static-lib` expects `test/wasm/app.wasm` + to already exist + - **Hosted effects coverage**: add a wasm platform/test with a non-empty hosted function + table so `generateHostedCall()` is exercised end-to-end instead of only by local/unit + coverage + - **Status cleanup**: keep this document's recorded status synchronized with actual + verification results instead of leaving stale “in progress” or old pass-count notes + +2. **Host module authoring guidance**: What exact compilation flags do we want to support and + document for platform authors producing relocatable `host.wasm` artifacts? + We should publish one blessed example, ideally based on the existing WASM test platform. + +3. **COMDAT groups**: The linking section can contain COMDAT metadata. The old Rust compiler + parsed but did not semantically use it. We should preserve that behavior: parse enough to + keep symbol-table indices correct, but defer any COMDAT-aware deduplication logic. + +4. **Compiler-rt dependency**: The wasm32 `roc_builtins.o` currently imports `__multi3` and + `__muloti4` from compiler-rt. The builtins codebase has `is_wasm` guards in + `compiler_rt_128.zig` that decompose most i128 ops into 64-bit ops for wasm32, but some + code paths still trigger native i128 multiply (likely in `num.zig` overflow detection or + `dec.zig`). Ideally these should be found and fixed so the builtins have zero compiler-rt + imports on wasm32, matching the approach used for all other targets. diff --git a/build.zig b/build.zig index 37003dfbef4..0a95d69f23b 100644 --- a/build.zig +++ b/build.zig @@ -1839,6 +1839,38 @@ fn buildAndCopyTestPlatformHostLib( return ©_step.step; } +/// Build the wasm test platform host as a relocatable .wasm object (not an archive). +/// Surgical linking operates on a single relocatable object with linking/reloc sections. +fn buildAndCopyWasmHostObject( + b: *std.Build, + target: ResolvedTarget, + optimize: OptimizeMode, + roc_modules: modules.RocModules, + strip: bool, + omit_frame_pointer: ?bool, +) *Step { + const obj = b.addObject(.{ + .name = "host", + .root_module = b.createModule(.{ + .root_source_file = b.path("test/wasm/platform/host.zig"), + .target = target, + .optimize = optimize, + .strip = strip, + .omit_frame_pointer = omit_frame_pointer, + .pic = true, + }), + }); + configureBackend(obj, target); + obj.root_module.addImport("builtins", roc_modules.builtins); + obj.root_module.addImport("build_options", roc_modules.build_options); + + const dest_path = "test/wasm/platform/targets/wasm32/host.wasm"; + const copy_step = b.addUpdateSourceFiles(); + copy_step.addCopyFileToSource(obj.getEmittedBin(), dest_path); + + return ©_step.step; +} + // Workaround for Zig bug https://codeberg.org/ziglang/zig/issues/30572 const FixArchivePaddingStep = struct { step: Step, @@ -2044,7 +2076,7 @@ fn setupTestPlatforms( strip: bool, omit_frame_pointer: ?bool, platform_filter: ?[]const u8, -) void { +) *Step { // Clear the Roc cache when test platforms are rebuilt to ensure stale cached hosts aren't used const clear_cache_step = createClearCacheStep(b); const native_target_name = roc_target.RocTarget.fromStdTarget(target.result).toName(); @@ -2111,24 +2143,22 @@ fn setupTestPlatforms( } } - // Build the wasm test platform host for wasm32-freestanding - { - const wasm_target = b.resolveTargetQuery(.{ .cpu_arch = .wasm32, .os_tag = .freestanding, .abi = .none }); - const copy_step = buildAndCopyTestPlatformHostLib( - b, - "wasm", - wasm_target, - "wasm32", - optimize, - roc_modules, - strip, - omit_frame_pointer, - ); - clear_cache_step.dependOn(copy_step); - } + // Build the wasm test platform host as a relocatable .wasm object for surgical linking + const wasm_target = b.resolveTargetQuery(.{ .cpu_arch = .wasm32, .os_tag = .freestanding, .abi = .none }); + const wasm_host_step = buildAndCopyWasmHostObject( + b, + wasm_target, + optimize, + roc_modules, + strip, + omit_frame_pointer, + ); + clear_cache_step.dependOn(wasm_host_step); b.getInstallStep().dependOn(clear_cache_step); test_platforms_step.dependOn(clear_cache_step); + + return wasm_host_step; } pub fn build(b: *std.Build) void { @@ -2324,8 +2354,38 @@ pub fn build(b: *std.Build) void { roc_modules.eval.addImport("bytebox", bytebox.module("bytebox")); roc_modules.lsp.addImport("compiled_builtins", compiled_builtins_module); + // Build wasm32 builtins object at build time so the eval/REPL pipeline can + // merge real compiled builtins into WASM modules (instead of using host imports). + const wasm32_resolved_target = b.resolveTargetQuery(.{ .cpu_arch = .wasm32, .os_tag = .freestanding, .abi = .none }); + const wasm32_builtins_obj = b.addObject(.{ + .name = "roc_builtins_wasm32_eval", + .root_module = b.createModule(.{ + .root_source_file = b.path("src/builtins/static_lib.zig"), + .target = wasm32_resolved_target, + .optimize = optimize, + .strip = strip, + .omit_frame_pointer = omit_frame_pointer, + .pic = true, + }), + }); + wasm32_builtins_obj.root_module.addImport("tracy", b.addModule("tracy_stub_wasm32_eval", .{ + .root_source_file = b.path("src/builtins/tracy_stub.zig"), + })); + wasm32_builtins_obj.bundle_compiler_rt = false; + configureBackend(wasm32_builtins_obj, wasm32_resolved_target); + + const wasm32_builtins_files = b.addWriteFiles(); + _ = wasm32_builtins_files.addCopyFile(wasm32_builtins_obj.getEmittedBin(), "roc_builtins.o"); + const wasm32_builtins_module = b.createModule(.{ + .root_source_file = wasm32_builtins_files.add("wasm32_builtins.zig", + \\pub const bytes = @embedFile("roc_builtins.o"); + \\ + ), + }); + roc_modules.eval.addImport("wasm32_builtins", wasm32_builtins_module); + // Setup test platform host libraries - setupTestPlatforms(b, target, optimize, roc_modules, test_platforms_step, strip, omit_frame_pointer, platform_filter); + const wasm_host_step = setupTestPlatforms(b, target, optimize, roc_modules, test_platforms_step, strip, omit_frame_pointer, platform_filter); const roc_exe = addMainExe(b, roc_modules, target, optimize, strip, omit_frame_pointer, use_system_llvm, user_llvm_path, flag_enable_tracy, zstd, compiled_builtins_module, write_compiled_builtins, flag_enable_tracy) orelse return; roc_modules.addAll(roc_exe); @@ -2818,9 +2878,10 @@ pub fn build(b: *std.Build) void { module_test.test_step.root_module.addImport("bytebox", bytebox.module("bytebox")); } - // Add bytebox to eval tests for wasm backend testing + // Add bytebox and wasm32 builtins to eval tests for wasm backend testing if (std.mem.eql(u8, module_test.test_step.name, "eval")) { module_test.test_step.root_module.addImport("bytebox", bytebox.module("bytebox")); + module_test.test_step.root_module.addImport("wasm32_builtins", wasm32_builtins_module); const compile_build_module = b.createModule(.{ .root_source_file = b.path("src/compile/compile_build.zig"), }); @@ -2854,6 +2915,13 @@ pub fn build(b: *std.Build) void { ); } + // Backend tests need the wasm host object and builtins for WASM linking tests + if (std.mem.eql(u8, module_test.test_step.name, "backend")) { + module_test.test_step.step.dependOn(wasm_host_step); + module_test.test_step.root_module.addImport("wasm32_builtins", wasm32_builtins_module); + module_test.test_step.root_module.addImport("bytebox", bytebox.module("bytebox")); + } + if (std.mem.eql(u8, module_test.test_step.name, "repl")) { try addLlvmSupportToStep( b, @@ -3623,7 +3691,7 @@ fn addMainExe( for (cross_compile_builtins_targets) |cross_target| { const cross_resolved_target = b.resolveTargetQuery(cross_target.query); - // Build builtins object file for this target + // Build builtins object file for this target. const cross_builtins_obj = b.addObject(.{ .name = b.fmt("roc_builtins_{s}", .{cross_target.name}), .root_module = b.createModule(.{ diff --git a/src/backend/wasm/CodeBuilder.zig b/src/backend/wasm/CodeBuilder.zig new file mode 100644 index 00000000000..65c6308ec52 --- /dev/null +++ b/src/backend/wasm/CodeBuilder.zig @@ -0,0 +1,357 @@ +//! Accumulates one WASM function body with deferred relocation resolution. +//! +//! During code generation, instructions are emitted into `code` and relocatable +//! call sites are recorded as relative offsets. Only when `insertIntoModule()` is +//! called are relocations resolved to absolute code-section offsets. This matches +//! the Rust compiler's `CodeBuilder` / `insert_into_module()` pattern. + +const std = @import("std"); +const Allocator = std.mem.Allocator; +const WasmModule = @import("WasmModule.zig"); + +const Self = @This(); + +/// Main instruction bytes for the current function. +code: std.ArrayList(u8), +/// Locals declaration preamble (local count groups + types). +preamble: std.ArrayList(u8), +/// Relocations within this function: (code_pos, symbol_index). +/// code_pos is relative to the start of self.code, NOT the module's code section. +import_relocations: std.ArrayList(Relocation), + +/// A relocation entry recording a call site that needs patching during linking. +pub const Relocation = struct { + /// Byte position of the 5-byte padded LEB128 placeholder, relative to start of `code`. + code_pos: u32, + /// Index into the module's linking symbol table. + symbol_index: u32, +}; + +pub fn init() Self { + return .{ + .code = .empty, + .preamble = .empty, + .import_relocations = .empty, + }; +} + +pub fn deinit(self: *Self, allocator: Allocator) void { + self.code.deinit(allocator); + self.preamble.deinit(allocator); + self.import_relocations.deinit(allocator); +} + +/// Emit a relocatable call instruction. +/// +/// Writes a `call` opcode followed by a 5-byte padded LEB128 placeholder (value 0). +/// Records the relocation position relative to this function's code buffer so that +/// `insertIntoModule` can compute the absolute offset later. +pub fn emitRelocatableCall(self: *Self, allocator: Allocator, symbol_idx: u32) !void { + try self.code.append(allocator, WasmModule.Op.call); + const code_pos: u32 = @intCast(self.code.items.len); + try self.import_relocations.append(allocator, .{ + .code_pos = code_pos, + .symbol_index = symbol_idx, + }); + try WasmModule.appendPaddedU32(allocator, &self.code, 0); // 5-byte placeholder +} + +/// Finalize this function and insert it into the module's code section. +/// +/// Computes final relocation offsets based on the function's actual position in +/// `code_bytes` (accounting for the LEB128 body-length prefix and preamble). +/// Returns the byte offset where this function's body starts in `code_bytes` +/// (i.e. the position of the body-length prefix). +pub fn insertIntoModule(self: *const Self, allocator: Allocator, module: *WasmModule) !u32 { + const fn_offset: u32 = @intCast(module.code_bytes.items.len); + try module.function_offsets.append(allocator, fn_offset); + + // Encode body length as LEB128 (preamble + instructions) + const body_len: u32 = @intCast(self.preamble.items.len + self.code.items.len); + try WasmModule.leb128WriteU32(allocator, &module.code_bytes, body_len); + + // Append preamble (locals declaration) + try module.code_bytes.appendSlice(allocator, self.preamble.items); + + // Record the code start offset (after length prefix + preamble) + const code_start: u32 = @intCast(module.code_bytes.items.len); + + // Append instruction bytes + try module.code_bytes.appendSlice(allocator, self.code.items); + + // Create relocation entries with absolute offsets + for (self.import_relocations.items) |reloc| { + try module.reloc_code.entries.append(allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = code_start + reloc.code_pos, + .symbol_index = reloc.symbol_index, + } }); + } + + return fn_offset; +} + +/// Prepend bytes to the code buffer and shift all relocation offsets accordingly. +/// +/// Used for stack prologues and other code that must appear before the main +/// instruction bytes but whose content isn't known until after code generation +/// (e.g. because the stack frame size depends on what instructions were emitted). +pub fn prependToCode(self: *Self, allocator: Allocator, prefix: []const u8) !void { + if (prefix.len == 0) return; + const prefix_len: u32 = @intCast(prefix.len); + // Adjust relocation offsets to account for the prefix + for (self.import_relocations.items) |*reloc| { + reloc.code_pos += prefix_len; + } + // Grow capacity if needed + try self.code.ensureTotalCapacity(allocator, self.code.items.len + prefix.len); + // Shift existing bytes right + const old_len = self.code.items.len; + self.code.items.len += prefix.len; + std.mem.copyBackwards(u8, self.code.items[prefix.len..], self.code.items[0..old_len]); + // Copy prefix into the beginning + @memcpy(self.code.items[0..prefix.len], prefix); +} + +/// Reset the builder for the next function without deallocating memory. +pub fn clear(self: *Self) void { + self.code.clearRetainingCapacity(); + self.preamble.clearRetainingCapacity(); + self.import_relocations.clearRetainingCapacity(); +} + +// Tests + +const testing = std.testing; + +/// Create a minimal WasmModule for testing (only code_bytes, function_offsets, +/// and reloc_code are needed). +fn testModule() WasmModule { + return WasmModule.init(testing.allocator); +} + +test "CodeBuilder.emitRelocatableCall — records code_pos relative to function start" { + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + // Emit some dummy instructions first + try cb.code.append(testing.allocator, WasmModule.Op.nop); // byte 0 + try cb.code.append(testing.allocator, WasmModule.Op.nop); // byte 1 + + // Now emit a relocatable call — the call opcode lands at byte 2, + // and the 5-byte placeholder starts at byte 3. + try cb.emitRelocatableCall(testing.allocator, 42); + + try testing.expectEqual(@as(usize, 1), cb.import_relocations.items.len); + // code_pos should be 3: after 2 nops + 1 call opcode byte + try testing.expectEqual(@as(u32, 3), cb.import_relocations.items[0].code_pos); + try testing.expectEqual(@as(u32, 42), cb.import_relocations.items[0].symbol_index); + + // Total code: 2 nops + 1 call opcode + 5 padded bytes = 8 + try testing.expectEqual(@as(usize, 8), cb.code.items.len); + // The call opcode should be at position 2 + try testing.expectEqual(WasmModule.Op.call, cb.code.items[2]); +} + +test "CodeBuilder.insertIntoModule — function appended at correct code_bytes position" { + var module = testModule(); + defer module.deinit(); + + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + // Simulate a simple preamble (1 byte: 0 local groups) + try cb.preamble.append(testing.allocator, 0x00); + // Simulate simple code: just an `end` opcode + try cb.code.append(testing.allocator, WasmModule.Op.end); + + const fn_offset = try cb.insertIntoModule(testing.allocator, &module); + + // First function — should start at offset 0 + try testing.expectEqual(@as(u32, 0), fn_offset); + // function_offsets should have one entry + try testing.expectEqual(@as(usize, 1), module.function_offsets.items.len); + try testing.expectEqual(@as(u32, 0), module.function_offsets.items[0]); + + // code_bytes: [body_len_leb128] [preamble: 0x00] [code: 0x0B] + // body_len = 1 (preamble) + 1 (code) = 2, which encodes as single byte 0x02 + try testing.expectEqual(@as(usize, 3), module.code_bytes.items.len); + try testing.expectEqual(@as(u8, 0x02), module.code_bytes.items[0]); // body length = 2 + try testing.expectEqual(@as(u8, 0x00), module.code_bytes.items[1]); // preamble + try testing.expectEqual(WasmModule.Op.end, module.code_bytes.items[2]); // code +} + +test "CodeBuilder.insertIntoModule — relocation offset accounts for body length prefix" { + var module = testModule(); + defer module.deinit(); + + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + // Empty preamble (0 local groups) + try cb.preamble.append(testing.allocator, 0x00); + // Emit a relocatable call as the first instruction + try cb.emitRelocatableCall(testing.allocator, 7); + // End opcode + try cb.code.append(testing.allocator, WasmModule.Op.end); + + _ = try cb.insertIntoModule(testing.allocator, &module); + + // Check the relocation was created with correct absolute offset. + // code_bytes layout: + // [0]: body_len LEB128 (1 byte for small values) + // [1]: preamble (0x00) + // [2]: call opcode (0x10) + // [3..7]: 5-byte padded placeholder <-- relocation points here + // [8]: end opcode + try testing.expectEqual(@as(usize, 1), module.reloc_code.entries.items.len); + const reloc = module.reloc_code.entries.items[0]; + const expected_offset: u32 = 1 + 1 + 1; // body_len_prefix(1) + preamble(1) + call_opcode(1) + try testing.expectEqual(expected_offset, reloc.index.offset); + try testing.expectEqual(@as(u32, 7), reloc.index.symbol_index); +} + +test "CodeBuilder.insertIntoModule — relocation offset accounts for preamble size" { + var module = testModule(); + defer module.deinit(); + + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + // Larger preamble: 5 bytes (simulating multiple local groups) + try cb.preamble.appendSlice(testing.allocator, &.{ 0x02, 0x03, 0x7F, 0x01, 0x7E }); + // Emit a relocatable call + try cb.emitRelocatableCall(testing.allocator, 99); + try cb.code.append(testing.allocator, WasmModule.Op.end); + + _ = try cb.insertIntoModule(testing.allocator, &module); + + const reloc = module.reloc_code.entries.items[0]; + // body_len = 5 (preamble) + 6 (call) + 1 (end) = 12 + // 12 encodes as 1 byte LEB128 + // code_start = 1 (body_len) + 5 (preamble) = 6 + // reloc.code_pos = 1 (after call opcode byte) + // absolute offset = 6 + 1 = 7 + const body_len_prefix_size: u32 = 1; // 12 fits in 1 LEB128 byte + const preamble_size: u32 = 5; + const code_pos: u32 = 1; // after the call opcode + try testing.expectEqual(body_len_prefix_size + preamble_size + code_pos, reloc.index.offset); +} + +test "CodeBuilder.insertIntoModule — multiple relocations in one function" { + var module = testModule(); + defer module.deinit(); + + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + try cb.preamble.append(testing.allocator, 0x00); // empty locals + + // First relocatable call + try cb.emitRelocatableCall(testing.allocator, 10); + // Some instructions between calls + try cb.code.append(testing.allocator, WasmModule.Op.drop); + // Second relocatable call + try cb.emitRelocatableCall(testing.allocator, 20); + // End + try cb.code.append(testing.allocator, WasmModule.Op.end); + + _ = try cb.insertIntoModule(testing.allocator, &module); + + try testing.expectEqual(@as(usize, 2), module.reloc_code.entries.items.len); + + // code_start = 1 (body_len) + 1 (preamble) = 2 + const code_start: u32 = 2; + + // First relocation: code_pos = 1 (after call opcode at byte 0) + const reloc0 = module.reloc_code.entries.items[0]; + try testing.expectEqual(code_start + 1, reloc0.index.offset); + try testing.expectEqual(@as(u32, 10), reloc0.index.symbol_index); + + // Second relocation: call opcode at byte 7 (1 call + 5 padded + 1 drop), placeholder at 8 + const reloc1 = module.reloc_code.entries.items[1]; + try testing.expectEqual(code_start + 8, reloc1.index.offset); + try testing.expectEqual(@as(u32, 20), reloc1.index.symbol_index); +} + +test "CodeBuilder — two functions inserted sequentially have non-overlapping offsets" { + var module = testModule(); + defer module.deinit(); + + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + // --- Function 1 --- + try cb.preamble.append(testing.allocator, 0x00); + try cb.emitRelocatableCall(testing.allocator, 1); + try cb.code.append(testing.allocator, WasmModule.Op.end); + const offset1 = try cb.insertIntoModule(testing.allocator, &module); + cb.clear(); + + // --- Function 2 --- + try cb.preamble.append(testing.allocator, 0x00); + try cb.emitRelocatableCall(testing.allocator, 2); + try cb.code.append(testing.allocator, WasmModule.Op.end); + const offset2 = try cb.insertIntoModule(testing.allocator, &module); + + // Function 2 must start after function 1 ends + try testing.expect(offset2 > offset1); + + // Two function_offsets entries + try testing.expectEqual(@as(usize, 2), module.function_offsets.items.len); + + // Relocation offsets must not overlap + try testing.expectEqual(@as(usize, 2), module.reloc_code.entries.items.len); + const reloc1_offset = module.reloc_code.entries.items[0].index.offset; + const reloc2_offset = module.reloc_code.entries.items[1].index.offset; + // Second reloc must be at a higher offset than the first + try testing.expect(reloc2_offset > reloc1_offset); + // And the gap should be at least 5 bytes (size of the padded LEB128 placeholder) + try testing.expect(reloc2_offset >= reloc1_offset + 5); +} + +test "CodeBuilder — clear resets state for next function without leaking relocations" { + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + // Populate with some data + try cb.code.appendSlice(testing.allocator, &.{ 0x01, 0x02, 0x03 }); + try cb.preamble.appendSlice(testing.allocator, &.{0x00}); + try cb.import_relocations.append(testing.allocator, .{ .code_pos = 0, .symbol_index = 5 }); + + cb.clear(); + + try testing.expectEqual(@as(usize, 0), cb.code.items.len); + try testing.expectEqual(@as(usize, 0), cb.preamble.items.len); + try testing.expectEqual(@as(usize, 0), cb.import_relocations.items.len); + + // Capacity should be retained (not deallocated) + try testing.expect(cb.code.capacity > 0); + try testing.expect(cb.import_relocations.capacity > 0); +} + +test "CodeBuilder.prependToCode — shifts relocations and inserts prefix" { + var cb = Self.init(); + defer cb.deinit(testing.allocator); + + // Emit a relocatable call (call opcode at byte 0, placeholder at byte 1) + try cb.emitRelocatableCall(testing.allocator, 5); + try cb.code.append(testing.allocator, WasmModule.Op.end); + + // Before prepend: reloc code_pos = 1 (after call opcode) + try testing.expectEqual(@as(u32, 1), cb.import_relocations.items[0].code_pos); + + // Prepend a 3-byte prologue + try cb.prependToCode(testing.allocator, &.{ 0xAA, 0xBB, 0xCC }); + + // After prepend: code is [AA, BB, CC, call, 5-byte-pad(5 bytes), end] = 10 + try testing.expectEqual(@as(usize, 10), cb.code.items.len); + try testing.expectEqual(@as(u8, 0xAA), cb.code.items[0]); + try testing.expectEqual(@as(u8, 0xBB), cb.code.items[1]); + try testing.expectEqual(@as(u8, 0xCC), cb.code.items[2]); + try testing.expectEqual(WasmModule.Op.call, cb.code.items[3]); + try testing.expectEqual(WasmModule.Op.end, cb.code.items[9]); + + // Relocation code_pos should be shifted by 3 + try testing.expectEqual(@as(u32, 4), cb.import_relocations.items[0].code_pos); +} diff --git a/src/backend/wasm/WasmCodeGen.zig b/src/backend/wasm/WasmCodeGen.zig index efc24cdc61f..9a6dddbcfa1 100644 --- a/src/backend/wasm/WasmCodeGen.zig +++ b/src/backend/wasm/WasmCodeGen.zig @@ -17,6 +17,7 @@ const LirExprId = LIR.LirExprId; const LirPattern = LIR.LirPattern; const Symbol = LIR.Symbol; const WasmModule = @import("WasmModule.zig"); +const WasmLinking = @import("WasmLinking.zig"); const WasmLayout = @import("WasmLayout.zig"); const Storage = @import("Storage.zig"); const Op = WasmModule.Op; @@ -29,13 +30,16 @@ const RcOpKind = enum { incref, decref, free }; const LayoutStore = layout.Store; +const WasmRocOps = WasmModule.WasmRocOps; +const CodeBuilder = @import("CodeBuilder.zig"); + const Self = @This(); allocator: Allocator, store: *const LirExprStore, layout_store: *const LayoutStore, module: WasmModule, -body: std.ArrayList(u8), // instruction bytes for current function +code_builder: CodeBuilder, // accumulates instruction bytes + relocations for current function storage: Storage, /// Accumulated stack frame size for the current function (for stack memory values). stack_frame_size: u32 = 0, @@ -45,8 +49,13 @@ uses_stack_memory: bool = false, fp_local: u32 = 0, /// Map from proc symbol key → compiled wasm function index (for LirProcSpec compilation). registered_procs: std.AutoHashMap(u64, u32), -/// Type index for the RocOps function signature: (i32, i32) -> void. +/// Type index for the RocOps callback signature: (i32 args_struct_ptr, i32 env_ptr) -> void. +/// Used with call_indirect for roc_alloc, roc_dealloc, roc_realloc, roc_dbg, +/// roc_expect_failed, and roc_crashed. roc_ops_type_idx: u32 = 0, +/// Type index for the RocCall (hosted function) signature: (i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) -> void. +/// Used with call_indirect for platform-provided hosted functions. +roc_call_type_idx: u32 = 0, /// Table indices for RocOps functions (used with call_indirect). roc_alloc_table_idx: u32 = 0, roc_dealloc_table_idx: u32 = 0, @@ -55,8 +64,8 @@ roc_dbg_table_idx: u32 = 0, roc_expect_failed_table_idx: u32 = 0, roc_crashed_table_idx: u32 = 0, /// Local index holding the roc_ops_ptr (pointer to RocOps struct in linear memory). -/// In main(), this is a local storing the constant 0 (struct at memory offset 0). -/// In compiled functions, this is parameter 0. +/// In the RocCall entrypoint, this is parameter 0 (provided by the host). +/// In compiled proc functions, this is also parameter 0. roc_ops_local: u32 = 0, /// CFStmt block nesting depth (for br targets in proc compilation). cf_depth: u32 = 0, @@ -70,83 +79,47 @@ join_point_depths: std.AutoHashMap(u32, u32), join_point_param_locals: std.AutoHashMap(u32, []u32), /// Stack of expression-level loop exit label depths for lowering break_expr. loop_break_target_depths: std.ArrayList(u32), -/// Wasm function index for imported roc_dec_mul host function. -dec_mul_import: ?u32 = null, -/// Wasm function index for imported roc_dec_to_str host function. -dec_to_str_import: ?u32 = null, -/// Wasm function index for imported roc_str_eq host function. -str_eq_import: ?u32 = null, -/// Wasm function index for imported roc_list_eq host function. -list_eq_import: ?u32 = null, -/// Wasm function index for imported roc_i128_div_s host function. -i128_div_s_import: ?u32 = null, -/// Wasm function index for imported roc_i128_mod_s host function. -i128_mod_s_import: ?u32 = null, -/// Wasm function index for imported roc_i32_mod_by host function. -i32_mod_by_import: ?u32 = null, -/// Wasm function index for imported roc_i64_mod_by host function. -i64_mod_by_import: ?u32 = null, -/// Wasm function index for imported roc_u128_div host function. -u128_div_import: ?u32 = null, -/// Wasm function index for imported roc_u128_mod host function. -u128_mod_import: ?u32 = null, -/// Wasm function index for imported roc_dec_div host function. -dec_div_import: ?u32 = null, -/// Wasm function index for imported roc_dec_div_trunc host function. -dec_div_trunc_import: ?u32 = null, -/// Wasm function index for imported roc_i128_to_str host function. -i128_to_str_import: ?u32 = null, -/// Wasm function index for imported roc_u128_to_str host function. -u128_to_str_import: ?u32 = null, -/// Wasm function index for imported roc_float_to_str host function. -float_to_str_import: ?u32 = null, -/// Wasm function index for imported roc_u128_to_dec host function. -u128_to_dec_import: ?u32 = null, -/// Wasm function index for imported roc_i128_to_dec host function. -i128_to_dec_import: ?u32 = null, -/// Wasm function index for imported roc_dec_to_i128 host function. -dec_to_i128_import: ?u32 = null, -/// Wasm function index for imported roc_dec_to_u128 host function. -dec_to_u128_import: ?u32 = null, -/// Wasm function index for imported roc_dec_to_f32 host function. -dec_to_f32_import: ?u32 = null, -/// Wasm function index for imported roc_list_str_eq host function. -list_str_eq_import: ?u32 = null, -/// Wasm function index for imported roc_list_list_eq host function. -list_list_eq_import: ?u32 = null, -str_repeat_import: ?u32 = null, -str_concat_import: ?u32 = null, -str_trim_import: ?u32 = null, -str_trim_start_import: ?u32 = null, -str_trim_end_import: ?u32 = null, -str_split_import: ?u32 = null, -str_join_with_import: ?u32 = null, -str_reserve_import: ?u32 = null, -str_release_excess_capacity_import: ?u32 = null, -str_with_capacity_import: ?u32 = null, -str_drop_prefix_import: ?u32 = null, -str_drop_suffix_import: ?u32 = null, -str_with_ascii_lowercased_import: ?u32 = null, -str_with_ascii_uppercased_import: ?u32 = null, -str_caseless_ascii_equals_import: ?u32 = null, -str_from_utf8_import: ?u32 = null, -int_from_str_import: ?u32 = null, -dec_from_str_import: ?u32 = null, -float_from_str_import: ?u32 = null, -list_append_unsafe_import: ?u32 = null, -list_sort_with_import: ?u32 = null, -list_reverse_import: ?u32 = null, +/// Builtin symbol indices for emitting relocatable calls to roc_builtins functions. +/// Populated via mergeModule + BuiltinSymbols.populate() before code generation. +builtin_syms: WasmModule.BuiltinSymbols, /// Configurable wasm stack size in bytes (default 1MB). wasm_stack_bytes: u32 = 1024 * 1024, /// Configurable wasm memory pages (0 = auto-compute from stack size). wasm_memory_pages: u32 = 0, -pub fn init(allocator: Allocator, store: *const LirExprStore, layout_store: *const LayoutStore) Self { +pub fn init(allocator: Allocator, store: *const LirExprStore, layout_store: *const LayoutStore, builtin_syms: WasmModule.BuiltinSymbols) Self { return .{ .allocator = allocator, .store = store, .layout_store = layout_store, .module = WasmModule.init(allocator), - .body = .empty, + .code_builder = CodeBuilder.init(), + .storage = Storage.init(allocator), + .stack_frame_size = 0, + .uses_stack_memory = false, + .fp_local = 0, + .registered_procs = std.AutoHashMap(u64, u32).init(allocator), + .join_point_depths = std.AutoHashMap(u32, u32).init(allocator), + .join_point_param_locals = std.AutoHashMap(u32, []u32).init(allocator), + .loop_break_target_depths = .empty, + .builtin_syms = builtin_syms, + }; +} + +/// Initialize a WasmCodeGen backed by an existing host module (for surgical linking). +/// Takes ownership of `host_module`. The caller should NOT deinit it separately. +pub fn initWithHostModule( + allocator: Allocator, + store: *const LirExprStore, + layout_store: *const LayoutStore, + host_module: WasmModule, + builtin_syms: WasmModule.BuiltinSymbols, +) Self { + return .{ + .allocator = allocator, + .store = store, + .layout_store = layout_store, + .module = host_module, + .code_builder = CodeBuilder.init(), .storage = Storage.init(allocator), .stack_frame_size = 0, .uses_stack_memory = false, @@ -155,12 +128,13 @@ pub fn init(allocator: Allocator, store: *const LirExprStore, layout_store: *con .join_point_depths = std.AutoHashMap(u32, u32).init(allocator), .join_point_param_locals = std.AutoHashMap(u32, []u32).init(allocator), .loop_break_target_depths = .empty, + .builtin_syms = builtin_syms, }; } pub fn deinit(self: *Self) void { self.module.deinit(); - self.body.deinit(self.allocator); + self.code_builder.deinit(self.allocator); self.storage.deinit(); self.registered_procs.deinit(); self.join_point_depths.deinit(); @@ -173,181 +147,115 @@ pub fn deinit(self: *Self) void { self.loop_break_target_depths.deinit(self.allocator); } -/// Register host function imports. Must be called before any addFunction calls -/// because wasm imports must come before locally-defined functions. -fn registerHostImports(self: *Self) !void { - // roc_dec_mul: (i32 lhs_ptr, i32 rhs_ptr, i32 result_ptr) -> void - // Takes pointers to 16-byte Dec values in linear memory, - // stores the result at result_ptr. - const dec_mul_type = try self.module.addFuncType( - &.{ .i32, .i32, .i32 }, - &.{}, - ); - self.dec_mul_import = try self.module.addImport("env", "roc_dec_mul", dec_mul_type); - - // roc_dec_to_str: (i32 dec_ptr, i32 buf_ptr) -> i32 str_len - // Reads 16-byte Dec value from dec_ptr, formats it as a string, - // writes the string bytes to buf_ptr, returns the length. - const dec_to_str_type = try self.module.addFuncType( +/// Register the 6 RocOps callback imports (roc_alloc, roc_dealloc, etc.) +/// and set up the funcref table. These are the only remaining imports — +/// all builtin operations are now provided via merged roc_builtins.o. +fn registerRocOpsImports(self: *Self) !void { + // RocOps callback type: (i32 args_struct_ptr, i32 env_ptr) -> void + self.roc_ops_type_idx = try self.module.addFuncType( &.{ .i32, .i32 }, - &.{.i32}, - ); - self.dec_to_str_import = try self.module.addImport("env", "roc_dec_to_str", dec_to_str_type); - - // roc_str_eq: (i32 str_a_ptr, i32 str_b_ptr) -> i32 (0 or 1) - // Compares two 12-byte RocStr structs for content equality. - const str_eq_type = try self.module.addFuncType( - &.{ .i32, .i32 }, - &.{.i32}, + &.{}, ); - self.str_eq_import = try self.module.addImport("env", "roc_str_eq", str_eq_type); - // roc_list_eq: (i32 list_a_ptr, i32 list_b_ptr, i32 elem_size) -> i32 - // Compares two 12-byte RocList structs for content equality (byte-wise comparison of elements). - const list_eq_type = try self.module.addFuncType( + // RocCall (hosted function) type: (i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) -> void + self.roc_call_type_idx = try self.module.addFuncType( &.{ .i32, .i32, .i32 }, - &.{.i32}, - ); - self.list_eq_import = try self.module.addImport("env", "roc_list_eq", list_eq_type); - - // RocOps function imports: all have signature (i32 args_ptr, i32 env_ptr) -> void - // These are called via call_indirect through the funcref table. - const roc_ops_type = try self.module.addFuncType( - &.{ .i32, .i32 }, &.{}, ); - self.roc_ops_type_idx = roc_ops_type; // Enable table and add each RocOps function as a table element self.module.enableTable(); - const roc_alloc_idx = try self.module.addImport("env", "roc_alloc", roc_ops_type); + const roc_alloc_idx = try self.module.addImport("env", "roc_alloc", self.roc_ops_type_idx); self.roc_alloc_table_idx = try self.module.addTableElement(roc_alloc_idx); - const roc_dealloc_idx = try self.module.addImport("env", "roc_dealloc", roc_ops_type); + const roc_dealloc_idx = try self.module.addImport("env", "roc_dealloc", self.roc_ops_type_idx); self.roc_dealloc_table_idx = try self.module.addTableElement(roc_dealloc_idx); - const roc_realloc_idx = try self.module.addImport("env", "roc_realloc", roc_ops_type); + const roc_realloc_idx = try self.module.addImport("env", "roc_realloc", self.roc_ops_type_idx); self.roc_realloc_table_idx = try self.module.addTableElement(roc_realloc_idx); - const roc_dbg_idx = try self.module.addImport("env", "roc_dbg", roc_ops_type); + const roc_dbg_idx = try self.module.addImport("env", "roc_dbg", self.roc_ops_type_idx); self.roc_dbg_table_idx = try self.module.addTableElement(roc_dbg_idx); - const roc_expect_failed_idx = try self.module.addImport("env", "roc_expect_failed", roc_ops_type); + const roc_expect_failed_idx = try self.module.addImport("env", "roc_expect_failed", self.roc_ops_type_idx); self.roc_expect_failed_table_idx = try self.module.addTableElement(roc_expect_failed_idx); - const roc_crashed_idx = try self.module.addImport("env", "roc_crashed", roc_ops_type); + const roc_crashed_idx = try self.module.addImport("env", "roc_crashed", self.roc_ops_type_idx); self.roc_crashed_table_idx = try self.module.addTableElement(roc_crashed_idx); +} - // i128/u128 division and modulo host functions - // All take (lhs_ptr, rhs_ptr, result_ptr) -> void - const i128_binop_type = try self.module.addFuncType( +/// Reuse RocOps imports that already exist in the current module and ensure +/// they have table entries for call_indirect. +fn bindExistingRocOpsImports(self: *Self) !void { + self.roc_ops_type_idx = try self.module.addFuncType( + &.{ .i32, .i32 }, + &.{}, + ); + self.roc_call_type_idx = try self.module.addFuncType( &.{ .i32, .i32, .i32 }, &.{}, ); - self.i128_div_s_import = try self.module.addImport("env", "roc_i128_div_s", i128_binop_type); - self.i128_mod_s_import = try self.module.addImport("env", "roc_i128_mod_s", i128_binop_type); - self.u128_div_import = try self.module.addImport("env", "roc_u128_div", i128_binop_type); - self.u128_mod_import = try self.module.addImport("env", "roc_u128_mod", i128_binop_type); - self.dec_div_import = try self.module.addImport("env", "roc_dec_div", i128_binop_type); - self.dec_div_trunc_import = try self.module.addImport("env", "roc_dec_div_trunc", i128_binop_type); - const i32_mod_by_type = try self.module.addFuncType(&.{ .i32, .i32 }, &.{.i32}); - self.i32_mod_by_import = try self.module.addImport("env", "roc_i32_mod_by", i32_mod_by_type); - - const i64_mod_by_type = try self.module.addFuncType(&.{ .i64, .i64 }, &.{.i64}); - self.i64_mod_by_import = try self.module.addImport("env", "roc_i64_mod_by", i64_mod_by_type); + self.module.enableTable(); - // i128/u128 to string: (val_ptr, buf_ptr) -> i32 str_len - const i128_to_str_type = try self.module.addFuncType( - &.{ .i32, .i32 }, - &.{.i32}, - ); - self.i128_to_str_import = try self.module.addImport("env", "roc_i128_to_str", i128_to_str_type); - self.u128_to_str_import = try self.module.addImport("env", "roc_u128_to_str", i128_to_str_type); + const names = [_]struct { name: []const u8, field: *u32 }{ + .{ .name = "roc_alloc", .field = &self.roc_alloc_table_idx }, + .{ .name = "roc_dealloc", .field = &self.roc_dealloc_table_idx }, + .{ .name = "roc_realloc", .field = &self.roc_realloc_table_idx }, + .{ .name = "roc_dbg", .field = &self.roc_dbg_table_idx }, + .{ .name = "roc_expect_failed", .field = &self.roc_expect_failed_table_idx }, + .{ .name = "roc_crashed", .field = &self.roc_crashed_table_idx }, + }; - const float_to_str_type = try self.module.addFuncType( - &.{ .i64, .i32, .i32 }, - &.{.i32}, - ); - self.float_to_str_import = try self.module.addImport("env", "roc_float_to_str", float_to_str_type); + for (names) |entry| { + const func_idx = self.module.findImportFuncIdx("env", entry.name) orelse + return error.MissingRocOpsCallback; + entry.field.* = try self.module.ensureTableElement(func_idx); + } +} - // 128-bit ↔ Dec conversions: (val_ptr, result_ptr) -> i32 (success flag) - const i128_dec_conv_type = try self.module.addFuncType( - &.{ .i32, .i32 }, - &.{.i32}, - ); - self.u128_to_dec_import = try self.module.addImport("env", "roc_u128_to_dec", i128_dec_conv_type); - self.i128_to_dec_import = try self.module.addImport("env", "roc_i128_to_dec", i128_dec_conv_type); - self.dec_to_i128_import = try self.module.addImport("env", "roc_dec_to_i128", i128_dec_conv_type); - self.dec_to_u128_import = try self.module.addImport("env", "roc_dec_to_u128", i128_dec_conv_type); - - // Dec to f32: (val_ptr) -> f32 - const dec_to_f32_type = try self.module.addFuncType( - &.{.i32}, - &.{.f32}, - ); - self.dec_to_f32_import = try self.module.addImport("env", "roc_dec_to_f32", dec_to_f32_type); +/// Find the host module's canonical RocOps callback functions and register +/// them in the funcref table for call_indirect. +fn resolveExistingRocOpsCallback(self: *Self, name: []const u8) !u32 { + if (self.module.findFunctionIdxBySuffix(name)) |func_idx| return func_idx; + return error.MissingRocOpsCallback; +} - // List of strings equality: (list_a_ptr, list_b_ptr) -> i32 - const list_str_eq_type = try self.module.addFuncType( +/// Bind existing RocOps callback functions from the host module into the funcref table. +/// Used when the module was prepared with `initWithHostModule` and already contains +/// the callback symbols — no new imports are added. +pub fn registerRocOpsFromModule(self: *Self) !void { + // Register type indices for call_indirect type checking + self.roc_ops_type_idx = try self.module.addFuncType( &.{ .i32, .i32 }, - &.{.i32}, + &.{}, ); - self.list_str_eq_import = try self.module.addImport("env", "roc_list_str_eq", list_str_eq_type); - - // List of lists equality: (list_a_ptr, list_b_ptr, inner_elem_size) -> i32 - const list_list_eq_type = try self.module.addFuncType( + self.roc_call_type_idx = try self.module.addFuncType( &.{ .i32, .i32, .i32 }, - &.{.i32}, + &.{}, ); - self.list_list_eq_import = try self.module.addImport("env", "roc_list_list_eq", list_list_eq_type); - - // String ops: (str_ptr, result_ptr) -> void - const str_unary_type = try self.module.addFuncType(&.{ .i32, .i32 }, &.{}); - self.str_trim_import = try self.module.addImport("env", "roc_str_trim", str_unary_type); - self.str_trim_start_import = try self.module.addImport("env", "roc_str_trim_start", str_unary_type); - self.str_trim_end_import = try self.module.addImport("env", "roc_str_trim_end", str_unary_type); - self.str_with_ascii_lowercased_import = try self.module.addImport("env", "roc_str_with_ascii_lowercased", str_unary_type); - self.str_with_ascii_uppercased_import = try self.module.addImport("env", "roc_str_with_ascii_uppercased", str_unary_type); - self.str_release_excess_capacity_import = try self.module.addImport("env", "roc_str_release_excess_capacity", str_unary_type); - self.str_with_capacity_import = try self.module.addImport("env", "roc_str_with_capacity", str_unary_type); - const str_from_utf8_type = try self.module.addFuncType(&.{ .i32, .i32, .i32, .i32 }, &.{}); - self.str_from_utf8_import = try self.module.addImport("env", "roc_str_from_utf8", str_from_utf8_type); - - const int_from_str_type = try self.module.addFuncType(&.{ .i32, .i32, .i32, .i32, .i32 }, &.{}); - self.int_from_str_import = try self.module.addImport("env", "roc_int_from_str", int_from_str_type); - - const dec_from_str_type = try self.module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); - self.dec_from_str_import = try self.module.addImport("env", "roc_dec_from_str", dec_from_str_type); - - const float_from_str_type = try self.module.addFuncType(&.{ .i32, .i32, .i32, .i32 }, &.{}); - self.float_from_str_import = try self.module.addImport("env", "roc_float_from_str", float_from_str_type); - - const list_append_unsafe_type = try self.module.addFuncType(&.{ .i32, .i32, .i32, .i32, .i32 }, &.{}); - self.list_append_unsafe_import = try self.module.addImport("env", "roc_list_append_unsafe", list_append_unsafe_type); - - const list_sort_with_type = try self.module.addFuncType(&.{ .i32, .i32, .i32, .i32, .i32 }, &.{}); - self.list_sort_with_import = try self.module.addImport("env", "roc_list_sort_with", list_sort_with_type); - - const list_reverse_type = try self.module.addFuncType(&.{ .i32, .i32, .i32, .i32 }, &.{}); - self.list_reverse_import = try self.module.addImport("env", "roc_list_reverse", list_reverse_type); + self.module.enableTable(); - // String ops: (arg1, arg2, result_ptr) -> void - const str_binary_type = try self.module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); - self.str_drop_prefix_import = try self.module.addImport("env", "roc_str_drop_prefix", str_binary_type); - self.str_drop_suffix_import = try self.module.addImport("env", "roc_str_drop_suffix", str_binary_type); - self.str_split_import = try self.module.addImport("env", "roc_str_split", str_binary_type); - self.str_join_with_import = try self.module.addImport("env", "roc_str_join_with", str_binary_type); - self.str_concat_import = try self.module.addImport("env", "roc_str_concat", str_binary_type); - self.str_repeat_import = try self.module.addImport("env", "roc_str_repeat", str_binary_type); - self.str_reserve_import = try self.module.addImport("env", "roc_str_reserve", str_binary_type); + const names = [_]struct { name: []const u8, field: *u32 }{ + .{ .name = "roc_alloc", .field = &self.roc_alloc_table_idx }, + .{ .name = "roc_dealloc", .field = &self.roc_dealloc_table_idx }, + .{ .name = "roc_realloc", .field = &self.roc_realloc_table_idx }, + .{ .name = "roc_dbg", .field = &self.roc_dbg_table_idx }, + .{ .name = "roc_expect_failed", .field = &self.roc_expect_failed_table_idx }, + .{ .name = "roc_crashed", .field = &self.roc_crashed_table_idx }, + }; - // Caseless equals: (str_a, str_b) -> i32 - self.str_caseless_ascii_equals_import = try self.module.addImport("env", "roc_str_caseless_ascii_equals", str_eq_type); + for (names) |entry| { + const func_idx = try self.resolveExistingRocOpsCallback(entry.name); + entry.field.* = try self.module.ensureTableElement(func_idx); + } } +/// Errors that can occur during wasm module generation. +pub const GenerateError = Allocator.Error || error{MissingRocOpsCallback}; + /// Result of generating a wasm module pub const GenerateResult = struct { wasm_bytes: []u8, @@ -356,10 +264,25 @@ pub const GenerateResult = struct { }; /// Generate a complete wasm module for a single expression. -/// The expression becomes the body of an exported "main" function. -pub fn generateModule(self: *Self, expr_id: LirExprId, result_layout: layout.Idx) Allocator.Error!GenerateResult { - // Register host function imports (must be done before addFunction calls) - self.registerHostImports() catch return error.OutOfMemory; +/// +/// Produces two exported functions: +/// - `entrypoint_name`: RocCall ABI `(i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void` +/// The app entrypoint used by surgical linking. The name comes from the platform's +/// `provides` section (e.g. `roc__main_for_host_1_exposed`). Receives the RocOps struct +/// from the host, writes its return value to `ret_ptr`. +/// - `main`: Eval wrapper `(i32 env_ptr) → result_vt` +/// Backward-compatible shim for the eval/REPL pipeline. Builds a RocOps struct, calls +/// the RocCall function, and returns the result on the wasm stack. +pub fn generateModule(self: *Self, expr_id: LirExprId, result_layout: layout.Idx, entrypoint_name: []const u8) GenerateError!GenerateResult { + // Register RocOps callback imports (roc_alloc, etc.) — these stay as imports + // because they come from the host platform, not from builtins. + // Eval prepares its module with canonical RocOps imports up front; reuse them + // rather than mutating the import section after builtins have been merged. + if (self.module.importCount() > 0) { + try self.bindExistingRocOpsImports(); + } else { + try self.registerRocOpsImports(); + } // Compile any procedures (recursive functions) before the main expression const proc_specs = self.store.getProcSpecs(); @@ -367,33 +290,41 @@ pub fn generateModule(self: *Self, expr_id: LirExprId, result_layout: layout.Idx self.compileAllProcSpecs(proc_specs) catch return error.OutOfMemory; } - // Determine return type from the expression's actual wasm type. - // We use exprValType because nominal layout indices can collide - // with well-known sentinel values (e.g., Bool's nominal layout - // index may equal the i64 sentinel). + // Determine return type and representation from the expression. const result_vt = self.exprValType(expr_id); + const result_repr = WasmLayout.wasmReprWithStore(result_layout, self.getLayoutStore()); + const result_byte_size: u32 = switch (result_repr) { + .primitive => |vt| switch (vt) { + .i32, .f32 => 4, + .i64, .f64 => 8, + }, + .stack_memory => |size| size, + }; - // Add function type: (i32 env_ptr) -> (result_type) - const type_idx = self.module.addFuncType(&.{.i32}, &.{result_vt}) catch return error.OutOfMemory; - - // Add function - const func_idx = self.module.addFunction(type_idx) catch return error.OutOfMemory; + // RocCall function: (i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void + const roc_call_type_idx = self.module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}) catch return error.OutOfMemory; + const roc_call_func_idx = self.module.addFunction(roc_call_type_idx) catch return error.OutOfMemory; - // Generate the expression body into self.body - self.body.clearRetainingCapacity(); + // Generate the expression body into self.code_builder.code + self.code_builder.clear(); self.storage.reset(); self.stack_frame_size = 0; self.uses_stack_memory = false; - // Local 0 = env_ptr parameter - const env_ptr_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - // Local 1 = roc_ops_local (will hold constant 0, the RocOps struct address) + // Local 0 = roc_ops_ptr parameter (provided by caller) self.roc_ops_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + // Local 1 = ret_ptr parameter + const ret_ptr_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + // Local 2 = args_ptr parameter (unused for eval, reserved for future use) + _ = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; // Pre-allocate frame pointer local so it doesn't collide with user locals self.fp_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.generateExpr(expr_id); + // Store result to ret_ptr instead of returning on the wasm stack. + try self.emitStoreResultToRetPtr(ret_ptr_local, result_vt, result_repr); + // Always enable memory + stack pointer (RocOps struct + allocations need linear memory) const stack_pages = (self.wasm_stack_bytes + 65535) / 65536; // round up to page boundary const memory_pages = if (self.wasm_memory_pages > 0) self.wasm_memory_pages else stack_pages; @@ -402,87 +333,287 @@ pub fn generateModule(self: *Self, expr_id: LirExprId, result_layout: layout.Idx self.uses_stack_memory = true; self.module.addExport("memory", .memory, 0) catch return error.OutOfMemory; - // Build function body: locals declaration + prologue + instructions + epilogue + end - var func_body: std.ArrayList(u8) = .empty; - defer func_body.deinit(self.allocator); + // Build RocCall function body: locals + prologue + instructions + epilogue + end + var roc_call_body: std.ArrayList(u8) = .empty; + defer roc_call_body.deinit(self.allocator); - // Encode locals declaration (skip 1 for the env_ptr parameter) - try self.encodeLocalsDecl(&func_body, 1); + // Encode locals declaration (skip 3 for the RocCall parameters) + try self.encodeLocalsDecl(&roc_call_body, 3); // Prologue: allocate stack frame + try self.emitStackPrologue(&roc_call_body); + + // Resolve deferred relocatable calls before copying instructions + self.resolvePendingRelocations(); + + // Main body instructions + roc_call_body.appendSlice(self.allocator, self.code_builder.code.items) catch return error.OutOfMemory; + + // Epilogue: restore stack pointer + try self.emitStackEpilogue(&roc_call_body); + + // End opcode + roc_call_body.append(self.allocator, Op.end) catch return error.OutOfMemory; + + self.module.setFunctionBody(roc_call_func_idx, roc_call_body.items) catch return error.OutOfMemory; + self.module.addExport(entrypoint_name, .func, roc_call_func_idx) catch return error.OutOfMemory; + + // Eval wrapper: (i32 env_ptr) → result_vt + // Builds RocOps struct at memory offset 0, allocates a return buffer on + // the stack, calls the RocCall function, and returns the result. + const eval_type_idx = self.module.addFuncType(&.{.i32}, &.{result_vt}) catch return error.OutOfMemory; + const eval_func_idx = self.module.addFunction(eval_type_idx) catch return error.OutOfMemory; + + var eval_body: std.ArrayList(u8) = .empty; + defer eval_body.deinit(self.allocator); + + try self.emitEvalWrapper(&eval_body, roc_call_func_idx, result_repr, result_byte_size); + + self.module.setFunctionBody(eval_func_idx, eval_body.items) catch return error.OutOfMemory; + self.module.addExport("main", .func, eval_func_idx) catch return error.OutOfMemory; + + // Transfer app function bodies into code_bytes so that + // function_offsets is populated before encode/materializeFuncBodies. + self.module.transferAppFunctions() catch return error.OutOfMemory; + + // Encode the module + const wasm_bytes = self.module.encode(self.allocator) catch return error.OutOfMemory; + + return .{ + .wasm_bytes = wasm_bytes, + .result_layout = result_layout, + .has_imports = self.module.importCount() > 0, + }; +} + +/// Store the result from the wasm value stack into ret_ptr. +/// For primitives, stores the scalar value directly. +/// For composites (stack_memory), copies the data from the pointer on the stack. +fn emitStoreResultToRetPtr(self: *Self, ret_ptr_local: u32, result_vt: ValType, result_repr: WasmLayout.WasmRepr) Allocator.Error!void { + switch (result_repr) { + .primitive => { + // Stack has: [result_value] + // Save result to temp, then store to ret_ptr + const tmp = self.storage.allocAnonymousLocal(result_vt) catch return error.OutOfMemory; + try self.emitLocalSet(tmp); + try self.emitLocalGet(ret_ptr_local); + try self.emitLocalGet(tmp); + try self.emitStoreOp(result_vt, 0); + }, + .stack_memory => |byte_size| { + if (byte_size == 0) { + // Zero-sized type — drop the value, nothing to store + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; + return; + } + // Stack has: [src_ptr] (i32 pointer to composite data) + const src_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalSet(src_ptr); + + // Copy byte_size bytes from src_ptr to ret_ptr using unrolled 4-byte chunks + var offset: u32 = 0; + while (offset + 4 <= byte_size) : (offset += 4) { + try self.emitLocalGet(ret_ptr_local); + try self.emitLocalGet(src_ptr); + try self.emitLoadOp(.i32, offset); + try self.emitStoreOp(.i32, offset); + } + // Handle remaining bytes (1-3) + while (offset < byte_size) : (offset += 1) { + // local.get ret_ptr + try self.emitLocalGet(ret_ptr_local); + // local.get src_ptr + try self.emitLocalGet(src_ptr); + // i32.load8_u offset + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; + // i32.store8 offset + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; + } + }, + } +} + +/// Emit the stack frame allocation prologue into a function body buffer. +fn emitStackPrologue(self: *Self, func_body: *std.ArrayList(u8)) Allocator.Error!void { + // Round up frame size to 8-byte alignment to ensure the frame pointer + // maintains 8-byte alignment (required by builtins that use @alignCast + // on stack-allocated pointers, e.g. FromUtf8Try with u64 fields). + self.stack_frame_size = (self.stack_frame_size + 7) & ~@as(u32, 7); + // global.get $__stack_pointer (global 0) func_body.append(self.allocator, Op.global_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &func_body, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, func_body, 0) catch return error.OutOfMemory; // i32.const frame_size func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &func_body, @intCast(self.stack_frame_size)) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, func_body, @intCast(self.stack_frame_size)) catch return error.OutOfMemory; // i32.sub func_body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; // local.tee $fp func_body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &func_body, self.fp_local) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, func_body, self.fp_local) catch return error.OutOfMemory; // global.set $__stack_pointer func_body.append(self.allocator, Op.global_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &func_body, 0) catch return error.OutOfMemory; - - // Build RocOps struct at memory offset 0 (36 bytes on wasm32) - // Set roc_ops_local = 0 (constant address of the struct) - func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &func_body, 0) catch return error.OutOfMemory; - func_body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &func_body, self.roc_ops_local) catch return error.OutOfMemory; - - // Write env pointer (offset 0) - try self.emitI32StoreToBody(&func_body, 0, env_ptr_local, null); - // Write roc_alloc table index (offset 4) - try self.emitI32StoreConstToBody(&func_body, 4, self.roc_alloc_table_idx); - // Write roc_dealloc table index (offset 8) - try self.emitI32StoreConstToBody(&func_body, 8, self.roc_dealloc_table_idx); - // Write roc_realloc table index (offset 12) - try self.emitI32StoreConstToBody(&func_body, 12, self.roc_realloc_table_idx); - // Write roc_dbg table index (offset 16) - try self.emitI32StoreConstToBody(&func_body, 16, self.roc_dbg_table_idx); - // Write roc_expect_failed table index (offset 20) - try self.emitI32StoreConstToBody(&func_body, 20, self.roc_expect_failed_table_idx); - // Write roc_crashed table index (offset 24) - try self.emitI32StoreConstToBody(&func_body, 24, self.roc_crashed_table_idx); - // Write hosted_fns.count = 0 (offset 28) - try self.emitI32StoreConstToBody(&func_body, 28, 0); - // Write hosted_fns.fns = 0 (offset 32) - try self.emitI32StoreConstToBody(&func_body, 32, 0); - - // Main body instructions - func_body.appendSlice(self.allocator, self.body.items) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, func_body, 0) catch return error.OutOfMemory; +} - // Epilogue: restore stack pointer +/// Emit the stack frame deallocation epilogue into a function body buffer. +fn emitStackEpilogue(self: *Self, func_body: *std.ArrayList(u8)) Allocator.Error!void { // local.get $fp func_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &func_body, self.fp_local) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, func_body, self.fp_local) catch return error.OutOfMemory; // i32.const frame_size func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &func_body, @intCast(self.stack_frame_size)) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, func_body, @intCast(self.stack_frame_size)) catch return error.OutOfMemory; // i32.add func_body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; // global.set $__stack_pointer func_body.append(self.allocator, Op.global_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &func_body, 0) catch return error.OutOfMemory; - - // End opcode - func_body.append(self.allocator, Op.end) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, func_body, 0) catch return error.OutOfMemory; +} - self.module.setFunctionBody(func_idx, func_body.items) catch return error.OutOfMemory; +/// Emit the eval wrapper function body. +/// +/// The wrapper has signature `(i32 env_ptr) → result_vt` and: +/// 1. Builds a RocOps struct at memory offset 0 +/// 2. Allocates a return buffer on the wasm stack +/// 3. Calls the RocCall function with (roc_ops_ptr=0, ret_buf, args_ptr=0) +/// 4. Loads the result from the return buffer and returns it +fn emitEvalWrapper( + self: *Self, + eval_body: *std.ArrayList(u8), + roc_call_func_idx: u32, + result_repr: WasmLayout.WasmRepr, + result_byte_size: u32, +) Allocator.Error!void { + // Eval wrapper locals (manually managed, not using self.storage): + // Local 0: env_ptr (parameter) + // Local 1: fp (frame pointer) + // Local 2: ret_buf_ptr + // Local 3: roc_ops_ptr + const eval_env_ptr_local: u32 = 0; + const eval_fp_local: u32 = 1; + const eval_ret_buf_local: u32 = 2; + const eval_roc_ops_local: u32 = 3; + + // Locals declaration: 3 locals of type i32 (fp + ret_buf_ptr + roc_ops_ptr) + WasmModule.leb128WriteU32(self.allocator, eval_body, 1) catch return error.OutOfMemory; // 1 group + WasmModule.leb128WriteU32(self.allocator, eval_body, 3) catch return error.OutOfMemory; // 3 locals + eval_body.append(self.allocator, @intFromEnum(ValType.i32)) catch return error.OutOfMemory; + + // Stack frame layout (grows downward from stack pointer): + // [ret_buf: ret_buf_size bytes, aligned to 8] + // [roc_ops: 36 bytes, aligned to 4] + // The RocOps struct MUST be at a non-zero address because builtins cast + // roc_ops pointers to ?*anyopaque, and Zig treats address 0 as null. + const ret_buf_size = if (result_byte_size > 0) result_byte_size else 8; + const ret_buf_aligned: u32 = (ret_buf_size + 7) & ~@as(u32, 7); + const roc_ops_size: u32 = WasmRocOps.total_size; // 36 bytes + const eval_frame_size: u32 = ret_buf_aligned + ((roc_ops_size + 7) & ~@as(u32, 7)); - // Export the function as "main" - self.module.addExport("main", .func, func_idx) catch return error.OutOfMemory; + // Prologue: allocate stack frame + // global.get $__stack_pointer + eval_body.append(self.allocator, Op.global_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, 0) catch return error.OutOfMemory; + // i32.const eval_frame_size + eval_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, eval_body, @intCast(eval_frame_size)) catch return error.OutOfMemory; + // i32.sub + eval_body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + // local.tee $fp + eval_body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_fp_local) catch return error.OutOfMemory; + // global.set $__stack_pointer + eval_body.append(self.allocator, Op.global_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, 0) catch return error.OutOfMemory; + + // ret_buf_ptr = fp + 0 (return buffer at start of frame) + eval_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_fp_local) catch return error.OutOfMemory; + eval_body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_ret_buf_local) catch return error.OutOfMemory; + + // roc_ops_ptr = fp + ret_buf_aligned (RocOps struct after return buffer) + // The RocOps struct MUST be at a non-zero address because builtins cast + // roc_ops pointers to ?*anyopaque, and Zig treats address 0 as null. + eval_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_fp_local) catch return error.OutOfMemory; + eval_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, eval_body, @intCast(ret_buf_aligned)) catch return error.OutOfMemory; + eval_body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + eval_body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_roc_ops_local) catch return error.OutOfMemory; + + // Build RocOps struct in the stack frame (at roc_ops_ptr) + // Write env pointer + try self.emitI32StoreToBody(eval_body, WasmRocOps.env_ptr, eval_env_ptr_local, eval_roc_ops_local); + // Write RocOps callback table indices + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.roc_alloc_table_idx, self.roc_alloc_table_idx, eval_roc_ops_local); + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.roc_dealloc_table_idx, self.roc_dealloc_table_idx, eval_roc_ops_local); + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.roc_realloc_table_idx, self.roc_realloc_table_idx, eval_roc_ops_local); + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.roc_dbg_table_idx, self.roc_dbg_table_idx, eval_roc_ops_local); + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.roc_expect_failed_table_idx, self.roc_expect_failed_table_idx, eval_roc_ops_local); + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.roc_crashed_table_idx, self.roc_crashed_table_idx, eval_roc_ops_local); + // No hosted functions in standalone eval mode + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.hosted_fns_count, 0, eval_roc_ops_local); + try self.emitI32StoreConstToBody(eval_body, WasmRocOps.hosted_fns_ptr, 0, eval_roc_ops_local); + + // Call RocCall function: (roc_ops_ptr, ret_buf, args_ptr=0) + // arg 0: roc_ops_ptr (non-zero stack address) + eval_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_roc_ops_local) catch return error.OutOfMemory; + // arg 1: ret_buf_ptr + eval_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_ret_buf_local) catch return error.OutOfMemory; + // arg 2: args_ptr = 0 (no arguments for eval) + eval_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, eval_body, 0) catch return error.OutOfMemory; + // call + eval_body.append(self.allocator, Op.call) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, roc_call_func_idx) catch return error.OutOfMemory; + + // Load result from return buffer + switch (result_repr) { + .primitive => |vt| { + // Load the scalar value from ret_buf and return it + eval_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_ret_buf_local) catch return error.OutOfMemory; + const load_op: u8 = switch (vt) { + .i32 => Op.i32_load, + .i64 => Op.i64_load, + .f32 => Op.f32_load, + .f64 => Op.f64_load, + }; + eval_body.append(self.allocator, load_op) catch return error.OutOfMemory; + const align_log2: u32 = switch (vt) { + .i32, .f32 => 2, + .i64, .f64 => 3, + }; + WasmModule.leb128WriteU32(self.allocator, eval_body, align_log2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, 0) catch return error.OutOfMemory; // offset + }, + .stack_memory => { + // Composite data was written to ret_buf by the RocCall function. + // Return the pointer to the buffer (i32). + eval_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_ret_buf_local) catch return error.OutOfMemory; + }, + } - // Encode the module - const wasm_bytes = self.module.encode(self.allocator) catch return error.OutOfMemory; + // Epilogue: restore stack pointer + eval_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, eval_fp_local) catch return error.OutOfMemory; + eval_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, eval_body, @intCast(eval_frame_size)) catch return error.OutOfMemory; + eval_body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + eval_body.append(self.allocator, Op.global_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, eval_body, 0) catch return error.OutOfMemory; - return .{ - .wasm_bytes = wasm_bytes, - .result_layout = result_layout, - .has_imports = self.module.importCount() > 0, - }; + // End opcode + eval_body.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Encode the locals declaration vector for a function body. @@ -526,38 +657,38 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { .i64_literal => |val| { switch (self.resolveValType(val.layout_idx)) { .i32 => { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @truncate(val.value)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @truncate(val.value)) catch return error.OutOfMemory; }, .i64 => { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, val.value) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, val.value) catch return error.OutOfMemory; }, .f32 => { - self.body.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; const bytes: [4]u8 = @bitCast(@as(f32, @floatFromInt(val.value))); - self.body.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; }, .f64 => { - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; const bytes: [8]u8 = @bitCast(@as(f64, @floatFromInt(val.value))); - self.body.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; }, } }, .f64_literal => |val| { - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; const bytes: [8]u8 = @bitCast(val); - self.body.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; }, .f32_literal => |val| { - self.body.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; const bytes: [4]u8 = @bitCast(val); - self.body.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; }, .bool_literal => |val| { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, if (val) 1 else 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, if (val) 1 else 0) catch return error.OutOfMemory; }, .dec_literal => |val| { // Dec is i128 stored in 16 bytes of linear memory @@ -569,17 +700,17 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { const high: i64 = @bitCast(@as(u64, @truncate(unsigned >> 64))); // Store low 8 bytes - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; try self.emitI64Store(base_offset); // Store high 8 bytes - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; try self.emitI64Store(base_offset + 8); // Push pointer to the 16-byte value @@ -595,17 +726,17 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { const high: i64 = @bitCast(@as(u64, @truncate(unsigned >> 64))); // Store low 8 bytes - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; try self.emitI64Store(base_offset); // Store high 8 bytes - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; try self.emitI64Store(base_offset + 8); // Push pointer to the 16-byte value @@ -650,8 +781,8 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { // Load the scalar from the pointer (lower bytes on little-endian) try self.emitLoadOpSized(vt2, target_size, 0); const local_idx = self.getOrAllocTypedLocal(bind.symbol, vt2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; continue; } unreachable; @@ -669,8 +800,8 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { // Zero-init the target memory first const base_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(stack_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; try self.emitZeroInit(base_local, target_size); // Generate the scalar and store at offset 0 (lower bytes) @@ -684,16 +815,16 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { (bind.layout_idx == .i128 or bind.layout_idx == .dec)) { // Store -1 (all ones) in upper 8 bytes for sign extension - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, -1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, -1) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); } // Bind pointer to the symbol's local - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; const local_idx = self.getOrAllocTypedLocal(bind.symbol, .i32) catch return error.OutOfMemory; try self.emitLocalSet(local_idx); continue; @@ -715,9 +846,9 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { // Bind pointer (fp + stack_offset) to the symbol's local try self.emitLocalGet(self.fp_local); if (stack_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(stack_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(stack_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const local_idx = self.getOrAllocTypedLocal(bind.symbol, .i32) catch return error.OutOfMemory; try self.emitLocalSet(local_idx); @@ -751,21 +882,21 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { try self.emitConversion(expr_vt, vt); // Allocate a local (or reuse existing one for mutable rebinding) const local_idx = self.getOrAllocTypedLocal(bind.symbol, vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; } }, .wildcard => { // Evaluate expression for side effects, drop result try self.generateExpr(stmt.expr); - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; }, .struct_ => |s| { // Struct destructuring: generate expr → pointer, then bind each field try self.generateExpr(stmt.expr); const ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr) catch return error.OutOfMemory; try self.bindStructPattern(ptr, s); }, .as_pattern => |as_pat| { @@ -773,22 +904,22 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { try self.generateExpr(stmt.expr); const vt = self.resolveValType(as_pat.layout_idx); const local_idx = self.storage.allocLocal(as_pat.symbol, vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; // Now bind inner pattern with the same value on the stack const inner = self.store.getPattern(as_pat.inner); switch (inner) { .bind => |inner_bind| { const inner_vt = self.resolveValType(inner_bind.layout_idx); const inner_local = self.storage.allocLocal(inner_bind.symbol, inner_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, inner_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, inner_local) catch return error.OutOfMemory; }, .wildcard => { - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; }, else => { - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; }, } }, @@ -796,16 +927,16 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { // Tag destructuring in let-binding try self.generateExpr(stmt.expr); const ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr) catch return error.OutOfMemory; try self.bindTagPattern(ptr, tag_pat); }, .list => |list_pat| { // List destructuring in let-binding try self.generateExpr(stmt.expr); const ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr) catch return error.OutOfMemory; try self.bindListPattern(ptr, list_pat); }, // Comparison patterns don't appear in let bindings @@ -818,8 +949,8 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { .lookup => |l| { const key: u64 = @bitCast(l.symbol); if (self.storage.locals.get(key)) |local_info| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_info.idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_info.idx) catch return error.OutOfMemory; // Convert if the local's actual type differs from the expression's layout type. // This can happen when a function parameter is i64 (Roc I64) but the body // expression's layout resolves to i32 (e.g., used as a list count). @@ -837,8 +968,8 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { .cell_load => |l| { const key: u64 = @bitCast(l.cell); if (self.storage.locals.get(key)) |local_info| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_info.idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_info.idx) catch return error.OutOfMemory; const expected_vt = self.resolveValType(l.layout_idx); if (local_info.val_type != expected_vt) { try self.emitConversion(local_info.val_type, expected_vt); @@ -874,23 +1005,23 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { const base_local = self.fp_local; // Zero out the 12 bytes for (0..3) |i| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitStoreOp(.i32, base_offset + @as(u32, @intCast(i)) * 4); } // Push pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; if (base_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(base_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(base_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } }, .runtime_error => { - self.body.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; }, .crash => |crash| { const msg_bytes = self.store.getString(crash.msg); @@ -901,43 +1032,39 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { // Write utf8_bytes pointer try self.emitFpOffset(crashed_slot); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(data_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(data_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // Write len try self.emitFpOffset(crashed_slot); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(msg_bytes.len)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(msg_bytes.len)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; // Push call_indirect args: (crashed_args_ptr, env_ptr) try self.emitFpOffset(crashed_slot); try self.emitLocalGet(self.roc_ops_local); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + try self.emitLoadOp(.i32, WasmRocOps.env_ptr); - // Load roc_crashed table index from roc_ops_ptr offset 24 + // Load roc_crashed table index from RocOps struct try self.emitLocalGet(self.roc_ops_local); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 24) catch return error.OutOfMemory; + try self.emitLoadOp(.i32, WasmRocOps.roc_crashed_table_idx); // call_indirect - self.body.append(self.allocator, Op.call_indirect) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, self.roc_ops_type_idx) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.call_indirect) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, self.roc_ops_type_idx) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; }, .early_return => |er| { try self.generateExpr(er.expr); - self.body.append(self.allocator, Op.@"return") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"return") catch return error.OutOfMemory; }, .proc_call => |c| { try self.generateCall(c); @@ -958,7 +1085,7 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { .expect => |e| { // Expect: evaluate condition (drop result), then evaluate body try self.generateExpr(e.cond); - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; try self.generateExpr(e.body); }, .low_level => |ll| { @@ -1013,18 +1140,17 @@ fn generateExpr(self: *Self, expr_id: LirExprId) Allocator.Error!void { try self.emitLoadOpForLayout(tpa.payload_layout, 0); } }, - .hosted_call => { - // TODO: Implement hosted_call expression lowering for wasm. - @panic("TODO: wasm hosted_call expression path is not implemented"); + .hosted_call => |hc| { + try self.generateHostedCall(hc); }, .break_expr => { const target_depth = self.currentLoopBreakDepth(); std.debug.assert(self.expr_control_depth >= target_depth); - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; WasmModule.leb128WriteU32( self.allocator, - &self.body, + &self.code_builder.code, self.expr_control_depth - target_depth, ) catch return error.OutOfMemory; }, @@ -1127,9 +1253,9 @@ fn emitRcAtPtr( const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(value_ptr_local); if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } try self.emitLocalSet(field_ptr); try self.emitRcAtPtr(kind, field_ptr, field_layout_idx, inc_count); @@ -1142,8 +1268,8 @@ fn emitRcAtPtr( const disc_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; if (tu_data.discriminant_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else { try self.emitLocalGet(value_ptr_local); try self.emitLoadBySize(tu_data.discriminant_size, tu_data.discriminant_offset); @@ -1156,19 +1282,19 @@ fn emitRcAtPtr( if (!ls.layoutContainsRefcounted(payload_layout)) continue; if (ls.layoutSizeAlign(payload_layout).size == 0) continue; - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(disc_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(variant_i)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(variant_i)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitRcAtPtr(kind, value_ptr_local, payload_layout_idx, inc_count); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } }, .closure => { @@ -1186,22 +1312,22 @@ fn emitDecodeListAllocPtr(self: *Self, list_ptr_local: u32, out_alloc_ptr: u32, try self.emitLocalSet(cap_local); try self.emitLocalGet(cap_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; try self.emitLocalSet(out_is_slice); try self.emitLocalGet(out_is_slice); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(ValType.i32)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(ValType.i32)) catch return error.OutOfMemory; try self.emitLocalGet(cap_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_shl) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_shl) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.emitLocalGet(list_ptr_local); try self.emitLoadOp(.i32, 0); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; try self.emitLocalSet(out_alloc_ptr); } @@ -1212,51 +1338,51 @@ fn emitDecodeStrAllocPtr(self: *Self, str_ptr_local: u32, out_alloc_ptr: u32, ou try self.emitLocalSet(cap_local); try self.emitLocalGet(cap_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; try self.emitLocalSet(out_is_small); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(out_alloc_ptr); - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(out_is_small); - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; const is_slice = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(str_ptr_local); try self.emitLoadOp(.i32, 4); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; try self.emitLocalSet(is_slice); try self.emitLocalGet(is_slice); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(ValType.i32)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(ValType.i32)) catch return error.OutOfMemory; try self.emitLocalGet(cap_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_shl) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_shl) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.emitLocalGet(str_ptr_local); try self.emitLoadOp(.i32, 0); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; try self.emitLocalSet(out_alloc_ptr); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } fn emitPtrWithOffset(self: *Self, ptr_local: u32, offset: i32) Allocator.Error!void { try self.emitLocalGet(ptr_local); if (offset != 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } } @@ -1274,41 +1400,41 @@ fn emitPrepareListSliceMetadata(self: *Self, list_ptr_local: u32, elements_refco if (elements_refcounted) { const rc_val = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(source_alloc_ptr); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(source_is_slice); - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLoadI32AtPtrOffset(source_alloc_ptr, -4, rc_val); try self.emitLocalGet(rc_val); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitPtrWithOffset(source_alloc_ptr, -8); try self.emitLocalGet(list_ptr_local); try self.emitLoadOp(.i32, 4); try self.emitStoreOp(.i32, 0); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } try self.emitLocalGet(source_alloc_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @as(i32, @bitCast(@as(u32, 0x80000000)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @as(i32, @bitCast(@as(u32, 0x80000000)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; try self.emitLocalSet(out_encoded_cap); } @@ -1316,8 +1442,8 @@ fn emitCallRocDealloc(self: *Self, ptr_local: u32, alignment: u32) Allocator.Err const dealloc_slot = try self.allocStackMemory(8, 4); try self.emitFpOffset(dealloc_slot); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(alignment)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(alignment)) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 0); try self.emitFpOffset(dealloc_slot); @@ -1326,14 +1452,14 @@ fn emitCallRocDealloc(self: *Self, ptr_local: u32, alignment: u32) Allocator.Err try self.emitFpOffset(dealloc_slot); try self.emitLocalGet(self.roc_ops_local); - try self.emitLoadOp(.i32, 0); // env ptr + try self.emitLoadOp(.i32, WasmRocOps.env_ptr); try self.emitLocalGet(self.roc_ops_local); - try self.emitLoadOp(.i32, 8); // roc_dealloc table idx + try self.emitLoadOp(.i32, WasmRocOps.roc_dealloc_table_idx); - self.body.append(self.allocator, Op.call_indirect) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, self.roc_ops_type_idx) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.call_indirect) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, self.roc_ops_type_idx) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } fn emitFreeRcPtr(self: *Self, rc_ptr_local: u32, element_alignment: u32, elements_refcounted: bool) Allocator.Error!void { @@ -1346,9 +1472,9 @@ fn emitFreeRcPtr(self: *Self, rc_ptr_local: u32, element_alignment: u32, element const alloc_ptr_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(rc_ptr_local); if (alloc_adjust > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, alloc_adjust) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, alloc_adjust) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; } try self.emitLocalSet(alloc_ptr_local); try self.emitCallRocDealloc(alloc_ptr_local, alloc_alignment); @@ -1361,40 +1487,40 @@ fn emitDataPtrIncref(self: *Self, data_ptr_local: u32, amount: u16) Allocator.Er const rc_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; const rc_val = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(data_ptr_local); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(data_ptr_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -4) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; try self.emitLocalSet(masked_ptr); try self.emitLocalGet(masked_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -4) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(rc_ptr); try self.emitLoadI32AtPtrOffset(rc_ptr, 0, rc_val); try self.emitLocalGet(rc_val); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(rc_ptr); try self.emitLocalGet(rc_val); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(amount)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(amount)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 0); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } fn emitDataPtrDecref(self: *Self, data_ptr_local: u32, alignment: u32, elements_refcounted: bool) Allocator.Error!void { @@ -1402,77 +1528,77 @@ fn emitDataPtrDecref(self: *Self, data_ptr_local: u32, alignment: u32, elements_ const rc_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; const rc_val = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(data_ptr_local); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(data_ptr_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -4) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; try self.emitLocalSet(masked_ptr); try self.emitLocalGet(masked_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -4) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(rc_ptr); try self.emitLoadI32AtPtrOffset(rc_ptr, 0, rc_val); try self.emitLocalGet(rc_val); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(rc_val); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitFreeRcPtr(rc_ptr, alignment, elements_refcounted); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.emitLocalGet(rc_ptr); try self.emitLocalGet(rc_val); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 0); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } fn emitDataPtrFree(self: *Self, data_ptr_local: u32, alignment: u32, elements_refcounted: bool) Allocator.Error!void { const masked_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; const rc_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(data_ptr_local); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(data_ptr_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -4) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; try self.emitLocalSet(masked_ptr); try self.emitLocalGet(masked_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -4) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(rc_ptr); try self.emitFreeRcPtr(rc_ptr, alignment, elements_refcounted); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } fn emitListElementDecrefsIfUnique( @@ -1490,69 +1616,69 @@ fn emitListElementDecrefsIfUnique( const idx_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; const elem_ptr_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(alloc_ptr_local); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLoadI32AtPtrOffset(alloc_ptr_local, -4, rc_val); try self.emitLocalGet(rc_val); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(is_slice_local); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLoadI32AtPtrOffset(alloc_ptr_local, -8, count_local); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.emitLocalGet(list_ptr_local); try self.emitLoadOp(.i32, 4); try self.emitLocalSet(count_local); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(idx_local); - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(idx_local); try self.emitLocalGet(count_local); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitLocalGet(alloc_ptr_local); try self.emitLocalGet(idx_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(elem_ptr_local); try self.emitRcAtPtr(.decref, elem_ptr_local, elem_layout_idx, 1); try self.emitLocalGet(idx_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(idx_local); - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } fn emitListRc( @@ -1602,11 +1728,11 @@ fn emitStrRc(self: *Self, comptime kind: RcOpKind, str_ptr_local: u32, inc_count const is_small_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitDecodeStrAllocPtr(str_ptr_local, alloc_ptr_local, is_small_local); - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitLocalGet(is_small_local); - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; switch (kind) { .incref => try self.emitDataPtrIncref(alloc_ptr_local, inc_count), @@ -1614,7 +1740,7 @@ fn emitStrRc(self: *Self, comptime kind: RcOpKind, str_ptr_local: u32, inc_count .free => try self.emitDataPtrFree(alloc_ptr_local, 1, false), } - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } fn emitBoxRc(self: *Self, comptime kind: RcOpKind, box_ptr_local: u32, box_layout_idx: layout.Idx, inc_count: u16) Allocator.Error!void { @@ -1647,14 +1773,14 @@ fn generateIfChain(self: *Self, branches: []const LIR.LirIfBranch, final_else: L // Generate first branch condition try self.generateExpr(branches[0].cond); // if (block_type) - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; self.pushExprControlFrame(); defer self.popExprControlFrame(); // then body try self.generateExpr(branches[0].body); // else - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; // Remaining branches become nested if/else, or just the final_else if (branches.len > 1) { try self.generateIfChain(branches[1..], final_else, bt); @@ -1662,7 +1788,7 @@ fn generateIfChain(self: *Self, branches: []const LIR.LirIfBranch, final_else: L try self.generateExpr(final_else); } // end - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Generate a match expression (pattern matching). @@ -1672,7 +1798,7 @@ fn generateMatch(self: *Self, w: anytype) Allocator.Error!void { if (branches.len == 0) { // No branches — unreachable - self.body.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; return; } @@ -1681,8 +1807,8 @@ fn generateMatch(self: *Self, w: anytype) Allocator.Error!void { try self.generateExpr(w.value); const temp_local = self.storage.allocAnonymousLocal(value_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp_local) catch return error.OutOfMemory; // Generate cascading if/else for each branch try self.generateMatchBranches(branches, temp_local, value_vt, bt); @@ -1691,7 +1817,7 @@ fn generateMatch(self: *Self, w: anytype) Allocator.Error!void { fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, value_local: u32, value_vt: ValType, bt: BlockType) Allocator.Error!void { if (branches.len == 0) { // Fallthrough — unreachable - self.body.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; return; } @@ -1708,26 +1834,26 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu // Bind the value to the symbol and generate the body const local_idx = self.storage.allocLocal(bind.symbol, value_vt) catch return error.OutOfMemory; // Copy value from temp to the bound local - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; try self.generateExpr(branch.body); }, .int_literal => |int_pat| { // Compare value to the integer literal - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; // Push the pattern value switch (value_vt) { .i32 => { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @truncate(@as(i64, @truncate(int_pat.value)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @truncate(@as(i64, @truncate(int_pat.value)))) catch return error.OutOfMemory; }, .i64 => { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, @truncate(int_pat.value)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, @truncate(int_pat.value)) catch return error.OutOfMemory; }, .f32, .f64 => unreachable, } @@ -1738,17 +1864,17 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu .i64 => Op.i64_eq, .f32, .f64 => unreachable, }; - self.body.append(self.allocator, eq_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, eq_op) catch return error.OutOfMemory; // if match - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; self.pushExprControlFrame(); defer self.popExprControlFrame(); try self.generateExpr(branch.body); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateMatchBranches(remaining, value_local, value_vt, bt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, .tag => |tag_pat| { // Match on tag discriminant @@ -1770,28 +1896,28 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu const disc_offset = tu_data.discriminant_offset; const disc_size: u32 = tu_data.discriminant_size; if (disc_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else { // Load discriminant: value_local[disc_offset] - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; try self.emitLoadOpSized(.i32, disc_size, disc_offset); } } else { // Value is the discriminant itself - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; } // Push discriminant to compare against - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(tag_pat.discriminant)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(tag_pat.discriminant)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; self.pushExprControlFrame(); defer self.popExprControlFrame(); @@ -1814,24 +1940,24 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu // stored inline in the tag union memory. The "value" is // a pointer to the start of the data within the tag union. // Compute: value_local + payload_offset - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; if (payload_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(payload_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(payload_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } }, .primitive => { // Primitive types: load the value from memory - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; try self.emitLoadOpForLayout(bind.layout_idx, payload_offset); }, } - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; payload_offset += bind_byte_size; }, .wildcard => |wc| { @@ -1841,16 +1967,16 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu .struct_ => |inner_struct| { // Struct destructuring of tag payload field const field_byte_size = self.layoutByteSize(inner_struct.struct_layout); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; if (payload_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(payload_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(payload_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_ptr) catch return error.OutOfMemory; try self.bindStructPattern(field_ptr, inner_struct); payload_offset += field_byte_size; }, @@ -1864,16 +1990,16 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu const field_byte_size = self.layoutByteSize(inner_tag.union_layout); if (self.store.getPatternSpan(inner_tag.args).len != 0) { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; if (payload_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(payload_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(payload_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_ptr) catch return error.OutOfMemory; try self.bindTagPattern(field_ptr, inner_tag); } payload_offset += field_byte_size; @@ -1897,9 +2023,9 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu } try self.generateExpr(branch.body); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateMatchBranches(remaining, value_local, value_vt, bt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, .struct_ => |struct_pat| { // Struct destructuring: bind each field to a local @@ -1916,33 +2042,33 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu const bind_byte_size = self.layoutStorageByteSize(bind.layout_idx); const local_idx = self.storage.allocLocal(bind.symbol, bind_vt) catch return error.OutOfMemory; const field_offset = ls.getStructFieldOffset(l.data.struct_.idx, @intCast(i)); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; if (self.isCompositeLayout(bind.layout_idx)) { if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } } else { try self.emitLoadOpSized(bind_vt, bind_byte_size, field_offset); } - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; }, .wildcard => {}, .struct_ => |inner_struct| { const field_offset = ls.getStructFieldOffset(l.data.struct_.idx, @intCast(i)); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_ptr) catch return error.OutOfMemory; try self.bindStructPattern(field_ptr, inner_struct); }, else => unreachable, @@ -1953,10 +2079,10 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu .as_pattern => |as_pat| { const bind_vt = self.resolveValType(as_pat.layout_idx); const local_idx = self.storage.allocLocal(as_pat.symbol, bind_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; const inner_pat = self.store.getPattern(as_pat.inner); switch (inner_pat) { @@ -1966,10 +2092,10 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu .bind => |bind| { const inner_vt = self.resolveValType(bind.layout_idx); const inner_local = self.storage.allocLocal(bind.symbol, inner_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, inner_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, inner_local) catch return error.OutOfMemory; try self.generateExpr(branch.body); }, .struct_ => |inner_struct| { @@ -1980,37 +2106,37 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu } }, .float_literal => |float_pat| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; switch (value_vt) { .f64 => { - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; const bytes: [8]u8 = @bitCast(float_pat.value); - self.body.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; }, .f32 => { - self.body.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; const bytes: [4]u8 = @bitCast(@as(f32, @floatCast(float_pat.value))); - self.body.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_eq) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &bytes) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_eq) catch return error.OutOfMemory; }, .i32, .i64 => unreachable, } - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; self.pushExprControlFrame(); defer self.popExprControlFrame(); try self.generateExpr(branch.body); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateMatchBranches(remaining, value_local, value_vt, bt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, .str_literal => |str_idx| { // String literal comparison in match branch - const import_idx = self.str_eq_import orelse unreachable; + const import_idx = self.builtin_syms.str_equal; // Generate the pattern string as a RocStr try self.generateStrLiteral(str_idx); @@ -2018,20 +2144,19 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu try self.emitLocalSet(pat_str); // Compare value with pattern using roc_str_eq - try self.emitLocalGet(value_local); - try self.emitLocalGet(pat_str); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitPtrLenCapArgs(value_local); + try self.emitPtrLenCapArgs(pat_str); + try self.emitCallBuiltin(import_idx); // if match - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; self.pushExprControlFrame(); defer self.popExprControlFrame(); try self.generateExpr(branch.body); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateMatchBranches(remaining, value_local, value_vt, bt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, .list => |list_pat| { // List destructuring in match branch @@ -2040,24 +2165,24 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu const prefix_count: u32 = @intCast(prefix_patterns.len); // Load list length from RocList (offset 4) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); // Compare length - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(prefix_count)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(prefix_count)) catch return error.OutOfMemory; if (list_pat.rest.isNone()) { // Exact match: length == prefix_count - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; } else { // Has rest: length >= prefix_count - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; self.pushExprControlFrame(); defer self.popExprControlFrame(); @@ -2068,8 +2193,8 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu const is_composite = self.isCompositeLayout(list_pat.elem_layout); // Load elements pointer from RocList (offset 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); const elems_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(elems_ptr); @@ -2083,9 +2208,9 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu // Composite: pointer = elems_ptr + offset try self.emitLocalGet(elems_ptr); if (elem_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } } else { // Scalar: load from elems_ptr + offset @@ -2099,9 +2224,9 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu .struct_ => |inner_struct| { try self.emitLocalGet(elems_ptr); if (elem_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(field_ptr); @@ -2113,9 +2238,9 @@ fn generateMatchBranches(self: *Self, branches: []const LIR.LirMatchBranch, valu } try self.generateExpr(branch.body); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateMatchBranches(remaining, value_local, value_vt, bt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, } } @@ -2447,7 +2572,7 @@ fn generateStructuralEq(self: *Self, lhs: LirExprId, rhs: LirExprId, negate: boo .struct_ => { try self.compareCompositeByLayout(lhs_local, rhs_local, lay_idx); if (negate) { - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; } return; }, @@ -2456,7 +2581,7 @@ fn generateStructuralEq(self: *Self, lhs: LirExprId, rhs: LirExprId, negate: boo if (tu_info.contains_refcounted) { try self.compareTagUnionByLayout(lhs_local, rhs_local, lay_idx); if (negate) { - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; } return; } @@ -2471,7 +2596,7 @@ fn generateStructuralEq(self: *Self, lhs: LirExprId, rhs: LirExprId, negate: boo try self.emitBytewiseEq(lhs_local, rhs_local, byte_size); if (negate) { - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; } } @@ -2487,8 +2612,8 @@ fn compareCompositeByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_ const struct_data = ls.getStructData(struct_idx); const field_count = struct_data.fields.count; if (field_count == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; return; } @@ -2504,15 +2629,15 @@ fn compareCompositeByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_ try self.compareFieldByLayout(lhs_local, rhs_local, field_offset, field_size, field_layout_idx); if (!first) { - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } first = false; } if (first) { // All fields were zero-size - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; } }, else => { @@ -2539,8 +2664,8 @@ fn compareTagUnionByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_i // Load LHS discriminant if (disc_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else { try self.emitLocalGet(lhs_local); try self.emitLoadBySize(disc_size, disc_offset); @@ -2550,8 +2675,8 @@ fn compareTagUnionByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_i // Load RHS discriminant if (disc_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else { try self.emitLocalGet(rhs_local); try self.emitLoadBySize(disc_size, disc_offset); @@ -2562,7 +2687,7 @@ fn compareTagUnionByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_i // Compare discriminants try self.emitLocalGet(lhs_disc); try self.emitLocalGet(rhs_disc); - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; const disc_eq_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(disc_eq_local); @@ -2577,12 +2702,12 @@ fn compareTagUnionByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_i // Only compare payloads if discriminants are equal // Only compare payloads if discriminants are equal // block { if disc_ne: br 0; ... payload comparison ... } end - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, 0x40) catch return error.OutOfMemory; // void block type + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x40) catch return error.OutOfMemory; // void block type try self.emitLocalGet(disc_eq_local); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; // disc_ne - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // break out of block + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; // disc_ne + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // break out of block // Payload comparison: compare based on variant // For simplicity, compare the payload bytes up to discriminant_offset @@ -2597,8 +2722,8 @@ fn compareTagUnionByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_i const payload_eq_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; // Default: payload equal (1) - will be overwritten - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitLocalSet(payload_eq_local); for (0..variants.len) |variant_i| { @@ -2608,16 +2733,16 @@ fn compareTagUnionByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_i if (variant_payload_size == 0) continue; // Check: if (disc == variant_i) { compare payload with this variant's layout } - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, 0x40) catch return error.OutOfMemory; // void block + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x40) catch return error.OutOfMemory; // void block // Skip if disc != variant_i try self.emitLocalGet(lhs_disc); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(variant_i)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(variant_i)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // This variant matches - compare its payload const variant_layout_tag = ls.getLayout(variant_payload_layout).tag; @@ -2633,18 +2758,18 @@ fn compareTagUnionByLayout(self: *Self, lhs_local: u32, rhs_local: u32, layout_i } try self.emitLocalSet(payload_eq_local); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block } // result = disc_eq AND payload_eq try self.emitLocalGet(disc_eq_local); try self.emitLocalGet(payload_eq_local); - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; try self.emitLocalSet(result_local); } } - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end outer block + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end outer block // Push result try self.emitLocalGet(result_local); @@ -2661,22 +2786,17 @@ fn compareFieldByLayout( field_layout_idx: layout.Idx, ) Allocator.Error!void { if (field_layout_idx == .str) { - // String: call roc_str_eq(lhs_ptr + offset, rhs_ptr + offset) - const import_idx = self.str_eq_import orelse unreachable; - try self.emitLocalGet(lhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - try self.emitLocalGet(rhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + // String: compare the embedded RocStr fields at the struct offset. + const import_idx = self.builtin_syms.str_equal; + const lhs_field_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + const rhs_field_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalGetPlusOffset(lhs_local, field_offset); + try self.emitLocalSet(lhs_field_local); + try self.emitLocalGetPlusOffset(rhs_local, field_offset); + try self.emitLocalSet(rhs_field_local); + try self.emitPtrLenCapArgs(lhs_field_local); + try self.emitPtrLenCapArgs(rhs_field_local); + try self.emitCallBuiltin(import_idx); return; } @@ -2685,89 +2805,44 @@ fn compareFieldByLayout( const field_layout = ls.getLayout(field_layout_idx); switch (field_layout.tag) { .list => { - // List: call roc_list_eq or roc_list_str_eq + // List: compare the embedded RocList fields at the struct offset. + const lhs_list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + const rhs_list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalGetPlusOffset(lhs_local, field_offset); + try self.emitLocalSet(lhs_list_local); + try self.emitLocalGetPlusOffset(rhs_local, field_offset); + try self.emitLocalSet(rhs_list_local); + const elem_layout = field_layout.data.list; if (elem_layout == .str) { - const import_idx = self.list_str_eq_import orelse unreachable; - try self.emitLocalGet(lhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - try self.emitLocalGet(rhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.list_str_eq; + try self.emitPtrLenCapArgs(lhs_list_local); + try self.emitPtrLenCapArgs(rhs_list_local); + try self.emitCallBuiltin(import_idx); } else if (ls.getLayout(elem_layout).tag == .list) { // List of lists - use specialized host function with inner element size const inner_elem_layout = ls.getLayout(elem_layout).data.list; const inner_elem_size = self.layoutByteSize(inner_elem_layout); - const import_idx = self.list_list_eq_import orelse unreachable; - try self.emitLocalGet(lhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - try self.emitLocalGet(rhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(inner_elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.list_list_eq; + try self.emitPtrLenCapArgs(lhs_list_local); + try self.emitPtrLenCapArgs(rhs_list_local); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(inner_elem_size)) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); } else if (ls.layoutContainsRefcounted(ls.getLayout(elem_layout))) { // Composite elements (records/tuples/tag-unions with refcounted fields): // inline element-by-element structural comparison loop. const elem_size = self.layoutByteSize(elem_layout); - const lhs_list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - const rhs_list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - - try self.emitLocalGet(lhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - try self.emitLocalSet(lhs_list_local); - - try self.emitLocalGet(rhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - try self.emitLocalSet(rhs_list_local); - try self.emitListEqLoop(lhs_list_local, rhs_list_local, elem_layout, elem_size); } else { // Simple scalar elements: bytewise comparison via host function - const import_idx = self.list_eq_import orelse unreachable; + const import_idx = self.builtin_syms.list_eq; const elem_size = self.layoutByteSize(elem_layout); - try self.emitLocalGet(lhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - try self.emitLocalGet(rhs_local); - if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - } - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitPtrLenCapArgs(lhs_list_local); + try self.emitPtrLenCapArgs(rhs_list_local); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); } }, .struct_, .tag_union => { @@ -2777,17 +2852,17 @@ fn compareFieldByLayout( try self.emitLocalGet(lhs_local); if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } try self.emitLocalSet(lhs_field_local); try self.emitLocalGet(rhs_local); if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } try self.emitLocalSet(rhs_field_local); @@ -2844,7 +2919,7 @@ fn emitListEqLoop( // result = (lhs_len == rhs_len) try self.emitLocalGet(lhs_len); try self.emitLocalGet(rhs_len); - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; try self.emitLocalSet(result_local); // Load data pointers (offset 0 in RocList) @@ -2857,45 +2932,45 @@ fn emitListEqLoop( try self.emitLocalSet(rhs_data); // idx = 0 - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(idx_local); // block { loop { - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // if result == 0, break (lengths didn't match or previous elem failed) try self.emitLocalGet(result_local); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // if idx >= lhs_len, break (all elements compared) try self.emitLocalGet(idx_local); try self.emitLocalGet(lhs_len); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // lhs_elem = lhs_data + idx * elem_size try self.emitLocalGet(lhs_data); try self.emitLocalGet(idx_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(lhs_elem); // rhs_elem = rhs_data + idx * elem_size try self.emitLocalGet(rhs_data); try self.emitLocalGet(idx_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(rhs_elem); // Compare elements structurally: pushes i32 result onto stack @@ -2903,23 +2978,23 @@ fn emitListEqLoop( // result = result AND elem_eq try self.emitLocalGet(result_local); - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; try self.emitLocalSet(result_local); // idx++ try self.emitLocalGet(idx_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(idx_local); // br 0 (continue loop) - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // end loop, end block - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // Push final result try self.emitLocalGet(result_local); @@ -2930,8 +3005,8 @@ fn emitListEqLoop( /// Pushes an i32 (1=equal, 0=not equal) onto the WASM stack. fn emitBytewiseEq(self: *Self, lhs_local: u32, rhs_local: u32, byte_size: u32) Allocator.Error!void { if (byte_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; return; } @@ -2940,36 +3015,36 @@ fn emitBytewiseEq(self: *Self, lhs_local: u32, rhs_local: u32, byte_size: u32) A while (offset + 4 <= byte_size) : (offset += 4) { try self.emitLocalGet(lhs_local); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; if (!first) { - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } first = false; } if (offset + 2 <= byte_size) { try self.emitLocalGet(lhs_local); - self.body.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; if (!first) { - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } first = false; offset += 2; @@ -2977,25 +3052,25 @@ fn emitBytewiseEq(self: *Self, lhs_local: u32, rhs_local: u32, byte_size: u32) A if (offset < byte_size) { try self.emitLocalGet(lhs_local); - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; if (!first) { - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } first = false; } if (first) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; } } @@ -3003,8 +3078,8 @@ fn emitBytewiseEq(self: *Self, lhs_local: u32, rhs_local: u32, byte_size: u32) A /// Pushes an i32 (1=equal, 0=not equal) onto the WASM stack. fn emitBytewiseEqAtOffset(self: *Self, lhs_local: u32, rhs_local: u32, base_offset: u32, byte_size: u32) Allocator.Error!void { if (byte_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; return; } @@ -3013,36 +3088,36 @@ fn emitBytewiseEqAtOffset(self: *Self, lhs_local: u32, rhs_local: u32, base_offs while (offset + 4 <= byte_size) : (offset += 4) { try self.emitLocalGet(lhs_local); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + offset) catch return error.OutOfMemory; try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; if (!first) { - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } first = false; } if (offset + 2 <= byte_size) { try self.emitLocalGet(lhs_local); - self.body.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + offset) catch return error.OutOfMemory; try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; if (!first) { - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } first = false; offset += 2; @@ -3050,25 +3125,25 @@ fn emitBytewiseEqAtOffset(self: *Self, lhs_local: u32, rhs_local: u32, base_offs if (offset < byte_size) { try self.emitLocalGet(lhs_local); - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + offset) catch return error.OutOfMemory; try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; if (!first) { - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } first = false; } if (first) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; } } @@ -3077,31 +3152,31 @@ fn emitBytewiseEqAtOffset(self: *Self, lhs_local: u32, rhs_local: u32, base_offs fn emitLoadBySize(self: *Self, disc_size: u8, offset: u16) Allocator.Error!void { switch (disc_size) { 0 => { - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; }, 1 => { - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; }, 2 => { - self.body.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; }, 4 => { - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; }, 8 => { - self.body.append(self.allocator, Op.i64_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 3) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 3) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; // Wrap to i32 since callers use i32 locals for discriminant values - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; }, else => unreachable, } @@ -3148,55 +3223,41 @@ fn generateCompositeNumericOp(self: *Self, op: anytype, args: []const LirExprId, .num_minus => try self.emitI128Sub(lhs_local, rhs_local), .num_times => { if (operand_layout == .dec) { - const import_idx = self.dec_mul_import orelse unreachable; - const result_offset = try self.allocStackMemory(16, 8); - const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.dec_mul; + try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx, false); return; } try self.emitI128Mul(lhs_local, rhs_local); }, .num_div_by => { if (operand_layout == .dec) { - const import_idx = self.dec_div_import orelse unreachable; - try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx); + const import_idx = self.builtin_syms.dec_div; + try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx, true); } else { const is_signed = operand_layout == .i128; - const import_idx = if (is_signed) self.i128_div_s_import else self.u128_div_import; - try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx orelse unreachable); + const import_idx = if (is_signed) self.builtin_syms.i128_div_s else self.builtin_syms.u128_div; + try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx, true); } }, .num_div_trunc_by => { if (operand_layout == .dec) { - const import_idx = self.dec_div_trunc_import orelse unreachable; - try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx); + const import_idx = self.builtin_syms.dec_div_trunc; + try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx, true); } else { const is_signed = operand_layout == .i128; - const import_idx = if (is_signed) self.i128_div_s_import else self.u128_div_import; - try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx orelse unreachable); + const import_idx = if (is_signed) self.builtin_syms.i128_div_s else self.builtin_syms.u128_div; + try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx, true); } }, .num_rem_by => { const is_signed = operand_layout == .i128 or operand_layout == .dec; - const import_idx = if (is_signed) self.i128_mod_s_import else self.u128_mod_import; - try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx orelse unreachable); + const import_idx = if (is_signed) self.builtin_syms.i128_mod_s else self.builtin_syms.u128_mod; + try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx, true); }, .num_mod_by => { const is_signed = operand_layout == .i128 or operand_layout == .dec; - const import_idx = if (is_signed) self.i128_mod_s_import else self.u128_mod_import; - try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx orelse unreachable); + const import_idx = if (is_signed) self.builtin_syms.i128_mod_s else self.builtin_syms.u128_mod; + try self.emitI128HostBinOp(lhs_local, rhs_local, import_idx, true); }, .num_is_gt => try self.emitI128Compare(lhs_local, rhs_local, .gt), .num_is_gte => try self.emitI128Compare(lhs_local, rhs_local, .gte), @@ -3205,12 +3266,12 @@ fn generateCompositeNumericOp(self: *Self, op: anytype, args: []const LirExprId, .num_abs_diff => { const is_signed = operand_layout == .i128 or operand_layout == .dec; try self.emitI128CompareWithSignedness(lhs_local, rhs_local, .gte, is_signed); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; try self.emitI128Sub(lhs_local, rhs_local); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.emitI128Sub(rhs_local, lhs_local); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, else => unreachable, } @@ -3233,18 +3294,77 @@ fn generateCompositeNumericOp(self: *Self, op: anytype, args: []const LirExprId, /// Emit an i128 binary operation via host function call. /// The host function takes (lhs_ptr, rhs_ptr, result_ptr) and returns void. /// Pushes an i32 pointer to the 16-byte result on the wasm stack. -fn emitI128HostBinOp(self: *Self, lhs_local: u32, rhs_local: u32, import_idx: u32) Allocator.Error!void { - const result_offset = try self.allocStackMemory(16, 8); +// --- Phase 8c: Builtin Call Helpers --- + +/// Resolve any pending relocatable call placeholders in code_builder. +fn resolvePendingRelocations(self: *Self) void { + for (self.code_builder.import_relocations.items) |reloc| { + const sym = self.module.linking.symbol_table.items[reloc.symbol_index]; + WasmModule.overwritePaddedU32(self.code_builder.code.items, reloc.code_pos, sym.index); + } + self.code_builder.import_relocations.clearRetainingCapacity(); +} + +fn allocStackResultPtr(self: *Self, size: u32, alignment: u32) Allocator.Error!u32 { + const result_offset = try self.allocStackMemory(size, alignment); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); try self.emitLocalSet(result_local); + return result_local; +} + +fn emitCallBuiltin(self: *Self, func_idx: u32) Allocator.Error!void { + self.code_builder.code.append(self.allocator, Op.call) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, func_idx) catch return error.OutOfMemory; +} + +fn emitLocalGetPlusOffset(self: *Self, base_local: u32, offset: u32) Allocator.Error!void { + try self.emitLocalGet(base_local); + if (offset == 0) return; + + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; +} + +fn emitLoadI32FromPtr(self: *Self, ptr_local: u32, offset: u32) Allocator.Error!void { + try self.emitLocalGet(ptr_local); + try self.emitLoadOp(.i32, offset); +} + +fn emitLoadI64FromPtr(self: *Self, ptr_local: u32, offset: u32) Allocator.Error!void { + try self.emitLocalGet(ptr_local); + try self.emitLoadOp(.i64, offset); +} + +/// Emit the raw `{ ptr/bytes, len, cap }` fields used by RocStr and RocList builtins. +fn emitPtrLenCapArgs(self: *Self, value_local: u32) Allocator.Error!void { + try self.emitLoadI32FromPtr(value_local, 0); + try self.emitLoadI32FromPtr(value_local, 4); + try self.emitLoadI32FromPtr(value_local, 8); +} + +/// Emit a wasm32 ABI i128/u128/Dec value as two i64 halves. +fn emitI128AbiArgs(self: *Self, value_local: u32) Allocator.Error!void { + try self.emitLoadI64FromPtr(value_local, 0); + try self.emitLoadI64FromPtr(value_local, 8); +} + +/// Emit an i128/Dec builtin call using the merged wasm32 ABI. +/// Builtins take two output pointers (`out_low`, `out_high`), followed by the +/// decomposed lhs/rhs words, and optionally the trailing `roc_ops` pointer. +fn emitI128HostBinOp(self: *Self, lhs_local: u32, rhs_local: u32, import_idx: u32, needs_roc_ops: bool) Allocator.Error!void { + const result_local = try self.allocStackResultPtr(16, 8); - // Call host function: (lhs_ptr, rhs_ptr, result_ptr) -> void - try self.emitLocalGet(lhs_local); - try self.emitLocalGet(rhs_local); try self.emitLocalGet(result_local); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitLocalGetPlusOffset(result_local, 8); + try self.emitI128AbiArgs(lhs_local); + try self.emitI128AbiArgs(rhs_local); + if (needs_roc_ops) { + try self.emitLocalGet(self.roc_ops_local); + } + + try self.emitCallBuiltin(import_idx); // Push result pointer try self.emitLocalGet(result_local); @@ -3258,84 +3378,84 @@ fn emitI128Add(self: *Self, lhs_local: u32, rhs_local: u32) Allocator.Error!void const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Pre-load all operand words into locals (prevents aliasing with result memory) const a_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_low) catch return error.OutOfMemory; const a_high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_high) catch return error.OutOfMemory; const b_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_low) catch return error.OutOfMemory; const b_high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_high) catch return error.OutOfMemory; // result_low = a_low + b_low const result_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low) catch return error.OutOfMemory; // Store result_low - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); // carry = (result_low < a_low) ? 1 : 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; // result_high = a_high + b_high + carry - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; // Store result_high const result_high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_high) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Emit i128 subtraction: result = lhs - rhs @@ -3346,94 +3466,91 @@ fn emitI128Sub(self: *Self, lhs_local: u32, rhs_local: u32) Allocator.Error!void const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Pre-load all operand words into locals (prevents aliasing with result memory) const a_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_low) catch return error.OutOfMemory; const a_high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_high) catch return error.OutOfMemory; const b_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_low) catch return error.OutOfMemory; const b_high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_high) catch return error.OutOfMemory; // result_low = a_low - b_low const result_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low) catch return error.OutOfMemory; // Store result_low - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); // borrow = (a_low < b_low) ? 1 : 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; const borrow_local = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, borrow_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, borrow_local) catch return error.OutOfMemory; // result_high = (a_high - b_high) - borrow - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, borrow_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, borrow_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; // Store result_high const result_high_local = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_high_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_high_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_high_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_high_local) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } -/// Emit i128 × i128 → i128 truncating multiply. -/// Takes two i32 pointers to 16-byte i128 values in linear memory. -/// Pushes an i32 pointer to the 16-byte result. /// /// Algorithm: /// a = (a_hi, a_lo), b = (b_hi, b_lo) (each hi/lo is i64) @@ -3446,207 +3563,207 @@ fn emitI128Mul(self: *Self, lhs_local: u32, rhs_local: u32) Allocator.Error!void const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Pre-load all operand words into locals const a_lo = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_lo) catch return error.OutOfMemory; const a_hi = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_hi) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_hi) catch return error.OutOfMemory; const b_lo = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_lo) catch return error.OutOfMemory; const b_hi = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_hi) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_hi) catch return error.OutOfMemory; // --- result_lo = a_lo * b_lo (truncating i64.mul) --- const result_lo_val = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_lo_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_lo_val) catch return error.OutOfMemory; // Store result_lo - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_lo_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_lo_val) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); // --- Compute high64(a_lo * b_lo) using 32-bit schoolbook method --- // Split a_lo into 32-bit halves: al0 = a_lo & 0xFFFFFFFF, al1 = a_lo >> 32 const al0 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; const al1 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0xFFFFFFFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, al0) catch return error.OutOfMemory; - - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, al1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0xFFFFFFFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, al0) catch return error.OutOfMemory; + + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, al1) catch return error.OutOfMemory; // Split b_lo similarly const bl0 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; const bl1 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0xFFFFFFFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, bl0) catch return error.OutOfMemory; - - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, bl1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0xFFFFFFFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, bl0) catch return error.OutOfMemory; + + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, bl1) catch return error.OutOfMemory; // t = al0 * bl0 const t = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, al0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, bl0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, t) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, al0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, bl0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, t) catch return error.OutOfMemory; // cross = (t >> 32) + al1*bl0 const cross = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, t) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, al1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, bl0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, t) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, al1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, bl0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; // cross = cross + al0*bl1 (may overflow u64 — need to track carry) // Save old cross for overflow detection const old_cross = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_cross) catch return error.OutOfMemory; - - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, al0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, bl1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_cross) catch return error.OutOfMemory; + + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, al0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, bl1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; // carry = (cross < old_cross) ? 1 : 0 (unsigned overflow detection) const carry = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, carry) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, carry) catch return error.OutOfMemory; // high64_of_lo_mul = al1*bl1 + (cross >> 32) + (carry << 32) const hi_of_lo_mul = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, al1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, bl1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, al1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, bl1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; // Add carry << 32 (carry is 0 or 1, so carry << 32 is 0 or 0x100000000) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, carry) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shl) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, hi_of_lo_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, carry) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shl) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, hi_of_lo_mul) catch return error.OutOfMemory; // --- result_hi = high64(a_lo * b_lo) + (a_lo * b_hi) + (a_hi * b_lo) --- // Start with hi_of_lo_mul - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, hi_of_lo_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, hi_of_lo_mul) catch return error.OutOfMemory; // + a_lo * b_hi (truncating, only lower 64 bits matter) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_hi) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_hi) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; // + a_hi * b_lo (truncating, only lower 64 bits matter) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_hi) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_lo) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_hi) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_lo) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; // Store result_hi const result_hi_val = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_hi_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_hi_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_hi_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_hi_val) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } const I128CmpOp = enum { lt, lte, gt, gte }; @@ -3669,36 +3786,36 @@ fn emitI128CompareWithSignedness(self: *Self, lhs_local: u32, rhs_local: u32, cm // result = a_high b_high // Load high words - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); const a_high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); const b_high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_high) catch return error.OutOfMemory; // if (a_high == b_high) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; // if (result is i32) - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; // Then: compare low words unsigned - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); const low_cmp: u8 = switch (cmp_op) { .lt => Op.i64_lt_u, @@ -3706,24 +3823,24 @@ fn emitI128CompareWithSignedness(self: *Self, lhs_local: u32, rhs_local: u32, cm .gt => Op.i64_gt_u, .gte => Op.i64_ge_u, }; - self.body.append(self.allocator, low_cmp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, low_cmp) catch return error.OutOfMemory; // Else: compare high words signed - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_high) catch return error.OutOfMemory; const high_cmp: u8 = switch (cmp_op) { .lt => if (is_signed) Op.i64_lt_s else Op.i64_lt_u, .lte => if (is_signed) Op.i64_le_s else Op.i64_le_u, .gt => if (is_signed) Op.i64_gt_s else Op.i64_gt_u, .gte => if (is_signed) Op.i64_ge_s else Op.i64_ge_u, }; - self.body.append(self.allocator, high_cmp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, high_cmp) catch return error.OutOfMemory; // End if - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Emit i128 bitwise operation (AND, OR, XOR) on both halves. @@ -3732,8 +3849,8 @@ fn emitI128CompareWithSignedness(self: *Self, lhs_local: u32, rhs_local: u32, cm fn generateCompositeI128Negate(self: *Self, expr: LirExprId, _: layout.Idx) Allocator.Error!void { try self.generateExpr(expr); const src_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; try self.emitCompositeI128NegateFromLocal(src_local); } @@ -3743,8 +3860,8 @@ fn emitCompositeI128NegateFromLocal(self: *Self, src_local: u32) Allocator.Error const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Two's complement: -x = ~x + 1 // low = ~a_low + 1 @@ -3752,81 +3869,81 @@ fn emitCompositeI128NegateFromLocal(self: *Self, src_local: u32) Allocator.Error // high = ~a_high + carry // Compute ~a_low + 1 - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, -1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, -1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i64_xor) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_xor) catch return error.OutOfMemory; // Stack: ~a_low - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; // Stack: result_low = ~a_low + 1 const result_low_local = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low_local) catch return error.OutOfMemory; // Store result_low - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low_local) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); // carry = (result_low == 0) ? 1 : 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_low_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_low_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; // high = ~a_high + carry - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, -1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, -1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.i64_xor) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_xor) catch return error.OutOfMemory; // Stack: [carry, ~a_high] - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; // Stack: [result_high] // Store result_high const result_high_local = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_high_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_high_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_high_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_high_local) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } fn generateCompositeI128Abs(self: *Self, expr: LirExprId) Allocator.Error!void { try self.generateExpr(expr); const src_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_lt_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; try self.emitCompositeI128NegateFromLocal(src_local); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Multiply two i64 values producing a 128-bit result stored in stack memory. @@ -3843,55 +3960,55 @@ fn emitI64MulToI128(self: *Self, a_local: u32, b_local: u32) Allocator.Error!voi const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Split a into 32-bit halves: a0 = a & 0xFFFFFFFF, a1 = a >> 32 const a0 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; const a1 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; // a0 = a & 0xFFFFFFFF - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0xFFFFFFFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0xFFFFFFFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a0) catch return error.OutOfMemory; // a1 = a >>> 32 (unsigned shift) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a1) catch return error.OutOfMemory; // Split b similarly const b0 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; const b1 = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0xFFFFFFFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0xFFFFFFFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b1) catch return error.OutOfMemory; // Compute low = a * b (truncating multiply gives lower 64 bits) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); // Compute high word using schoolbook method: @@ -3901,149 +4018,86 @@ fn emitI64MulToI128(self: *Self, a_local: u32, b_local: u32) Allocator.Error!voi // t = a0 * b0 const t = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, t) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, t) catch return error.OutOfMemory; // cross1 = (t >> 32) + a1*b0 const cross = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, t) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, t) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; // cross2 = cross1 + a0*b1 (can carry past 64 bits — must track carry) // Save old cross for overflow detection const old_cross = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_cross) catch return error.OutOfMemory; - - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_cross) catch return error.OutOfMemory; + + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; // carry = (cross < old_cross) ? 1 : 0 (unsigned overflow detection) const carry = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, carry) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_lt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, carry) catch return error.OutOfMemory; // high = a1*b1 + (cross >> 32) + (carry << 32) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, cross) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, cross) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; // Add carry << 32 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, carry) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shl) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, carry) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shl) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_add) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; -} - -/// Emit i128 signed division: result = a / b (truncating). -/// Takes two i32 pointers to 16-byte i128 values. -/// For Dec→int conversions, we only need division by a constant (10^18). -/// This implementation handles the general case for positive divisors. -fn emitI128DivByConst(self: *Self, numerator_local: u32, divisor_val: i64) Allocator.Error!void { - // For Dec→int: we divide by 10^18 (positive constant). - // Strategy: use signed division. - // For simplicity, handle only the case where the numerator fits in i64 - // after division (which is always true for Dec→i64 and smaller). - // - // result = (i128 as i64-pair) / divisor - // Since divisor fits in i64 and result fits in i64, we can compute: - // result = ((high * 2^64) + low) / divisor - // - // For signed division when high == 0 or high == -1 (sign extension), - // the value fits in i64 and we can do i64.div_s directly. - // - // General approach: extract the full i128, then truncate to i64 and divide. - // This works because the result of Dec→int always fits in i64. - - const result_offset = try self.allocStackMemory(16, 8); - const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - - // Load the low i64 from the numerator - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, numerator_local) catch return error.OutOfMemory; - try self.emitLoadOp(.i64, 0); - - // Divide by divisor - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, divisor_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; - - // Store as i128 (sign-extend to high word) - const quotient = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, quotient) catch return error.OutOfMemory; - - // Store low word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, quotient) catch return error.OutOfMemory; - try self.emitStoreOp(.i64, 0); - - // Store high word (sign extension) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, quotient) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 63) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; - try self.emitStoreOp(.i64, 8); - - // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Convert an i64 value on the wasm stack to a 16-byte i128 in stack memory. @@ -4054,41 +4108,41 @@ fn emitIntToI128(self: *Self, signed: bool) Allocator.Error!void { const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Save the i64 value from the stack const val_local = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val_local) catch return error.OutOfMemory; // Store low word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val_local) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); // Store high word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; if (signed) { // Sign extend: high = value >> 63 (arithmetic shift) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 63) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 63) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; } else { // Zero extend: high = 0 - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } try self.emitStoreOp(.i64, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Convert f64 value (in val_local) to i128, storing result at result_local pointer. @@ -4096,50 +4150,50 @@ fn emitIntToI128(self: *Self, signed: bool) Allocator.Error!void { /// Pushes the result pointer onto the stack. fn emitF64ToI128(self: *Self, val_local: u32, result_local: u32, signed: bool) Allocator.Error!void { // high = trunc(val / 2^64) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_div) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_trunc) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_div) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_trunc) catch return error.OutOfMemory; const high_f = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high_f) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high_f) catch return error.OutOfMemory; if (signed) { - self.body.append(self.allocator, Op.i64_trunc_f64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f64_s) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i64_trunc_f64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f64_u) catch return error.OutOfMemory; } const high_i = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high_i) catch return error.OutOfMemory; // low = (val - high_f * 2^64) as u64 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high_f) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_trunc_f64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high_f) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f64_u) catch return error.OutOfMemory; const low_i = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low_i) catch return error.OutOfMemory; // Store low and high words - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low_i) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high_i) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Emit float-to-int try_unsafe conversion. @@ -4154,87 +4208,87 @@ fn emitFloatToIntTryUnsafe(self: *Self, val_size: u32, is_i64: bool, min_f: f64, const result_offset = try self.allocStackMemory(total_size, alignment); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Save the f64 value const val = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; // Compute is_int: !isNaN(val) && !isInf(val) && trunc(val) == val // !isNaN: val == val - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; // !isInf: abs(val) != inf - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(std.math.inf(f64)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(std.math.inf(f64)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; // trunc(val) == val - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_trunc) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_trunc) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; const is_int = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, is_int) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, is_int) catch return error.OutOfMemory; // Compute in_range: val >= min_f && val <= max_f - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(min_f))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_ge) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(max_f))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_le) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(min_f))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_ge) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(max_f))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_le) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; const in_range = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, in_range) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, in_range) catch return error.OutOfMemory; // Store value (only if is_int && in_range — but for try_unsafe we always store) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; if (is_i64) { - self.body.append(self.allocator, Op.i64_trunc_f64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f64_s) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); } else { - self.body.append(self.allocator, Op.i32_trunc_f64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_trunc_f64_s) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 0); } // Store is_int at offset val_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, is_int) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, is_int) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, val_size); // Store in_range at offset val_size + 1 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, in_range) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, in_range) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, val_size + 1); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Emit f64→i128/u128 try_unsafe conversion. @@ -4245,93 +4299,93 @@ fn emitFloatToI128TryUnsafe(self: *Self, signed: bool) Allocator.Error!void { const result_offset = try self.allocStackMemory(24, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Save the f64 value const val = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; // Compute is_int: !isNaN(val) && !isInf(val) && trunc(val) == val - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(std.math.inf(f64)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_trunc) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(std.math.inf(f64)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_trunc) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; const is_int = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, is_int) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, is_int) catch return error.OutOfMemory; // Compute in_range using f64 bounds if (signed) { // i128 range: roughly -1.7e38 to 1.7e38 const min_f: f64 = -170141183460469231731687303715884105728.0; const max_f: f64 = 170141183460469231731687303715884105727.0; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(min_f))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_ge) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(max_f))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_le) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(min_f))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_ge) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(max_f))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_le) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } else { // u128 range: 0 to ~3.4e38 const max_f: f64 = 340282366920938463463374607431768211455.0; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 0.0)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_ge) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(max_f))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_le) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 0.0)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_ge) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(max_f))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_le) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } const in_range = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, in_range) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, in_range) catch return error.OutOfMemory; // Convert f64 to i128 and store as val (16 bytes at offset 0) try self.emitF64ToI128(val, result_local, signed); // Store is_int at offset 16 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, is_int) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, is_int) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 16); // Store in_range at offset 17 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, in_range) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, in_range) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 17); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Emit an integer try conversion that returns a Result(TargetInt, {}) tag union. @@ -4351,8 +4405,8 @@ fn emitIntTryResult( .i64 => self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory, .f32, .f64 => unreachable, }; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val_local) catch return error.OutOfMemory; // Allocate result const total_size = disc_offset + 1; @@ -4361,14 +4415,14 @@ fn emitIntTryResult( const result_offset = try self.allocStackMemory(padded, alignment); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Zero out discriminant (Err by default) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, disc_offset); return .{ .result_local = result_local, .val_local = val_local }; @@ -4385,12 +4439,12 @@ fn emitIntTryOk( disc_offset: u32, ) Allocator.Error!void { // Store value - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val_local) catch return error.OutOfMemory; if (src_vt == .i64 and payload_size <= 4) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, payload_size, 0); } else if (src_vt == .i32 and payload_size < 4) { try self.emitStoreOpSized(.i32, payload_size, 0); @@ -4401,10 +4455,10 @@ fn emitIntTryOk( } // Set discriminant to 1 (Ok) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, disc_offset); } @@ -4421,24 +4475,24 @@ fn emitI128TryNarrow( ) Allocator.Error!void { // Save source pointer const src_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; // Load low i64 from [src_ptr + 0] - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); const low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; // Load high i64 from [src_ptr + 8] - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); const high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; // Determine result layout // Payload is the target type in wasm representation @@ -4452,14 +4506,14 @@ fn emitI128TryNarrow( const result_offset = try self.allocStackMemory(padded, alignment); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Zero discriminant (Err by default) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, disc_offset); // Build the range check condition @@ -4468,34 +4522,34 @@ fn emitI128TryNarrow( // For signed target from signed source: high must be sign-extension of low's upper bits if (!signed_target) { // Unsigned target: high == 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; if (target_bytes < 8) { // AND low <= max_unsigned_target - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; const max_val: i64 = (@as(i64, 1) << @intCast(target_bytes * 8)) - 1; - WasmModule.leb128WriteI64(self.allocator, &self.body, max_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, max_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } // For target_bytes == 8: just high == 0 is sufficient } else if (!signed_source) { // Signed target from unsigned source: high == 0 AND low <= max_signed_target - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; const max_signed: i64 = (@as(i64, 1) << @intCast(target_bytes * 8 - 1)) - 1; - WasmModule.leb128WriteI64(self.allocator, &self.body, max_signed) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, max_signed) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } else { // Signed target from signed source (i128 → i8/i16/i32/i64) // Value fits if sign-extending the low N bits back to i128 gives the same value. @@ -4515,76 +4569,76 @@ fn emitI128TryNarrow( // Sign-extend low from target_bytes: const bit_count = target_bytes * 8; const sign_ext_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; // Shift left by (64 - bit_count), then arithmetic shift right by (64 - bit_count) - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, @intCast(64 - bit_count)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shl) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, @intCast(64 - bit_count)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sign_ext_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, @intCast(64 - bit_count)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shl) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, @intCast(64 - bit_count)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sign_ext_low) catch return error.OutOfMemory; // Check: sign_ext_low == low - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sign_ext_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sign_ext_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; // AND high == (sign_ext_low >> 63) (sign extension of the sign-extended low) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sign_ext_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 63) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sign_ext_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 63) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; } else { // i128 → i64: high must equal (low >> 63) (sign extension) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 63) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 63) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_shr_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; } } // If condition is true, store Ok result - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // Store payload (truncated value) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; if (target_bytes <= 4) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, target_bytes, 0); } else { try self.emitStoreOp(.i64, 0); } // Set discriminant = 1 (Ok) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, disc_offset); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Emit i128 → u128 try conversion (or signed widening → u128 try). @@ -4592,144 +4646,144 @@ fn emitI128TryNarrow( /// Result is a Result(U128, {}) — 16-byte payload at offset 0, disc at offset 16. fn emitI128TryToU128(self: *Self, _: bool) Allocator.Error!void { const src_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; // Load high word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); const high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; // Load low word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); const low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; // Allocate result: 16 bytes payload + 1 byte disc, aligned to 8 const result_offset = try self.allocStackMemory(24, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Zero discriminant at offset 16 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 16); // Check: high >= 0 (sign bit not set) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // Store payload (copy both words) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Set disc = 1 (Ok) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 16); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Emit u128 → i128 try conversion. /// Source is an i32 pointer to 16 bytes. Check value < 2^127 (high bit not set). fn emitI128TryToI128(self: *Self) Allocator.Error!void { const src_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; // Load high word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); const high = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; // Load low word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); const low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; // Allocate result: 16 bytes payload + 1 byte disc, aligned to 8 const result_offset = try self.allocStackMemory(24, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Zero discriminant - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 16); // Check: high >= 0 (MSB not set, value < 2^127) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // Store payload - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, low) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, high) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, high) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 8); // Set disc = 1 (Ok) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 16); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; } /// Resolve a layout.Idx to its wasm ValType, using the layout store for dynamic indices. @@ -4766,42 +4820,38 @@ fn emitHeapAlloc(self: *Self, size_local: u32, alignment: u32) Allocator.Error!v // Write alignment field try self.emitFpOffset(alloc_slot); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(alignment)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(alignment)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // Write length field try self.emitFpOffset(alloc_slot); try self.emitLocalGet(size_local); - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; // Push call_indirect args: (alloc_args_ptr, env_ptr) - try self.emitFpOffset(alloc_slot); // args_ptr - try self.emitLocalGet(self.roc_ops_local); // load roc_ops_ptr - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; // load env from offset 0 - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + try self.emitFpOffset(alloc_slot); + try self.emitLocalGet(self.roc_ops_local); + try self.emitLoadOp(.i32, WasmRocOps.env_ptr); - // Load roc_alloc table index from roc_ops_ptr offset 4 + // Load roc_alloc table index from RocOps struct try self.emitLocalGet(self.roc_ops_local); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + try self.emitLoadOp(.i32, WasmRocOps.roc_alloc_table_idx); // call_indirect with RocOps function type, table 0 - self.body.append(self.allocator, Op.call_indirect) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, self.roc_ops_type_idx) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // table index 0 + self.code_builder.code.append(self.allocator, Op.call_indirect) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, self.roc_ops_type_idx) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // table index 0 // Read answer from struct (offset +8) → result pointer on stack try self.emitFpOffset(alloc_slot); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 8) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 8) catch return error.OutOfMemory; } /// Emit heap allocation via roc_alloc with a constant size. @@ -4809,19 +4859,24 @@ fn emitHeapAlloc(self: *Self, size_local: u32, alignment: u32) Allocator.Error!v fn emitHeapAllocConst(self: *Self, size: u32, alignment: u32) Allocator.Error!void { // Store size in a temp local, then delegate to emitHeapAlloc const size_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, size_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, size_local) catch return error.OutOfMemory; try self.emitHeapAlloc(size_local, alignment); } /// Emit i32.store to a func_body buffer: stores a local's value at memory offset 0 + field_offset. /// Used during main() prologue to build the RocOps struct. -fn emitI32StoreToBody(self: *Self, func_body: *std.ArrayList(u8), field_offset: u32, local_idx: u32, _: ?void) Allocator.Error!void { - // i32.const 0 (base address of RocOps struct) - func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, func_body, 0) catch return error.OutOfMemory; +fn emitI32StoreToBody(self: *Self, func_body: *std.ArrayList(u8), field_offset: u32, local_idx: u32, base_local: ?u32) Allocator.Error!void { + // base address: local.get $base_local or i32.const 0 + if (base_local) |bl| { + func_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, func_body, bl) catch return error.OutOfMemory; + } else { + func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, func_body, 0) catch return error.OutOfMemory; + } // local.get $local_idx func_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; WasmModule.leb128WriteU32(self.allocator, func_body, local_idx) catch return error.OutOfMemory; @@ -4831,12 +4886,17 @@ fn emitI32StoreToBody(self: *Self, func_body: *std.ArrayList(u8), field_offset: WasmModule.leb128WriteU32(self.allocator, func_body, field_offset) catch return error.OutOfMemory; } -/// Emit i32.store to a func_body buffer: stores a constant value at memory offset 0 + field_offset. +/// Emit i32.store to a func_body buffer: stores a constant value at base_local + field_offset. /// Used during main() prologue to build the RocOps struct. -fn emitI32StoreConstToBody(self: *Self, func_body: *std.ArrayList(u8), field_offset: u32, value: u32) Allocator.Error!void { - // i32.const 0 (base address of RocOps struct) - func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, func_body, 0) catch return error.OutOfMemory; +fn emitI32StoreConstToBody(self: *Self, func_body: *std.ArrayList(u8), field_offset: u32, value: u32, base_local: ?u32) Allocator.Error!void { + // base address: local.get $base_local or i32.const 0 + if (base_local) |bl| { + func_body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, func_body, bl) catch return error.OutOfMemory; + } else { + func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, func_body, 0) catch return error.OutOfMemory; + } // i32.const value func_body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; WasmModule.leb128WriteI32(self.allocator, func_body, @intCast(value)) catch return error.OutOfMemory; @@ -4849,22 +4909,22 @@ fn emitI32StoreConstToBody(self: *Self, func_body: *std.ArrayList(u8), field_off /// Emit: local.get $fp; i32.const offset; i32.add /// Leaves (fp + offset) on the stack as an i32 pointer. fn emitFpOffset(self: *Self, offset: u32) Allocator.Error!void { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, self.fp_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, self.fp_local) catch return error.OutOfMemory; if (offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } } /// Emit: i64.store with alignment 3 (8 bytes) and the given offset. fn emitI64Store(self: *Self, offset: u32) Allocator.Error!void { - self.body.append(self.allocator, Op.i64_store) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_store) catch return error.OutOfMemory; // alignment (log2 of bytes): 3 = 8-byte aligned - WasmModule.leb128WriteU32(self.allocator, &self.body, 3) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 3) catch return error.OutOfMemory; // offset - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; } /// Copy a stack_memory value from a (potentially dangling) source pointer to @@ -4905,7 +4965,7 @@ fn emitConversion(self: *Self, source: ValType, target: ValType) Allocator.Error }, }; if (op) |opcode| { - self.body.append(self.allocator, opcode) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, opcode) catch return error.OutOfMemory; } } @@ -4925,6 +4985,128 @@ pub fn compileAllProcSpecs(self: *Self, proc_specs: []const LirProcSpec) Allocat } } +/// Generate a RocCall ABI entrypoint wrapper for a compiled proc. +/// +/// Creates a function with signature `(i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void` +/// that reads arguments from `args_ptr`, calls the compiled proc, and stores the result +/// to `ret_ptr`. The function is exported with the given `entrypoint_name`. +/// +/// The proc must already be compiled via `compileAllProcSpecs`. +/// Returns the global function index of the wrapper. +pub fn generateEntrypointWrapper( + self: *Self, + proc: LirProcSpec, + entrypoint_name: []const u8, + arg_layouts: []const layout.Idx, + ret_layout: layout.Idx, +) Allocator.Error!u32 { + const ret_vt = self.resolveValType(ret_layout); + const ret_repr = WasmLayout.wasmReprWithStore(ret_layout, self.getLayoutStore()); + const ret_byte_size: u32 = switch (ret_repr) { + .primitive => |vt| switch (vt) { + .i32, .f32 => 4, + .i64, .f64 => 8, + }, + .stack_memory => |size| size, + }; + + // Create the RocCall function: (i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void + const roc_call_type_idx = try self.module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); + const func_idx = try self.module.addFunction(roc_call_type_idx); + + // Save and reset codegen state + const saved = try self.saveState(); + + self.code_builder.code = .empty; + self.code_builder.preamble = .empty; + self.code_builder.import_relocations = .empty; + self.storage.locals = std.AutoHashMap(u64, Storage.LocalInfo).init(self.allocator); + self.storage.next_local_idx = 0; + self.storage.local_types = .empty; + self.stack_frame_size = 0; + self.uses_stack_memory = false; + self.fp_local = 0; + self.cf_depth = 0; + self.in_proc = false; + + // Allocate wrapper parameters + const roc_ops_local = try self.storage.allocAnonymousLocal(.i32); // param 0: roc_ops_ptr + const ret_ptr_local = try self.storage.allocAnonymousLocal(.i32); // param 1: ret_ptr + const args_ptr_local = try self.storage.allocAnonymousLocal(.i32); // param 2: args_ptr + + // --- Build the call to the compiled proc --- + + // Push roc_ops_ptr (first arg to the proc) + try self.emitLocalGet(roc_ops_local); + + // Load each argument from the args struct at args_ptr + var args_offset: u32 = 0; + for (arg_layouts) |arg_layout| { + const arg_repr = WasmLayout.wasmReprWithStore(arg_layout, self.getLayoutStore()); + switch (arg_repr) { + .primitive => |vt| { + // Load the value from args_ptr + offset + try self.emitLocalGet(args_ptr_local); + try self.emitLoadOp(vt, args_offset); + const size: u32 = switch (vt) { + .i32, .f32 => 4, + .i64, .f64 => 8, + }; + args_offset += size; + }, + .stack_memory => |size| { + // Pass pointer to the data: args_ptr + offset + try self.emitLocalGet(args_ptr_local); + if (args_offset > 0) { + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(args_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + } + // Align to 4 bytes for the next arg + const aligned_size = (size + 3) & ~@as(u32, 3); + args_offset += aligned_size; + }, + } + } + + // Call the compiled proc + const proc_key: u64 = @bitCast(proc.name); + const proc_func_idx = self.registered_procs.get(proc_key) orelse return error.OutOfMemory; + try self.emitCall(proc_func_idx); + + // Store the return value to ret_ptr + if (ret_byte_size == 0) { + // Zero-sized type — drop the return value, nothing to store + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; + } else { + try self.emitStoreResultToRetPtr(ret_ptr_local, ret_vt, ret_repr); + } + + // Build function body (wrapper doesn't use stack memory — no prologue/epilogue) + var wrapper_body: std.ArrayList(u8) = .empty; + defer wrapper_body.deinit(self.allocator); + + // Locals declaration (skip 3 for the RocCall parameters) + try self.encodeLocalsDecl(&wrapper_body, 3); + + // Resolve deferred relocatable calls + self.resolvePendingRelocations(); + + // Body instructions + try wrapper_body.appendSlice(self.allocator, self.code_builder.code.items); + + // End opcode + try wrapper_body.append(self.allocator, Op.end); + + try self.module.setFunctionBody(func_idx, wrapper_body.items); + try self.module.addExport(entrypoint_name, .func, func_idx); + + // Restore state + self.restoreState(saved); + + return func_idx; +} + /// Compile a single LirProcSpec as a wasm function. /// Does NOT compile the body — that's done by compileProcSpecBody. fn registerProcSpec(self: *Self, proc: LirProcSpec) Allocator.Error!void { @@ -4962,7 +5144,9 @@ fn compileProcSpecBody(self: *Self, proc: LirProcSpec) Allocator.Error!void { const saved = self.saveState() catch return error.OutOfMemory; // Initialize fresh state with ALL registered proc_specs (for mutual recursion) - self.body = .empty; + self.code_builder.code = .empty; + self.code_builder.preamble = .empty; + self.code_builder.import_relocations = .empty; self.storage.locals = std.AutoHashMap(u64, Storage.LocalInfo).init(self.allocator); self.storage.next_local_idx = 0; self.storage.local_types = .empty; @@ -5003,8 +5187,8 @@ fn compileProcSpecBody(self: *Self, proc: LirProcSpec) Allocator.Error!void { self.fp_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; // Emit proc body block (ret targets this block) - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(ret_vt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(ret_vt)) catch return error.OutOfMemory; self.cf_depth = 1; // inside the ret block // Generate CFStmt body @@ -5014,7 +5198,7 @@ fn compileProcSpecBody(self: *Self, proc: LirProcSpec) Allocator.Error!void { }; // End of ret block - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // Build function body var func_body: std.ArrayList(u8) = .empty; @@ -5047,8 +5231,11 @@ fn compileProcSpecBody(self: *Self, proc: LirProcSpec) Allocator.Error!void { WasmModule.leb128WriteU32(self.allocator, &func_body, 0) catch return error.OutOfMemory; } + // Resolve deferred relocatable calls before copying instructions + self.resolvePendingRelocations(); + // Body instructions - func_body.appendSlice(self.allocator, self.body.items) catch return error.OutOfMemory; + func_body.appendSlice(self.allocator, self.code_builder.code.items) catch return error.OutOfMemory; if (self.uses_stack_memory) { // Epilogue: restore stack pointer @@ -5087,8 +5274,12 @@ fn compileProcSpecBody(self: *Self, proc: LirProcSpec) Allocator.Error!void { /// Saved codegen state for restoring after compiling a nested function. const SavedState = struct { - body_items: []u8, - body_capacity: usize, + code_items: []u8, + code_capacity: usize, + preamble_items: []u8, + preamble_capacity: usize, + import_relocs_items: []CodeBuilder.Relocation, + import_relocs_capacity: usize, locals: std.AutoHashMap(u64, Storage.LocalInfo), next_local_idx: u32, local_types_items: []ValType, @@ -5106,8 +5297,12 @@ const SavedState = struct { /// Capture current codegen state for later restoration. fn saveState(self: *Self) Allocator.Error!SavedState { return .{ - .body_items = self.body.items, - .body_capacity = self.body.capacity, + .code_items = self.code_builder.code.items, + .code_capacity = self.code_builder.code.capacity, + .preamble_items = self.code_builder.preamble.items, + .preamble_capacity = self.code_builder.preamble.capacity, + .import_relocs_items = self.code_builder.import_relocations.items, + .import_relocs_capacity = self.code_builder.import_relocations.capacity, .locals = self.storage.locals, .next_local_idx = self.storage.next_local_idx, .local_types_items = self.storage.local_types.items, @@ -5123,9 +5318,15 @@ fn saveState(self: *Self) Allocator.Error!SavedState { /// Restore codegen state after compiling a nested function. fn restoreState(self: *Self, saved: SavedState) void { - self.body.deinit(self.allocator); - self.body.items = saved.body_items; - self.body.capacity = saved.body_capacity; + self.code_builder.code.deinit(self.allocator); + self.code_builder.code.items = saved.code_items; + self.code_builder.code.capacity = saved.code_capacity; + self.code_builder.preamble.deinit(self.allocator); + self.code_builder.preamble.items = saved.preamble_items; + self.code_builder.preamble.capacity = saved.preamble_capacity; + self.code_builder.import_relocations.deinit(self.allocator); + self.code_builder.import_relocations.items = saved.import_relocs_items; + self.code_builder.import_relocations.capacity = saved.import_relocs_capacity; self.storage.locals.deinit(); self.storage.locals = saved.locals; self.storage.next_local_idx = saved.next_local_idx; @@ -5161,13 +5362,13 @@ fn generateCFStmt(self: *Self, stmt_id: CFStmtId) Allocator.Error!void { // Generate return value try self.generateExpr(r.value); // Break out to the proc ret block - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, self.cf_depth - 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, self.cf_depth - 1) catch return error.OutOfMemory; }, .expr_stmt => |es| { // Generate value for side effects, then drop try self.generateExpr(es.value); - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; try self.generateCFStmt(es.next); }, .switch_stmt => |sw| { @@ -5185,23 +5386,23 @@ fn generateCFStmt(self: *Self, stmt_id: CFStmtId) Allocator.Error!void { // Compare cond to branch value try self.emitLocalGet(cond_local); if (cond_vt == .i64) { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, @bitCast(branch.value)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, @bitCast(branch.value)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eq) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(@as(i64, @bitCast(branch.value)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(@as(i64, @bitCast(branch.value)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; } // if (result_type) - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(ret_vt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(ret_vt)) catch return error.OutOfMemory; self.cf_depth += 1; try self.generateCFStmt(branch.body); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; } // Default branch @@ -5209,7 +5410,7 @@ fn generateCFStmt(self: *Self, stmt_id: CFStmtId) Allocator.Error!void { // Close all if/else blocks for (0..branches.len) |_| { - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; self.cf_depth -= 1; } }, @@ -5253,8 +5454,8 @@ fn generateCFStmt(self: *Self, stmt_id: CFStmtId) Allocator.Error!void { try self.generateCFStmt(j.remainder); // Emit loop for the join point body - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, 0x40) catch return error.OutOfMemory; // void block type + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x40) catch return error.OutOfMemory; // void block type self.cf_depth += 1; // Record the loop depth for jump targeting @@ -5263,7 +5464,7 @@ fn generateCFStmt(self: *Self, stmt_id: CFStmtId) Allocator.Error!void { try self.generateCFStmt(j.body); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; self.cf_depth -= 1; }, .jump => |jmp| { @@ -5305,8 +5506,8 @@ fn generateCFStmt(self: *Self, stmt_id: CFStmtId) Allocator.Error!void { if (self.join_point_depths.get(jp_key)) |loop_depth| { // br to the loop (br 0 from directly inside the loop) const br_target = self.cf_depth - loop_depth; - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, br_target) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, br_target) catch return error.OutOfMemory; } // If loop not entered yet (initial jump in remainder), just fall through }, @@ -5330,7 +5531,7 @@ fn generateCFStmt(self: *Self, stmt_id: CFStmtId) Allocator.Error!void { /// Each branch body is a CF statement (handles its own ret/jump). fn generateCFMatchBranches(self: *Self, branches: []const LIR.CFMatchBranch, value_local: u32, value_vt: ValType) Allocator.Error!void { if (branches.len == 0) { - self.body.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; return; } @@ -5344,24 +5545,24 @@ fn generateCFMatchBranches(self: *Self, branches: []const LIR.CFMatchBranch, val }, .bind => |bind| { const local_idx = self.storage.allocLocal(bind.symbol, value_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; try self.generateCFStmtWithGuard(branch, remaining, value_local, value_vt); }, .int_literal => |int_pat| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; switch (value_vt) { .i32 => { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @truncate(@as(i64, @truncate(int_pat.value)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @truncate(@as(i64, @truncate(int_pat.value)))) catch return error.OutOfMemory; }, .i64 => { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, @truncate(int_pat.value)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, @truncate(int_pat.value)) catch return error.OutOfMemory; }, .f32, .f64 => unreachable, } @@ -5371,16 +5572,16 @@ fn generateCFMatchBranches(self: *Self, branches: []const LIR.CFMatchBranch, val .i64 => Op.i64_eq, .f32, .f64 => unreachable, }; - self.body.append(self.allocator, eq_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, eq_op) catch return error.OutOfMemory; // if match: void block type since branch bodies handle their own control flow - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, 0x40) catch return error.OutOfMemory; // void + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x40) catch return error.OutOfMemory; // void self.cf_depth += 1; try self.generateCFStmtWithGuard(branch, remaining, value_local, value_vt); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateCFMatchBranches(remaining, value_local, value_vt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; self.cf_depth -= 1; }, .tag => |tag_pat| { @@ -5401,30 +5602,30 @@ fn generateCFMatchBranches(self: *Self, branches: []const LIR.CFMatchBranch, val const disc_offset = tu_data.discriminant_offset; const disc_size: u32 = tu_data.discriminant_size; if (disc_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; try self.emitLoadOpSized(.i32, disc_size, disc_offset); } } else { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, value_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, value_local) catch return error.OutOfMemory; } // Compare against the discriminant constant - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(tag_pat.discriminant)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(tag_pat.discriminant)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, 0x40) catch return error.OutOfMemory; // void + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x40) catch return error.OutOfMemory; // void self.cf_depth += 1; try self.generateCFStmtWithGuard(branch, remaining, value_local, value_vt); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateCFMatchBranches(remaining, value_local, value_vt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; self.cf_depth -= 1; }, .struct_, .list, .as_pattern => { @@ -5452,13 +5653,13 @@ fn generateCFStmtWithGuard( if (!branch.guard.isNone()) { // Evaluate the guard expression (pushes i32 0 or 1) try self.generateExpr(branch.guard); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, 0x40) catch return error.OutOfMemory; // void + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x40) catch return error.OutOfMemory; // void self.cf_depth += 1; try self.generateCFStmt(branch.body); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateCFMatchBranches(remaining, value_local, value_vt); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; self.cf_depth -= 1; } else { try self.generateCFStmt(branch.body); @@ -5493,9 +5694,9 @@ fn bindCFLetPattern(self: *Self, pat: LirPattern, value_expr: LirExprId) Allocat try self.emitStoreOp(scalar_vt, stack_offset); try self.emitLocalGet(self.fp_local); if (stack_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(stack_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(stack_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const local_idx = self.getOrAllocTypedLocal(bind.symbol, .i32) catch return error.OutOfMemory; try self.emitLocalSet(local_idx); @@ -5524,7 +5725,7 @@ fn bindCFLetPattern(self: *Self, pat: LirPattern, value_expr: LirExprId) Allocat } }, .wildcard => { - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; }, .struct_ => |s| { const ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; @@ -5540,8 +5741,8 @@ fn bindCFLetPattern(self: *Self, pat: LirPattern, value_expr: LirExprId) Allocat // Bind the outer symbol, then recurse on the inner pattern const vt = self.resolveValType(as_pat.layout_idx); const local_idx = self.storage.allocLocal(as_pat.symbol, vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; const inner_pat = self.store.getPattern(as_pat.inner); try self.bindCFLetPattern(inner_pat, value_expr); }, @@ -5576,8 +5777,8 @@ fn generateCall(self: *Self, c: anytype) Allocator.Error!void { /// Emit a call instruction. fn emitCall(self: *Self, func_idx: u32) Allocator.Error!void { - try self.body.append(self.allocator, Op.call); - try WasmModule.leb128WriteU32(self.allocator, &self.body, func_idx); + try self.code_builder.code.append(self.allocator, Op.call); + try WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, func_idx); } /// Generate call arguments (helper to avoid duplication). @@ -5603,6 +5804,148 @@ fn generateCallArgs(self: *Self, args: LIR.LirExprSpan) Allocator.Error!void { } } +/// Generate a hosted function call via `call_indirect` through RocOps.hosted_fns. +/// Hosted functions follow the RocCall ABI: fn(roc_ops_ptr, ret_ptr, args_ptr) -> void. +/// The table index is loaded from linear memory at hosted_fns_ptr + (index * 4). +fn generateHostedCall(self: *Self, hc: anytype) Allocator.Error!void { + const ls = self.getLayoutStore(); + const arg_exprs = self.store.getExprSpan(hc.args); + + // 1. Generate all argument expressions and stabilize composite results. + // We collect locals holding each arg value (primitive on wasm stack → save to local, + // composite → pointer in local). + const ArgInfo = struct { local: u32, layout_idx: layout.Idx, is_composite: bool, byte_size: u32 }; + var arg_infos: std.ArrayList(ArgInfo) = .empty; + defer arg_infos.deinit(self.allocator); + + for (arg_exprs) |arg_id| { + try self.generateExpr(arg_id); + const layout_idx = self.exprLayoutIdx(arg_id); + const repr = WasmLayout.wasmReprWithStore(layout_idx, ls); + const is_composite = switch (repr) { + .stack_memory => |sz| sz > 0, + .primitive => false, + }; + const byte_size: u32 = switch (repr) { + .stack_memory => |sz| sz, + .primitive => |vt| switch (vt) { + .i32, .f32 => @as(u32, 4), + .i64, .f64 => @as(u32, 8), + }, + }; + // Save value to a local so it survives subsequent arg generation. + if (is_composite) { + const stabilized = try self.stabilizeCompositeResult(byte_size); + arg_infos.append(self.allocator, .{ .local = stabilized, .layout_idx = layout_idx, .is_composite = true, .byte_size = byte_size }) catch return error.OutOfMemory; + } else { + const vt = self.resolveValType(layout_idx); + const tmp = self.storage.allocAnonymousLocal(vt) catch return error.OutOfMemory; + try self.emitLocalSet(tmp); + arg_infos.append(self.allocator, .{ .local = tmp, .layout_idx = layout_idx, .is_composite = false, .byte_size = byte_size }) catch return error.OutOfMemory; + } + } + + // 2. Allocate return slot on stack frame. + const ret_repr = WasmLayout.wasmReprWithStore(hc.ret_layout, ls); + const ret_size: u32 = switch (ret_repr) { + .stack_memory => |sz| sz, + .primitive => |vt| switch (vt) { + .i32, .f32 => @as(u32, 4), + .i64, .f64 => @as(u32, 8), + }, + }; + const ret_align: u32 = if (ret_size >= 8) 8 else if (ret_size >= 4) 4 else if (ret_size >= 2) 2 else 1; + const ret_slot = if (ret_size > 0) + try self.allocStackMemory(@max(ret_size, 4), @max(ret_align, 4)) + else + try self.allocStackMemory(4, 4); // Minimum slot for ZST (need valid pointer) + + // 3. Marshal arguments into a contiguous buffer on stack. + var total_args_size: u32 = 0; + for (arg_infos.items) |arg| { + const arg_align: u32 = self.layoutByteAlign(arg.layout_idx); + total_args_size = std.mem.alignForward(u32, total_args_size, @max(arg_align, 1)); + total_args_size += arg.byte_size; + } + + const args_slot = if (total_args_size > 0) + try self.allocStackMemory(total_args_size, 4) + else + try self.allocStackMemory(4, 4); // Minimum slot for empty args + + // Copy each argument into the args buffer. + var offset: u32 = 0; + for (arg_infos.items) |arg| { + const arg_align: u32 = self.layoutByteAlign(arg.layout_idx); + offset = std.mem.alignForward(u32, offset, @max(arg_align, 1)); + + if (arg.byte_size > 0) { + if (arg.is_composite) { + // Composite: arg.local is a pointer to the data. Copy bytes. + // Destination: fp + args_slot + offset + const dst_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitFpOffset(args_slot + offset); + try self.emitLocalSet(dst_local); + try self.emitMemCopy(dst_local, 0, arg.local, arg.byte_size); + } else { + // Primitive: arg.local holds the scalar value. Store it. + try self.emitFpOffset(args_slot + offset); + const vt = self.resolveValType(arg.layout_idx); + try self.emitLocalGet(arg.local); + try self.emitStoreOp(vt, 0); + } + } + offset += arg.byte_size; + } + + // 4. Load hosted function's table index from RocOps: + // roc_ops_ptr → i32.load offset=32 (hosted_fns_ptr) + // → i32.load offset=(index * 4) (table index) + try self.emitLocalGet(self.roc_ops_local); + try self.emitLoadOp(.i32, WasmRocOps.hosted_fns_ptr); + try self.emitLoadOp(.i32, hc.index * 4); + + // Save table index to temp local (call_indirect expects it last on stack). + const table_idx_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalSet(table_idx_local); + + // 5. Push RocCall args: (roc_ops_ptr, ret_ptr, args_ptr), then table index. + try self.emitLocalGet(self.roc_ops_local); // arg 0: roc_ops + try self.emitFpOffset(ret_slot); // arg 1: ret_ptr + try self.emitFpOffset(args_slot); // arg 2: args_ptr + try self.emitLocalGet(table_idx_local); // table index (consumed by call_indirect) + + // 6. call_indirect with RocCall type signature, table 0. + self.code_builder.code.append(self.allocator, Op.call_indirect) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, self.roc_call_type_idx) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + + // 7. Load result from ret_slot. + switch (ret_repr) { + .primitive => |vt| { + if (ret_size == 0) { + // ZST — push unit value + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + } else { + // Load scalar from ret_slot + try self.emitFpOffset(ret_slot); + try self.emitLoadOp(vt, 0); + } + }, + .stack_memory => |sz| { + if (sz == 0) { + // ZST — push dummy pointer + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + } else { + // Composite: push pointer to ret_slot + try self.emitFpOffset(ret_slot); + } + }, + } +} + // ---- Lambda set / Closure value generation ---- // These functions handle runtime dispatch for lambda sets with multiple members. // Used when closures have enum_dispatch or union_repr representations. @@ -5704,14 +6047,14 @@ fn emitStoreOp(self: *Self, vt: ValType, mem_offset: u32) Allocator.Error!void { .f32 => Op.f32_store, .f64 => Op.f64_store, }; - self.body.append(self.allocator, op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, op) catch return error.OutOfMemory; // Alignment (log2): i32=2, i64=3, f32=2, f64=3 const align_log2: u32 = switch (vt) { .i32, .f32 => 2, .i64, .f64 => 3, }; - WasmModule.leb128WriteU32(self.allocator, &self.body, align_log2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, mem_offset) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, align_log2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, mem_offset) catch return error.OutOfMemory; } /// Emit a size-aware store instruction for a field with a known byte size. @@ -5720,10 +6063,10 @@ fn emitStoreOpSized(self: *Self, vt: ValType, byte_size: u32, mem_offset: u32) A if (vt == .i32 and byte_size < 4) { // Sub-word store for i32 values in small fields const op: u8 = if (byte_size == 1) Op.i32_store8 else Op.i32_store16; - self.body.append(self.allocator, op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, op) catch return error.OutOfMemory; const align_log2: u32 = if (byte_size == 1) 0 else 1; - WasmModule.leb128WriteU32(self.allocator, &self.body, align_log2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, mem_offset) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, align_log2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, mem_offset) catch return error.OutOfMemory; } else { try self.emitStoreOp(vt, mem_offset); } @@ -5737,36 +6080,36 @@ fn emitLoadOp(self: *Self, vt: ValType, mem_offset: u32) Allocator.Error!void { .f32 => Op.f32_load, .f64 => Op.f64_load, }; - self.body.append(self.allocator, op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, op) catch return error.OutOfMemory; const align_log2: u32 = switch (vt) { .i32, .f32 => 2, .i64, .f64 => 3, }; - WasmModule.leb128WriteU32(self.allocator, &self.body, align_log2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, mem_offset) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, align_log2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, mem_offset) catch return error.OutOfMemory; } /// Emit a size-aware load instruction for a field with a known byte size. /// For sub-32-bit fields (1 or 2 bytes), uses i32.load8_u/i32.load16_u. fn emitLoadOpSized(self: *Self, vt: ValType, byte_size: u32, mem_offset: u32) Allocator.Error!void { if (byte_size == 0) { - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; switch (vt) { .i32 => { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; }, .i64 => { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; }, .f32 => { - self.body.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; - try self.body.appendSlice(self.allocator, std.mem.asBytes(&@as(f32, 0))); + self.code_builder.code.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; + try self.code_builder.code.appendSlice(self.allocator, std.mem.asBytes(&@as(f32, 0))); }, .f64 => { - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - try self.body.appendSlice(self.allocator, std.mem.asBytes(&@as(f64, 0))); + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + try self.code_builder.code.appendSlice(self.allocator, std.mem.asBytes(&@as(f64, 0))); }, } return; @@ -5775,10 +6118,10 @@ fn emitLoadOpSized(self: *Self, vt: ValType, byte_size: u32, mem_offset: u32) Al if (vt == .i32 and byte_size < 4) { // Sub-word load for i32 values in small fields const op: u8 = if (byte_size == 1) Op.i32_load8_u else Op.i32_load16_u; - self.body.append(self.allocator, op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, op) catch return error.OutOfMemory; const align_log2: u32 = if (byte_size == 1) 0 else 1; - WasmModule.leb128WriteU32(self.allocator, &self.body, align_log2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, mem_offset) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, align_log2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, mem_offset) catch return error.OutOfMemory; } else { try self.emitLoadOp(vt, mem_offset); } @@ -5794,10 +6137,10 @@ fn emitLoadOpForLayout(self: *Self, lay: layout.Idx, mem_offset: u32) Allocator. (if (is_signed) Op.i32_load8_s else Op.i32_load8_u) else (if (is_signed) Op.i32_load16_s else Op.i32_load16_u); - self.body.append(self.allocator, op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, op) catch return error.OutOfMemory; const align_log2: u32 = if (byte_size == 1) 0 else 1; - WasmModule.leb128WriteU32(self.allocator, &self.body, align_log2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, mem_offset) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, align_log2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, mem_offset) catch return error.OutOfMemory; } else { try self.emitLoadOp(vt, mem_offset); } @@ -5811,14 +6154,14 @@ fn emitStoreToMem(self: *Self, base_local: u32, field_offset: u32, vt: ValType) // Strategy: store value in a temp local, push address, get value back, then store. const temp = self.storage.allocAnonymousLocal(vt) catch return error.OutOfMemory; // local.set temp (pop value) - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; // local.get base - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; // local.get temp (push value) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; // store with field_offset as immediate try self.emitStoreOp(vt, field_offset); } @@ -5827,18 +6170,18 @@ fn emitStoreToMem(self: *Self, base_local: u32, field_offset: u32, vt: ValType) fn emitStoreToMemSized(self: *Self, base_local: u32, field_offset: u32, vt: ValType, byte_size: u32) Allocator.Error!void { if (byte_size == 0) { const temp = self.storage.allocAnonymousLocal(vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; return; } const temp = self.storage.allocAnonymousLocal(vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; try self.emitStoreOpSized(vt, byte_size, field_offset); } @@ -5850,47 +6193,47 @@ fn emitMemCopy(self: *Self, dst_local: u32, dst_offset: u32, src_local: u32, byt // Copy i32-sized chunks while (offset + 4 <= byte_count) : (offset += 4) { // dst_local - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_local) catch return error.OutOfMemory; // load from src - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; // store to dst - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_offset + offset) catch return error.OutOfMemory; } // Copy i16 chunk if (offset + 2 <= byte_count) { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store16) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load16_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store16) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_offset + offset) catch return error.OutOfMemory; offset += 2; } // Copy remaining byte if (offset < byte_count) { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_offset + offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_offset + offset) catch return error.OutOfMemory; } } @@ -5899,22 +6242,22 @@ fn emitMemCopy(self: *Self, dst_local: u32, dst_offset: u32, src_local: u32, byt fn emitZeroInit(self: *Self, base_local: u32, byte_count: u32) Allocator.Error!void { var offset: u32 = 0; while (offset + 4 <= byte_count) : (offset += 4) { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; } while (offset < byte_count) : (offset += 1) { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; } } @@ -5925,16 +6268,16 @@ fn generateStruct(self: *Self, r: anytype) Allocator.Error!void { const l = ls.getLayout(r.struct_layout); // Empty structs (ZST) have scalar layout, not struct_ — push dummy pointer if (l.tag != .struct_) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; return; } const size = ls.layoutSize(l); if (size == 0) { // Zero-sized struct — push dummy pointer - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; return; } @@ -5945,8 +6288,8 @@ fn generateStruct(self: *Self, r: anytype) Allocator.Error!void { // Allocate a local to hold the base pointer const base_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(frame_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; // Generate all field expressions FIRST and save values to locals. // This must happen before zero-init because field expressions may read from @@ -5992,8 +6335,8 @@ fn generateStruct(self: *Self, r: anytype) Allocator.Error!void { // Save value to a local (i32 pointer for composite, value type for primitive) const save_vt: ValType = if (is_composite) .i32 else field_vt; const local_idx = self.storage.allocAnonymousLocal(save_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; field_val_locals[i] = local_idx; field_val_types[i] = save_vt; @@ -6014,15 +6357,15 @@ fn generateStruct(self: *Self, r: anytype) Allocator.Error!void { try self.emitMemCopy(base_local, field_offset, field_val_locals[i], field_byte_size); } else { // Primitive — push value from local, then store to record - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_val_locals[i]) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_val_locals[i]) catch return error.OutOfMemory; try self.emitStoreToMemSized(base_local, field_offset, field_val_types[i], field_byte_size); } } // Push the base pointer as the result - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; } /// Bind a struct destructuring pattern: load each field from a struct pointer @@ -6046,40 +6389,40 @@ fn bindStructPattern(self: *Self, ptr_local: u32, s: anytype) Allocator.Error!vo const is_composite = self.isCompositeLayout(field_layout_idx); if (is_composite and field_byte_size > 0) { // Composite field: compute pointer = ptr + offset - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const local_idx = self.storage.allocLocal(bind.symbol, .i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; } else { // Scalar field: load from memory const field_vt = WasmLayout.resultValTypeWithStore(field_layout_idx, ls); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; try self.emitLoadOpSized(field_vt, field_byte_size, field_offset); const local_idx = self.storage.allocLocal(bind.symbol, field_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; } }, .wildcard => {}, .struct_ => |inner_struct| { // Nested struct destructuring: compute pointer to field - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_ptr) catch return error.OutOfMemory; try self.bindStructPattern(field_ptr, inner_struct); }, .tag => |inner_tag| { @@ -6091,16 +6434,16 @@ fn bindStructPattern(self: *Self, ptr_local: u32, s: anytype) Allocator.Error!vo } if (self.store.getPatternSpan(inner_tag.args).len == 0) continue; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_ptr) catch return error.OutOfMemory; try self.bindTagPattern(field_ptr, inner_tag); }, else => unreachable, @@ -6152,24 +6495,24 @@ fn bindTagPattern(self: *Self, ptr_local: u32, tag: anytype) Allocator.Error!voi const is_composite = self.isCompositeLayout(bind.layout_idx); if (is_composite and bind_byte_size > 0) { // Composite field: compute pointer = ptr + offset - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; if (payload_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(payload_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(payload_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const local_idx = self.storage.allocLocal(bind.symbol, .i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; } else { // Scalar field: load from memory - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; try self.emitLoadOpForLayout(bind.layout_idx, payload_offset); const local_idx = self.storage.allocLocal(bind.symbol, bind_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; } payload_offset += bind_byte_size; }, @@ -6178,16 +6521,16 @@ fn bindTagPattern(self: *Self, ptr_local: u32, tag: anytype) Allocator.Error!voi }, .struct_ => |inner_struct| { const field_byte_size = self.layoutStorageByteSize(inner_struct.struct_layout); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; if (payload_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(payload_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(payload_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_ptr) catch return error.OutOfMemory; try self.bindStructPattern(field_ptr, inner_struct); payload_offset += field_byte_size; }, @@ -6200,16 +6543,16 @@ fn bindTagPattern(self: *Self, ptr_local: u32, tag: anytype) Allocator.Error!voi } const field_byte_size = self.layoutStorageByteSize(inner_tag.union_layout); if (self.store.getPatternSpan(inner_tag.args).len != 0) { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; if (payload_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(payload_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(payload_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, field_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, field_ptr) catch return error.OutOfMemory; try self.bindTagPattern(field_ptr, inner_tag); } payload_offset += field_byte_size; @@ -6230,8 +6573,8 @@ fn bindListPattern(self: *Self, ptr_local: u32, list_pat: anytype) Allocator.Err const is_composite = self.isCompositeLayout(list_pat.elem_layout); // Load elements pointer from RocList (offset 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); const elems_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(elems_ptr); @@ -6245,9 +6588,9 @@ fn bindListPattern(self: *Self, ptr_local: u32, list_pat: anytype) Allocator.Err if (is_composite and elem_size > 0) { try self.emitLocalGet(elems_ptr); if (elem_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } } else { try self.emitLocalGet(elems_ptr); @@ -6260,9 +6603,9 @@ fn bindListPattern(self: *Self, ptr_local: u32, list_pat: anytype) Allocator.Err .struct_ => |inner_struct| { try self.emitLocalGet(elems_ptr); if (elem_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } const field_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(field_ptr); @@ -6296,9 +6639,9 @@ fn generateStructAccess(self: *Self, sa: anytype) Allocator.Error!void { const src_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(struct_ptr); if (field_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(field_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(field_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } try self.emitLocalSet(src_local); @@ -6326,8 +6669,8 @@ fn generateZeroArgTag(self: *Self, z: anytype) Allocator.Error!void { const tu_size = ls.layoutSize(l); if (tu_size <= 4) { // Small tag union — fits in an i32 discriminant - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(z.discriminant)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(z.discriminant)) catch return error.OutOfMemory; return; } // Larger tag union — allocate memory, store discriminant @@ -6336,8 +6679,8 @@ fn generateZeroArgTag(self: *Self, z: anytype) Allocator.Error!void { const base_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(frame_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; // Store discriminant (size-aware) const tu_data = ls.getTagUnionData(l.data.tag_union.idx); @@ -6345,18 +6688,18 @@ fn generateZeroArgTag(self: *Self, z: anytype) Allocator.Error!void { const disc_size: u32 = tu_data.discriminant_size; // Push discriminant value if (disc_size != 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(z.discriminant)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(z.discriminant)) catch return error.OutOfMemory; try self.emitStoreToMemSized(base_local, disc_offset, .i32, disc_size); } // Push base pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; } else { // Possibly a simple bool/enum tag - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(z.discriminant)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(z.discriminant)) catch return error.OutOfMemory; } } @@ -6378,10 +6721,10 @@ fn generateTag(self: *Self, t: anytype) Allocator.Error!void { const small_args = self.store.getExprSpan(t.args); for (small_args) |arg_expr_id| { try self.generateExpr(arg_expr_id); - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(t.discriminant)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(t.discriminant)) catch return error.OutOfMemory; return; } @@ -6390,8 +6733,8 @@ fn generateTag(self: *Self, t: anytype) Allocator.Error!void { const base_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(frame_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; // Store payload args at offset 0 FIRST (payload may overlap discriminant // if the expression generates a wider type than the payload slot, e.g. i64 @@ -6405,8 +6748,8 @@ fn generateTag(self: *Self, t: anytype) Allocator.Error!void { // Composite types (Str, List, records, etc.) produce a pointer on // the stack. Copy the full data from the source to the tag union. const src_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; try self.emitMemCopy(base_local, payload_offset, src_local, arg_byte_size); } else { const arg_vt = self.exprValType(arg_expr_id); @@ -6418,14 +6761,14 @@ fn generateTag(self: *Self, t: anytype) Allocator.Error!void { // Store discriminant AFTER payload (so it can't be overwritten) const disc_size: u32 = tu_data.discriminant_size; if (disc_size != 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(t.discriminant)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(t.discriminant)) catch return error.OutOfMemory; try self.emitStoreToMemSized(base_local, disc_offset, .i32, disc_size); } // Push base pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; } /// Generate a discriminant switch expression. @@ -6436,7 +6779,7 @@ fn generateDiscriminantSwitch(self: *Self, ds: anytype) Allocator.Error!void { const branches = self.store.getExprSpan(ds.branches); if (branches.len == 0) { - self.body.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; return; } @@ -6463,28 +6806,28 @@ fn generateDiscriminantSwitch(self: *Self, ds: anytype) Allocator.Error!void { if (tu_size <= 4) { // Small tag union — the value IS the discriminant (already on stack as i32) - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, disc_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, disc_local) catch return error.OutOfMemory; } else { // Value is a pointer — load discriminant from memory const ptr_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; if (disc_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; try self.emitLoadOpSized(.i32, disc_size, disc_offset); } - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, disc_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, disc_local) catch return error.OutOfMemory; } } else { // Scalar/ZST — the value itself is the discriminant - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, disc_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, disc_local) catch return error.OutOfMemory; } // Determine result block type from the first branch @@ -6503,35 +6846,35 @@ fn generateDiscSwitchBranches(self: *Self, branches: []const LirExprId, disc_loc } // Compare discriminant to disc_value - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, disc_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(disc_value)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, disc_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(disc_value)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; // if (disc == disc_value) - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(bt)) catch return error.OutOfMemory; self.pushExprControlFrame(); defer self.popExprControlFrame(); try self.generateExpr(branches[0]); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; try self.generateDiscSwitchBranches(branches[1..], disc_local, bt, disc_value + 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Generate a while loop expression. /// Wasm structure: block { loop { i32.eqz br_if 1 drop br 0 } } i32.const 0 fn generateWhileLoop(self: *Self, wl: anytype) Allocator.Error!void { // block (void) — exit target for br_if - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; self.pushExprControlFrame(); const break_target_depth = self.expr_control_depth; // loop (void) — back-edge target for br - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; self.pushExprControlFrame(); self.loop_break_target_depths.append(self.allocator, break_target_depth) catch return error.OutOfMemory; defer { @@ -6544,26 +6887,26 @@ fn generateWhileLoop(self: *Self, wl: anytype) Allocator.Error!void { try self.generateExpr(wl.cond); // If condition is false (0), break out of the block - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; // break out of block (depth 1) + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // break out of block (depth 1) // Generate body (result is discarded) try self.generateExpr(wl.body); - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; // Branch back to loop start - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // continue loop (depth 0) + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // continue loop (depth 0) // end loop - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // While loops return unit — push dummy i32 0 - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } /// Generate a for loop expression. @@ -6574,43 +6917,43 @@ fn generateForLoopExpr(self: *Self, fl: anytype) Allocator.Error!void { // Generate the list expression → i32 pointer to RocList struct try self.generateExpr(fl.list_expr); const list_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_ptr) catch return error.OutOfMemory; // Load elements pointer (offset 0 in RocList) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); const elems_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, elems_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, elems_ptr) catch return error.OutOfMemory; // Load list length (offset 4 in RocList) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_ptr) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); const list_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_len) catch return error.OutOfMemory; // Loop index (initialized to 0) const idx_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; // Get element size const elem_size: u32 = self.layoutStorageByteSize(fl.elem_layout); const elem_vt = WasmLayout.resultValTypeWithStore(fl.elem_layout, ls); // block { loop { - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; self.pushExprControlFrame(); const break_target_depth = self.expr_control_depth; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; self.pushExprControlFrame(); self.loop_break_target_depths.append(self.allocator, break_target_depth) catch return error.OutOfMemory; defer { @@ -6620,13 +6963,13 @@ fn generateForLoopExpr(self: *Self, fl: anytype) Allocator.Error!void { } // Check: if idx >= len, break - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_len) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; // break out of block + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // break out of block // Bind element to pattern const elem_pattern = self.store.getPattern(fl.elem_pattern); @@ -6637,50 +6980,50 @@ fn generateForLoopExpr(self: *Self, fl: anytype) Allocator.Error!void { if (elem_size == 0) { // ZST elements — push dummy value - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else if (self.isCompositeLayout(fl.elem_layout)) { // Composite element — compute pointer: elems_ptr + idx * elem_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, elems_ptr) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, elems_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } else { // Primitive element — load from elems_ptr + idx * elem_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, elems_ptr) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, elems_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLoadOpForLayout(fl.elem_layout, 0); } - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local_idx) catch return error.OutOfMemory; }, .wildcard => { // No binding needed }, .struct_ => |s| { // Compute element pointer: elems_ptr + idx * elem_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, elems_ptr) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, elems_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; const elem_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, elem_ptr) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, elem_ptr) catch return error.OutOfMemory; try self.bindStructPattern(elem_ptr, s); }, else => unreachable, @@ -6688,28 +7031,28 @@ fn generateForLoopExpr(self: *Self, fl: anytype) Allocator.Error!void { // Generate body (result is discarded) try self.generateExpr(fl.body); - self.body.append(self.allocator, Op.drop) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.drop) catch return error.OutOfMemory; // Increment index - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; // Branch back to loop start - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // end loop, end block - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // For loops return unit — push dummy i32 0 - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } /// Generate a list construction expression. @@ -6725,11 +7068,11 @@ fn generateList(self: *Self, l: anytype) Allocator.Error!void { // Actually we need to zero-init at the right location const base_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(base_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; try self.emitZeroInit(base_local, 12); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; return; } @@ -6737,22 +7080,35 @@ fn generateList(self: *Self, l: anytype) Allocator.Error!void { const elem_size: u32 = self.layoutStorageByteSize(l.elem_layout); const elem_align: u32 = self.layoutStorageByteAlign(l.elem_layout); - // Allocate space for all elements on the heap so list literals remain valid - // when returned from functions (callee stack frames are reclaimed on return). + // Allocate space for all elements on the heap with a refcount header, + // so list builtins (append, reserve, etc.) can manage the allocation. const total_data_size = elem_size * @as(u32, @intCast(elems.len)); + const elements_refcounted: bool = if (ls.getLayout(l.elem_layout).tag != .list_of_zst) + ls.layoutContainsRefcounted(ls.getLayout(l.elem_layout)) + else + false; const data_base = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; if (total_data_size > 0) { - try self.emitHeapAllocConst(total_data_size, elem_align); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, data_base) catch return error.OutOfMemory; + // Call roc_builtins_allocate_with_refcount(data_bytes, alignment, elements_refcounted, roc_ops) + // Returns pointer to data (after refcount header) + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(total_data_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_align)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, if (elements_refcounted) 1 else 0) catch return error.OutOfMemory; + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(self.builtin_syms.allocate_with_refcount); + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, data_base) catch return error.OutOfMemory; // Zero-initialize element data for consistent padding try self.emitZeroInit(data_base, total_data_size); } else { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, data_base) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, data_base) catch return error.OutOfMemory; } // Store each element @@ -6764,8 +7120,8 @@ fn generateList(self: *Self, l: anytype) Allocator.Error!void { if (self.isCompositeLayout(l.elem_layout) and elem_size > 0) { // Composite element — copy from source pointer const src_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_local) catch return error.OutOfMemory; try self.emitMemCopy(data_base, offset, src_local, elem_size); } else { // Primitive element — store directly @@ -6779,27 +7135,27 @@ fn generateList(self: *Self, l: anytype) Allocator.Error!void { const list_offset = try self.allocStackMemory(12, 4); const list_base = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(list_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_base) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_base) catch return error.OutOfMemory; // Store elements pointer (offset 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, data_base) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, data_base) catch return error.OutOfMemory; try self.emitStoreToMem(list_base, 0, .i32); // Store length (offset 4) - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elems.len)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elems.len)) catch return error.OutOfMemory; try self.emitStoreToMem(list_base, 4, .i32); // Store capacity (offset 8) — same as length for stack-allocated lists - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elems.len)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elems.len)) catch return error.OutOfMemory; try self.emitStoreToMem(list_base, 8, .i32); // Push pointer to the RocList struct - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_base) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_base) catch return error.OutOfMemory; } /// Generate a low-level operation. @@ -6827,7 +7183,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .bool_not => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; }, // Safe integer widenings (no-op or single instruction) @@ -6836,29 +7192,29 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[0]); // If arg produces i64 (e.g. from i64_literal), wrap to i32 if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } }, .i8_to_i16, .i8_to_i32 => { // i8 is i32 in wasm, sign-extend from 8 bits try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; }, .u16_to_i32, .u16_to_u32 => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } }, .i16_to_i32 => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.i32_extend16_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_extend16_s) catch return error.OutOfMemory; }, // i32/u32 → i64/u64 @@ -6875,7 +7231,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Already i64 — no extension needed return; } - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; }, .i8_to_i64, .i16_to_i64, .i32_to_i64 => { try self.generateExpr(args[0]); @@ -6884,13 +7240,13 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Already i64 — no extension needed return; } - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; }, // Narrowing/wrapping conversions .i64_to_i32_wrap, .u64_to_u32_wrap, .u64_to_i32_wrap, .i64_to_u32_wrap => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; }, .i32_to_i8_wrap, .u32_to_u8_wrap, @@ -6909,12 +7265,12 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // May need to wrap i64 to i32 first const arg_vt = self.exprValType(args[0]); if (arg_vt == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } // Mask to 8 bits - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0xFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0xFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; }, .i32_to_i16_wrap, .u32_to_u16_wrap, @@ -6928,12 +7284,12 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[0]); const arg_vt = self.exprValType(args[0]); if (arg_vt == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } // Mask to 16 bits - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0xFFFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0xFFFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; }, .i32_to_u32_wrap, .u32_to_i32_wrap, @@ -6953,42 +7309,42 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Signed sub-i32 to unsigned wider wrapping (needs sign extension) .i8_to_u32_wrap => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; }, .i16_to_u32_wrap => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_extend16_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_extend16_s) catch return error.OutOfMemory; }, .i8_to_u16_wrap => { try self.generateExpr(args[0]); // Sign-extend from 8 bits then mask to 16 bits - self.body.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0xFFFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0xFFFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; }, .i8_to_u64_wrap => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_extend8_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; }, .i16_to_u64_wrap => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.i32_extend16_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_extend16_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; }, .i32_to_u64_wrap => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; }, .i32_to_u128_wrap => { // Signed i32→u128 wrap: sign-extend to i64, then to i128 @@ -6996,7 +7352,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { if (self.exprValType(args[0]) == .i64) { // Already i64 } else { - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; } try self.emitIntToI128(true); }, @@ -7005,36 +7361,36 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 8, 8); // Check: val >= 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // Ok path: sign-extend i32 to i64, store payload - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; try self.emitStoreOp(.i64, 0); // Set discriminant = 1 (Ok) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 8); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .i32_to_u128_try => { // Signed i32 → unsigned u128: check >= 0, then sign-extend to i128 try self.generateExpr(args[0]); // Sign-extend to i64 first if (self.exprValType(args[0]) != .i64) { - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; } try self.emitIntToI128(true); try self.emitI128TryToU128(true); @@ -7043,91 +7399,91 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Float conversions .f32_to_f64 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; }, .f64_to_f32_wrap => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; }, // Int to float .i32_to_f32, .i8_to_f32, .i16_to_f32 => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.f32_convert_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_convert_i32_s) catch return error.OutOfMemory; }, .u32_to_f32, .u8_to_f32, .u16_to_f32 => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.f32_convert_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_convert_i32_u) catch return error.OutOfMemory; }, .i32_to_f64, .i8_to_f64, .i16_to_f64 => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.f64_convert_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i32_s) catch return error.OutOfMemory; }, .u32_to_f64, .u8_to_f64, .u16_to_f64 => { try self.generateExpr(args[0]); if (self.exprValType(args[0]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.f64_convert_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i32_u) catch return error.OutOfMemory; }, .i64_to_f32 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f32_convert_i64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_convert_i64_s) catch return error.OutOfMemory; }, .u64_to_f32 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f32_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_convert_i64_u) catch return error.OutOfMemory; }, .i64_to_f64 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; }, .u64_to_f64 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; }, // Float to int (truncating) .f32_to_i32_trunc, .f32_to_i8_trunc, .f32_to_i16_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_trunc_f32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_trunc_f32_s) catch return error.OutOfMemory; }, .f32_to_u32_trunc, .f32_to_u8_trunc, .f32_to_u16_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_trunc_f32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_trunc_f32_u) catch return error.OutOfMemory; }, .f64_to_i32_trunc, .f64_to_i8_trunc, .f64_to_i16_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_trunc_f64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_trunc_f64_s) catch return error.OutOfMemory; }, .f64_to_u32_trunc, .f64_to_u8_trunc, .f64_to_u16_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_trunc_f64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_trunc_f64_u) catch return error.OutOfMemory; }, .f32_to_i64_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_trunc_f32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f32_s) catch return error.OutOfMemory; }, .f32_to_u64_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_trunc_f32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f32_u) catch return error.OutOfMemory; }, .f64_to_i64_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_trunc_f64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f64_s) catch return error.OutOfMemory; }, .f64_to_u64_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_trunc_f64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_trunc_f64_u) catch return error.OutOfMemory; }, // Float math functions (direct wasm opcodes) @@ -7139,7 +7495,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .f64 => Op.f64_sqrt, .i32, .i64 => unreachable, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_floor => { try self.generateExpr(args[0]); @@ -7149,7 +7505,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .f64 => Op.f64_floor, .i32, .i64 => unreachable, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_ceiling => { try self.generateExpr(args[0]); @@ -7159,7 +7515,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .f64 => Op.f64_ceil, .i32, .i64 => unreachable, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_round => { try self.generateExpr(args[0]); @@ -7169,7 +7525,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .f64 => Op.f64_nearest, .i32, .i64 => unreachable, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, // Modulo (integer only — float mod not yet supported) @@ -7186,7 +7542,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // If ret_layout expects i64, extend const ret_vt = self.resolveValType(ll.ret_layout); if (ret_vt == .i64) { - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; } }, .list_get_unsafe => { @@ -7218,17 +7574,17 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Dec literals are scaled by 10^18. Convert back to integer. const one_point_zero: i128 = 1_000_000_000_000_000_000; const actual: i32 = if (v == 0) 0 else @intCast(@divExact(v, one_point_zero)); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, actual) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, actual) catch return error.OutOfMemory; }, .i64_literal => |v| { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(v.value)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(v.value)) catch return error.OutOfMemory; }, else => { try self.generateExpr(args[1]); if (self.exprValType(args[1]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } }, } @@ -7244,10 +7600,10 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.emitLocalGet(list_local); try self.emitLoadOp(.i32, 0); try self.emitLocalGet(index_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; if (!elem_is_composite) { try self.emitLoadOpForLayout(elem_layout_idx, 0); @@ -7296,28 +7652,28 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // list_last(list) -> elem (loads last element) try self.generateExpr(args[0]); const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; // Load elements_ptr (offset 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); // Load length (offset 4) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); // Compute address: elements_ptr + (len-1) * elem_size - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; const ret_byte_size = self.layoutStorageByteSize(ll.ret_layout); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(ret_byte_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(ret_byte_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; // Load last element if (self.isCompositeLayout(ll.ret_layout)) { @@ -7333,178 +7689,178 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // No allocation needed — returns a view try self.generateExpr(args[0]); const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.generateExpr(args[1]); if (self.exprValType(args[1]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } const count_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; // Allocate result RocList (12 bytes) const result_offset = try self.allocStackMemory(12, 4); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Get element size from ret_layout (which is the list layout, not elem) const elem_size = self.getListElemSize(ll.ret_layout); // new_ptr = old_ptr + count * elem_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 0); // new_len = old_len - count - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 4); // Encode seamless-slice cap from the source allocation pointer. const encoded_cap = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitPrepareListSliceMetadata(list_local, self.listContainsRefcounted(ll.ret_layout), encoded_cap); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, encoded_cap) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, encoded_cap) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, .list_drop_last => { // list_drop_last(list, count) -> list // Returns a RocList with adjusted length (pointer stays same) try self.generateExpr(args[0]); const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.generateExpr(args[1]); if (self.exprValType(args[1]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } const count_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; // Allocate result RocList (12 bytes) const result_offset = try self.allocStackMemory(12, 4); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Same elements_ptr - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); try self.emitStoreOp(.i32, 0); // new_len = old_len - count - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 4); // Same capacity - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 8); try self.emitStoreOp(.i32, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, .list_take_first => { // list_take_first(list, count) -> list // Same as list but with length = min(count, len) try self.generateExpr(args[0]); const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.generateExpr(args[1]); if (self.exprValType(args[1]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } const count_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; const result_offset = try self.allocStackMemory(12, 4); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Same elements_ptr - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); try self.emitStoreOp(.i32, 0); // new_len = min(count, old_len) using select // Stack: count, old_len, count <= old_len - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.select) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.select) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 4); // Same capacity - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 8); try self.emitStoreOp(.i32, 8); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, .list_take_last => { // list_take_last(list, count) -> list @@ -7512,84 +7868,84 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // length = min(count, len) try self.generateExpr(args[0]); const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.generateExpr(args[1]); if (self.exprValType(args[1]) == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } const count_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; // Load length - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); const len_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; // actual_count = min(count, len) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, count_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.select) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, count_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.select) catch return error.OutOfMemory; const actual_count = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_count) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_count) catch return error.OutOfMemory; const result_offset = try self.allocStackMemory(12, 4); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; const elem_size = self.getListElemSize(ll.ret_layout); // new_ptr = old_ptr + (len - actual_count) * elem_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_count) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_count) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 0); // new_len = actual_count - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_count) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_count) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 4); // Encode seamless-slice cap from the source allocation pointer. const encoded_cap = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitPrepareListSliceMetadata(list_local, self.listContainsRefcounted(ll.ret_layout), encoded_cap); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, encoded_cap) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, encoded_cap) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 8); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, .list_contains => blk: { @@ -7597,8 +7953,8 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Linear scan through list elements using layout-aware equality. try self.generateExpr(args[0]); const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; const ls = self.getLayoutStore(); const list_layout_idx = self.exprLayoutIdx(args[0]); @@ -7607,12 +7963,12 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .list => ls.getListInfo(list_layout), .list_of_zst => { // contains for ZST elements is true iff the list is non-empty - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; break :blk; }, else => unreachable, @@ -7636,14 +7992,14 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.emitLocalGet(dst_local); } - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, needle_ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, needle_ptr_local) catch return error.OutOfMemory; } else { try self.generateExpr(args[1]); const needle_vt = self.exprValType(args[1]); const needle_tmp = self.storage.allocAnonymousLocal(needle_vt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, needle_tmp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, needle_tmp) catch return error.OutOfMemory; const alignment: u32 = if (elem_byte_size >= 8) 8 else if (elem_byte_size >= 4) 4 else if (elem_byte_size >= 2) 2 else 1; const needle_offset = try self.allocStackMemory(elem_byte_size, alignment); @@ -7654,101 +8010,101 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.emitLocalGet(needle_tmp); try self.emitStoreOpSized(needle_vt, elem_byte_size, 0); try self.emitLocalGet(needle_addr_tmp); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, needle_ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, needle_ptr_local) catch return error.OutOfMemory; } // Load list ptr and len - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); const ptr_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); const len_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; // result = 0 (not found) const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // idx = 0 const idx_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; // block { loop { - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // if idx >= len: br 1 (exit block) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // Load element at ptr + idx * elem_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_byte_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_byte_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; const elem_ptr_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, elem_ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, elem_ptr_local) catch return error.OutOfMemory; // Compare with needle using layout-aware equality try self.compareFieldByLayout(elem_ptr_local, needle_ptr_local, 0, elem_byte_size, elem_layout_idx); // if equal: set result = 1, br 1 (exit) - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // idx += 1 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, idx_local) catch return error.OutOfMemory; // br 0 (continue loop) - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // } } end loop, end block - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // Push result - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, .list_append_unsafe => { @@ -7831,124 +8187,124 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Generate list arg (pointer to {data_ptr, len, capacity}) try self.generateExpr(args[0]); const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; // Generate record arg (pointer to the config record) try self.generateExpr(args[1]); const rec_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rec_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rec_local) catch return error.OutOfMemory; // Load "len" field by original semantic index, wrap to i32 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rec_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rec_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, len_field_off); - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; const sub_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sub_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sub_len) catch return error.OutOfMemory; // Load "start" field by original semantic index, wrap to i32 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rec_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rec_local) catch return error.OutOfMemory; try self.emitLoadOp(.i64, start_field_off); - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; const start_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, start_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, start_local) catch return error.OutOfMemory; // Load old_len from list - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); const old_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_len) catch return error.OutOfMemory; // actual_start = min(start, old_len) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, start_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_len) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, start_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_len) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.select) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, start_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, start_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.select) catch return error.OutOfMemory; const actual_start = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_start) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_start) catch return error.OutOfMemory; // remaining = old_len - actual_start - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, old_len) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_start) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, old_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_start) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; const remaining = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, remaining) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, remaining) catch return error.OutOfMemory; // actual_len = min(sub_len, remaining) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sub_len) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, remaining) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sub_len) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, remaining) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.select) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sub_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, remaining) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sub_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, remaining) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.select) catch return error.OutOfMemory; const actual_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_len) catch return error.OutOfMemory; // Allocate result RocList (12 bytes) const result_offset = try self.allocStackMemory(12, 4); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; const elem_size = self.getListElemSize(ll.ret_layout); // new_ptr = old_ptr + actual_start * elem_size - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, list_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, list_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 0); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_start) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_start) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 0); // new_len = actual_len - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, actual_len) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, actual_len) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 4); // Encode seamless-slice cap from the source allocation pointer. const encoded_cap = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitPrepareListSliceMetadata(list_local, self.listContainsRefcounted(ll.ret_layout), encoded_cap); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, encoded_cap) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, encoded_cap) catch return error.OutOfMemory; try self.emitStoreOp(.i32, 8); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, .str_count_utf8_bytes => { @@ -7958,59 +8314,57 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // For heap: length at offset 4 // We use the simplified approach: load byte 11, check SSO bit const str_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, str_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, str_local) catch return error.OutOfMemory; // Load byte 11 (SSO tag byte) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, str_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, str_local) catch return error.OutOfMemory; try self.emitLoadOpSized(.i32, 1, 11); const tag_byte = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, tag_byte) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, tag_byte) catch return error.OutOfMemory; // Check if SSO: high bit set - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x80) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x80) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; // if SSO: len = tag_byte & 0x7F; else: len = load i32 from offset 4 - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(ValType.i32)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(ValType.i32)) catch return error.OutOfMemory; // SSO path - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, tag_byte) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x7F) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, tag_byte) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x7F) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; // Heap path - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, str_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, str_local) catch return error.OutOfMemory; try self.emitLoadOp(.i32, 4); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // Result is length as i32. If ret_layout expects i64, extend. const ret_vt = self.resolveValType(ll.ret_layout); if (ret_vt == .i64) { - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; } }, .str_is_eq => { - // String equality via host function (handles both SSO and heap strings) - const import_idx = self.str_eq_import orelse unreachable; + // String equality via merged builtin (handles both SSO and heap strings) + const import_idx = self.builtin_syms.str_equal; try self.generateExpr(args[0]); const a = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(a); try self.generateExpr(args[1]); const b = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(b); - // Push both pointers and call host function - try self.emitLocalGet(a); - try self.emitLocalGet(b); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitPtrLenCapArgs(a); + try self.emitPtrLenCapArgs(b); + try self.emitCallBuiltin(import_idx); }, .str_concat => { // LowLevel str_concat: concatenate 2 strings @@ -8033,7 +8387,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { const total = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_len); try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(total); // Allocate buffer @@ -8043,8 +8397,8 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Copy a bytes at offset 0 const zero = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero); try self.emitMemCopyLoop(buf, zero, a_ptr, a_len); @@ -8078,28 +8432,28 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .str_release_excess_capacity, => { const import_idx = switch (ll.op) { - .str_trim => self.str_trim_import orelse unreachable, - .str_trim_start => self.str_trim_start_import orelse unreachable, - .str_trim_end => self.str_trim_end_import orelse unreachable, - .str_with_ascii_lowercased => self.str_with_ascii_lowercased_import orelse unreachable, - .str_with_ascii_uppercased => self.str_with_ascii_uppercased_import orelse unreachable, - .str_release_excess_capacity => self.str_release_excess_capacity_import orelse unreachable, + .str_trim => self.builtin_syms.str_trim, + .str_trim_start => self.builtin_syms.str_trim_start, + .str_trim_end => self.builtin_syms.str_trim_end, + .str_with_ascii_lowercased => self.builtin_syms.str_with_ascii_lowercased, + .str_with_ascii_uppercased => self.builtin_syms.str_with_ascii_uppercased, + .str_release_excess_capacity => self.builtin_syms.str_release_excess_capacity, else => unreachable, }; try self.generateExpr(args[0]); const input = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(input); - const result_offset = try self.allocStackMemory(12, 4); - try self.emitLocalGet(input); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(input); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); }, .str_drop_prefix, .str_drop_suffix => { const import_idx = switch (ll.op) { - .str_drop_prefix => self.str_drop_prefix_import orelse unreachable, - .str_drop_suffix => self.str_drop_suffix_import orelse unreachable, + .str_drop_prefix => self.builtin_syms.str_drop_prefix, + .str_drop_suffix => self.builtin_syms.str_drop_suffix, else => unreachable, }; try self.generateExpr(args[0]); @@ -8108,38 +8462,50 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[1]); const b = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(b); - const result_offset = try self.allocStackMemory(12, 4); - try self.emitLocalGet(a); - try self.emitLocalGet(b); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(a); + try self.emitPtrLenCapArgs(b); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); }, - .str_split_on, .str_join_with => { - const import_idx = switch (ll.op) { - .str_split_on => self.str_split_import orelse unreachable, - .str_join_with => self.str_join_with_import orelse unreachable, - else => unreachable, - }; + .str_split_on => { + const import_idx = self.builtin_syms.str_split; try self.generateExpr(args[0]); const a = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(a); try self.generateExpr(args[1]); const b = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(b); - const result_offset = try self.allocStackMemory(12, 4); - try self.emitLocalGet(a); - try self.emitLocalGet(b); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(a); + try self.emitPtrLenCapArgs(b); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); + }, + .str_join_with => { + const import_idx = self.builtin_syms.str_join_with; + try self.generateExpr(args[0]); + const list_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalSet(list_local); + try self.generateExpr(args[1]); + const sep_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalSet(sep_local); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(list_local); + try self.emitPtrLenCapArgs(sep_local); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); }, .str_repeat, .str_reserve => { const import_idx = switch (ll.op) { - .str_repeat => self.str_repeat_import orelse unreachable, - .str_reserve => self.str_reserve_import orelse unreachable, + .str_repeat => self.builtin_syms.str_repeat, + .str_reserve => self.builtin_syms.str_reserve, else => unreachable, }; try self.generateExpr(args[0]); @@ -8147,67 +8513,132 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.emitLocalSet(str_local); try self.generateExpr(args[1]); const int_vt = self.exprValType(args[1]); - if (int_vt == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + if (int_vt == .i32) { + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; } - const int_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + const int_local = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; try self.emitLocalSet(int_local); - const result_offset = try self.allocStackMemory(12, 4); - try self.emitLocalGet(str_local); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(str_local); try self.emitLocalGet(int_local); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); }, .str_with_capacity => { - const import_idx = self.str_with_capacity_import orelse unreachable; + const import_idx = self.builtin_syms.str_with_capacity; try self.generateExpr(args[0]); const int_vt = self.exprValType(args[0]); - if (int_vt == .i64) { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + if (int_vt == .i32) { + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; } - const int_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + const int_local = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; try self.emitLocalSet(int_local); - const result_offset = try self.allocStackMemory(12, 4); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); try self.emitLocalGet(int_local); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); }, .str_caseless_ascii_equals => { - const import_idx = self.str_caseless_ascii_equals_import orelse unreachable; + const import_idx = self.builtin_syms.str_caseless_ascii_equals; try self.generateExpr(args[0]); const a = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(a); try self.generateExpr(args[1]); const b = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(b); - try self.emitLocalGet(a); - try self.emitLocalGet(b); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitPtrLenCapArgs(a); + try self.emitPtrLenCapArgs(b); + try self.emitCallBuiltin(import_idx); }, .str_from_utf8 => { + // str_from_utf8(List U8) -> Result Str [BadUtf8 {byte_index: U64, problem: Utf8ByteProblem}] + // + // The C builtin writes FromUtf8Try layout (wasm32): + // byte_index: u64 @0, string: RocStr @8 (12 bytes), is_ok: bool @20, problem_code: u8 @21 + // The Roc tag union layout is: + // Ok(1): Str (12 bytes) @0, discriminant @disc_offset + // Err(0): byte_index: u64 @0, problem: u8 @8, discriminant @disc_offset + // + // We call the builtin into a temp buffer, then convert to tag union layout. const ls = self.getLayoutStore(); const ret_layout_val = ls.getLayout(ll.ret_layout); if (ret_layout_val.tag != .tag_union) unreachable; const tu_data = ls.getTagUnionData(ret_layout_val.data.tag_union.idx); - const import_idx = self.str_from_utf8_import orelse unreachable; + const disc_offset: u32 = tu_data.discriminant_offset; + const disc_size: u32 = tu_data.discriminant_size; + const import_idx = self.builtin_syms.str_from_utf8; + try self.generateExpr(args[0]); const input = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(input); - const result_offset = try self.allocStackMemory(tu_data.size, 4); - try self.emitLocalGet(input); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(tu_data.size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(tu_data.discriminant_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + + // Allocate temp buffer for FromUtf8Try (24 bytes on wasm32) + const raw_local = try self.allocStackResultPtr(24, 8); + + // Call: roc_builtins_str_from_utf8(raw_ptr, list_bytes, list_len, list_cap, roc_ops) + try self.emitLocalGet(raw_local); + try self.emitPtrLenCapArgs(input); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + + // Allocate tag union result buffer + const result_local = try self.allocStackResultPtr(tu_data.size, self.layoutByteAlign(ll.ret_layout)); + + // Read is_ok from raw buffer (offset 20 on wasm32) + try self.emitLocalGet(raw_local); + try self.emitLoadOpSized(.i32, 1, 20); + + // if (is_ok) + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + + // OK branch: copy RocStr (12 bytes) from raw+8 to result+0 + // Copy bytes ptr (4 bytes) + try self.emitLocalGet(result_local); + try self.emitLocalGet(raw_local); + try self.emitLoadOp(.i32, 8); + try self.emitStoreOp(.i32, 0); + // Copy length (4 bytes) + try self.emitLocalGet(result_local); + try self.emitLocalGet(raw_local); + try self.emitLoadOp(.i32, 12); + try self.emitStoreOp(.i32, 4); + // Copy capacity (4 bytes) + try self.emitLocalGet(result_local); + try self.emitLocalGet(raw_local); + try self.emitLoadOp(.i32, 16); + try self.emitStoreOp(.i32, 8); + // Write discriminant = 1 (Ok) + try self.emitLocalGet(result_local); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + try self.emitStoreOpSized(.i32, disc_size, disc_offset); + + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + + // ERR branch: copy byte_index (8 bytes) from raw+0 to result+0 + try self.emitLocalGet(result_local); + try self.emitLocalGet(raw_local); + try self.emitLoadOp(.i64, 0); + try self.emitStoreOp(.i64, 0); + // Copy problem_code (1 byte) from raw+21 to result+8 + try self.emitLocalGet(result_local); + try self.emitLocalGet(raw_local); + try self.emitLoadOpSized(.i32, 1, 21); + try self.emitStoreOpSized(.i32, 1, 8); + // Write discriminant = 0 (Err) + try self.emitLocalGet(result_local); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + try self.emitStoreOpSized(.i32, disc_size, disc_offset); + + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + + try self.emitLocalGet(result_local); }, .num_from_str => { const ls = self.getLayoutStore(); @@ -8215,7 +8646,6 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { if (ret_layout_val.tag != .tag_union) unreachable; const tu_data = ls.getTagUnionData(ret_layout_val.data.tag_union.idx); const disc_offset: u32 = tu_data.discriminant_offset; - const result_offset = try self.allocStackMemory(tu_data.size, 4); var ok_payload_idx: ?layout.Idx = null; const variants = ls.getTagUnionVariants(tu_data); @@ -8247,28 +8677,27 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[0]); const input = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(input); + const result_local = try self.allocStackResultPtr(tu_data.size, self.layoutByteAlign(ll.ret_layout)); if (ok_payload == .dec) { - const import_idx = self.dec_from_str_import orelse unreachable; - try self.emitLocalGet(input); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(disc_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.dec_from_str; + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(input); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(disc_offset)) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); } else if (ok_payload == .f32 or ok_payload == .f64) { - const import_idx = self.float_from_str_import orelse unreachable; + const import_idx = self.builtin_syms.float_from_str; const float_width: i32 = if (ok_payload == .f32) 4 else 8; - try self.emitLocalGet(input); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, float_width) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(disc_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(input); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, float_width) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(disc_offset)) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); } else { - const import_idx = self.int_from_str_import orelse unreachable; + const import_idx = self.builtin_syms.int_from_str; const int_width: i32 = switch (ok_payload) { .u8, .i8 => 1, .u16, .i16 => 2, @@ -8282,19 +8711,18 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { .u8, .u16, .u32, .u64, .u128 => 0, else => unreachable, }; - try self.emitLocalGet(input); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, int_width) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, is_signed) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(disc_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(input); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, int_width) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, is_signed) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(disc_offset)) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); } - try self.emitFpOffset(result_offset); + try self.emitLocalGet(result_local); }, .str_inspect, @@ -8347,63 +8775,63 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { while (offset + 4 <= value_size) : (offset += 4) { try self.emitLocalGet(box_ptr); try self.emitLocalGet(src_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; } while (offset < value_size) : (offset += 1) { try self.emitLocalGet(box_ptr); try self.emitLocalGet(src_ptr); - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, offset) catch return error.OutOfMemory; } } else { // Use a loop for larger values const i = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(i); - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // box_ptr[i] = src_ptr[i] try self.emitLocalGet(box_ptr); try self.emitLocalGet(i); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalGet(src_ptr); try self.emitLocalGet(i); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // i++ try self.emitLocalGet(i); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(i); // continue if i < size try self.emitLocalGet(i); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(value_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_lt_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(value_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_lt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } } else { // Scalar type - store directly @@ -8454,100 +8882,100 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // gt_flag = (a > b) ? 1 : 0 const a = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; const b = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; // gt_flag - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, if (is_unsigned) Op.i32_gt_u else Op.i32_gt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i32_gt_u else Op.i32_gt_s) catch return error.OutOfMemory; // lt_flag * 2 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, if (is_unsigned) Op.i32_lt_u else Op.i32_lt_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i32_lt_u else Op.i32_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; // result = gt_flag + lt_flag * 2 - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; }, .i64 => { const a = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; const b = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, if (is_unsigned) Op.i64_gt_u else Op.i64_gt_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, if (is_unsigned) Op.i64_lt_u else Op.i64_lt_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i64_gt_u else Op.i64_gt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i64_lt_u else Op.i64_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; }, .f32 => { const a = self.storage.allocAnonymousLocal(.f32) catch return error.OutOfMemory; const b = self.storage.allocAnonymousLocal(.f32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_gt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_lt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_gt) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_lt) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; }, .f64 => { const a = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; const b = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_gt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, a) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, b) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_lt) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_gt) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, a) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, b) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_lt) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; }, } }, // Crash .crash => { - self.body.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"unreachable") catch return error.OutOfMemory; }, // Integer try conversions — return Result(TargetInt, {}) tag union @@ -8557,183 +8985,183 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 1, 1); // Check: val >= -128 && val <= 127 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -128) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 127) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -128) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 127) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u8_to_i8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 1, 1); // u8 → i8: check val <= 127 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 127) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 127) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, // Narrowing to u8 .i32_to_u8_try, .i16_to_u8_try, .u16_to_u8_try, .i8_to_u8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 1, 1); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 255) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 255) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, // Narrowing to i16 .i32_to_i16_try, .u32_to_i16_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 2, 2); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, -32768) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 32767) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, -32768) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 32767) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 2, 2); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u16_to_i16_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 2, 2); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 32767) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 32767) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 2, 2); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, // Narrowing to u16 .i32_to_u16_try, .u32_to_u16_try, .i16_to_u16_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 2, 2); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 65535) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 65535) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 2, 2); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, // i32 <-> u32 try .i32_to_u32_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 4, 4); // i32 → u32: check val >= 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 4, 4); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u32_to_i32_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 4, 4); // u32 → i32: check high bit is 0 (val <= 0x7FFFFFFF) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 4, 4); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u32_to_i8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 1, 1); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 127) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 127) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u32_to_u8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i32, 1, 1); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 255) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 255) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, // i8/i16 → unsigned wider types (always succeed since value fits — but need sign check) .i8_to_u16_try, @@ -8749,283 +9177,283 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { const disc_offset: u32 = payload_size; if (target_is_i64) { // Extend to i64 first - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; const r = try self.emitIntTryResult(.i64, payload_size, disc_offset); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, payload_size, disc_offset); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; } else { const r = try self.emitIntTryResult(.i32, payload_size, disc_offset); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i32, payload_size, disc_offset); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; } }, // i64 → narrowing try conversions .i64_to_i8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 1, 1); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, -128) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 127) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, -128) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 127) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .i64_to_i16_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 2, 2); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, -32768) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32767) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, -32768) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32767) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 2, 2); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .i64_to_i32_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 4, 4); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, -2147483648) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 2147483647) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, -2147483648) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 2147483647) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 4, 4); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .i64_to_u8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 1, 1); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 255) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 255) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .i64_to_u16_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 2, 2); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 65535) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 65535) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 2, 2); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .i64_to_u32_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 4, 4); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 4294967295) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 4294967295) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 4, 4); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .i64_to_u64_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 8, 8); // i64 → u64: check val >= 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 8, 8); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, // u64 → narrowing try conversions .u64_to_i8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 1, 1); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 127) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 127) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u64_to_i16_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 2, 2); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 32767) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 32767) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 2, 2); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u64_to_i32_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 4, 4); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 2147483647) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 2147483647) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 4, 4); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u64_to_i64_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 8, 8); // u64 → i64: check high bit is 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 8, 8); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u64_to_u8_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 1, 1); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 255) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 255) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 1, 1); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u64_to_u16_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 2, 2); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 65535) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 65535) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 2, 2); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, .u64_to_u32_try => { try self.generateExpr(args[0]); const r = try self.emitIntTryResult(.i64, 4, 4); - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.val_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 4294967295) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.val_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 4294967295) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitIntTryOk(r.result_local, r.val_local, .i64, 4, 4); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, r.result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, r.result_local) catch return error.OutOfMemory; }, // 128-bit try conversions: narrowing from i128/u128 to smaller types .i128_to_i8_try => { @@ -9109,7 +9537,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Convert to i128, then check >= 0 const src_vt = self.exprValType(args[0]); if (src_vt == .i32) { - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; } // Now we have i64 on stack. Convert to i128 first. try self.emitIntToI128(true); @@ -9127,7 +9555,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { => { // Unsigned i32→i128: zero-extend i32 to i64, then to i128 try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; try self.emitIntToI128(false); }, .u64_to_i128, @@ -9143,7 +9571,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { => { // Signed i32→i128: sign-extend i32 to i64, then to i128 try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; try self.emitIntToI128(true); }, .i64_to_i128, @@ -9157,7 +9585,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { => { // Signed i32→u128 wrap: sign-extend to i64, then i128 try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; try self.emitIntToI128(true); }, .i64_to_u128_wrap, @@ -9175,10 +9603,10 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[0]); // Load low i64, wrap to i32, mask to 8 bits try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0xFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0xFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; }, .i128_to_i16_wrap, .i128_to_u16_wrap, @@ -9187,10 +9615,10 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { => { try self.generateExpr(args[0]); try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0xFFFF) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0xFFFF) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; }, .i128_to_i32_wrap, .i128_to_u32_wrap, @@ -9199,7 +9627,7 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { => { try self.generateExpr(args[0]); try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; }, .i128_to_i64_wrap, .i128_to_u64_wrap, @@ -9220,167 +9648,151 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Approximate: convert low u64 to f64 + high i64 * 2^64 try self.generateExpr(args[0]); const src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; // high_f64 = (f64)high * 2^64 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; // 2^64 - self.body.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; // 2^64 + self.code_builder.code.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; // low_f64 = (f64)(u64)low - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; // result = high_f64 + low_f64 - self.body.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; }, .u128_to_f64 => { // Same as i128 but high word is unsigned try self.generateExpr(args[0]); const src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; }, .i128_to_f32 => { // Convert via f64 then demote try self.generateExpr(args[0]); const src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; }, .u128_to_f32 => { try self.generateExpr(args[0]); const src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 8); - self.body.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([8]u8, @bitCast(@as(f64, 18446744073709551616.0)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; }, // float → i128/u128 truncating conversions .f64_to_i128_trunc => { try self.generateExpr(args[0]); const val = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; try self.emitF64ToI128(val, result_local, true); }, .f64_to_u128_trunc => { try self.generateExpr(args[0]); const val = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; try self.emitF64ToI128(val, result_local, false); }, .f32_to_i128_trunc => { // Promote f32 to f64, then use f64_to_i128 logic try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; const val = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; try self.emitF64ToI128(val, result_local, true); }, .f32_to_u128_trunc => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; const val = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const result_offset = try self.allocStackMemory(16, 8); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; try self.emitF64ToI128(val, result_local, false); }, // 128-bit → Dec conversions: multiply by 10^18, check overflow .u128_to_dec_try_unsafe, .i128_to_dec_try_unsafe => { const is_signed = ll.op == .i128_to_dec_try_unsafe; - const import_idx = if (is_signed) self.i128_to_dec_import else self.u128_to_dec_import; + const import_idx = if (is_signed) self.builtin_syms.i128_to_dec else self.builtin_syms.u128_to_dec; // Generate the 128-bit value (pointer to 16 bytes in stack memory) try self.generateExpr(args[0]); const val_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(val_ptr); - // Allocate result: { dec: i128 (16 bytes), success: bool (1 byte) } - // Align to 8 for the i128 - const result_offset = try self.allocStackMemory(17, 8); - const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); - try self.emitLocalSet(result_local); - - // Call host function: (val_ptr, result_ptr) -> i32 (success) - try self.emitLocalGet(val_ptr); + const result_local = try self.allocStackResultPtr( + self.layoutByteSize(ll.ret_layout), + self.layoutByteAlign(ll.ret_layout), + ); try self.emitLocalGet(result_local); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx orelse unreachable) catch return error.OutOfMemory; - - // Store success flag at offset 16 - const success_flag = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(success_flag); - try self.emitLocalGet(result_local); - try self.emitLocalGet(success_flag); - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 16) catch return error.OutOfMemory; // offset - - // Push result pointer + try self.emitI128AbiArgs(val_ptr); + try self.emitCallBuiltin(import_idx); try self.emitLocalGet(result_local); }, @@ -9389,155 +9801,156 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { // Unsigned small int → Dec: zero-extend to i64, multiply by 10^18 try self.generateExpr(args[0]); const val = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const dec_factor = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dec_factor) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dec_factor) catch return error.OutOfMemory; try self.emitI64MulToI128(val, dec_factor); }, .u64_to_dec => { // u64 → Dec: already i64 try self.generateExpr(args[0]); const val = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const dec_factor = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dec_factor) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dec_factor) catch return error.OutOfMemory; try self.emitI64MulToI128(val, dec_factor); }, .i8_to_dec, .i16_to_dec, .i32_to_dec => { // Signed small int → Dec: sign-extend to i64, multiply by 10^18 try self.generateExpr(args[0]); const val = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const dec_factor = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dec_factor) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dec_factor) catch return error.OutOfMemory; try self.emitI64MulToI128(val, dec_factor); }, .i64_to_dec => { // i64 → Dec: already i64 try self.generateExpr(args[0]); const val = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const dec_factor = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dec_factor) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dec_factor) catch return error.OutOfMemory; try self.emitI64MulToI128(val, dec_factor); }, // Dec → integer truncating conversions (divide by 10^18, truncate) - .dec_to_i64_trunc => { + .dec_to_i64_trunc, + .dec_to_i32_trunc, + .dec_to_i16_trunc, + .dec_to_i8_trunc, + .dec_to_u64_trunc, + .dec_to_u32_trunc, + .dec_to_u16_trunc, + .dec_to_u8_trunc, + => { // Dec → i64: load low i64, divide by 10^18 try self.generateExpr(args[0]); - // The Dec value is a pointer to 16-byte i128 - // For values that fit in i64, low word / 10^18 gives the result - // (with sign from high word already encoded in the i128 representation) - const src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; - // Load full i128 as two i64 parts, reconstruct the signed value, - // then divide. For most Dec values (< 2^63), the low word suffices. - // We use the simpler approach: load low word, signed divide. - // This works for Dec values representing integers that fit in i64. - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; - try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; - }, - .dec_to_i32_trunc => { - try self.generateExpr(args[0]); - try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; - }, - .dec_to_i16_trunc, .dec_to_i8_trunc => { - try self.generateExpr(args[0]); - try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; - // Mask to target size - const mask: i32 = if (ll.op == .dec_to_i8_trunc) 0xFF else 0xFFFF; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, mask) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - }, - .dec_to_u64_trunc => { - try self.generateExpr(args[0]); - try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; - }, - .dec_to_u32_trunc => { - try self.generateExpr(args[0]); - try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; - }, - .dec_to_u16_trunc, .dec_to_u8_trunc => { - try self.generateExpr(args[0]); + const dec_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalSet(dec_local); + + // Store 10^18 as i128 constant in stack memory + const divisor_offset = try self.allocStackMemory(16, 8); + const divisor_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitFpOffset(divisor_offset); + try self.emitLocalSet(divisor_local); + // low word = 10^18 + try self.emitLocalGet(divisor_local); + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + try self.emitStoreOp(.i64, 0); + // high word = 0 + try self.emitLocalGet(divisor_local); + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + try self.emitStoreOp(.i64, 8); + + // Call roc_i128_div_s(dec_ptr, divisor_ptr, result_ptr) + try self.emitI128HostBinOp(dec_local, divisor_local, self.builtin_syms.i128_div_s, true); + // Result is an i32 pointer to the 16-byte quotient; load low i64 try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; - const mask: i32 = if (ll.op == .dec_to_u8_trunc) 0xFF else 0xFFFF; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, mask) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + + // Truncate to target size + switch (ll.op) { + .dec_to_i64_trunc, .dec_to_u64_trunc => {}, + .dec_to_i32_trunc, .dec_to_u32_trunc => { + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + }, + .dec_to_i16_trunc, .dec_to_i8_trunc, .dec_to_u16_trunc, .dec_to_u8_trunc => { + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + const mask: i32 = switch (ll.op) { + .dec_to_i8_trunc, .dec_to_u8_trunc => 0xFF, + .dec_to_i16_trunc, .dec_to_u16_trunc => 0xFFFF, + else => unreachable, + }; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, mask) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + }, + else => unreachable, + } }, .dec_to_i128_trunc, .dec_to_u128_trunc => { // Dec → i128/u128: divide i128 by 10^18 try self.generateExpr(args[0]); - const src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; - try self.emitI128DivByConst(src, 1_000_000_000_000_000_000); + const dec_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitLocalSet(dec_local); + + // Store 10^18 as i128 constant in stack memory + const divisor_offset = try self.allocStackMemory(16, 8); + const divisor_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; + try self.emitFpOffset(divisor_offset); + try self.emitLocalSet(divisor_local); + try self.emitLocalGet(divisor_local); + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + try self.emitStoreOp(.i64, 0); + try self.emitLocalGet(divisor_local); + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + try self.emitStoreOp(.i64, 8); + + try self.emitI128HostBinOp(dec_local, divisor_local, self.builtin_syms.i128_div_s, true); }, .dec_to_f64 => { // Dec → f64: load i128 as i64 (low word), convert to f64, divide by 10^18.0 try self.generateExpr(args[0]); try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; // 10^18 as f64 bytes (IEEE 754 double for 1e18) const dec_f64_bytes = @as([8]u8, @bitCast(@as(f64, 1_000_000_000_000_000_000.0))); - self.body.appendSlice(self.allocator, &dec_f64_bytes) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_div) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &dec_f64_bytes) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_div) catch return error.OutOfMemory; }, .dec_to_f32_wrap => { // Dec → f32: same approach as f64, then demote try self.generateExpr(args[0]); try self.emitLoadOp(.i64, 0); - self.body.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_convert_i64_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_const) catch return error.OutOfMemory; const dec_f64_bytes = @as([8]u8, @bitCast(@as(f64, 1_000_000_000_000_000_000.0))); - self.body.appendSlice(self.allocator, &dec_f64_bytes) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_div) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &dec_f64_bytes) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_div) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; }, // Dec try_unsafe conversions — return {val, is_int, in_range} record // Dec is i128 (fixed-point × 10^18). Check if remainder is 0 (is_int), @@ -9554,37 +9967,37 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { try self.generateExpr(args[0]); // Dec value is a pointer to 16-byte i128 const src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; // Load low i64 word - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src) catch return error.OutOfMemory; try self.emitLoadOp(.i64, 0); const dec_low = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dec_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dec_low) catch return error.OutOfMemory; // is_int = (dec_low % 10^18) == 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dec_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_rem_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dec_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_rem_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_eqz) catch return error.OutOfMemory; const is_int = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, is_int) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, is_int) catch return error.OutOfMemory; // int_val = dec_low / 10^18 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dec_low) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 1_000_000_000_000_000_000) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dec_low) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 1_000_000_000_000_000_000) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_div_s) catch return error.OutOfMemory; const int_val = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, int_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, int_val) catch return error.OutOfMemory; // Determine target range and value size const TryInfo = struct { val_size: u32, is_i64: bool, min_i: i64, max_i: i64 }; @@ -9601,20 +10014,20 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { }; // in_range = int_val >= min && int_val <= max - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, int_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, info.min_i) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, int_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, info.max_i) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, int_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, info.min_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, int_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, info.max_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_le_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; const in_range = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, in_range) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, in_range) catch return error.OutOfMemory; // Allocate result record const total_size: u32 = if (info.is_i64) 16 else 8; @@ -9622,249 +10035,209 @@ fn generateLowLevel(self: *Self, ll: anytype) Allocator.Error!void { const result_offset = try self.allocStackMemory(total_size, alignment); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Store value - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, int_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, int_val) catch return error.OutOfMemory; if (info.is_i64) { try self.emitStoreOp(.i64, 0); } else { - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, info.val_size, 0); } // Store is_int - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, is_int) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, is_int) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, info.val_size); // Store in_range - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, in_range) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, in_range) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, info.val_size + 1); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, // Dec → i128/u128: divide by 10^18 .dec_to_i128_try_unsafe, .dec_to_u128_try_unsafe => { - const is_signed = ll.op == .dec_to_i128_try_unsafe; - const import_idx = if (is_signed) self.dec_to_i128_import else self.dec_to_u128_import; + const import_idx = self.builtin_syms.dec_to_int_try_unsafe; + const is_signed: i32 = @intFromBool(ll.op == .dec_to_i128_try_unsafe); // Generate the Dec value (pointer to 16 bytes in stack memory) try self.generateExpr(args[0]); const val_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(val_ptr); - // Allocate result: { value: i128/u128 (16 bytes), success: bool (1 byte) } - const result_offset = try self.allocStackMemory(17, 8); - const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); - try self.emitLocalSet(result_local); - - // Call host function: (val_ptr, result_ptr) -> i32 (success) - try self.emitLocalGet(val_ptr); - try self.emitLocalGet(result_local); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx orelse unreachable) catch return error.OutOfMemory; - - // Store success flag at offset 16 - const success_flag = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(success_flag); + const result_local = try self.allocStackResultPtr( + self.layoutByteSize(ll.ret_layout), + self.layoutByteAlign(ll.ret_layout), + ); try self.emitLocalGet(result_local); - try self.emitLocalGet(success_flag); - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 16) catch return error.OutOfMemory; // offset - - // Push result pointer + try self.emitI128AbiArgs(val_ptr); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 128) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, is_signed) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 16) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); try self.emitLocalGet(result_local); }, // Dec → f32: convert Dec to floating point .dec_to_f32_try_unsafe => { - const import_idx = self.dec_to_f32_import orelse unreachable; + const import_idx = self.builtin_syms.dec_to_f32; // Generate the Dec value (pointer to 16 bytes in stack memory) try self.generateExpr(args[0]); const val_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(val_ptr); - // Allocate result: { value: f32 (4 bytes), is_int: bool (1 byte), in_range: bool (1 byte) } - // Total 6 bytes, align to 4 - const result_offset = try self.allocStackMemory(8, 4); - const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); - try self.emitLocalSet(result_local); - - // Call host function: (val_ptr) -> f32 - try self.emitLocalGet(val_ptr); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - const f32_val = self.storage.allocAnonymousLocal(.f32) catch return error.OutOfMemory; - try self.emitLocalSet(f32_val); - - // Store f32 value at offset 0 - try self.emitLocalGet(result_local); - try self.emitLocalGet(f32_val); - self.body.append(self.allocator, Op.f32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // offset - - // Store is_int = 1 at offset 4 (Dec values converted to f32 are always considered valid) - try self.emitLocalGet(result_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; // offset - - // Store in_range = 1 at offset 5 (Dec always in f32 range for practical values) + const result_local = try self.allocStackResultPtr( + self.layoutByteSize(ll.ret_layout), + self.layoutByteAlign(ll.ret_layout), + ); try self.emitLocalGet(result_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 5) catch return error.OutOfMemory; // offset - - // Push result pointer + try self.emitI128AbiArgs(val_ptr); + try self.emitCallBuiltin(import_idx); try self.emitLocalGet(result_local); }, // Float try_unsafe conversions — return {val, is_int, in_range} record .f32_to_i8_try_unsafe, .f64_to_i8_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_i8_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_i8_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(1, false, -128.0, 127.0); }, .f32_to_u8_try_unsafe, .f64_to_u8_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_u8_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_u8_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(1, false, 0.0, 255.0); }, .f32_to_i16_try_unsafe, .f64_to_i16_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_i16_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_i16_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(2, false, -32768.0, 32767.0); }, .f32_to_u16_try_unsafe, .f64_to_u16_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_u16_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_u16_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(2, false, 0.0, 65535.0); }, .f32_to_i32_try_unsafe, .f64_to_i32_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_i32_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_i32_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(4, false, -2147483648.0, 2147483647.0); }, .f32_to_u32_try_unsafe, .f64_to_u32_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_u32_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_u32_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(4, false, 0.0, 4294967295.0); }, .f32_to_i64_try_unsafe, .f64_to_i64_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_i64_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_i64_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(8, true, @as(f64, @floatFromInt(@as(i64, std.math.minInt(i64)))), @as(f64, @floatFromInt(@as(i64, std.math.maxInt(i64))))); }, .f32_to_u64_try_unsafe, .f64_to_u64_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_u64_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_u64_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToIntTryUnsafe(8, true, 0.0, @as(f64, @floatFromInt(@as(u64, std.math.maxInt(u64))))); }, // 128-bit float try_unsafe: return {val: i128, is_int: bool, in_range: bool} .f32_to_i128_try_unsafe, .f64_to_i128_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_i128_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_i128_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToI128TryUnsafe(true); }, .f32_to_u128_try_unsafe, .f64_to_u128_try_unsafe => { try self.generateExpr(args[0]); - if (ll.op == .f32_to_u128_try_unsafe) self.body.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; + if (ll.op == .f32_to_u128_try_unsafe) self.code_builder.code.append(self.allocator, Op.f64_promote_f32) catch return error.OutOfMemory; try self.emitFloatToI128TryUnsafe(false); }, .f64_to_f32_try_unsafe => { // Returns {val: F32, success: Bool} — 8 bytes try self.generateExpr(args[0]); const val = self.storage.allocAnonymousLocal(.f64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; const result_offset = try self.allocStackMemory(8, 4); const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // Convert f64 to f32 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_demote_f64) catch return error.OutOfMemory; const f32_val = self.storage.allocAnonymousLocal(.f32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, f32_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, f32_val) catch return error.OutOfMemory; // Store f32 at offset 0 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, f32_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, f32_val) catch return error.OutOfMemory; try self.emitStoreOp(.f32, 0); // success = !isInf(f32_val) && (!isNaN(val) || isNaN(f32_val)) // not_inf = abs(f32_val) != inf - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, f32_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_abs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; - self.body.appendSlice(self.allocator, &@as([4]u8, @bitCast(std.math.inf(f32)))) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, f32_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_abs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_const) catch return error.OutOfMemory; + self.code_builder.code.appendSlice(self.allocator, &@as([4]u8, @bitCast(std.math.inf(f32)))) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_ne) catch return error.OutOfMemory; // is_not_nan = (val == val) (NaN != NaN) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_eq) catch return error.OutOfMemory; // is_nan_f32 = (f32_val != f32_val) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, f32_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, f32_val) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, f32_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, f32_val) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_ne) catch return error.OutOfMemory; // is_not_nan OR is_nan_f32 - self.body.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; // not_inf AND (is_not_nan OR is_nan_f32) - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; // Store success at offset 4 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; // swap: need [addr, val] for store const success = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, success) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, success) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, success) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, success) catch return error.OutOfMemory; try self.emitStoreOpSized(.i32, 1, 4); // Push result pointer - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, result_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, result_local) catch return error.OutOfMemory; }, } } @@ -9901,7 +10274,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_add, .f64 => Op.f64_add, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_minus => { try self.generateExpr(args[0]); @@ -9912,7 +10285,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_sub, .f64 => Op.f64_sub, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_times => { try self.generateExpr(args[0]); @@ -9923,7 +10296,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_mul, .f64 => Op.f64_mul, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_div_by => { try self.generateExpr(args[0]); @@ -9935,7 +10308,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_div, .f64 => Op.f64_div, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_div_trunc_by => { try self.generateExpr(args[0]); @@ -9947,39 +10320,39 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_div, .f64 => Op.f64_div, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_rem_by => { try self.generateExpr(args[0]); try self.generateExpr(args[1]); const is_unsigned = isUnsignedLayout(ret_layout); switch (vt) { - .i32 => self.body.append(self.allocator, if (is_unsigned) Op.i32_rem_u else Op.i32_rem_s) catch return error.OutOfMemory, - .i64 => self.body.append(self.allocator, if (is_unsigned) Op.i64_rem_u else Op.i64_rem_s) catch return error.OutOfMemory, + .i32 => self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i32_rem_u else Op.i32_rem_s) catch return error.OutOfMemory, + .i64 => self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i64_rem_u else Op.i64_rem_s) catch return error.OutOfMemory, .f32, .f64 => try self.emitFloatMod(vt), } }, .num_negate => { switch (vt) { .i32 => { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; }, .i64 => { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; }, .f32 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f32_neg) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_neg) catch return error.OutOfMemory; }, .f64 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f64_neg) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_neg) catch return error.OutOfMemory; }, } }, @@ -9998,7 +10371,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_eq, .f64 => Op.f64_eq, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_is_gt => { try self.generateExpr(args[0]); @@ -10010,7 +10383,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_gt, .f64 => Op.f64_gt, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_is_gte => { try self.generateExpr(args[0]); @@ -10022,7 +10395,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_ge, .f64 => Op.f64_ge, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_is_lt => { try self.generateExpr(args[0]); @@ -10034,7 +10407,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_lt, .f64 => Op.f64_lt, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_is_lte => { try self.generateExpr(args[0]); @@ -10046,55 +10419,55 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .f32 => Op.f32_le, .f64 => Op.f64_le, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_abs => { switch (vt) { .f32 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f32_abs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_abs) catch return error.OutOfMemory; }, .f64 => { try self.generateExpr(args[0]); - self.body.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; }, .i32 => { // abs(x) = select(x, -x, x >= 0) try self.generateExpr(args[0]); const temp = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; // Stack: [x]. Compute -x. - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; // Stack: [x, -x]. Compute condition: x >= 0. - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_s) catch return error.OutOfMemory; // select(x, -x, x >= 0) — returns x if true, -x if false - self.body.append(self.allocator, Op.select) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.select) catch return error.OutOfMemory; }, .i64 => { try self.generateExpr(args[0]); const temp = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, temp) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.select) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, temp) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.select) catch return error.OutOfMemory; }, } }, @@ -10103,14 +10476,14 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re try self.generateExpr(args[1]); switch (vt) { .i32 => { - const import_idx = self.i32_mod_by_import orelse unreachable; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.i32_mod_by; + self.code_builder.code.append(self.allocator, Op.call) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, import_idx) catch return error.OutOfMemory; }, .i64 => { - const import_idx = self.i64_mod_by_import orelse unreachable; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.i64_mod_by; + self.code_builder.code.append(self.allocator, Op.call) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, import_idx) catch return error.OutOfMemory; }, .f32, .f64 => try self.emitFloatMod(vt), } @@ -10121,106 +10494,111 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re .i32 => { try self.generateExpr(args[0]); const lhs = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; try self.generateExpr(args[1]); const rhs = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - self.body.append(self.allocator, if (is_unsigned) Op.i32_ge_u else Op.i32_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i32_ge_u else Op.i32_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.i32)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, .i64 => { try self.generateExpr(args[0]); const lhs = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; try self.generateExpr(args[1]); const rhs = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - self.body.append(self.allocator, if (is_unsigned) Op.i64_ge_u else Op.i64_ge_s) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.i64)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, rhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, lhs) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, if (is_unsigned) Op.i64_ge_u else Op.i64_ge_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.i64)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, rhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, lhs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, .f32 => { try self.generateExpr(args[0]); try self.generateExpr(args[1]); - self.body.append(self.allocator, Op.f32_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f32_abs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f32_abs) catch return error.OutOfMemory; }, .f64 => { try self.generateExpr(args[0]); try self.generateExpr(args[1]); - self.body.append(self.allocator, Op.f64_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.f64_abs) catch return error.OutOfMemory; }, } }, .num_shift_left_by => { try self.generateExpr(args[0]); try self.generateExpr(args[1]); + // WASM shift instructions require both operands to have the same type. + // The shift amount (args[1]) is always U8 (i32), so extend it for i64 shifts. + if (vt == .i64) self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; const wasm_op: u8 = switch (vt) { .i32 => Op.i32_shl, .i64 => Op.i64_shl, .f32, .f64 => unreachable, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_shift_right_by => { try self.generateExpr(args[0]); try self.generateExpr(args[1]); + if (vt == .i64) self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; const wasm_op: u8 = switch (vt) { .i32 => Op.i32_shr_s, .i64 => Op.i64_shr_s, .f32, .f64 => unreachable, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, .num_shift_right_zf_by => { try self.generateExpr(args[0]); try self.generateExpr(args[1]); + if (vt == .i64) self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; const wasm_op: u8 = switch (vt) { .i32 => Op.i32_shr_u, .i64 => Op.i64_shr_u, .f32, .f64 => unreachable, }; - self.body.append(self.allocator, wasm_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, wasm_op) catch return error.OutOfMemory; }, else => unreachable, } @@ -10229,7 +10607,7 @@ fn generateNumericLowLevel(self: *Self, op: anytype, args: []const LirExprId, re /// Generate string equality comparison using roc_str_eq host function. /// Both lhs and rhs should produce i32 pointers to 12-byte RocStr values. fn generateStrEq(self: *Self, lhs: LirExprId, rhs: LirExprId, negate: bool) Allocator.Error!void { - const import_idx = self.str_eq_import orelse unreachable; + const import_idx = self.builtin_syms.str_equal; // Generate both string expressions, store to locals try self.generateExpr(lhs); @@ -10240,15 +10618,13 @@ fn generateStrEq(self: *Self, lhs: LirExprId, rhs: LirExprId, negate: bool) Allo const rhs_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(rhs_local); - // Call roc_str_eq(lhs_ptr, rhs_ptr) -> i32 - try self.emitLocalGet(lhs_local); - try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitPtrLenCapArgs(lhs_local); + try self.emitPtrLenCapArgs(rhs_local); + try self.emitCallBuiltin(import_idx); // If negate, flip the result if (negate) { - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; } } @@ -10277,11 +10653,10 @@ fn generateListEqWithElemLayout(self: *Self, lhs: LirExprId, rhs: LirExprId, ele // Determine which comparison to use based on element type if (elem_layout == .str) { // List of strings - use specialized host function - const import_idx = self.list_str_eq_import orelse unreachable; - try self.emitLocalGet(lhs_local); - try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.list_str_eq; + try self.emitPtrLenCapArgs(lhs_local); + try self.emitPtrLenCapArgs(rhs_local); + try self.emitCallBuiltin(import_idx); } else { const ls = self.getLayoutStore(); const elem_l = ls.getLayout(elem_layout); @@ -10289,33 +10664,31 @@ fn generateListEqWithElemLayout(self: *Self, lhs: LirExprId, rhs: LirExprId, ele // List of lists - use specialized host function with inner element size const inner_elem_layout = elem_l.data.list; const inner_elem_size = self.layoutByteSize(inner_elem_layout); - const import_idx = self.list_list_eq_import orelse unreachable; - try self.emitLocalGet(lhs_local); - try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(inner_elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + const import_idx = self.builtin_syms.list_list_eq; + try self.emitPtrLenCapArgs(lhs_local); + try self.emitPtrLenCapArgs(rhs_local); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(inner_elem_size)) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); } else if (ls.layoutContainsRefcounted(elem_l)) { // Composite elements with refcounted fields: inline structural loop const elem_size = self.layoutByteSize(elem_layout); try self.emitListEqLoop(lhs_local, rhs_local, elem_layout, elem_size); } else { // Simple scalar elements - byte-wise comparison - const import_idx = self.list_eq_import orelse unreachable; + const import_idx = self.builtin_syms.list_eq; const elem_size = self.layoutByteSize(elem_layout); - try self.emitLocalGet(lhs_local); - try self.emitLocalGet(rhs_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + try self.emitPtrLenCapArgs(lhs_local); + try self.emitPtrLenCapArgs(rhs_local); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + try self.emitCallBuiltin(import_idx); } } // If negate, flip the result if (negate) { - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; } } @@ -10336,34 +10709,34 @@ fn generateStrLiteral(self: *Self, str_idx: anytype) Allocator.Error!void { // Store string bytes inline in the 12-byte struct // First, zero out the 12 bytes (3 × i32.store) for (0..3) |i| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitStoreOp(.i32, base_offset + @as(u32, @intCast(i)) * 4); } // Store string bytes one at a time for (str_bytes, 0..) |byte, i| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(byte)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(byte)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; // alignment = 0 (byte-aligned) - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // offset - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + @as(u32, @intCast(i))) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + @as(u32, @intCast(i))) catch return error.OutOfMemory; } // Store SSO marker: byte 11 = len | 0x80 - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @as(i32, @intCast(len)) | @as(i32, 0x80)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + 11) catch return error.OutOfMemory; // offset + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @as(i32, @intCast(len)) | @as(i32, 0x80)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + 11) catch return error.OutOfMemory; // offset } else { // Large string — place data in a data segment with static RC header. // Runtime RC ops read/write at data_ptr - 4, so static literals must reserve @@ -10376,34 +10749,34 @@ fn generateStrLiteral(self: *Self, str_idx: anytype) Allocator.Error!void { const data_offset = segment_offset + 4; // Store ptr (offset 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(data_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(data_offset)) catch return error.OutOfMemory; try self.emitStoreOp(.i32, base_offset); // Store len (offset 4) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(len)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(len)) catch return error.OutOfMemory; try self.emitStoreOp(.i32, base_offset + 4); // Store capacity (offset 8) — same as len for constants - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(len)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(len)) catch return error.OutOfMemory; try self.emitStoreOp(.i32, base_offset + 8); } // Push pointer to the RocStr on the stack - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; if (base_offset > 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(base_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(base_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; } } @@ -10411,7 +10784,7 @@ fn generateStrLiteral(self: *Self, str_idx: anytype) Allocator.Error!void { /// Each sub-expression produces a RocStr pointer (12 bytes: ptr/bytes, len/bytes, cap/bytes). fn generateStrConcat(self: *Self, span: anytype) Allocator.Error!void { const expr_ids = self.store.getExprSpan(span); - const import_idx = self.str_concat_import orelse unreachable; + const import_idx = self.builtin_syms.str_concat; if (expr_ids.len == 0) { try self.generateEmptyStr(); @@ -10430,13 +10803,13 @@ fn generateStrConcat(self: *Self, span: anytype) Allocator.Error!void { try self.generateExpr(expr_id); const rhs = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(rhs); - const result_offset = try self.allocStackMemory(12, 4); - try self.emitLocalGet(current); - try self.emitLocalGet(rhs); - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(current); + try self.emitPtrLenCapArgs(rhs); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); try self.emitLocalSet(current); } @@ -10450,21 +10823,21 @@ fn generateEmptyStr(self: *Self) Allocator.Error!void { // Zero out 12 bytes (3 x i32.store of 0) for (0..3) |i| { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitStoreOp(.i32, base_offset + @as(u32, @intCast(i)) * 4); } // Set byte 11 = 0x80 (SSO marker, length 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x80) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, base_offset + 11) catch return error.OutOfMemory; // offset + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x80) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_offset + 11) catch return error.OutOfMemory; // offset // Push pointer to the result try self.emitFpOffset(base_offset); @@ -10475,62 +10848,62 @@ fn generateEmptyStr(self: *Self) Allocator.Error!void { /// Emits: if SSO { ptr=str_local, len=byte11&0x7F } else { ptr=*(str+0), len=*(str+4) } fn emitExtractStrPtrLen(self: *Self, str_local: u32, ptr_local: u32, len_local: u32) Allocator.Error!void { // Load byte 11 to check SSO bit - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, str_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 11) catch return error.OutOfMemory; // offset = 11 + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, str_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 11) catch return error.OutOfMemory; // offset = 11 const sso_marker = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sso_marker) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_tee) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sso_marker) catch return error.OutOfMemory; // Check if SSO: bit 7 set - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x80) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x80) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; // if (is_sso) - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // SSO path: len = sso_marker & 0x7F, ptr = str_local (bytes inline) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, sso_marker) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x7F) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; - - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, str_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, sso_marker) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x7F) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; + + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, str_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; // else — heap path - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; // len = *(str_local + 4) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, str_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; // align = 2 - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; // offset = 4 - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, str_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; // align = 2 + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; // offset = 4 + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; // ptr = *(str_local + 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, str_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; // align = 2 - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // offset = 0 - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, str_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; // align = 2 + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // offset = 0 + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; // end if - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Emit a byte-by-byte copy loop: memcpy(dst_base + dst_offset, src_ptr, len). @@ -10539,70 +10912,70 @@ fn emitMemCopyLoop(self: *Self, dst_base_local: u32, dst_offset_local: u32, src_ const loop_i = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; // loop_i = 0 - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, loop_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, loop_i) catch return error.OutOfMemory; // block (void) - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // loop (void) - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // if loop_i >= len, break - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, loop_i) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; // break out of block + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, loop_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // break out of block // dst address = dst_base + dst_offset + loop_i - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, dst_offset_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, loop_i) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, dst_offset_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, loop_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; // src value = *(src_ptr + loop_i) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, src_ptr_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, loop_i) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align = 0 - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // offset = 0 + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, src_ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, loop_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align = 0 + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // offset = 0 // store byte - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align = 0 - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // offset = 0 + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align = 0 + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // offset = 0 // loop_i++ - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, loop_i) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, loop_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, loop_i) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, loop_i) catch return error.OutOfMemory; // br back to loop - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // continue loop + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // continue loop // end loop - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Build a heap-format RocStr on the stack frame from ptr and len locals. @@ -10612,24 +10985,24 @@ fn buildHeapRocStr(self: *Self, ptr_local: u32, len_local: u32) Allocator.Error! const base_local = self.fp_local; // Store ptr (offset 0) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, ptr_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, ptr_local) catch return error.OutOfMemory; try self.emitStoreOp(.i32, result_offset); // Store len (offset 4) - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; try self.emitStoreOp(.i32, result_offset + 4); // Store cap (offset 8) = len - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, base_local) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, len_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, base_local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, len_local) catch return error.OutOfMemory; try self.emitStoreOp(.i32, result_offset + 8); // Push pointer to result @@ -10653,27 +11026,27 @@ fn generateStrToUtf8(self: *Self, str_arg: LirExprId) Allocator.Error!void { // Read byte 11 to check SSO flag try self.emitLocalGet(str_ptr); - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 11) catch return error.OutOfMemory; // offset + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 11) catch return error.OutOfMemory; // offset const last_byte = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(last_byte); // Check SSO flag: last_byte & 0x80 try self.emitLocalGet(last_byte); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x80) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x80) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; // if (is_sso) - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; { // SSO case: extract len = last_byte & 0x7F try self.emitLocalGet(last_byte); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x7F) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x7F) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_and) catch return error.OutOfMemory; const sso_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(sso_len); @@ -10684,8 +11057,8 @@ fn generateStrToUtf8(self: *Self, str_arg: LirExprId) Allocator.Error!void { // Copy SSO bytes from str_ptr to heap: memcpy(heap_ptr+0, str_ptr, sso_len) const zero = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero); try self.emitMemCopyLoop(heap_ptr, zero, str_ptr, sso_len); @@ -10704,14 +11077,14 @@ fn generateStrToUtf8(self: *Self, str_arg: LirExprId) Allocator.Error!void { try self.emitStoreOp(.i32, 8); } // else (non-SSO) - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; { // Non-SSO: RocStr {ptr, len, cap} has same layout as RocList(U8) // Copy 12 bytes from str_ptr to result_ptr try self.emitMemCopy(result_ptr, 0, str_ptr, 12); } // end if - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // Leave result pointer on stack try self.emitLocalGet(result_ptr); @@ -10734,59 +11107,59 @@ fn generateStrFromUtf8Lossy(self: *Self, list_arg: LirExprId) Allocator.Error!vo // Read len from list struct (offset 4) try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; // align = 4-byte - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; // offset + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; // align = 4-byte + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; // offset const len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(len); // Check if len <= 11 (fits in SSO) try self.emitLocalGet(len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 12) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_lt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 12) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_lt_u) catch return error.OutOfMemory; // if (len < 12) — SSO - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; { // Zero-initialize the 12-byte result (so unused SSO bytes are 0) try self.emitZeroInit(result_ptr, 12); // Read data_ptr from list struct (offset 0) try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // offset + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // offset const data_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(data_ptr); // Copy len bytes from data_ptr to result_ptr: memcpy(result_ptr+0, data_ptr, len) const zero = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero); try self.emitMemCopyLoop(result_ptr, zero, data_ptr, len); // Set byte 11 = len | 0x80 (SSO marker) try self.emitLocalGet(result_ptr); try self.emitLocalGet(len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0x80) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 11) catch return error.OutOfMemory; // offset + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0x80) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 11) catch return error.OutOfMemory; // offset } // else (non-SSO) - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; { // Non-SSO: RocList(U8) {ptr, len, cap} has same layout as RocStr // Copy 12 bytes from list_ptr to result_ptr try self.emitMemCopy(result_ptr, 0, list_ptr, 12); } // end if - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // Leave result pointer on stack try self.emitLocalGet(result_ptr); @@ -10824,38 +11197,38 @@ fn generateIntToStr(self: *Self, its: anytype) Allocator.Error!void { // is_neg = value < 0 try self.emitLocalGet(value_local); if (is_64bit) { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_lt_s) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_lt_s) catch return error.OutOfMemory; } try self.emitLocalSet(is_neg_local); // if negative: value = 0 - value try self.emitLocalGet(is_neg_local); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; if (is_64bit) { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(value_local); - self.body.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_sub) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalGet(value_local); - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; } try self.emitLocalSet(value_local); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } else { // Not signed: is_neg = 0 - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(is_neg_local); } @@ -10866,25 +11239,25 @@ fn generateIntToStr(self: *Self, its: anytype) Allocator.Error!void { // pos = 20 (write position, rightmost) const pos_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 20) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 20) catch return error.OutOfMemory; try self.emitLocalSet(pos_local); // Do-while digit extraction loop - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // digit = value % 10 try self.emitLocalGet(value_local); if (is_64bit) { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 10) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_rem_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 10) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_rem_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_wrap_i64) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 10) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_rem_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 10) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_rem_u) catch return error.OutOfMemory; } const digit_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(digit_local); @@ -10892,76 +11265,76 @@ fn generateIntToStr(self: *Self, its: anytype) Allocator.Error!void { // value = value / 10 try self.emitLocalGet(value_local); if (is_64bit) { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 10) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_div_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 10) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_div_u) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 10) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_div_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 10) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_div_u) catch return error.OutOfMemory; } try self.emitLocalSet(value_local); // buffer[pos] = digit + '0' try self.emitLocalGet(buf_local); try self.emitLocalGet(pos_local); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalGet(digit_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, '0') catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // align - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; // offset + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, '0') catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // align + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // offset // pos-- try self.emitLocalGet(pos_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitLocalSet(pos_local); // if value > 0: continue loop try self.emitLocalGet(value_local); if (is_64bit) { - self.body.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI64(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_gt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI64(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_gt_u) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_gt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_gt_u) catch return error.OutOfMemory; } - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // end loop - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // If negative, prepend '-' if (is_signed) { try self.emitLocalGet(is_neg_local); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // buffer[pos] = '-' try self.emitLocalGet(buf_local); try self.emitLocalGet(pos_local); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, '-') catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, '-') catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // pos-- try self.emitLocalGet(pos_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitLocalSet(pos_local); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } // String starts at buf + pos + 1, length = 20 - pos @@ -10971,17 +11344,17 @@ fn generateIntToStr(self: *Self, its: anytype) Allocator.Error!void { // str_ptr = buf + pos + 1 try self.emitLocalGet(buf_local); try self.emitLocalGet(pos_local); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(str_ptr_local); // str_len = 20 - pos - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 20) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 20) catch return error.OutOfMemory; try self.emitLocalGet(pos_local); - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitLocalSet(str_len_local); // Build heap RocStr @@ -10990,100 +11363,101 @@ fn generateIntToStr(self: *Self, its: anytype) Allocator.Error!void { /// Generate float_to_str: convert a float to its string representation. /// Uses the same host-side formatter as the native backends to keep output stable. +/// +/// wasm32 ABI: roc_builtins_float_to_str returns a RocStr via sret convention: +/// (result_ptr: i32, val_bits: i64, is_f32: i32, roc_ops: i32) -> void +/// The result_ptr receives the 12-byte RocStr. fn generateFloatToStr(self: *Self, fts: anytype) Allocator.Error!void { - const import_idx = self.float_to_str_import orelse unreachable; + const import_idx = self.builtin_syms.float_to_str; const is_f32 = fts.float_precision == .f32; std.debug.assert(fts.float_precision != .dec); try self.generateExpr(fts.value); if (is_f32) { - self.body.append(self.allocator, Op.i32_reinterpret_f32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_reinterpret_f32) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_extend_i32_u) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i64_reinterpret_f64) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_reinterpret_f64) catch return error.OutOfMemory; } const val_bits = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; try self.emitLocalSet(val_bits); - try self.emitHeapAllocConst(48, 1); - const buf_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(buf_ptr); - + const result_ptr = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_ptr); try self.emitLocalGet(val_bits); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intFromBool(is_f32)) catch return error.OutOfMemory; - try self.emitLocalGet(buf_ptr); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intFromBool(is_f32)) catch return error.OutOfMemory; + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); - const str_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(str_len); - try self.buildHeapRocStr(buf_ptr, str_len); + // Leave result_ptr (pointer to 12-byte RocStr) on the wasm stack. + try self.emitLocalGet(result_ptr); } /// Generate i128/u128 to string: convert a 128-bit integer to its decimal string representation. /// Uses a host function import since wasm has no native 128-bit division. fn generateI128ToStr(self: *Self, value_expr: anytype, is_signed: bool) Allocator.Error!void { - const import_idx = if (is_signed) - self.i128_to_str_import orelse unreachable - else - self.u128_to_str_import orelse unreachable; + const import_idx = self.builtin_syms.int_to_str; // Generate the 128-bit value expression → pointer to 16-byte value in stack memory try self.generateExpr(value_expr); const val_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(val_ptr); - // Allocate a 48-byte buffer on the heap for the formatted string - // (max i128 string length is 40 bytes: 39 digits + sign) - try self.emitHeapAllocConst(48, 1); - const buf_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(buf_ptr); - - // Call roc_i128_to_str(val_ptr, buf_ptr) -> str_len - try self.emitLocalGet(val_ptr); - try self.emitLocalGet(buf_ptr); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - - // Result (str_len) is on the stack - const str_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(str_len); - - // Build a heap RocStr from buf_ptr and str_len - try self.buildHeapRocStr(buf_ptr, str_len); + const result_ptr = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_ptr); + try self.emitI128AbiArgs(val_ptr); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 16) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intFromBool(is_signed)) catch return error.OutOfMemory; + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_ptr); } /// Generate dec_to_str: convert a RocDec (i128 scaled by 10^18) to string. /// Uses a host function import to perform the formatting. +/// Generate dec_to_str: convert a Dec (i128) value to its decimal string representation. +/// +/// wasm32 ABI: roc_builtins_dec_to_str uses sret + i128 split: +/// (result_ptr: i32, dec_lo: i64, dec_hi: i64, roc_ops: i32) -> void +/// The result_ptr receives the 12-byte RocStr. fn generateDecToStr(self: *Self, dec_expr: anytype) Allocator.Error!void { - const import_idx = self.dec_to_str_import orelse unreachable; + const import_idx = self.builtin_syms.dec_to_str; // Generate the Dec expression → pointer to 16-byte Dec value in stack memory try self.generateExpr(dec_expr); const dec_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(dec_ptr); - // Allocate a 48-byte buffer on the heap for the formatted string - // (max Dec string length is 41 bytes: 39 digits + sign + decimal point) - try self.emitHeapAllocConst(48, 1); - const buf_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(buf_ptr); - - // Call roc_dec_to_str(dec_ptr, buf_ptr) -> str_len + // Load the two i64 halves of the Dec value from memory + const dec_lo = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; + const dec_hi = self.storage.allocAnonymousLocal(.i64) catch return error.OutOfMemory; + try self.emitLocalGet(dec_ptr); + self.code_builder.code.append(self.allocator, Op.i64_load) catch return error.OutOfMemory; + // alignment=3 (8 bytes), offset=0 + self.code_builder.code.append(self.allocator, 0x03) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x00) catch return error.OutOfMemory; + try self.emitLocalSet(dec_lo); try self.emitLocalGet(dec_ptr); - try self.emitLocalGet(buf_ptr); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_load) catch return error.OutOfMemory; + // alignment=3, offset=8 + self.code_builder.code.append(self.allocator, 0x03) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, 0x08) catch return error.OutOfMemory; + try self.emitLocalSet(dec_hi); - // Result (str_len) is on the stack - const str_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - try self.emitLocalSet(str_len); + const result_ptr = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_ptr); + try self.emitLocalGet(dec_lo); + try self.emitLocalGet(dec_hi); + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); - // Build a heap RocStr from buf_ptr and str_len - try self.buildHeapRocStr(buf_ptr, str_len); + // Leave result_ptr (pointer to 12-byte RocStr) on the wasm stack. + try self.emitLocalGet(result_ptr); } /// Generate str_escape_and_quote: surround string with quotes and escape special chars. @@ -11103,12 +11477,12 @@ fn generateStrEscapeAndQuote(self: *Self, quote_expr: anytype) Allocator.Error!v // For simplicity, allocate 2 * len + 2 (worst case: every char needs escaping) const buf_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(inner_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(buf_size); try self.emitHeapAlloc(buf_size, 1); @@ -11119,139 +11493,139 @@ fn generateStrEscapeAndQuote(self: *Self, quote_expr: anytype) Allocator.Error!v // Write opening '"' try self.emitLocalGet(buf_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, '"') catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, '"') catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitLocalSet(out_pos); // Copy inner bytes with escaping: " -> \", \ -> \\, \n -> \\n, \r -> \\r, \t -> \\t // Loop over each byte, check if it needs escaping, write accordingly. const src_idx = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(src_idx); // block { loop { - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.void)) catch return error.OutOfMemory; // if (src_idx >= inner_len) br 1 (exit block) try self.emitLocalGet(src_idx); try self.emitLocalGet(inner_len); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // Load current byte: byte = mem[inner_ptr + src_idx] const cur_byte = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(inner_ptr); try self.emitLocalGet(src_idx); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(cur_byte); // Check if byte needs escaping: " (34), \ (92), \n (10), \r (13), \t (9) // if (byte == '"' || byte == '\\') try self.emitLocalGet(cur_byte); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, '"') catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, '"') catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; try self.emitLocalGet(cur_byte); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, '\\') catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, '\\') catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eq) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_or) catch return error.OutOfMemory; // if (needs_escape) { write '\' + byte } else { write byte } - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(WasmModule.BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(WasmModule.BlockType.void)) catch return error.OutOfMemory; // Then: write '\\' at buf[out_pos] try self.emitLocalGet(buf_local); try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, '\\') catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, '\\') catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // out_pos++ try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(out_pos); // Then write the original byte try self.emitLocalGet(buf_local); try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalGet(cur_byte); - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // out_pos++ try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(out_pos); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; // Else: write byte directly try self.emitLocalGet(buf_local); try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalGet(cur_byte); - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // out_pos++ try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(out_pos); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end if + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end if // src_idx++ try self.emitLocalGet(src_idx); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(src_idx); // br 0 (continue loop) - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end loop - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end loop + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block // Write closing '"' try self.emitLocalGet(buf_local); try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, '"') catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, '"') catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store8) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // out_pos++ try self.emitLocalGet(out_pos); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(out_pos); // Build RocStr @@ -11260,14 +11634,14 @@ fn generateStrEscapeAndQuote(self: *Self, quote_expr: anytype) Allocator.Error!v /// Helper: emit local.get instruction fn emitLocalGet(self: *Self, local: u32) Allocator.Error!void { - self.body.append(self.allocator, Op.local_get) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_get) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local) catch return error.OutOfMemory; } /// Helper: emit local.set instruction fn emitLocalSet(self: *Self, local: u32) Allocator.Error!void { - self.body.append(self.allocator, Op.local_set) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, local) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.local_set) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, local) catch return error.OutOfMemory; } /// Emit float modulo: a % b = a - trunc(a / b) * b @@ -11287,13 +11661,13 @@ fn emitFloatMod(self: *Self, vt: ValType) Allocator.Error!void { // trunc(a / b) try self.emitLocalGet(a); try self.emitLocalGet(b); - self.body.append(self.allocator, div_op) catch return error.OutOfMemory; - self.body.append(self.allocator, trunc_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, div_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, trunc_op) catch return error.OutOfMemory; // * b try self.emitLocalGet(b); - self.body.append(self.allocator, mul_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, mul_op) catch return error.OutOfMemory; // a - ... - self.body.append(self.allocator, sub_op) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, sub_op) catch return error.OutOfMemory; } /// Get the element size for a list layout. @@ -11349,91 +11723,91 @@ fn generateLLStrSearch(self: *Self, args: anytype, mode: StrSearchMode) Allocato // ends_with: compare last b_len bytes of a with b // offset = a_len - b_len // If a_len < b_len, return false - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(result_local); // if a_len >= b_len try self.emitLocalGet(a_len); try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // Compare b_len bytes starting at a_ptr + (a_len - b_len) vs b_ptr const offset_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_len); try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitLocalSet(offset_local); const a_end_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_ptr); try self.emitLocalGet(offset_local); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(a_end_ptr); try self.emitBytewiseCompare(a_end_ptr, b_ptr, b_len, result_local); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; }, .contains => { // contains: search for b as a substring of a // Naive O(n*m): for each position i in [0..a_len-b_len], compare b_len bytes - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(result_local); // if b_len == 0, result = true (empty string is always contained) try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitLocalSet(result_local); - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; // if a_len >= b_len, search try self.emitLocalGet(a_len); try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // search_end = a_len - b_len + 1 const search_end = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_len); try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(search_end); const search_i = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(search_i); // block { loop { - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // if search_i >= search_end: break try self.emitLocalGet(search_i); try self.emitLocalGet(search_end); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // Compare b_len bytes at a_ptr+search_i vs b_ptr const cand_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_ptr); try self.emitLocalGet(search_i); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(cand_ptr); const match_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; @@ -11441,31 +11815,31 @@ fn generateLLStrSearch(self: *Self, args: anytype, mode: StrSearchMode) Allocato // if match: result = 1, break try self.emitLocalGet(match_local); - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitLocalSet(result_local); - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 3) catch return error.OutOfMemory; // break out of block (past loop + block) - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 3) catch return error.OutOfMemory; // break out of block (past loop + block) + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // search_i++ try self.emitLocalGet(search_i); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(search_i); // br loop - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end loop - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end loop + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end if a_len >= b_len - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end if b_len == 0 + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end if a_len >= b_len + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end if b_len == 0 }, } @@ -11477,99 +11851,102 @@ fn generateLLStrSearch(self: *Self, args: anytype, mode: StrSearchMode) Allocato /// Sets result_local to 1 if equal, 0 otherwise. fn emitStrPrefixCompare(self: *Self, a_ptr: u32, a_len: u32, b_ptr: u32, b_len: u32, result_local: u32) Allocator.Error!void { // Default result = 0 - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(result_local); // if a_len >= b_len try self.emitLocalGet(a_len); try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; try self.emitBytewiseCompare(a_ptr, b_ptr, b_len, result_local); - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; } /// Compare len bytes at ptr_a vs ptr_b, store result (1=equal, 0=not) in result_local. fn emitBytewiseCompare(self: *Self, ptr_a: u32, ptr_b: u32, len: u32, result_local: u32) Allocator.Error!void { // Assume equal - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitLocalSet(result_local); const cmp_i = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(cmp_i); // block { loop { - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; // if cmp_i >= len: break (all bytes matched) try self.emitLocalGet(cmp_i); try self.emitLocalGet(len); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; // Load byte from a[cmp_i] try self.emitLocalGet(ptr_a); try self.emitLocalGet(cmp_i); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // Load byte from b[cmp_i] try self.emitLocalGet(ptr_b); try self.emitLocalGet(cmp_i); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load8_u) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; // If not equal: result = 0, break - self.body.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(result_local); - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 3) catch return error.OutOfMemory; // break out of block (skip if + loop + block) - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 3) catch return error.OutOfMemory; // break out of block (skip if + loop + block) + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // cmp_i++ try self.emitLocalGet(cmp_i); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(cmp_i); // continue loop - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end loop - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end loop + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; // end block } /// Generate LowLevel list_append: create new list with one element appended. +/// For ZST elements, uses the unsafe version (no allocation needed). +/// For non-ZST elements, uses the safe version which reserves capacity. fn generateLLListAppend(self: *Self, args: anytype, ret_layout: layout.Idx) Allocator.Error!void { + const ls = self.getLayoutStore(); + const ret_layout_val = ls.getLayout(ret_layout); const elem_size = self.getListElemSize(ret_layout); const elem_align = self.getListElemAlign(ret_layout); - const elem_layout_idx = switch (self.getLayoutStore().getLayout(ret_layout).tag) { - .list => self.getLayoutStore().getLayout(ret_layout).data.list, + const elem_layout_idx = switch (ret_layout_val.tag) { + .list => ret_layout_val.data.list, .list_of_zst => layout.Idx.zst, else => unreachable, }; - const import_idx = self.list_append_unsafe_import orelse unreachable; try self.generateExpr(args[0]); const list_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; @@ -11577,8 +11954,8 @@ fn generateLLListAppend(self: *Self, args: anytype, ret_layout: layout.Idx) Allo const elem_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; if (elem_size == 0) { - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(elem_ptr); } else { const target_is_composite = self.isCompositeLayout(elem_layout_idx); @@ -11612,17 +11989,36 @@ fn generateLLListAppend(self: *Self, args: anytype, ret_layout: layout.Idx) Allo } } - const result_offset = try self.allocStackMemory(12, 4); - try self.emitLocalGet(list_ptr); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(list_ptr); try self.emitLocalGet(elem_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_align)) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + + if (elem_size == 0) { + // ZST: use unsafe version (no capacity reservation needed) + // roc_builtins_list_append_unsafe(out, list_bytes, list_len, list_cap, element, element_width, roc_ops) + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(self.builtin_syms.list_append_unsafe); + } else { + // Non-ZST: use safe version which reserves capacity + // roc_builtins_list_append_safe(out, list_bytes, list_len, list_cap, element, alignment, element_width, elements_refcounted, roc_ops) + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_align)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + // elements_refcounted: bool (i32 0 or 1) + const elements_refcounted: bool = if (ret_layout_val.tag == .list) + ls.layoutContainsRefcounted(ls.getLayout(ret_layout_val.data.list)) + else + false; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, if (elements_refcounted) 1 else 0) catch return error.OutOfMemory; + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(self.builtin_syms.list_append_safe); + } + try self.emitLocalGet(result_local); } /// Generate LowLevel list_prepend: create new list with one element prepended. @@ -11642,31 +12038,31 @@ fn generateLLListPrepend(self: *Self, args: anytype, ret_layout: layout.Idx) All // Load list length const old_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(old_len); const old_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(old_data); const new_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(new_len); // Allocate new buffer const total_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(total_size); try self.emitHeapAlloc(total_size, elem_align); @@ -11678,21 +12074,21 @@ fn generateLLListPrepend(self: *Self, args: anytype, ret_layout: layout.Idx) All try self.emitLocalGet(new_data); try self.emitLocalGet(elem_val); if (elem_size <= 4) { - self.body.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; } else { - self.body.append(self.allocator, Op.i64_store) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 3) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i64_store) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 3) catch return error.OutOfMemory; } - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; } else { const zero2 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero2); const es = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; try self.emitLocalSet(es); try self.emitMemCopyLoop(new_data, zero2, elem_val, es); } @@ -11700,14 +12096,14 @@ fn generateLLListPrepend(self: *Self, args: anytype, ret_layout: layout.Idx) All // Copy old elements at offset elem_size const old_byte_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(old_byte_len); const dst_off = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; try self.emitLocalSet(dst_off); try self.emitMemCopyLoop(new_data, dst_off, old_data, old_byte_len); @@ -11732,42 +12128,42 @@ fn generateLLListConcat(self: *Self, args: anytype, ret_layout: layout.Idx) Allo const a_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; const a_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(a_data); try self.emitLocalGet(a_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(a_len); const b_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; const b_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(b_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(b_data); try self.emitLocalGet(b_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(b_len); // new_len = a_len + b_len const new_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_len); try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(new_len); // Allocate new buffer const total_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(total_size); try self.emitHeapAlloc(total_size, elem_align); @@ -11777,14 +12173,14 @@ fn generateLLListConcat(self: *Self, args: anytype, ret_layout: layout.Idx) Allo // Copy a's bytes at offset 0 const a_bytes = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(a_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(a_bytes); const zero3 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero3); try self.emitMemCopyLoop(new_data, zero3, a_data, a_bytes); @@ -11792,9 +12188,9 @@ fn generateLLListConcat(self: *Self, args: anytype, ret_layout: layout.Idx) Allo // Copy b's bytes at offset a_bytes const b_bytes = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(b_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(b_bytes); try self.emitMemCopyLoop(new_data, a_bytes, b_data, b_bytes); @@ -11810,23 +12206,23 @@ fn generateLLListReverse(self: *Self, args: anytype, ret_layout: layout.Idx) All return; } - const import_idx = self.list_reverse_import orelse unreachable; + const import_idx = self.builtin_syms.list_reverse; const elem_align = self.getListElemAlign(ret_layout); try self.generateExpr(args[0]); const list_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalSet(list_ptr); - const result_offset = try self.allocStackMemory(12, 4); - try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_align)) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); - self.body.append(self.allocator, Op.call) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, import_idx) catch return error.OutOfMemory; - try self.emitFpOffset(result_offset); + const result_local = try self.allocStackResultPtr(12, 4); + try self.emitLocalGet(result_local); + try self.emitPtrLenCapArgs(list_ptr); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_align)) catch return error.OutOfMemory; + try self.emitLocalGet(self.roc_ops_local); + try self.emitCallBuiltin(import_idx); + try self.emitLocalGet(result_local); } /// Build a RocList struct on the stack frame from data ptr and length locals. @@ -11921,22 +12317,22 @@ fn generateLLListSortWith(self: *Self, ll: anytype, args: anytype, ret_layout: l const result_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_lt_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.@"if") catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_lt_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"if") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; { try self.emitLocalGet(list_ptr); try self.emitLocalSet(result_local); } - self.body.append(self.allocator, Op.@"else") catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.@"else") catch return error.OutOfMemory; { const total_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(total_size); try self.emitHeapAlloc(total_size, elem_align); @@ -11944,8 +12340,8 @@ fn generateLLListSortWith(self: *Self, ll: anytype, args: anytype, ret_layout: l try self.emitLocalSet(new_data); const zero = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero); try self.emitMemCopyLoop(new_data, zero, old_data, total_size); @@ -11955,28 +12351,28 @@ fn generateLLListSortWith(self: *Self, ll: anytype, args: anytype, ret_layout: l try self.emitLocalSet(temp_ptr); const i_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; try self.emitLocalSet(i_local); - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; { try self.emitLocalGet(i_local); try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - try WasmModule.leb128WriteU32(self.allocator, &self.body, 1); + self.code_builder.code.append(self.allocator, Op.i32_ge_u) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + try WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1); const elem_i_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_data); try self.emitLocalGet(i_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(elem_i_ptr); try self.emitMemCopy(temp_ptr, 0, elem_i_ptr, elem_size); @@ -11985,26 +12381,26 @@ fn generateLLListSortWith(self: *Self, ll: anytype, args: anytype, ret_layout: l try self.emitLocalGet(i_local); try self.emitLocalSet(j_local); - self.body.append(self.allocator, Op.block) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.loop_) catch return error.OutOfMemory; - self.body.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.block) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.loop_) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, @intFromEnum(BlockType.void)) catch return error.OutOfMemory; { try self.emitLocalGet(j_local); - self.body.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - try WasmModule.leb128WriteU32(self.allocator, &self.body, 1); + self.code_builder.code.append(self.allocator, Op.i32_eqz) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + try WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1); const prev_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_data); try self.emitLocalGet(j_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(prev_ptr); try self.emitLocalGet(self.roc_ops_local); @@ -12015,62 +12411,62 @@ fn generateLLListSortWith(self: *Self, ll: anytype, args: anytype, ret_layout: l try self.emitLocalSet(cmp_result); try self.emitLocalGet(cmp_result); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.br_if) catch return error.OutOfMemory; - try WasmModule.leb128WriteU32(self.allocator, &self.body, 1); + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_ne) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.br_if) catch return error.OutOfMemory; + try WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 1); const dst_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_data); try self.emitLocalGet(j_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(dst_ptr); try self.emitMemCopy(dst_ptr, 0, prev_ptr, elem_size); try self.emitLocalGet(j_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitLocalSet(j_local); - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - try WasmModule.leb128WriteU32(self.allocator, &self.body, 0); + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + try WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0); } - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; const insert_ptr = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_data); try self.emitLocalGet(j_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(insert_ptr); try self.emitMemCopy(insert_ptr, 0, temp_ptr, elem_size); try self.emitLocalGet(i_local); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(i_local); - self.body.append(self.allocator, Op.br) catch return error.OutOfMemory; - try WasmModule.leb128WriteU32(self.allocator, &self.body, 0); + self.code_builder.code.append(self.allocator, Op.br) catch return error.OutOfMemory; + try WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0); } - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; try self.buildRocList(new_data, old_len); try self.emitLocalSet(result_local); } - self.body.append(self.allocator, Op.end) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.end) catch return error.OutOfMemory; try self.emitLocalGet(result_local); } @@ -12089,9 +12485,9 @@ fn generateLLListWithCapacity(self: *Self, args: anytype, ret_layout: layout.Idx // Allocate cap * elem_size bytes on heap const total_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(cap); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(total_size); try self.emitHeapAlloc(total_size, elem_align); @@ -12100,8 +12496,8 @@ fn generateLLListWithCapacity(self: *Self, args: anytype, ret_layout: layout.Idx // len = 0 const len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(len); try self.buildRocListWithCap(new_data, len, cap); @@ -12132,24 +12528,24 @@ fn generateLLListSet(self: *Self, args: anytype, ret_layout: layout.Idx) Allocat // Load list fields const old_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(old_data); const old_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(old_len); // Allocate new data buffer (same size as old) const total_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(total_size); try self.emitHeapAlloc(total_size, elem_align); @@ -12158,8 +12554,8 @@ fn generateLLListSet(self: *Self, args: anytype, ret_layout: layout.Idx) Allocat // Copy all old data const zero2 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero2); try self.emitMemCopyLoop(new_data, zero2, old_data, total_size); @@ -12167,19 +12563,19 @@ fn generateLLListSet(self: *Self, args: anytype, ret_layout: layout.Idx) Allocat const dst = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_data); try self.emitLocalGet(index); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(dst); const bytes_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; try self.emitLocalSet(bytes_local); const zero3 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero3); try self.emitMemCopyLoop(dst, zero3, elem_val, bytes_local); @@ -12205,31 +12601,31 @@ fn generateLLListReserve(self: *Self, args: anytype, ret_layout: layout.Idx) All // Load list fields const old_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(old_data); const old_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(old_len); // new_cap = old_len + additional const new_cap = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); try self.emitLocalGet(additional); - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(new_cap); // Allocate new_cap * elem_size bytes const total_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(new_cap); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(total_size); try self.emitHeapAlloc(total_size, elem_align); @@ -12239,14 +12635,14 @@ fn generateLLListReserve(self: *Self, args: anytype, ret_layout: layout.Idx) All // Copy old data (old_len * elem_size bytes) const copy_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(copy_size); const zero4 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero4); try self.emitMemCopyLoop(new_data, zero4, old_data, copy_size); @@ -12266,24 +12662,24 @@ fn generateLLListReleaseExcessCapacity(self: *Self, args: anytype, ret_layout: l // Load list fields const old_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(old_data); const old_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(old_len); // Allocate new_len * elem_size bytes const total_size = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; try self.emitLocalSet(total_size); try self.emitHeapAlloc(total_size, elem_align); @@ -12292,8 +12688,8 @@ fn generateLLListReleaseExcessCapacity(self: *Self, args: anytype, ret_layout: l // Copy old data const zero5 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero5); try self.emitMemCopyLoop(new_data, zero5, old_data, total_size); @@ -12314,16 +12710,16 @@ fn generateLLListSplitFirst(self: *Self, args: anytype, ret_layout: layout.Idx) // Load list fields const old_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(old_data); const old_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(old_len); // Allocate result struct: { first: elem (elem_size bytes), rest: list (12 bytes) } @@ -12340,36 +12736,36 @@ fn generateLLListSplitFirst(self: *Self, args: anytype, ret_layout: layout.Idx) // Copy first element to result const bytes_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; try self.emitLocalSet(bytes_local); const first_dst = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(result_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(first_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(first_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(first_dst); const zero6 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero6); try self.emitMemCopyLoop(first_dst, zero6, old_data, bytes_local); // Build rest list (pointing to old_data + elem_size, len = old_len - 1) const rest_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_data); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(rest_data); const rest_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitLocalSet(rest_len); const encoded_cap = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; @@ -12378,9 +12774,9 @@ fn generateLLListSplitFirst(self: *Self, args: anytype, ret_layout: layout.Idx) // Store rest list in result struct const rest_base = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(result_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(rest_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(rest_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(rest_base); // ptr @@ -12397,6 +12793,78 @@ fn generateLLListSplitFirst(self: *Self, args: anytype, ret_layout: layout.Idx) try self.emitLocalGet(result_ptr); } +test "registerRocOpsFromModule — reuses canonical host callbacks and existing table entries" { + const allocator = std.testing.allocator; + + var callback_indices = [_]u32{0} ** 6; + const module = blk: { + var m = WasmModule.init(allocator); + errdefer m.deinit(); + + const roc_ops_type = try m.addFuncType(&.{ .i32, .i32 }, &.{}); + + _ = try m.addImport("env", "roc__main", roc_ops_type); + _ = try m.addImport("env", "roc_dbg", roc_ops_type); + _ = try m.addImport("env", "roc_expect_failed", roc_ops_type); + _ = try m.addImport("env", "roc_panic", roc_ops_type); + m.import_fn_count = @intCast(m.imports.items.len); + + try m.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 1 }, + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 2 }, + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 3 }, + }); + + m.enableTable(); + + const callback_names = [_][]const u8{ + "host.roc_alloc", + "host.roc_dealloc", + "host.roc_realloc", + "host.roc_dbg", + "host.roc_expect_failed", + "host.roc_crashed", + }; + + for (callback_names, 0..) |name, i| { + const func_idx = try m.addFunction(roc_ops_type); + callback_indices[i] = func_idx; + try m.linking.symbol_table.append(allocator, .{ + .kind = .function, + .flags = 0, + .name = name, + .index = func_idx, + }); + _ = try m.addTableElement(func_idx); + } + + break :blk m; + }; + + var codegen = Self.initWithHostModule(allocator, undefined, undefined, module, undefined); + defer codegen.deinit(); + + try std.testing.expectEqual(@as(usize, 4), codegen.module.imports.items.len); + try std.testing.expectEqual(@as(usize, 6), codegen.module.table_func_indices.items.len); + + try codegen.registerRocOpsFromModule(); + + try std.testing.expectEqual(@as(usize, 4), codegen.module.imports.items.len); + try std.testing.expectEqual(@as(usize, 6), codegen.module.table_func_indices.items.len); + + try std.testing.expectEqual(@as(u32, 0), codegen.roc_alloc_table_idx); + try std.testing.expectEqual(@as(u32, 1), codegen.roc_dealloc_table_idx); + try std.testing.expectEqual(@as(u32, 2), codegen.roc_realloc_table_idx); + try std.testing.expectEqual(@as(u32, 3), codegen.roc_dbg_table_idx); + try std.testing.expectEqual(@as(u32, 4), codegen.roc_expect_failed_table_idx); + try std.testing.expectEqual(@as(u32, 5), codegen.roc_crashed_table_idx); + + try std.testing.expectEqual(callback_indices[3], codegen.module.table_func_indices.items[3]); + try std.testing.expectEqual(callback_indices[4], codegen.module.table_func_indices.items[4]); + try std.testing.expectEqual(callback_indices[5], codegen.module.table_func_indices.items[5]); +} + /// Generate list_split_last: split list into rest and last element fn generateLLListSplitLast(self: *Self, args: anytype, ret_layout: layout.Idx) Allocator.Error!void { const elem_size = self.getListElemSize(ret_layout); @@ -12410,16 +12878,16 @@ fn generateLLListSplitLast(self: *Self, args: anytype, ret_layout: layout.Idx) A // Load list fields const old_data = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(old_data); const old_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(list_ptr); - self.body.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 2) catch return error.OutOfMemory; - WasmModule.leb128WriteU32(self.allocator, &self.body, 4) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_load) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 2) catch return error.OutOfMemory; + WasmModule.leb128WriteU32(self.allocator, &self.code_builder.code, 4) catch return error.OutOfMemory; try self.emitLocalSet(old_len); // Allocate result struct: { rest: list (12 bytes), last: elem (elem_size bytes) } @@ -12437,17 +12905,17 @@ fn generateLLListSplitLast(self: *Self, args: anytype, ret_layout: layout.Idx) A // rest_len = old_len - 1 const rest_len = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 1) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 1) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_sub) catch return error.OutOfMemory; try self.emitLocalSet(rest_len); // Store rest list in result struct (shares data ptr, but with reduced length) const rest_base = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(result_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(rest_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(rest_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(rest_base); // ptr (same as old_data) @@ -12469,27 +12937,27 @@ fn generateLLListSplitLast(self: *Self, args: anytype, ret_layout: layout.Idx) A const last_src = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(old_data); try self.emitLocalGet(rest_len); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_mul) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(last_src); const bytes_local = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(elem_size)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(elem_size)) catch return error.OutOfMemory; try self.emitLocalSet(bytes_local); const last_dst = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; try self.emitLocalGet(result_ptr); - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, @intCast(aligned_last_offset)) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, @intCast(aligned_last_offset)) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_add) catch return error.OutOfMemory; try self.emitLocalSet(last_dst); const zero7 = self.storage.allocAnonymousLocal(.i32) catch return error.OutOfMemory; - self.body.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; - WasmModule.leb128WriteI32(self.allocator, &self.body, 0) catch return error.OutOfMemory; + self.code_builder.code.append(self.allocator, Op.i32_const) catch return error.OutOfMemory; + WasmModule.leb128WriteI32(self.allocator, &self.code_builder.code, 0) catch return error.OutOfMemory; try self.emitLocalSet(zero7); try self.emitMemCopyLoop(last_dst, zero7, last_src, bytes_local); diff --git a/src/backend/wasm/WasmLinking.zig b/src/backend/wasm/WasmLinking.zig new file mode 100644 index 00000000000..53c7b2b7d53 --- /dev/null +++ b/src/backend/wasm/WasmLinking.zig @@ -0,0 +1,883 @@ +//! Data structures for WASM relocatable module metadata. +//! +//! These structures represent the `linking` and `reloc.*` custom sections +//! defined by the WebAssembly Tool Conventions: +//! https://github.com/WebAssembly/tool-conventions/blob/main/Linking.md +//! +//! They form the "map" that makes surgical linking possible: the symbol table +//! records what symbols exist and the relocation entries record where each +//! symbol is referenced so we can patch those sites in-place. + +const std = @import("std"); +const WasmModule = @import("WasmModule.zig"); +const Import = WasmModule.Import; + +// --- Relocation Types --- + +/// Index-based relocation types (no addend). +/// These patch indices in instructions like `call`, `global.get`, `call_indirect`. +pub const IndexRelocType = enum(u8) { + function_index_leb = 0, // R_WASM_FUNCTION_INDEX_LEB — function index in `call` + table_index_sleb = 1, // R_WASM_TABLE_INDEX_SLEB — signed table index in `i32.const` + table_index_i32 = 2, // R_WASM_TABLE_INDEX_I32 — table index as raw u32 in data + type_index_leb = 6, // R_WASM_TYPE_INDEX_LEB — type index in `call_indirect` + global_index_leb = 7, // R_WASM_GLOBAL_INDEX_LEB — global index in `global.get/set` + event_index_leb = 10, // R_WASM_EVENT_INDEX_LEB + table_index_rel_sleb = 12, // R_WASM_TABLE_INDEX_REL_SLEB — PIC relative table index + global_index_i32 = 13, // R_WASM_GLOBAL_INDEX_I32 + table_number_leb = 20, // R_WASM_TABLE_NUMBER_LEB +}; + +/// Offset-based relocation types (have an addend). +/// These patch memory addresses in load/store instructions and data segments. +pub const OffsetRelocType = enum(u8) { + memory_addr_leb = 3, // R_WASM_MEMORY_ADDR_LEB — unsigned addr in load/store + memory_addr_sleb = 4, // R_WASM_MEMORY_ADDR_SLEB — signed addr in `i32.const` + memory_addr_i32 = 5, // R_WASM_MEMORY_ADDR_I32 — raw u32 addr in data segment + function_offset_i32 = 8, // R_WASM_FUNCTION_OFFSET_I32 + section_offset_i32 = 9, // R_WASM_SECTION_OFFSET_I32 + memory_addr_rel_sleb = 11, // R_WASM_MEMORY_ADDR_REL_SLEB — PIC relative signed addr +}; + +comptime { + // Index and Offset relocation type discriminants must not overlap, + // since they share the same type byte in the binary format and are + // distinguished by whether the value is an index or an offset type. + for (std.meta.tags(IndexRelocType)) |idx_tag| { + for (std.meta.tags(OffsetRelocType)) |off_tag| { + std.debug.assert(@intFromEnum(idx_tag) != @intFromEnum(off_tag)); + } + } +} + +// --- Relocation Entry --- + +/// A single relocation entry. Describes one site in the code or data section +/// that references a symbol and needs patching when that symbol's value changes. +pub const RelocationEntry = union(enum) { + /// Index relocations: the value at `offset` is a symbol index (function, type, global). + /// No addend — the patched value is the symbol's resolved index directly. + index: struct { + type_id: IndexRelocType, + offset: u32, // byte offset within the target section body + symbol_index: u32, // index into the linking section's symbol table + /// For reloc.DATA entries, the data segment this site belongs to after normalization. + /// `maxInt(u32)` means "not normalized / not applicable". + data_segment_index: u32 = std.math.maxInt(u32), + }, + + /// Offset relocations: the value at `offset` is a memory address. + /// The patched value is the symbol's address + addend. + offset: struct { + type_id: OffsetRelocType, + offset: u32, + symbol_index: u32, + addend: i32, + /// For reloc.DATA entries, the data segment this site belongs to after normalization. + /// `maxInt(u32)` means "not normalized / not applicable". + data_segment_index: u32 = std.math.maxInt(u32), + }, + + pub fn getSymbolIndex(self: RelocationEntry) u32 { + return switch (self) { + .index => |i| i.symbol_index, + .offset => |o| o.symbol_index, + }; + } + + pub fn getOffset(self: RelocationEntry) u32 { + return switch (self) { + .index => |i| i.offset, + .offset => |o| o.offset, + }; + } + + /// Parse a single relocation entry from bytes at cursor position. + pub fn parse(bytes: []const u8, cursor: *usize) WasmModule.ParseError!RelocationEntry { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const type_byte = bytes[cursor.*]; + cursor.* += 1; + const offset = try WasmModule.readU32(bytes, cursor); + const symbol_index = try WasmModule.readU32(bytes, cursor); + + // Try index relocation types first (no addend) + if (std.meta.intToEnum(IndexRelocType, type_byte)) |type_id| { + return .{ .index = .{ + .type_id = type_id, + .offset = offset, + .symbol_index = symbol_index, + } }; + } else |_| {} + + // Try offset relocation types (with addend) + if (std.meta.intToEnum(OffsetRelocType, type_byte)) |type_id| { + const addend = try WasmModule.readI32(bytes, cursor); + return .{ .offset = .{ + .type_id = type_id, + .offset = offset, + .symbol_index = symbol_index, + .addend = addend, + } }; + } else |_| {} + + return error.InvalidSection; + } +}; + +// --- Symbol Info --- + +/// Flags for symbol table entries. +pub const SymFlag = struct { + pub const BINDING_WEAK: u32 = 0x01; + pub const BINDING_LOCAL: u32 = 0x02; + pub const VISIBILITY_HIDDEN: u32 = 0x04; + pub const UNDEFINED: u32 = 0x10; + pub const EXPORTED: u32 = 0x20; + pub const EXPLICIT_NAME: u32 = 0x40; + pub const NO_STRIP: u32 = 0x80; +}; + +/// Symbol kinds in the linking section's symbol table. +pub const SymKind = enum(u8) { + function = 0, + data = 1, + global = 2, + section = 3, + event = 4, + table = 5, +}; + +/// A symbol table entry. Each symbol has a kind, flags, and an index +/// into the relevant index space (function index, global index, etc.). +/// +/// Function/global symbols can be **explicitly named** (name stored in the linking +/// section) or **implicitly named** (undefined symbols that inherit their name from +/// the import section entry they reference). +/// +/// Parsing rule: a function/global symbol gets a name from the linking section if +/// `(flags & WASM_SYM_EXPLICIT_NAME) != 0` OR `(flags & WASM_SYM_UNDEFINED) == 0` +/// (i.e. defined symbols always have names). Undefined symbols without EXPLICIT_NAME +/// have `name = null` — their name must be looked up from the import section at the +/// symbol's `index`. +pub const SymInfo = struct { + kind: SymKind, + flags: u32, + /// Explicit name from the linking section, or null for implicitly-named + /// imported symbols (whose name comes from the import section). + name: ?[]const u8, + /// For function symbols: the function index (import or defined). + /// For global symbols: the global index. + /// For data symbols: segment index (stored here, offset/size stored separately). + index: u32, + /// Data symbols only: offset within segment. + data_offset: u32 = 0, + /// Data symbols only: size in bytes. + data_size: u32 = 0, + + pub fn isUndefined(self: SymInfo) bool { + return (self.flags & SymFlag.UNDEFINED) != 0; + } + + pub fn isImplicitlyNamed(self: SymInfo) bool { + return self.name == null; + } + + pub fn isLocal(self: SymInfo) bool { + return (self.flags & SymFlag.BINDING_LOCAL) != 0; + } + + pub fn isFunction(self: SymInfo) bool { + return self.kind == .function; + } + + /// Parse a single symbol table entry from the linking section. + pub fn parse(bytes: []const u8, cursor: *usize) WasmModule.ParseError!SymInfo { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const kind_byte = bytes[cursor.*]; + cursor.* += 1; + const kind = std.meta.intToEnum(SymKind, kind_byte) catch return error.InvalidSection; + const flags = try WasmModule.readU32(bytes, cursor); + + switch (kind) { + .function, .global, .event, .table => { + const index = try WasmModule.readU32(bytes, cursor); + const is_import = (flags & SymFlag.UNDEFINED) != 0; + const has_explicit_name = (flags & SymFlag.EXPLICIT_NAME) != 0; + // Defined symbols always have names; undefined ones only if EXPLICIT_NAME is set + const name: ?[]const u8 = if (!is_import or has_explicit_name) + try WasmModule.readString(bytes, cursor) + else + null; + return .{ .kind = kind, .flags = flags, .name = name, .index = index }; + }, + .data => { + const name = try WasmModule.readString(bytes, cursor); + if ((flags & SymFlag.UNDEFINED) != 0) { + // Imported data symbol — no segment info + return .{ .kind = kind, .flags = flags, .name = name, .index = 0 }; + } + const segment_index = try WasmModule.readU32(bytes, cursor); + const data_offset = try WasmModule.readU32(bytes, cursor); + const data_size = try WasmModule.readU32(bytes, cursor); + return .{ + .kind = kind, + .flags = flags, + .name = name, + .index = segment_index, + .data_offset = data_offset, + .data_size = data_size, + }; + }, + .section => { + const index = try WasmModule.readU32(bytes, cursor); + return .{ .kind = kind, .flags = flags, .name = null, .index = index }; + }, + } + } + + /// Resolve this symbol's name when one is available. + /// + /// Explicitly named symbols return their stored name. + /// Implicitly named undefined function/global/event/table symbols inherit + /// their name from the corresponding import array (function, global, or table). + /// Section symbols and other unnamed non-import symbols return null. + pub fn resolveName( + self: SymInfo, + fn_imports: []const Import, + global_imports: []const WasmModule.GlobalImport, + table_imports: []const WasmModule.TableImport, + ) ?[]const u8 { + if (self.name) |n| return n; + + if (!self.isUndefined()) return null; + + return switch (self.kind) { + .function => if (self.index < fn_imports.len) fn_imports[self.index].field_name else null, + .global => if (self.index < global_imports.len) global_imports[self.index].field_name else null, + .table => if (self.index < table_imports.len) table_imports[self.index].field_name else null, + .event => if (self.index < fn_imports.len) fn_imports[self.index].field_name else null, + else => null, + }; + } +}; + +// --- Relocation Section --- + +/// Holds all relocation entries for one section (either "reloc.CODE" or "reloc.DATA"). +pub const RelocationSection = struct { + /// Name of this reloc section (e.g. "reloc.CODE"). + name: []const u8, + /// Index of the target section these relocations apply to. + target_section_index: u32, + /// The relocation entries, sorted by offset. + entries: std.ArrayList(RelocationEntry), + + /// Parse a relocation section from its body bytes (after the section name has + /// been consumed). `name` is the section name (e.g. "reloc.CODE"). + pub fn parse( + allocator: std.mem.Allocator, + name: []const u8, + bytes: []const u8, + cursor: *usize, + section_end: usize, + ) WasmModule.ParseError!RelocationSection { + const target_section_index = try WasmModule.readU32(bytes, cursor); + const count = try WasmModule.readU32(bytes, cursor); + + var entries: std.ArrayList(RelocationEntry) = .empty; + errdefer entries.deinit(allocator); + try entries.ensureTotalCapacity(allocator, count); + + for (0..count) |_| { + if (cursor.* > section_end) return error.UnexpectedEnd; + const entry = try RelocationEntry.parse(bytes, cursor); + entries.appendAssumeCapacity(entry); + } + + return .{ + .name = name, + .target_section_index = target_section_index, + .entries = entries, + }; + } + + /// Patch all sites in `section_bytes` that reference `sym_index` with `value`. + /// This is the core surgical linking primitive. + pub fn applyRelocsU32( + self: *const RelocationSection, + section_bytes: []u8, + sym_index: u32, + value: u32, + ) void { + for (self.entries.items) |entry| { + if (entry.getSymbolIndex() != sym_index) continue; + switch (entry) { + .index => |idx| { + switch (idx.type_id) { + .function_index_leb, + .type_index_leb, + .global_index_leb, + .event_index_leb, + .table_number_leb, + => WasmModule.overwritePaddedU32(section_bytes, idx.offset, value), + .table_index_sleb, + .table_index_rel_sleb, + => WasmModule.overwritePaddedI32( + section_bytes, + idx.offset, + @as(i32, @intCast(value)), + ), + .table_index_i32, .global_index_i32 => { + const off: usize = @intCast(idx.offset); + std.mem.writeInt(u32, section_bytes[off..][0..4], value, .little); + }, + } + }, + .offset => |off| { + const patched = @as(i64, value) + @as(i64, off.addend); + switch (off.type_id) { + .memory_addr_leb => WasmModule.overwritePaddedU32( + section_bytes, + off.offset, + @intCast(patched), + ), + .memory_addr_sleb, + .memory_addr_rel_sleb, + => WasmModule.overwritePaddedI32( + section_bytes, + off.offset, + @intCast(patched), + ), + .memory_addr_i32, + .function_offset_i32, + .section_offset_i32, + => { + const o: usize = @intCast(off.offset); + std.mem.writeInt(u32, section_bytes[o..][0..4], @intCast(patched), .little); + }, + } + }, + } + } + } +}; + +// --- Linking Section --- + +/// Version of the linking metadata format (tool conventions v2). +pub const LINKING_VERSION: u32 = 2; + +/// Linking subsection types (within the "linking" custom section). +pub const LinkingSubsection = enum(u8) { + segment_info = 5, + init_funcs = 6, + comdat_info = 7, + symbol_table = 8, +}; + +/// Container for all linking metadata from a relocatable WASM module. +pub const LinkingSection = struct { + symbol_table: std.ArrayList(SymInfo), + segment_info: std.ArrayList(SegmentInfo), + init_funcs: std.ArrayList(InitFunc), + + /// Parse a linking section body (after the "linking" name has been consumed). + pub fn parse( + allocator: std.mem.Allocator, + bytes: []const u8, + cursor: *usize, + section_end: usize, + ) WasmModule.ParseError!LinkingSection { + // Version must be 2 + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const version = bytes[cursor.*]; + cursor.* += 1; + if (version != LINKING_VERSION) return error.InvalidLinkingVersion; + + var symbol_table: std.ArrayList(SymInfo) = .empty; + errdefer symbol_table.deinit(allocator); + var segment_info: std.ArrayList(SegmentInfo) = .empty; + errdefer segment_info.deinit(allocator); + var init_funcs: std.ArrayList(InitFunc) = .empty; + errdefer init_funcs.deinit(allocator); + + while (cursor.* < section_end) { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const subsection_id = bytes[cursor.*]; + cursor.* += 1; + const subsection_len = try WasmModule.readU32(bytes, cursor); + const subsection_end = cursor.* + subsection_len; + + if (std.meta.intToEnum(LinkingSubsection, subsection_id)) |sub| { + switch (sub) { + .symbol_table => { + const count = try WasmModule.readU32(bytes, cursor); + try symbol_table.ensureTotalCapacity(allocator, count); + for (0..count) |_| { + const sym = try SymInfo.parse(bytes, cursor); + symbol_table.appendAssumeCapacity(sym); + } + }, + .segment_info => { + const count = try WasmModule.readU32(bytes, cursor); + try segment_info.ensureTotalCapacity(allocator, count); + for (0..count) |_| { + const name = try WasmModule.readString(bytes, cursor); + const alignment = try WasmModule.readU32(bytes, cursor); + const flags = try WasmModule.readU32(bytes, cursor); + segment_info.appendAssumeCapacity(.{ + .name = name, + .alignment = alignment, + .flags = flags, + }); + } + }, + .init_funcs => { + const count = try WasmModule.readU32(bytes, cursor); + try init_funcs.ensureTotalCapacity(allocator, count); + for (0..count) |_| { + const priority = try WasmModule.readU32(bytes, cursor); + const symbol_index = try WasmModule.readU32(bytes, cursor); + init_funcs.appendAssumeCapacity(.{ + .priority = priority, + .symbol_index = symbol_index, + }); + } + }, + .comdat_info => { + // Skip comdat info for now + cursor.* = subsection_end; + }, + } + } else |_| { + // Unknown subsection, skip + cursor.* = subsection_end; + } + } + + return .{ + .symbol_table = symbol_table, + .segment_info = segment_info, + .init_funcs = init_funcs, + }; + } + + /// Find a symbol by name. For implicitly-named imported symbols, resolves + /// the name from the corresponding import section. Returns the symbol index, or null. + pub fn findSymbolByName( + self: *const LinkingSection, + name: []const u8, + fn_imports: []const Import, + global_imports: []const WasmModule.GlobalImport, + table_imports: []const WasmModule.TableImport, + ) ?u32 { + for (self.symbol_table.items, 0..) |sym, i| { + if (sym.resolveName(fn_imports, global_imports, table_imports)) |sym_name| { + if (std.mem.eql(u8, sym_name, name)) return @intCast(i); + } + } + return null; + } + + /// Find the symbol table index for an imported function at the given function index. + pub fn findImportedFnSymIndex(self: *const LinkingSection, fn_index: u32) ?u32 { + for (self.symbol_table.items, 0..) |sym, i| { + if (sym.kind == .function and sym.isUndefined() and sym.index == fn_index) { + return @intCast(i); + } + } + return null; + } + + /// Find the symbol for an imported function at `old_fn_index` and update it + /// to point to `new_fn_index`. Returns the symbol index. + pub fn findAndReindexImportedFn( + self: *LinkingSection, + old_fn_index: u32, + new_fn_index: u32, + ) ?u32 { + for (self.symbol_table.items, 0..) |*sym, i| { + if (sym.kind == .function and sym.isUndefined() and sym.index == old_fn_index) { + sym.index = new_fn_index; + return @intCast(i); + } + } + return null; + } +}; + +/// Metadata for a data segment from the linking section. +pub const SegmentInfo = struct { + name: []const u8, + alignment: u32, + flags: u32, +}; + +/// An initialization function entry from the linking section. +pub const InitFunc = struct { + priority: u32, + symbol_index: u32, +}; + +// --- Tests --- + +const testing = std.testing; + +test "RelocationSection.applyRelocsU32 — patches function_index_leb at correct offset" { + // Set up a 10-byte buffer with 5 zero bytes at offset 2 (the relocation site) + var buf = [_]u8{ 0xAA, 0xBB, 0x80, 0x80, 0x80, 0x80, 0x00, 0xCC, 0xDD, 0xEE }; + + var entries: std.ArrayList(RelocationEntry) = .empty; + defer entries.deinit(testing.allocator); + try entries.append(testing.allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = 2, + .symbol_index = 0, + } }); + + const reloc = RelocationSection{ + .name = "reloc.CODE", + .target_section_index = 0, + .entries = entries, + }; + + reloc.applyRelocsU32(&buf, 0, 42); + + // Verify the 5 bytes at offset 2 encode 42 in padded LEB128 + var expected = [_]u8{0} ** 5; + WasmModule.overwritePaddedU32(&expected, 0, 42); + try testing.expectEqualSlices(u8, &expected, buf[2..7]); + + // Verify surrounding bytes are untouched + try testing.expectEqual(@as(u8, 0xAA), buf[0]); + try testing.expectEqual(@as(u8, 0xBB), buf[1]); + try testing.expectEqual(@as(u8, 0xCC), buf[7]); +} + +test "RelocationSection.applyRelocsU32 — patches multiple sites for same symbol" { + var buf = [_]u8{0} ** 15; + + var entries: std.ArrayList(RelocationEntry) = .empty; + defer entries.deinit(testing.allocator); + // Two relocation sites for the same symbol + try entries.append(testing.allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = 0, + .symbol_index = 3, + } }); + try entries.append(testing.allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = 10, + .symbol_index = 3, + } }); + + const reloc = RelocationSection{ + .name = "reloc.CODE", + .target_section_index = 0, + .entries = entries, + }; + + reloc.applyRelocsU32(&buf, 3, 99); + + // Both sites should be patched + var expected = [_]u8{0} ** 5; + WasmModule.overwritePaddedU32(&expected, 0, 99); + try testing.expectEqualSlices(u8, &expected, buf[0..5]); + try testing.expectEqualSlices(u8, &expected, buf[10..15]); +} + +test "RelocationSection.applyRelocsU32 — ignores entries for different symbols" { + var buf = [_]u8{0} ** 10; + + var entries: std.ArrayList(RelocationEntry) = .empty; + defer entries.deinit(testing.allocator); + try entries.append(testing.allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = 0, + .symbol_index = 1, + } }); + try entries.append(testing.allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = 5, + .symbol_index = 2, + } }); + + const reloc = RelocationSection{ + .name = "reloc.CODE", + .target_section_index = 0, + .entries = entries, + }; + + // Only patch symbol 1 + reloc.applyRelocsU32(&buf, 1, 77); + + var expected = [_]u8{0} ** 5; + WasmModule.overwritePaddedU32(&expected, 0, 77); + try testing.expectEqualSlices(u8, &expected, buf[0..5]); + + // Symbol 2's site should be untouched (all zeros) + try testing.expectEqualSlices(u8, &.{ 0, 0, 0, 0, 0 }, buf[5..10]); +} + +test "RelocationSection.applyRelocsU32 — memory_addr_leb adds addend correctly" { + var buf = [_]u8{0} ** 5; + + var entries: std.ArrayList(RelocationEntry) = .empty; + defer entries.deinit(testing.allocator); + try entries.append(testing.allocator, .{ .offset = .{ + .type_id = .memory_addr_leb, + .offset = 0, + .symbol_index = 0, + .addend = 16, + } }); + + const reloc = RelocationSection{ + .name = "reloc.CODE", + .target_section_index = 0, + .entries = entries, + }; + + // value=100, addend=16, patched address should be 116 + reloc.applyRelocsU32(&buf, 0, 100); + + var expected = [_]u8{0} ** 5; + WasmModule.overwritePaddedU32(&expected, 0, 116); + try testing.expectEqualSlices(u8, &expected, &buf); +} + +test "RelocationSection.applyRelocsU32 — memory_addr_sleb handles negative addend" { + var buf = [_]u8{0} ** 5; + + var entries: std.ArrayList(RelocationEntry) = .empty; + defer entries.deinit(testing.allocator); + try entries.append(testing.allocator, .{ .offset = .{ + .type_id = .memory_addr_sleb, + .offset = 0, + .symbol_index = 0, + .addend = -4, + } }); + + const reloc = RelocationSection{ + .name = "reloc.CODE", + .target_section_index = 0, + .entries = entries, + }; + + // value=100, addend=-4, patched address should be 96 + reloc.applyRelocsU32(&buf, 0, 100); + + var expected = [_]u8{0} ** 5; + WasmModule.overwritePaddedI32(&expected, 0, 96); + try testing.expectEqualSlices(u8, &expected, &buf); +} + +test "LinkingSection.findSymbolByName — finds existing symbol" { + var sym_table: std.ArrayList(SymInfo) = .empty; + defer sym_table.deinit(testing.allocator); + try sym_table.append(testing.allocator, .{ + .kind = .function, + .flags = 0, + .name = "foo", + .index = 0, + }); + try sym_table.append(testing.allocator, .{ + .kind = .function, + .flags = 0, + .name = "bar", + .index = 1, + }); + + const seg_info: std.ArrayList(SegmentInfo) = .empty; + const init_funcs: std.ArrayList(InitFunc) = .empty; + + const section = LinkingSection{ + .symbol_table = sym_table, + .segment_info = seg_info, + .init_funcs = init_funcs, + }; + + const imports: []const Import = &.{}; + const global_imports: []const WasmModule.GlobalImport = &.{}; + const table_imports: []const WasmModule.TableImport = &.{}; + try testing.expectEqual(@as(?u32, 1), section.findSymbolByName("bar", imports, global_imports, table_imports)); +} + +test "LinkingSection.findSymbolByName — returns null for missing symbol" { + var sym_table: std.ArrayList(SymInfo) = .empty; + defer sym_table.deinit(testing.allocator); + try sym_table.append(testing.allocator, .{ + .kind = .function, + .flags = 0, + .name = "foo", + .index = 0, + }); + + const seg_info: std.ArrayList(SegmentInfo) = .empty; + const init_funcs: std.ArrayList(InitFunc) = .empty; + + const section = LinkingSection{ + .symbol_table = sym_table, + .segment_info = seg_info, + .init_funcs = init_funcs, + }; + + const imports: []const Import = &.{}; + const global_imports: []const WasmModule.GlobalImport = &.{}; + const table_imports: []const WasmModule.TableImport = &.{}; + try testing.expectEqual(@as(?u32, null), section.findSymbolByName("missing", imports, global_imports, table_imports)); +} + +test "LinkingSection.findImportedFnSymIndex — finds undefined function symbol" { + var sym_table: std.ArrayList(SymInfo) = .empty; + defer sym_table.deinit(testing.allocator); + // A defined function + try sym_table.append(testing.allocator, .{ + .kind = .function, + .flags = 0, + .name = "defined_fn", + .index = 0, + }); + // An undefined (imported) function at fn_index 5 + try sym_table.append(testing.allocator, .{ + .kind = .function, + .flags = SymFlag.UNDEFINED, + .name = null, + .index = 5, + }); + + const seg_info: std.ArrayList(SegmentInfo) = .empty; + const init_funcs: std.ArrayList(InitFunc) = .empty; + + const section = LinkingSection{ + .symbol_table = sym_table, + .segment_info = seg_info, + .init_funcs = init_funcs, + }; + + try testing.expectEqual(@as(?u32, 1), section.findImportedFnSymIndex(5)); + try testing.expectEqual(@as(?u32, null), section.findImportedFnSymIndex(99)); +} + +test "LinkingSection.findAndReindexImportedFn — updates index and returns sym index" { + var sym_table: std.ArrayList(SymInfo) = .empty; + defer sym_table.deinit(testing.allocator); + try sym_table.append(testing.allocator, .{ + .kind = .function, + .flags = SymFlag.UNDEFINED, + .name = null, + .index = 3, + }); + + const seg_info: std.ArrayList(SegmentInfo) = .empty; + const init_funcs: std.ArrayList(InitFunc) = .empty; + + var section = LinkingSection{ + .symbol_table = sym_table, + .segment_info = seg_info, + .init_funcs = init_funcs, + }; + + const result = section.findAndReindexImportedFn(3, 10); + try testing.expectEqual(@as(?u32, 0), result); + // Verify the index was actually updated + try testing.expectEqual(@as(u32, 10), section.symbol_table.items[0].index); + // Old index should no longer be found + try testing.expectEqual(@as(?u32, null), section.findAndReindexImportedFn(3, 20)); +} + +// --- Parsing tests --- + +test "RelocationEntry.parse — parses index relocation (function_index_leb)" { + // type=0 (function_index_leb), offset=5, symbol_index=2 + var bytes: [3]u8 = .{ 0x00, 0x05, 0x02 }; + var cursor: usize = 0; + const entry = try RelocationEntry.parse(&bytes, &cursor); + try testing.expectEqual(@as(usize, 3), cursor); + switch (entry) { + .index => |idx| { + try testing.expectEqual(IndexRelocType.function_index_leb, idx.type_id); + try testing.expectEqual(@as(u32, 5), idx.offset); + try testing.expectEqual(@as(u32, 2), idx.symbol_index); + }, + .offset => unreachable, + } +} + +test "RelocationEntry.parse — parses offset relocation (memory_addr_leb) with addend" { + // type=3 (memory_addr_leb), offset=10, symbol_index=1, addend=16 + var bytes: [4]u8 = .{ 0x03, 0x0A, 0x01, 0x10 }; + var cursor: usize = 0; + const entry = try RelocationEntry.parse(&bytes, &cursor); + try testing.expectEqual(@as(usize, 4), cursor); + switch (entry) { + .offset => |off| { + try testing.expectEqual(OffsetRelocType.memory_addr_leb, off.type_id); + try testing.expectEqual(@as(u32, 10), off.offset); + try testing.expectEqual(@as(u32, 1), off.symbol_index); + try testing.expectEqual(@as(i32, 16), off.addend); + }, + .index => unreachable, + } +} + +test "SymInfo.parse — parses undefined function symbol (implicitly named)" { + // kind=0 (function), flags=0x10 (UNDEFINED), index=3 + var bytes: [3]u8 = .{ 0x00, 0x10, 0x03 }; + var cursor: usize = 0; + const sym = try SymInfo.parse(&bytes, &cursor); + try testing.expectEqual(SymKind.function, sym.kind); + try testing.expect(sym.isUndefined()); + try testing.expect(sym.isImplicitlyNamed()); + try testing.expectEqual(@as(u32, 3), sym.index); +} + +test "SymInfo.parse — parses defined function symbol (explicitly named)" { + // kind=0 (function), flags=0 (defined), index=1, name="my_func" (7 bytes) + const bytes = [_]u8{ 0x00, 0x00, 0x01, 0x07 } ++ "my_func".*; + var cursor: usize = 0; + const sym = try SymInfo.parse(&bytes, &cursor); + try testing.expectEqual(SymKind.function, sym.kind); + try testing.expect(!sym.isUndefined()); + try testing.expectEqualStrings("my_func", sym.name.?); + try testing.expectEqual(@as(u32, 1), sym.index); +} + +test "SymInfo.parse — parses data symbol with segment info" { + // kind=1 (data), flags=0 (defined), name="data_sym" (8 bytes), + // segment_index=0, data_offset=16, data_size=4 + const bytes = [_]u8{ 0x01, 0x00, 0x08 } ++ "data_sym".* ++ [_]u8{ 0x00, 0x10, 0x04 }; + var cursor: usize = 0; + const sym = try SymInfo.parse(&bytes, &cursor); + try testing.expectEqual(SymKind.data, sym.kind); + try testing.expectEqualStrings("data_sym", sym.name.?); + try testing.expectEqual(@as(u32, 0), sym.index); // segment index + try testing.expectEqual(@as(u32, 16), sym.data_offset); + try testing.expectEqual(@as(u32, 4), sym.data_size); +} + +test "RelocationSection.parse — parses section with multiple entries" { + // Build reloc section body: target_section=10, count=2, then 2 index relocs + var bytes_list: std.ArrayList(u8) = .empty; + defer bytes_list.deinit(testing.allocator); + try WasmModule.leb128WriteU32(testing.allocator, &bytes_list, 10); // target section + try WasmModule.leb128WriteU32(testing.allocator, &bytes_list, 2); // count + // Entry 1: function_index_leb, offset=5, sym=0 + try bytes_list.appendSlice(testing.allocator, &.{ 0x00, 0x05, 0x00 }); + // Entry 2: global_index_leb, offset=12, sym=1 + try bytes_list.appendSlice(testing.allocator, &.{ 0x07, 0x0C, 0x01 }); + + var cursor: usize = 0; + var reloc = try RelocationSection.parse( + testing.allocator, + "reloc.CODE", + bytes_list.items, + &cursor, + bytes_list.items.len, + ); + defer reloc.entries.deinit(testing.allocator); + + try testing.expectEqual(@as(u32, 10), reloc.target_section_index); + try testing.expectEqual(@as(usize, 2), reloc.entries.items.len); + try testing.expectEqual(@as(u32, 5), reloc.entries.items[0].getOffset()); + try testing.expectEqual(@as(u32, 12), reloc.entries.items[1].getOffset()); +} diff --git a/src/backend/wasm/WasmModule.zig b/src/backend/wasm/WasmModule.zig index d18e2110487..27329c968df 100644 --- a/src/backend/wasm/WasmModule.zig +++ b/src/backend/wasm/WasmModule.zig @@ -1,11 +1,12 @@ -//! Wasm binary format encoder. +//! Wasm binary format encoder and parser. //! -//! Builds a valid wasm module from type definitions, function bodies, -//! exports, and other sections. Produces the binary encoding with -//! proper LEB128 variable-length integers. +//! Builds valid wasm modules from type definitions, function bodies, +//! exports, and other sections. Also parses relocatable WASM objects +//! for surgical linking. const std = @import("std"); const Allocator = std.mem.Allocator; +const WasmLinking = @import("WasmLinking.zig"); const Self = @This(); @@ -26,7 +27,8 @@ pub const ExportKind = enum(u8) { }; /// Wasm section IDs -const SectionId = enum(u8) { +pub const SectionId = enum(u8) { + custom_section = 0, type_section = 1, import_section = 2, function_section = 3, @@ -34,9 +36,11 @@ const SectionId = enum(u8) { memory_section = 5, global_section = 6, export_section = 7, + start_section = 8, element_section = 9, code_section = 10, data_section = 11, + data_count_section = 12, }; /// Wasm opcodes @@ -248,12 +252,12 @@ pub const BlockType = enum(u8) { }; /// A function type (signature) -const FuncType = struct { +pub const FuncType = struct { params: []const ValType, }; /// An export entry -const Export = struct { +pub const Export = struct { name: []const u8, kind: ExportKind, idx: u32, @@ -264,22 +268,100 @@ const FuncBody = struct { body: []const u8, }; +/// A defined global variable (beyond the built-in __stack_pointer). +/// Used for PIC globals like __memory_base and __table_base that +/// resolve to constants in the final linked module. +pub const DefinedGlobal = struct { + val_type: u8, // 0x7F = i32, 0x7E = i64 + mutable: bool, + init_value: i32, +}; + /// A data segment placed in linear memory const DataSegment = struct { offset: u32, // offset in linear memory - data: []const u8, // bytes to place + data: []u8, // bytes to place + /// Byte offset of this segment's payload within the original data section body. + /// Used to normalize reloc.DATA entries during preload. + section_offset: u32 = 0, }; /// An imported function -const Import = struct { +pub const Import = struct { module_name: []const u8, field_name: []const u8, type_idx: u32, }; +/// An imported global (e.g. __stack_pointer, __memory_base). +/// PIC modules use these for position-independent addressing. +pub const GlobalImport = struct { + module_name: []const u8, + field_name: []const u8, + val_type: u8, // raw valtype byte (0x7F=i32, 0x7E=i64, etc.) + mutable: bool, +}; + +/// An imported table (e.g. __indirect_function_table). +/// PIC modules use this for indirect function calls. +pub const TableImport = struct { + module_name: []const u8, + field_name: []const u8, +}; + /// Wasm reference type for funcref tables const funcref: u8 = 0x70; +/// WASM32 layout of the RocOps struct in linear memory. +/// +/// On native 64-bit targets, RocOps is 72 bytes with 8-byte pointers and function +/// pointers. On wasm32, function pointers don't exist in linear memory — instead, +/// functions are referenced by u32 table indices for use with `call_indirect`. +/// This makes the WASM layout 36 bytes with all fields being i32. +/// +/// Two distinct `call_indirect` type signatures are used: +/// - RocOps callbacks (roc_alloc, etc.): 2-arg `(i32 args_struct_ptr, i32 env_ptr) → void` +/// - Hosted functions (RocCall ABI): 3-arg `(i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void` +pub const WasmRocOps = struct { + /// Host environment pointer (passed as second arg to all RocOps callbacks). + pub const env_ptr: u32 = 0; + /// Table index for roc_alloc: (args_ptr, env_ptr) → void. + pub const roc_alloc_table_idx: u32 = 4; + /// Table index for roc_dealloc: (args_ptr, env_ptr) → void. + pub const roc_dealloc_table_idx: u32 = 8; + /// Table index for roc_realloc: (args_ptr, env_ptr) → void. + pub const roc_realloc_table_idx: u32 = 12; + /// Table index for roc_dbg: (args_ptr, env_ptr) → void. + pub const roc_dbg_table_idx: u32 = 16; + /// Table index for roc_expect_failed: (args_ptr, env_ptr) → void. + pub const roc_expect_failed_table_idx: u32 = 20; + /// Table index for roc_crashed: (args_ptr, env_ptr) → void. + pub const roc_crashed_table_idx: u32 = 24; + /// Number of hosted functions provided by the platform. + pub const hosted_fns_count: u32 = 28; + /// Pointer to array of u32 table indices for hosted functions in linear memory. + pub const hosted_fns_ptr: u32 = 32; + /// Total size of the WasmRocOps struct in bytes. + pub const total_size: u32 = 36; + + comptime { + // Verify layout: 9 consecutive i32 fields at 4-byte stride = 36 bytes total. + std.debug.assert(total_size == 36); + std.debug.assert(hosted_fns_ptr + 4 == total_size); + + // All offsets must be 4-byte aligned and sequential. + std.debug.assert(env_ptr == 0); + std.debug.assert(roc_alloc_table_idx == env_ptr + 4); + std.debug.assert(roc_dealloc_table_idx == roc_alloc_table_idx + 4); + std.debug.assert(roc_realloc_table_idx == roc_dealloc_table_idx + 4); + std.debug.assert(roc_dbg_table_idx == roc_realloc_table_idx + 4); + std.debug.assert(roc_expect_failed_table_idx == roc_dbg_table_idx + 4); + std.debug.assert(roc_crashed_table_idx == roc_expect_failed_table_idx + 4); + std.debug.assert(hosted_fns_count == roc_crashed_table_idx + 4); + std.debug.assert(hosted_fns_ptr == hosted_fns_count + 4); + } +}; + /// Module state allocator: Allocator, func_types: std.ArrayList(FuncType), @@ -287,7 +369,12 @@ func_type_results: std.ArrayList(?ValType), // parallel to func_types func_type_indices: std.ArrayList(u32), // func_idx -> type_idx func_bodies: std.ArrayList(FuncBody), exports: std.ArrayList(Export), +/// Function imports (indices 0..import_fn_count-1 in the function index space). imports: std.ArrayList(Import), +/// Global imports (e.g. __stack_pointer, __memory_base for PIC modules). +global_imports: std.ArrayList(GlobalImport), +/// Table imports (e.g. __indirect_function_table for PIC modules). +table_imports: std.ArrayList(TableImport), data_segments: std.ArrayList(DataSegment), /// Next available offset for data placement in linear memory (grows up from 0). data_offset: u32, @@ -299,6 +386,37 @@ stack_pointer_init: u32, has_table: bool, /// Function indices to place in the table (element section). table_func_indices: std.ArrayList(u32), +/// Additional defined globals (beyond __stack_pointer at index 0). +/// Used for PIC globals like __memory_base, __table_base. +extra_globals: std.ArrayList(DefinedGlobal), + +// --- Fields for surgical linking (populated by preload) --- + +/// Raw bytes of all function bodies in the code section. +/// Relocation offsets refer to positions within this buffer. +code_bytes: std.ArrayList(u8), +/// Byte offset of each function body within code_bytes. +/// Length: func_type_indices.items.len (locally-defined functions only). +function_offsets: std.ArrayList(u32), +/// Number of dummy functions prepended during linking to maintain index stability. +dead_import_dummy_count: u32, +/// Number of function imports (may differ from imports.items.len after linking). +import_fn_count: u32, +/// Number of global imports parsed from the import section. +/// Tracked for validation — global imports are not stored in the imports array. +import_global_count: u32, +/// LEB128 byte size of the function count in the code section header. +/// Relocation offsets in reloc.CODE are relative to the section body (which +/// includes the function count), but code_bytes starts AFTER the count. +/// This delta must be subtracted from reloc offsets to index into code_bytes. +code_section_fn_count_leb_size: u32, + +/// Linking metadata (symbol table, segment info, init funcs). +linking: WasmLinking.LinkingSection, +/// Relocations for the code section. +reloc_code: WasmLinking.RelocationSection, +/// Relocations for the data section. +reloc_data: WasmLinking.RelocationSection, pub fn init(allocator: Allocator) Self { return .{ @@ -309,6 +427,8 @@ pub fn init(allocator: Allocator) Self { .func_bodies = .empty, .exports = .empty, .imports = .empty, + .global_imports = .empty, + .table_imports = .empty, .data_segments = .empty, .data_offset = 1024, // reserve first 1KB for future use .has_memory = false, @@ -317,6 +437,16 @@ pub fn init(allocator: Allocator) Self { .stack_pointer_init = 65536, .has_table = false, .table_func_indices = .empty, + .extra_globals = .empty, + .code_bytes = .empty, + .function_offsets = .empty, + .dead_import_dummy_count = 0, + .import_fn_count = 0, + .import_global_count = 0, + .code_section_fn_count_leb_size = 0, + .linking = .{ .symbol_table = .empty, .segment_info = .empty, .init_funcs = .empty }, + .reloc_code = .{ .name = "reloc.CODE", .target_section_index = 0, .entries = .empty }, + .reloc_data = .{ .name = "reloc.DATA", .target_section_index = 0, .entries = .empty }, }; } @@ -335,11 +465,21 @@ pub fn deinit(self: *Self) void { self.func_bodies.deinit(self.allocator); self.exports.deinit(self.allocator); self.imports.deinit(self.allocator); + self.global_imports.deinit(self.allocator); + self.table_imports.deinit(self.allocator); for (self.data_segments.items) |ds| { self.allocator.free(ds.data); } self.data_segments.deinit(self.allocator); self.table_func_indices.deinit(self.allocator); + self.extra_globals.deinit(self.allocator); + self.code_bytes.deinit(self.allocator); + self.function_offsets.deinit(self.allocator); + self.linking.symbol_table.deinit(self.allocator); + self.linking.segment_info.deinit(self.allocator); + self.linking.init_funcs.deinit(self.allocator); + self.reloc_code.entries.deinit(self.allocator); + self.reloc_data.entries.deinit(self.allocator); } /// Add an imported function. Returns the function index (imports come before regular functions). @@ -421,6 +561,7 @@ pub fn addDataSegment(self: *Self, data: []const u8, align_bytes: u32) !u32 { try self.data_segments.append(self.allocator, .{ .offset = offset, .data = data_copy, + .section_offset = 0, }); self.data_offset += @intCast(data.len); return offset; @@ -432,6 +573,20 @@ pub fn enableStackPointer(self: *Self, initial_value: u32) void { self.stack_pointer_init = initial_value; } +/// Add a defined global and return its index (accounting for __stack_pointer at 0). +/// Used to define PIC globals like __memory_base and __table_base with value 0. +pub fn addDefinedGlobal(self: *Self, val_type: u8, mutable: bool, init_value: i32) !u32 { + // Global 0 is __stack_pointer (when has_stack_pointer is true). + // Extra globals start at index 1. + const idx: u32 = 1 + @as(u32, @intCast(self.extra_globals.items.len)); + try self.extra_globals.append(self.allocator, .{ + .val_type = val_type, + .mutable = mutable, + .init_value = init_value, + }); + return idx; +} + /// Enable the funcref table for call_indirect. pub fn enableTable(self: *Self) void { self.has_table = true; @@ -444,294 +599,4710 @@ pub fn addTableElement(self: *Self, func_idx: u32) !u32 { return table_idx; } -/// Encode the module to a valid wasm binary. -pub fn encode(self: *Self, allocator: Allocator) ![]u8 { - var output: std.ArrayList(u8) = .empty; - errdefer output.deinit(allocator); - - // Magic number and version - try output.appendSlice(allocator, &.{ 0x00, 0x61, 0x73, 0x6D }); // \0asm - try output.appendSlice(allocator, &.{ 0x01, 0x00, 0x00, 0x00 }); // version 1 +/// Import a hosted function and add it to the funcref table. +/// +/// Hosted functions use the RocCall ABI: (i32 roc_ops_ptr, i32 ret_ptr, i32 args_ptr) → void. +/// The caller must provide the type index for this 3-arg signature (registered separately +/// from the 2-arg RocOps callback type). +/// +/// Returns the table index that can be used with `call_indirect` to invoke the function. +pub fn addHostedFunctionToTable(self: *Self, module_name: []const u8, fn_name: []const u8, roc_call_type_idx: u32) !u32 { + const func_idx = try self.addImport(module_name, fn_name, roc_call_type_idx); + return try self.addTableElement(func_idx); +} - // Type section - if (self.func_types.items.len > 0) { - try self.encodeTypeSection(allocator, &output); +/// Find an imported function's index by module and field name. +/// Returns null if no matching import exists. +pub fn findImportFuncIdx(self: *const Self, module_name: []const u8, field_name: []const u8) ?u32 { + for (self.imports.items, 0..) |imp, i| { + if (std.mem.eql(u8, imp.module_name, module_name) and std.mem.eql(u8, imp.field_name, field_name)) { + return @intCast(i); + } } + return null; +} - // Import section (must be between type and function sections) - if (self.imports.items.len > 0) { - try self.encodeImportSection(allocator, &output); +/// Find a function index by its resolved symbol/import name. +pub fn findFunctionIdxByName(self: *const Self, name: []const u8) ?u32 { + if (self.linking.findSymbolByName(name, self.imports.items, self.global_imports.items, self.table_imports.items)) |sym_idx| { + const sym = self.linking.symbol_table.items[sym_idx]; + if (sym.kind == .function) return sym.index; } - // Function section - if (self.func_type_indices.items.len > 0) { - try self.encodeFunctionSection(allocator, &output); + for (self.imports.items, 0..) |imp, i| { + if (std.mem.eql(u8, imp.field_name, name)) return @intCast(i); } - // Table section (between function and memory) - if (self.has_table) { - try self.encodeTableSection(allocator, &output); - } + return null; +} - // Memory section - if (self.has_memory) { - try self.encodeMemorySection(allocator, &output); +/// Find a defined function whose resolved name ends with `suffix`. +/// This intentionally ignores undefined/imported symbols so host callback lookups +/// do not accidentally bind raw platform imports like `roc_dbg`. +pub fn findFunctionIdxBySuffix(self: *const Self, suffix: []const u8) ?u32 { + for (self.linking.symbol_table.items) |sym| { + if (sym.kind != .function or sym.isUndefined()) continue; + const sym_name = sym.resolveName(self.imports.items, self.global_imports.items, self.table_imports.items) orelse continue; + if (std.mem.endsWith(u8, sym_name, suffix)) return sym.index; } + return null; +} - // Global section - if (self.has_stack_pointer) { - try self.encodeGlobalSection(allocator, &output); - } +/// Find or append a function in the element section. +pub fn ensureTableElement(self: *Self, func_idx: u32) !u32 { + return self.findTableIndex(func_idx) orelse try self.addTableElement(func_idx); +} - // Export section - if (self.exports.items.len > 0) { - try self.encodeExportSection(allocator, &output); - } +// --- Surgical Linking --- - // Element section (between export and code) - if (self.has_table and self.table_func_indices.items.len > 0) { - try self.encodeElementSection(allocator, &output); - } +/// Dummy function body: unreachable + end. Inserted to maintain function index +/// stability when an import is removed during surgical linking. +pub const DUMMY_FUNCTION = [3]u8{ + 0x00, // zero local variable declarations + Op.@"unreachable", // trap if called (means DCE was wrong) + Op.end, // end of function body +}; - // Code section - if (self.func_bodies.items.len > 0) { - try self.encodeCodeSection(allocator, &output); +/// Entry in the host-to-app linking map: maps an app function name +/// (which the host imports) to its defined function index. +pub const HostToAppEntry = struct { + name: []const u8, + fn_index: u32, +}; + +/// Perform surgical linking: for each (app_fn_name, app_fn_index) pair, +/// remove the host's import for that name and redirect all call sites to +/// the app-defined function at app_fn_index. +/// +/// The last function import is swapped into the vacated slot so that only +/// two symbols need relocation updates. A dummy function is prepended to +/// func_type_indices to keep the total function count stable. +pub fn linkHostToAppCalls(self: *Self, host_to_app_map: []const HostToAppEntry) !void { + for (host_to_app_map) |entry| { + const app_fn_name = entry.name; + const app_fn_index = entry.fn_index; + + // 1. Find the host import matching app_fn_name, and the last import (swap candidate). + // Since self.imports only contains function imports, import_index == fn_index. + var host_fn_index: ?u32 = null; + var last_fn_index: u32 = 0; + for (self.imports.items, 0..) |imp, i| { + last_fn_index = @intCast(i); + if (std.mem.eql(u8, imp.field_name, app_fn_name)) { + host_fn_index = @intCast(i); + } + } + + const host_idx = host_fn_index orelse { + // The host doesn't import this function — export the app's definition + // so it can be called from JS. + try self.exports.append(self.allocator, .{ + .name = app_fn_name, + .kind = .func, + .idx = app_fn_index, + }); + continue; + }; + + // 2. Swap: remove the last import and put it where the host import was. + // This keeps all other import indices stable — only the host and swap + // indices need relocation updates. + const swap_import = self.imports.items[last_fn_index]; + self.imports.items.len -= 1; + if (last_fn_index != host_idx) { + self.imports.items[host_idx] = swap_import; + } + + // 3. Update symbol table and apply relocations for the host function. + // The host import (at host_idx) is now a defined app function at app_fn_index. + if (self.linking.findAndReindexImportedFn(host_idx, app_fn_index)) |sym_index| { + self.reloc_code.applyRelocsU32(self.code_bytes.items, sym_index, app_fn_index); + } + + // 4. Update symbol table and apply relocations for the swapped function. + // The last import (at last_fn_index) moved to host_idx. + if (last_fn_index != host_idx) { + if (self.linking.findAndReindexImportedFn(last_fn_index, host_idx)) |swap_sym_index| { + self.reloc_code.applyRelocsU32(self.code_bytes.items, swap_sym_index, host_idx); + } + } + + // 5. Insert a dummy function to compensate for the removed import. + // This keeps defined-function indices unchanged: import_count decreases + // by 1, but one dummy is prepended to the code section, so the first + // real defined function stays at the same global index. + self.dead_import_dummy_count += 1; + try self.func_type_indices.insert(self.allocator, 0, 0); // dummy uses type signature 0 + + // 6. Track the decreased import count. + self.import_fn_count -= 1; } +} - // Data section - if (self.data_segments.items.len > 0) { - try self.encodeDataSection(allocator, &output); +// --- Phase 8a: Module Merging (Builtins) --- + +/// Result of mergeModule: maps from the source module's symbol indices +/// to the merged module's symbol indices. +pub const MergeResult = struct { + /// Maps source module symbol index → merged module symbol index. + /// Length equals source module's symbol_table.items.len. + symbol_remap: []u32, + allocator: Allocator, + + pub fn deinit(self: *MergeResult) void { + self.allocator.free(self.symbol_remap); } +}; - return output.toOwnedSlice(allocator); -} +// --- Phase 8b: Builtin Symbol Lookup --- -fn encodeTypeSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); +/// Maps builtin operations to their symbol indices in the merged module. +/// +/// After `mergeModule` incorporates `roc_builtins.o`, this struct is populated +/// by looking up each `roc_builtins_*` symbol name in the merged module's +/// symbol table. WasmCodeGen uses these symbol indices with +/// `emitRelocatableCall` to emit calls to builtins. +pub const BuiltinSymbols = struct { + // --- Decimal / i128 arithmetic --- + dec_mul: u32, // roc_builtins_dec_mul_saturated + dec_div: u32, // roc_builtins_dec_div + dec_div_trunc: u32, // roc_builtins_dec_div_trunc + dec_to_str: u32, // roc_builtins_dec_to_str + i128_div_s: u32, // roc_builtins_num_div_trunc_i128 + i128_mod_s: u32, // roc_builtins_num_rem_trunc_i128 + u128_div: u32, // roc_builtins_num_div_trunc_u128 + u128_mod: u32, // roc_builtins_num_rem_trunc_u128 - try leb128WriteU32(gpa, §ion_data, @intCast(self.func_types.items.len)); + // --- Numeric conversions --- + i128_to_dec: u32, // roc_builtins_i128_to_dec_try_unsafe + u128_to_dec: u32, // roc_builtins_u128_to_dec_try_unsafe + dec_to_int_try_unsafe: u32, // roc_builtins_dec_to_int_try_unsafe + dec_to_f32: u32, // roc_builtins_dec_to_f32_try_unsafe + float_to_str: u32, // roc_builtins_float_to_str + int_to_str: u32, // roc_builtins_int_to_str + int_from_str: u32, // roc_builtins_int_from_str + dec_from_str: u32, // roc_builtins_dec_from_str + float_from_str: u32, // roc_builtins_float_from_str - for (self.func_types.items, 0..) |ft, idx| { - try section_data.append(gpa, 0x60); // func type marker - try leb128WriteU32(gpa, §ion_data, @intCast(ft.params.len)); - for (ft.params) |p| { - try section_data.append(gpa, @intFromEnum(p)); + // --- String operations --- + str_equal: u32, // roc_builtins_str_equal + str_concat: u32, // roc_builtins_str_concat + str_repeat: u32, // roc_builtins_str_repeat + str_trim: u32, // roc_builtins_str_trim + str_trim_start: u32, // roc_builtins_str_trim_start + str_trim_end: u32, // roc_builtins_str_trim_end + str_split: u32, // roc_builtins_str_split + str_join_with: u32, // roc_builtins_str_join_with + str_reserve: u32, // roc_builtins_str_reserve + str_release_excess_capacity: u32, // roc_builtins_str_release_excess_capacity + str_with_capacity: u32, // roc_builtins_str_with_capacity + str_drop_prefix: u32, // roc_builtins_str_drop_prefix + str_drop_suffix: u32, // roc_builtins_str_drop_suffix + str_with_ascii_lowercased: u32, // roc_builtins_str_with_ascii_lowercased + str_with_ascii_uppercased: u32, // roc_builtins_str_with_ascii_uppercased + str_caseless_ascii_equals: u32, // roc_builtins_str_caseless_ascii_equals + str_from_utf8: u32, // roc_builtins_str_from_utf8 + + // --- List operations --- + list_append_unsafe: u32, // roc_builtins_list_append_unsafe + list_append_safe: u32, // roc_builtins_list_append_safe + list_sort_with: u32, // roc_builtins_list_sort_with + list_eq: u32, // roc_builtins_list_eq + list_str_eq: u32, // roc_builtins_list_str_eq + list_list_eq: u32, // roc_builtins_list_list_eq + list_reverse: u32, // roc_builtins_list_reverse + + // --- Memory management --- + allocate_with_refcount: u32, // roc_builtins_allocate_with_refcount + + // --- Integer modulo --- + i32_mod_by: u32, // roc_builtins_i32_mod_by + i64_mod_by: u32, // roc_builtins_i64_mod_by + + /// Name → field mapping used by `populate` to fill this struct. + const mapping = .{ + .{ "roc_builtins_dec_mul_saturated", "dec_mul" }, + .{ "roc_builtins_dec_div", "dec_div" }, + .{ "roc_builtins_dec_div_trunc", "dec_div_trunc" }, + .{ "roc_builtins_dec_to_str", "dec_to_str" }, + .{ "roc_builtins_num_div_trunc_i128", "i128_div_s" }, + .{ "roc_builtins_num_rem_trunc_i128", "i128_mod_s" }, + .{ "roc_builtins_num_div_trunc_u128", "u128_div" }, + .{ "roc_builtins_num_rem_trunc_u128", "u128_mod" }, + .{ "roc_builtins_i128_to_dec_try_unsafe", "i128_to_dec" }, + .{ "roc_builtins_u128_to_dec_try_unsafe", "u128_to_dec" }, + .{ "roc_builtins_dec_to_int_try_unsafe", "dec_to_int_try_unsafe" }, + .{ "roc_builtins_dec_to_f32_try_unsafe", "dec_to_f32" }, + .{ "roc_builtins_float_to_str", "float_to_str" }, + .{ "roc_builtins_int_to_str", "int_to_str" }, + .{ "roc_builtins_int_from_str", "int_from_str" }, + .{ "roc_builtins_dec_from_str", "dec_from_str" }, + .{ "roc_builtins_float_from_str", "float_from_str" }, + .{ "roc_builtins_str_equal", "str_equal" }, + .{ "roc_builtins_str_concat", "str_concat" }, + .{ "roc_builtins_str_repeat", "str_repeat" }, + .{ "roc_builtins_str_trim", "str_trim" }, + .{ "roc_builtins_str_trim_start", "str_trim_start" }, + .{ "roc_builtins_str_trim_end", "str_trim_end" }, + .{ "roc_builtins_str_split", "str_split" }, + .{ "roc_builtins_str_join_with", "str_join_with" }, + .{ "roc_builtins_str_reserve", "str_reserve" }, + .{ "roc_builtins_str_release_excess_capacity", "str_release_excess_capacity" }, + .{ "roc_builtins_str_with_capacity", "str_with_capacity" }, + .{ "roc_builtins_str_drop_prefix", "str_drop_prefix" }, + .{ "roc_builtins_str_drop_suffix", "str_drop_suffix" }, + .{ "roc_builtins_str_with_ascii_lowercased", "str_with_ascii_lowercased" }, + .{ "roc_builtins_str_with_ascii_uppercased", "str_with_ascii_uppercased" }, + .{ "roc_builtins_str_caseless_ascii_equals", "str_caseless_ascii_equals" }, + .{ "roc_builtins_str_from_utf8", "str_from_utf8" }, + .{ "roc_builtins_list_append_unsafe", "list_append_unsafe" }, + .{ "roc_builtins_list_append_safe", "list_append_safe" }, + .{ "roc_builtins_list_sort_with", "list_sort_with" }, + .{ "roc_builtins_list_eq", "list_eq" }, + .{ "roc_builtins_list_str_eq", "list_str_eq" }, + .{ "roc_builtins_list_list_eq", "list_list_eq" }, + .{ "roc_builtins_list_reverse", "list_reverse" }, + .{ "roc_builtins_allocate_with_refcount", "allocate_with_refcount" }, + .{ "roc_builtins_i32_mod_by", "i32_mod_by" }, + .{ "roc_builtins_i64_mod_by", "i64_mod_by" }, + }; + + pub const PopulateError = error{MissingBuiltinSymbol}; + + /// Populate this struct by looking up each builtin symbol name in the + /// module's merged symbol table. Returns the actual function index for + /// each builtin (from sym.index), not the symbol table index. + pub fn populate(module: *const Self) PopulateError!BuiltinSymbols { + var result: BuiltinSymbols = undefined; + inline for (mapping) |entry| { + const sym_name = entry[0]; + const field_name = entry[1]; + const sym_table_idx = module.linking.findSymbolByName( + sym_name, + module.imports.items, + module.global_imports.items, + module.table_imports.items, + ) orelse return error.MissingBuiltinSymbol; + @field(result, field_name) = module.linking.symbol_table.items[sym_table_idx].index; } - if (self.func_type_results.items[idx]) |r| { - try section_data.append(gpa, 1); // 1 result - try section_data.append(gpa, @intFromEnum(r)); + return result; + } +}; + +/// Merge a relocatable module (e.g. roc_builtins.o) into this module. +/// +/// This appends the source module's functions, code, data, symbols, and +/// relocations into self, resolving shared symbols (like roc_alloc) against +/// this module's existing imports. +/// +/// After merging, the source module's defined functions become defined +/// functions in self, and all relocation entries are remapped so that +/// the merged symbol table is consistent. +/// +/// Returns a MergeResult with the symbol index mapping, which callers +/// use to look up merged builtins by their original symbol indices. +pub fn mergeModule(self: *Self, source: *const Self) !MergeResult { + const gpa = self.allocator; + + // --- 1. Merge type section (with deduplication) --- + // Maps source type index → self type index. + const type_remap = try gpa.alloc(u32, source.func_types.items.len); + defer gpa.free(type_remap); + + for (source.func_types.items, source.func_type_results.items, 0..) |src_ft, src_result, src_idx| { + // Check if self already has an identical type signature. + var found: ?u32 = null; + for (self.func_types.items, self.func_type_results.items, 0..) |dst_ft, dst_result, dst_idx| { + if (src_result != dst_result) continue; + if (src_ft.params.len != dst_ft.params.len) continue; + if (std.mem.eql(ValType, src_ft.params, dst_ft.params)) { + found = @intCast(dst_idx); + break; + } + } + if (found) |idx| { + type_remap[src_idx] = idx; } else { - try section_data.append(gpa, 0); // 0 results + // Add new type to self. + type_remap[src_idx] = try self.addFuncType(src_ft.params, if (src_result) |r| &.{r} else &.{}); } } - try output.append(gpa, @intFromEnum(SectionId.type_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); -} + // --- 2. Compute function index mapping --- + // Source defined functions start at source.import_fn_count in source's index space. + // In self, they'll be appended after existing defined functions. + const source_defined_count: u32 = @intCast(source.func_type_indices.items.len); -fn encodeImportSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); + // func_remap: maps source global function index → self global function index. + // For source imports, we resolve them against self's imports by name. + // For source defined functions, they get sequential indices after self's existing functions. + const total_source_fns = source.import_fn_count + source_defined_count; + const func_remap = try gpa.alloc(u32, total_source_fns); + defer gpa.free(func_remap); - try leb128WriteU32(gpa, §ion_data, @intCast(self.imports.items.len)); - for (self.imports.items) |imp| { - // Module name - try leb128WriteU32(gpa, §ion_data, @intCast(imp.module_name.len)); - try section_data.appendSlice(gpa, imp.module_name); - // Field name - try leb128WriteU32(gpa, §ion_data, @intCast(imp.field_name.len)); - try section_data.appendSlice(gpa, imp.field_name); - // Import kind: 0x00 = function - try section_data.append(gpa, 0x00); - // Type index - try leb128WriteU32(gpa, §ion_data, imp.type_idx); + // Remap source imports → self imports (by name match). + // NOTE: This loop may add new imports to self, changing importCount(). + // We must compute self_defined_base AFTER this loop completes. + const old_import_count = self.importCount(); + for (source.imports.items, 0..) |src_imp, src_idx| { + // Find matching import in self by field_name. + var matched: ?u32 = null; + for (self.imports.items, 0..) |self_imp, self_idx| { + if (std.mem.eql(u8, self_imp.field_name, src_imp.field_name)) { + matched = @intCast(self_idx); + break; + } + } + func_remap[src_idx] = matched orelse { + // Source imports a function self doesn't have — add it as a new import. + const remapped_type = type_remap[src_imp.type_idx]; + func_remap[src_idx] = try self.addImport(src_imp.module_name, src_imp.field_name, remapped_type); + continue; + }; } - try output.append(gpa, @intFromEnum(SectionId.import_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); -} + // If new imports were added, all existing defined function indices in self + // shift up by the number of new imports. Update symbol table, element section, + // and exports to reflect the new indices. + const new_import_count = self.importCount(); + const import_delta = new_import_count - old_import_count; + if (import_delta > 0) { + // Shift defined function symbols. + for (self.linking.symbol_table.items) |*sym| { + if (sym.isFunction() and !sym.isUndefined() and sym.index >= old_import_count) { + sym.index += import_delta; + } + } + // Shift element section entries (table func indices). + for (self.table_func_indices.items) |*fi| { + if (fi.* >= old_import_count) fi.* += import_delta; + } + // Shift export entries referencing defined functions. + for (self.exports.items) |*exp| { + if (exp.kind == .func and exp.idx >= old_import_count) { + exp.idx += import_delta; + } + } + } -fn encodeFunctionSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); + // Compute defined function base AFTER imports are finalized, + // since addImport above may have increased importCount(). + const self_defined_base = self.importCount() + @as(u32, @intCast(self.func_type_indices.items.len)); - try leb128WriteU32(gpa, §ion_data, @intCast(self.func_type_indices.items.len)); - for (self.func_type_indices.items) |type_idx| { - try leb128WriteU32(gpa, §ion_data, type_idx); + // Remap source defined functions → new indices in self. + for (0..source_defined_count) |i| { + func_remap[source.import_fn_count + i] = self_defined_base + @as(u32, @intCast(i)); } - try output.append(gpa, @intFromEnum(SectionId.function_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); -} + // --- 3. Merge function section (remap type indices) --- + for (source.func_type_indices.items) |src_type_idx| { + try self.func_type_indices.append(gpa, type_remap[src_type_idx]); + } -fn encodeMemorySection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); + // --- 4. Merge code section (append bytes, track offsets) --- + const base_code_offset: u32 = @intCast(self.code_bytes.items.len); - try leb128WriteU32(gpa, §ion_data, 1); // 1 memory - try section_data.append(gpa, 0x00); // no max - try leb128WriteU32(gpa, §ion_data, self.memory_min_pages); + try self.code_bytes.appendSlice(gpa, source.code_bytes.items); - try output.append(gpa, @intFromEnum(SectionId.memory_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); -} + for (source.function_offsets.items) |src_offset| { + try self.function_offsets.append(gpa, base_code_offset + src_offset); + } -fn encodeGlobalSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); + // --- 5. Merge data section (adjust memory offsets) --- + // data_remap: maps source segment index → new data base address. + const data_remap = try gpa.alloc(u32, source.data_segments.items.len); + defer gpa.free(data_remap); + // data_segment_remap: maps source segment index → new segment index in self.data_segments. + const data_segment_remap = try gpa.alloc(u32, source.data_segments.items.len); + defer gpa.free(data_segment_remap); - try leb128WriteU32(gpa, §ion_data, 1); // 1 global + for (source.data_segments.items, 0..) |src_ds, i| { + // Find alignment from segment info if available, default to 1. + const alignment: u32 = if (i < source.linking.segment_info.items.len) + @as(u32, 1) << @intCast(source.linking.segment_info.items[i].alignment) + else + 1; + data_segment_remap[i] = @intCast(self.data_segments.items.len); + const new_offset = try self.addDataSegment(src_ds.data, alignment); + data_remap[i] = new_offset; + } - // Global 0: __stack_pointer (i32, mutable) - try section_data.append(gpa, @intFromEnum(ValType.i32)); - try section_data.append(gpa, 0x01); // mutable - try section_data.append(gpa, Op.i32_const); - try leb128WriteI32(gpa, §ion_data, @intCast(self.stack_pointer_init)); - try section_data.append(gpa, Op.end); + // --- 5b. Merge element section (remap function indices) --- + for (source.table_func_indices.items) |src_func_idx| { + const remapped = func_remap[src_func_idx]; + try self.table_func_indices.append(gpa, remapped); + self.has_table = true; + } - try output.append(gpa, @intFromEnum(SectionId.global_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); -} + // --- 6. Merge symbol table --- + // symbol_remap: maps source symbol index → self symbol index. + const source_sym_count = source.linking.symbol_table.items.len; + const symbol_remap = try gpa.alloc(u32, source_sym_count); + errdefer gpa.free(symbol_remap); -fn encodeExportSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); + for (source.linking.symbol_table.items, 0..) |src_sym, src_sym_idx| { + const src_name = src_sym.resolveName(source.imports.items, source.global_imports.items, source.table_imports.items); - try leb128WriteU32(gpa, §ion_data, @intCast(self.exports.items.len)); - for (self.exports.items) |exp| { - try leb128WriteU32(gpa, §ion_data, @intCast(exp.name.len)); - try section_data.appendSlice(gpa, exp.name); - try section_data.append(gpa, @intFromEnum(exp.kind)); - try leb128WriteU32(gpa, §ion_data, exp.idx); + switch (src_sym.kind) { + .function => { + if (src_sym.isUndefined()) { + // Undefined function in source — resolve against self's symbol table. + if (src_name) |name| { + if (self.linking.findSymbolByName(name, self.imports.items, self.global_imports.items, self.table_imports.items)) |existing| { + symbol_remap[src_sym_idx] = existing; + continue; + } + } + // Not found — add as new undefined symbol referencing the (possibly new) import. + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, .{ + .kind = .function, + .flags = src_sym.flags, + .name = src_name, + .index = func_remap[src_sym.index], + }); + symbol_remap[src_sym_idx] = new_sym_idx; + } else { + // Defined function in source — add as defined in self. + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, .{ + .kind = .function, + .flags = src_sym.flags, + .name = src_sym.name, + .index = func_remap[src_sym.index], + }); + symbol_remap[src_sym_idx] = new_sym_idx; + } + }, + .data => { + if (src_sym.isUndefined()) { + // Undefined data — resolve against self. + if (src_name) |name| { + if (self.linking.findSymbolByName(name, self.imports.items, self.global_imports.items, self.table_imports.items)) |existing| { + symbol_remap[src_sym_idx] = existing; + continue; + } + } + // Add as-is (undefined). + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, src_sym); + symbol_remap[src_sym_idx] = new_sym_idx; + } else { + // Defined data — rebase the symbol's absolute memory address. + // + // Preloaded relocatable modules normalize data_offset from + // segment-relative to absolute during parse. To merge a data + // symbol into self, first recover its offset within the + // source segment, then add that intra-segment offset to the + // new segment base in self. + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + const new_offset = if (src_sym.index < data_remap.len and src_sym.index < source.data_segments.items.len) blk: { + const source_segment_offset = source.data_segments.items[src_sym.index].offset; + const within_segment_offset = if (src_sym.data_offset >= source_segment_offset) + src_sym.data_offset - source_segment_offset + else + src_sym.data_offset; + break :blk data_remap[src_sym.index] + within_segment_offset; + } else src_sym.data_offset; + const new_segment_idx = if (src_sym.index < data_segment_remap.len) + data_segment_remap[src_sym.index] + else + src_sym.index; + try self.linking.symbol_table.append(gpa, .{ + .kind = .data, + .flags = src_sym.flags, + .name = src_sym.name, + .index = new_segment_idx, + .data_offset = new_offset, + .data_size = src_sym.data_size, + }); + symbol_remap[src_sym_idx] = new_sym_idx; + } + }, + .global => { + // Resolve globals (like __stack_pointer) against self. + if (src_sym.isUndefined()) { + if (src_name) |name| { + if (self.linking.findSymbolByName(name, self.imports.items, self.global_imports.items, self.table_imports.items)) |existing| { + symbol_remap[src_sym_idx] = existing; + continue; + } + // PIC globals: define them as constants in the final module. + // __memory_base and __table_base are 0 in statically-linked modules. + if (std.mem.eql(u8, name, "__memory_base") or + std.mem.eql(u8, name, "__table_base")) + { + const global_idx = try self.addDefinedGlobal(0x7F, false, 0); // i32, immutable, value=0 + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, .{ + .kind = .global, + .flags = 0, // defined (not undefined) + .name = name, + .index = global_idx, + }); + symbol_remap[src_sym_idx] = new_sym_idx; + continue; + } + } + } + // Add as-is if not resolved. + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, src_sym); + symbol_remap[src_sym_idx] = new_sym_idx; + }, + .table => { + // PIC: __indirect_function_table → just enable the module's table. + if (src_sym.isUndefined()) { + if (src_name) |name| { + if (std.mem.eql(u8, name, "__indirect_function_table")) { + self.has_table = true; + // Map to a symbol that references table 0. + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, .{ + .kind = .table, + .flags = 0, + .name = name, + .index = 0, + }); + symbol_remap[src_sym_idx] = new_sym_idx; + continue; + } + } + } + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, src_sym); + symbol_remap[src_sym_idx] = new_sym_idx; + }, + .section, .event => { + // Carry over as-is. + const new_sym_idx: u32 = @intCast(self.linking.symbol_table.items.len); + try self.linking.symbol_table.append(gpa, src_sym); + symbol_remap[src_sym_idx] = new_sym_idx; + }, + } } - try output.append(gpa, @intFromEnum(SectionId.export_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); + // --- 7. Merge relocation entries (remap symbol indices and offsets) --- + // Code relocations. + for (source.reloc_code.entries.items) |src_entry| { + switch (src_entry) { + .index => |idx| { + if (idx.type_id == .type_index_leb) { + // R_WASM_TYPE_INDEX_LEB: the placeholder in code_bytes is the + // SOURCE type index. Remap it immediately using type_remap rather + // than deferring — resolveCodeRelocations doesn't have type_remap. + const src_type_idx = readPaddedU32( + self.code_bytes.items, + base_code_offset + idx.offset, + ); + const remapped = if (src_type_idx < type_remap.len) + type_remap[src_type_idx] + else + src_type_idx; + overwritePaddedU32( + self.code_bytes.items, + base_code_offset + idx.offset, + remapped, + ); + // Don't add to reloc_code — already resolved. + continue; + } + try self.reloc_code.entries.append(gpa, .{ .index = .{ + .type_id = idx.type_id, + .offset = base_code_offset + idx.offset, + .symbol_index = symbol_remap[idx.symbol_index], + } }); + }, + .offset => |off| { + try self.reloc_code.entries.append(gpa, .{ .offset = .{ + .type_id = off.type_id, + .offset = base_code_offset + off.offset, + .symbol_index = symbol_remap[off.symbol_index], + .addend = off.addend, + } }); + }, + } + } + + // Data relocations. + for (source.reloc_data.entries.items) |src_entry| { + switch (src_entry) { + .index => |idx| { + const remapped_segment_idx = if (idx.data_segment_index < data_segment_remap.len) + data_segment_remap[idx.data_segment_index] + else + idx.data_segment_index; + try self.reloc_data.entries.append(gpa, .{ + .index = .{ + .type_id = idx.type_id, + .offset = idx.offset, + .symbol_index = symbol_remap[idx.symbol_index], + .data_segment_index = remapped_segment_idx, + }, + }); + }, + .offset => |off| { + const remapped_segment_idx = if (off.data_segment_index < data_segment_remap.len) + data_segment_remap[off.data_segment_index] + else + off.data_segment_index; + try self.reloc_data.entries.append(gpa, .{ .offset = .{ + .type_id = off.type_id, + .offset = off.offset, + .symbol_index = symbol_remap[off.symbol_index], + .addend = off.addend, + .data_segment_index = remapped_segment_idx, + } }); + }, + } + } + + // Update import_fn_count to reflect any new imports added during merge. + self.import_fn_count = @intCast(self.imports.items.len); + + return .{ + .symbol_remap = symbol_remap, + .allocator = gpa, + }; } -fn encodeCodeSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); +/// Find the table index (position in table_func_indices) for a given function index. +/// Returns null if the function is not in the table. +fn findTableIndex(self: *const Self, func_idx: u32) ?u32 { + for (self.table_func_indices.items, 0..) |tfi, i| { + if (tfi == func_idx) return @intCast(i); + } + return null; +} - try leb128WriteU32(gpa, §ion_data, @intCast(self.func_bodies.items.len)); - for (self.func_bodies.items) |fb| { - try leb128WriteU32(gpa, §ion_data, @intCast(fb.body.len)); - try section_data.appendSlice(gpa, fb.body); +fn patchResolvedRelocation(self: *const Self, target_bytes: []u8, entry: WasmLinking.RelocationEntry, patch_offset: u32) void { + const sym = self.linking.symbol_table.items[entry.getSymbolIndex()]; + switch (entry) { + .index => |idx| { + const value = sym.index; + switch (idx.type_id) { + .type_index_leb => { + // type_index_leb relocations are normally resolved during merge + // (when the type_remap is available). If one survives, use the + // resolved symbol index directly. + overwritePaddedU32(target_bytes, patch_offset, value); + }, + .function_index_leb, + .global_index_leb, + .event_index_leb, + .table_number_leb, + => overwritePaddedU32(target_bytes, patch_offset, value), + .table_index_sleb, + .table_index_rel_sleb, + => { + const table_idx = self.findTableIndex(value) orelse value; + overwritePaddedI32(target_bytes, patch_offset, @intCast(table_idx)); + }, + .table_index_i32 => { + const table_idx = self.findTableIndex(value) orelse value; + const off: usize = @intCast(patch_offset); + std.mem.writeInt(u32, target_bytes[off..][0..4], table_idx, .little); + }, + .global_index_i32 => { + const off: usize = @intCast(patch_offset); + std.mem.writeInt(u32, target_bytes[off..][0..4], value, .little); + }, + } + }, + .offset => |off| { + // For data symbols, the resolved address is the data_offset. + // For others, use the symbol's index as the base address. + const base: i64 = if (sym.kind == .data) + @intCast(sym.data_offset) + else + @intCast(sym.index); + const patched = base + @as(i64, off.addend); + switch (off.type_id) { + .memory_addr_leb => overwritePaddedU32( + target_bytes, + patch_offset, + @intCast(patched), + ), + .memory_addr_sleb, + .memory_addr_rel_sleb, + => overwritePaddedI32( + target_bytes, + patch_offset, + @intCast(patched), + ), + .memory_addr_i32, + .function_offset_i32, + .section_offset_i32, + => { + const o: usize = @intCast(patch_offset); + std.mem.writeInt(u32, target_bytes[o..][0..4], @intCast(patched), .little); + }, + } + }, } +} - try output.append(gpa, @intFromEnum(SectionId.code_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); +/// Resolve all code relocations in place. +/// +/// For each relocation entry in `reloc_code`, look up the symbol's resolved +/// value (function index, global index, or memory address) and patch the +/// corresponding site in `code_bytes`. +pub fn resolveCodeRelocations(self: *Self) void { + for (self.reloc_code.entries.items) |entry| { + self.patchResolvedRelocation(self.code_bytes.items, entry, entry.getOffset()); + } } -fn encodeDataSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); +/// Resolve all data relocations in place. +pub fn resolveDataRelocations(self: *Self) void { + // First pass: ensure functions referenced by table_index_* relocations + // are present in the element section. This is needed because data segments + // can store function pointers (e.g. hosted_function_ptrs) which need valid + // table indices, and the functions must be in the table for call_indirect. + for (self.reloc_data.entries.items) |entry| { + switch (entry) { + .index => |idx| { + if (idx.type_id == .table_index_i32 or + idx.type_id == .table_index_sleb or + idx.type_id == .table_index_rel_sleb) + { + const sym = self.linking.symbol_table.items[idx.symbol_index]; + if (sym.isFunction()) { + _ = self.ensureTableElement(sym.index) catch continue; + } + } + }, + .offset => {}, + } + } - try leb128WriteU32(gpa, §ion_data, @intCast(self.data_segments.items.len)); - for (self.data_segments.items) |ds| { - // Active segment for memory 0 - try leb128WriteU32(gpa, §ion_data, 0); // flags: active, memory 0 - // Offset expression: i32.const ; end - try section_data.append(gpa, Op.i32_const); - try leb128WriteI32(gpa, §ion_data, @intCast(ds.offset)); - try section_data.append(gpa, Op.end); - // Data bytes - try leb128WriteU32(gpa, §ion_data, @intCast(ds.data.len)); - try section_data.appendSlice(gpa, ds.data); + // Second pass: patch data bytes with resolved values. + for (self.reloc_data.entries.items) |entry| { + const segment_idx = switch (entry) { + .index => |idx| idx.data_segment_index, + .offset => |off| off.data_segment_index, + }; + std.debug.assert(segment_idx != std.math.maxInt(u32)); + std.debug.assert(segment_idx < self.data_segments.items.len); + + const segment = &self.data_segments.items[segment_idx]; + self.patchResolvedRelocation(segment.data, entry, entry.getOffset()); } +} - try output.append(gpa, @intFromEnum(SectionId.data_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); +/// Resolve both code and data relocations in place. +pub fn resolveRelocations(self: *Self) void { + self.resolveCodeRelocations(); + self.resolveDataRelocations(); } -fn encodeTableSection(_: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); +/// Transfer function bodies added via setFunctionBody into the code_bytes +/// representation. This makes app-generated functions compatible with +/// linkHostToAppCalls, resolveCodeRelocations, eliminateDeadCode, and +/// materializeFuncBodies. +/// +/// Must be called after all addFunction/setFunctionBody calls are complete +/// and before linkHostToAppCalls. +pub fn transferAppFunctions(self: *Self) !void { + const host_defined_count = self.function_offsets.items.len; + const total_defined_count = self.func_type_indices.items.len; - try leb128WriteU32(gpa, §ion_data, 1); // 1 table - try section_data.append(gpa, funcref); // element type: funcref - try section_data.append(gpa, 0x00); // limits: no max - try leb128WriteU32(gpa, §ion_data, 16); // min size (enough for RocOps functions) + if (total_defined_count <= host_defined_count) return; - try output.append(gpa, @intFromEnum(SectionId.table_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); + for (host_defined_count..total_defined_count) |i| { + if (i >= self.func_bodies.items.len) break; + const body = self.func_bodies.items[i].body; + if (body.len == 0) continue; + + const fn_offset: u32 = @intCast(self.code_bytes.items.len); + try self.function_offsets.append(self.allocator, fn_offset); + + // Write body length + body to code_bytes + try leb128WriteU32(self.allocator, &self.code_bytes, @intCast(body.len)); + try self.code_bytes.appendSlice(self.allocator, body); + } } -fn encodeElementSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { - var section_data: std.ArrayList(u8) = .empty; - defer section_data.deinit(gpa); +/// Convert code_bytes + function_offsets into func_bodies for encoding. +/// +/// After `resolveRelocations()` has patched all relocation sites, this method +/// splits the contiguous code_bytes buffer into individual function bodies +/// (skipping dummy functions from dead_import_dummy_count) and populates +/// func_bodies so that `encode()` can emit them. +pub fn materializeFuncBodies(self: *Self) !void { + const gpa = self.allocator; + const defined_count = self.func_type_indices.items.len; - try leb128WriteU32(gpa, §ion_data, 1); // 1 element segment - // Active segment for table 0 - try leb128WriteU32(gpa, §ion_data, 0); // flags: active, table 0 - // Offset expression: i32.const 0; end - try section_data.append(gpa, Op.i32_const); - try leb128WriteI32(gpa, §ion_data, 0); - try section_data.append(gpa, Op.end); - // Function indices - try leb128WriteU32(gpa, §ion_data, @intCast(self.table_func_indices.items.len)); - for (self.table_func_indices.items) |func_idx| { - try leb128WriteU32(gpa, §ion_data, func_idx); + // Clear existing func_bodies. + for (self.func_bodies.items) |fb| { + if (fb.body.len > 0) gpa.free(fb.body); } + self.func_bodies.clearRetainingCapacity(); - try output.append(gpa, @intFromEnum(SectionId.element_section)); - try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); - try output.appendSlice(gpa, section_data.items); + // First dead_import_dummy_count entries in func_type_indices are dummies. + for (0..self.dead_import_dummy_count) |_| { + const body_copy = try gpa.dupe(u8, &DUMMY_FUNCTION); + try self.func_bodies.append(gpa, .{ .body = body_copy }); + } + + // The real functions follow the dummies. + const real_count = defined_count - self.dead_import_dummy_count; + for (0..real_count) |i| { + const fn_offset = self.function_offsets.items[i]; + + // Parse the body length from the code_bytes (LEB128 encoded at fn_offset). + var cursor: usize = @intCast(fn_offset); + const body_len = readU32(self.code_bytes.items, &cursor) catch unreachable; + const body_start: usize = cursor; + const body_end: usize = body_start + body_len; + + const body_copy = try gpa.dupe(u8, self.code_bytes.items[body_start..body_end]); + try self.func_bodies.append(gpa, .{ .body = body_copy }); + } } -// --- LEB128 encoding utilities --- +// --- Phase 8e: Verification --- -/// Encode a u32 as unsigned LEB128 and append to the list. -pub fn leb128WriteU32(gpa: Allocator, output: *std.ArrayList(u8), value: u32) !void { - var val = value; - while (true) { - const byte: u8 = @truncate(val & 0x7F); - val >>= 7; - if (val == 0) { - try output.append(gpa, byte); - break; +/// Verify that no stale builtin roc_* imports remain in the final module. +/// `roc_panic` is also tolerated because current host platforms still import it +/// behind the `roc_crashed` wrapper, and verification runs before DCE. +pub fn verifyNoBuiltinImports(self: *const Self) !void { + const allowed = [_][]const u8{ + "roc_alloc", + "roc_dealloc", + "roc_realloc", + "roc_dbg", + "roc_expect_failed", + "roc_crashed", + "roc_panic", + }; + for (self.imports.items) |imp| { + var is_allowed = false; + for (allowed) |name| { + if (std.mem.eql(u8, imp.field_name, name)) { + is_allowed = true; + break; + } + } + if (!is_allowed and std.mem.startsWith(u8, imp.field_name, "roc_")) { + return error.UnresolvedBuiltinImport; + } + } +} + +// --- Phase 10: Dead Code Elimination --- + +/// Trace the call graph from exported/live functions and replace unreachable +/// function bodies with `unreachable; end` stubs. Dead imports are removed +/// entirely (so the host page doesn't need to provide dummy JS functions), +/// and `dead_import_dummy_count` is incremented accordingly. +/// +/// This must be called AFTER `resolveRelocations()` has patched all +/// relocation sites but BEFORE `materializeFuncBodies()`. +/// +/// `called_fns` is a bitset of function indices that are directly called +/// by the app (e.g. from codegen). It is combined with exports, init funcs, +/// and element section entries to seed the live set. +pub fn eliminateDeadCode(self: *Self, called_fns: []const bool) !void { + const gpa = self.allocator; + + const import_count = self.import_fn_count; + const fn_index_min = import_count + self.dead_import_dummy_count; + const fn_count = fn_index_min + @as(u32, @intCast(self.function_offsets.items.len)); + + // --- 1. Trace live functions --- + const live_flags = try self.traceLiveFunctions(called_fns, fn_index_min, fn_count); + defer gpa.free(live_flags); + + // --- 2. Remove all unused JS imports --- + // Track which live imports need relocation updates. + var live_import_fns: std.ArrayList(u32) = .empty; + defer live_import_fns.deinit(gpa); + try live_import_fns.ensureTotalCapacity(gpa, import_count); + + var fn_index: u32 = 0; + var eliminated_import_count: u32 = 0; + var write_idx: usize = 0; + for (self.imports.items) |imp| { + if (fn_index < import_count and live_flags[fn_index]) { + live_import_fns.appendAssumeCapacity(fn_index); + self.imports.items[write_idx] = imp; + write_idx += 1; + } else if (fn_index < import_count) { + eliminated_import_count += 1; + } + fn_index += 1; + } + self.imports.items.len = write_idx; + + // Update dead_import_dummy_count to account for removed imports. + self.dead_import_dummy_count += eliminated_import_count; + + // Insert function signatures for the new dummy functions. + // Dummies use type signature 0 (arbitrary — they never execute). + for (0..eliminated_import_count) |_| { + try self.func_type_indices.insert(gpa, 0, 0); + } + + // Relocate calls to remaining JS imports. + // This must happen before we rebuild the code section. + for (live_import_fns.items, 0..) |old_index, new_idx| { + if (new_idx == old_index) continue; + if (self.linking.findAndReindexImportedFn(old_index, @intCast(new_idx))) |sym_index| { + self.reloc_code.applyRelocsU32(self.code_bytes.items, sym_index, @intCast(new_idx)); + } + } + + // --- 3. Replace dead defined-function bodies with dummies --- + var buffer: std.ArrayList(u8) = .empty; + defer buffer.deinit(gpa); + try buffer.ensureTotalCapacity(gpa, self.code_bytes.items.len); + + const offsets = self.function_offsets.items; + for (offsets, 0..) |fn_offset, i| { + const global_fn_idx = fn_index_min + @as(u32, @intCast(i)); + if (live_flags[global_fn_idx]) { + // Copy the live function body verbatim. + const code_start: usize = fn_offset; + const code_end: usize = if (i + 1 < offsets.len) offsets[i + 1] else self.code_bytes.items.len; + buffer.appendSliceAssumeCapacity(self.code_bytes.items[code_start..code_end]); } else { - try output.append(gpa, byte | 0x80); + // Serialize dummy: body_size (LEB128) + body bytes. + // Body size = 3 (DUMMY_FUNCTION.len). + buffer.appendAssumeCapacity(DUMMY_FUNCTION.len); // single-byte LEB128 for 3 + buffer.appendSliceAssumeCapacity(&DUMMY_FUNCTION); + } + } + + // Replace code_bytes with the rebuilt buffer. + self.code_bytes.clearRetainingCapacity(); + try self.code_bytes.appendSlice(gpa, buffer.items); + + // Rebuild function_offsets. + var offset: u32 = 0; + for (offsets, 0..) |_, i| { + self.function_offsets.items[i] = offset; + const global_fn_idx = fn_index_min + @as(u32, @intCast(i)); + if (live_flags[global_fn_idx]) { + // Parse body length to advance offset. + var cursor: usize = offset; + const body_len = readU32(self.code_bytes.items, &cursor) catch unreachable; + offset = @intCast(cursor + body_len); + } else { + // Dummy: 1 byte LEB128 size + 3 bytes body = 4 bytes. + offset += 1 + DUMMY_FUNCTION.len; } } + + // Update import_fn_count to reflect removals. + self.import_fn_count -= eliminated_import_count; } -/// Encode an i32 as signed LEB128 and append to the list. -pub fn leb128WriteI32(gpa: Allocator, output: *std.ArrayList(u8), value: i32) !void { - var val = value; +/// Trace the call graph starting from called functions, exports, init funcs, +/// and element section entries. Returns a bool slice where `result[fn_index]` +/// is true if the function is reachable. +fn traceLiveFunctions( + self: *const Self, + called_fns: []const bool, + fn_index_min: u32, + fn_count: u32, +) ![]bool { + const gpa = self.allocator; + + // --- Categorize relocation entries --- + // We iterate the relocation entries directly in the inner loop rather + // than copying them, to avoid needing temporary ArrayLists of anonymous structs. + + // Build symbol_index → function_index lookup. + const sym_fn_indices = try gpa.alloc(u32, self.linking.symbol_table.items.len); + defer gpa.free(sym_fn_indices); + for (self.linking.symbol_table.items, 0..) |sym, i| { + sym_fn_indices[i] = if (sym.isFunction()) sym.index else std.math.maxInt(u32); + } + + // --- Iterative live-function tracing --- + const live_flags = try gpa.alloc(bool, fn_count); + @memset(live_flags, false); + + const current_pass = try gpa.alloc(bool, fn_count); + defer gpa.free(current_pass); + @memset(current_pass, false); + + const next_pass = try gpa.alloc(bool, fn_count); + defer gpa.free(next_pass); + @memset(next_pass, false); + + // Seed with called_fns. + for (called_fns, 0..) |is_called, i| { + if (is_called and i < fn_count) current_pass[i] = true; + } + + // Seed with exported functions. + for (self.exports.items) |exp| { + if (exp.kind == .func and exp.idx < fn_count) { + current_pass[exp.idx] = true; + } + } + + // Seed with init functions. + for (self.linking.init_funcs.items) |init_fn| { + const sym = self.linking.symbol_table.items[init_fn.symbol_index]; + if (sym.isFunction() and sym.index < fn_count) { + current_pass[sym.index] = true; + } + } + + // Seed with element section entries (indirect call targets). + for (self.table_func_indices.items) |fi| { + if (fi < fn_count) current_pass[fi] = true; + } + + // Iterate until no new functions are discovered. while (true) { - const byte: u8 = @truncate(@as(u32, @bitCast(val)) & 0x7F); - val >>= 7; - if ((val == 0 and (byte & 0x40) == 0) or (val == -1 and (byte & 0x40) != 0)) { - try output.append(gpa, byte); + const any_new = std.mem.indexOfScalar(bool, current_pass, true) != null; + if (!any_new) break; + + // Mark current pass as live. + for (current_pass, 0..) |is_current, i| { + if (is_current) live_flags[i] = true; + } + + // For each live function in this pass, find its callees. + for (current_pass, 0..) |is_current, fi| { + if (!is_current) continue; + if (fi < fn_index_min or fi >= fn_count) continue; + + // Find function body byte range. + const offset_index = fi - fn_index_min; + const code_start = self.function_offsets.items[offset_index]; + const code_end: u32 = if (offset_index + 1 < self.function_offsets.items.len) + self.function_offsets.items[offset_index + 1] + else + @intCast(self.code_bytes.items.len); + + // Scan relocation entries within this function body. + for (self.reloc_code.entries.items) |entry| { + switch (entry) { + .index => |idx| { + if (idx.offset > code_start and idx.offset < code_end) { + switch (idx.type_id) { + .function_index_leb => { + // Direct call: mark the callee as live. + const callee = sym_fn_indices[idx.symbol_index]; + if (callee < fn_count and !live_flags[callee]) { + next_pass[callee] = true; + } + }, + .type_index_leb => { + // Indirect call: conservatively mark all element-section + // functions with matching type signature as live. + const type_idx = self.linking.symbol_table.items[idx.symbol_index].index; + for (self.table_func_indices.items) |tfi| { + if (tfi >= fn_index_min and tfi < fn_count) { + const local = tfi - fn_index_min; + const tfi_type = self.func_type_indices.items[self.dead_import_dummy_count + local]; + if (tfi_type == type_idx and !live_flags[tfi]) { + next_pass[tfi] = true; + } + } + } + }, + else => {}, + } + } + }, + .offset => {}, + } + } + } + + // Swap passes. + @memcpy(current_pass, next_pass); + @memset(next_pass, false); + } + + return live_flags; +} + +// --- Phase 5: Memory, Table, and Stack Pointer Ownership --- + +/// Setup step (called after preload, before code generation): +/// Validate that memory and table ownership is correctly configured. +/// +/// In relocatable WASM objects, memory, table, and __stack_pointer are imported. +/// Our parser already strips non-function imports from the imports array — only +/// function imports are stored. Memory and table state is tracked via `has_memory` +/// and `has_table` flags, and will be emitted as defined sections (not imports) +/// when the module is encoded. +/// +/// The __stack_pointer global import is handled implicitly: it exists in the +/// symbol table for relocation resolution and will become a defined global +/// during `finalizeMemoryAndTable()`. +/// Promote globally-visible, defined function symbols from the linking section +/// to actual WASM exports. In relocatable objects, `export fn` in Zig generates +/// symbols with `binding=global vis=default`, but no Export section exists. +/// This must be called after preload so that the surgical linker pipeline can +/// see and preserve these exports. +pub fn exportGlobalSymbols(self: *Self) void { + for (self.linking.symbol_table.items) |sym| { + if (sym.kind != .function or sym.isUndefined() or sym.isLocal()) continue; + if ((sym.flags & WasmLinking.SymFlag.VISIBILITY_HIDDEN) != 0) continue; + const name = sym.name orelse continue; + // Skip roc__ symbols (handled by linkHostToAppCalls). + if (std.mem.startsWith(u8, name, "roc__")) continue; + // Avoid duplicate exports. + var already_exported = false; + for (self.exports.items) |exp| { + if (exp.kind == .func and std.mem.eql(u8, exp.name, name)) { + already_exported = true; + break; + } + } + if (!already_exported) { + self.addExport(name, .func, sym.index) catch {}; + } + } +} + +/// No-op: memory and table imports are already stored in separate lists +/// during parsing (has_memory, has_table flags). This method only asserts +/// that the host module declared memory. +pub fn removeMemoryAndTableImports(self: *Self) void { + // The parser separates function imports from memory/table/global imports. + // Non-function imports are NOT in self.imports, so import_fn_count is correct. + // Memory and table flags were set during parseImportSection. + // + // Assert the host module declared memory (required for any useful program). + std.debug.assert(self.has_memory); + // Note: has_table may not be set if the host doesn't use indirect calls yet. + // Table will be set during finalization if table_func_indices are populated. +} + +/// Finalization step (called after all code generation and surgical linking, +/// before encode): +/// +/// 1. Calculate memory layout from data segments and stack requirements +/// 2. Define __stack_pointer global at top of memory +/// 3. Configure table size based on actual element count +/// 4. Export memory as "memory" for host/runtime access +pub fn finalizeMemoryAndTable(self: *Self, stack_bytes: u32) !void { + // Calculate the highest data segment end address. + var data_end: u32 = self.data_offset; + for (self.data_segments.items) |ds| { + const seg_end = ds.offset + @as(u32, @intCast(ds.data.len)); + data_end = @max(data_end, seg_end); + } + + // Calculate memory pages: data + stack, rounded up to page boundary. + const total_bytes: u64 = @as(u64, data_end) + @as(u64, stack_bytes); + const page_size: u64 = 65536; + const pages: u32 = @intCast(@max(1, (total_bytes + page_size - 1) / page_size)); + self.memory_min_pages = pages; + + // Define __stack_pointer as a mutable i32 global. + // Initial value = top of memory (stack grows downward). + self.has_stack_pointer = true; + self.stack_pointer_init = pages * @as(u32, 65536); + + // Ensure memory is defined (not imported) in the final module. + self.has_memory = true; + + // Configure table if we have any function indices to place in it. + if (self.table_func_indices.items.len > 0) { + self.has_table = true; + } + + // Export memory as "memory" for host/runtime access. + try self.exports.append(self.allocator, .{ + .name = "memory", + .kind = .memory, + .idx = 0, + }); +} + +// --- Parsing (preload) --- + +const wasm_magic = "\x00asm"; +const wasm_version = 1; + +/// Parse a relocatable WASM binary into a WasmModule. +/// The input bytes must contain `linking` and `reloc.*` custom sections +/// if `require_relocatable` is true. +pub fn preload(allocator: Allocator, bytes: []const u8, require_relocatable: bool) ParseError!Self { + if (bytes.len < 8) return error.UnexpectedEnd; + if (!std.mem.eql(u8, bytes[0..4], wasm_magic)) return error.InvalidMagic; + if (std.mem.readInt(u32, bytes[4..8], .little) != wasm_version) return error.InvalidVersion; + + var module = Self.init(allocator); + errdefer module.deinit(); + + var cursor: usize = 8; + + // Parse standard sections in binary order. + // Each parser checks the section ID and returns early if it doesn't match. + try module.parseTypeSection(bytes, &cursor); + try module.parseImportSection(bytes, &cursor); + try module.parseFunctionSection(bytes, &cursor); + try module.parseTableSection_(bytes, &cursor); + try module.parseMemorySection_(bytes, &cursor); + try module.parseGlobalSection_(bytes, &cursor); + try module.parseExportSection(bytes, &cursor); + try module.parseStartSection(bytes, &cursor); + try module.parseElementSection_(bytes, &cursor); + try module.parseDataCountSection(bytes, &cursor); + try module.parseCodeSection(bytes, &cursor); + try module.parseDataSection_(bytes, &cursor); + + // Parse trailing custom sections (linking, reloc.CODE, reloc.DATA) + while (cursor < bytes.len) { + try module.parseCustomSection(bytes, &cursor); + } + + // Adjust reloc.CODE offsets: they are relative to the code section body + // (which includes the function count LEB128), but code_bytes starts after + // the count. Subtract the count's LEB128 size so offsets index into code_bytes. + if (module.code_section_fn_count_leb_size > 0) { + const delta = module.code_section_fn_count_leb_size; + for (module.reloc_code.entries.items) |*entry| { + switch (entry.*) { + .index => |*idx| { + std.debug.assert(idx.offset >= delta); + idx.offset -= delta; + }, + .offset => |*off| { + std.debug.assert(off.offset >= delta); + off.offset -= delta; + }, + } + } + module.code_section_fn_count_leb_size = 0; + } + + try module.normalizeDataRelocations(); + + // Convert data symbol offsets from (segment-relative) to absolute memory addresses. + // The linking section stores data_offset as the offset within the segment, but + // resolveCodeRelocations uses data_offset as the absolute address in linear memory. + for (module.linking.symbol_table.items) |*sym| { + if (sym.kind == .data and !sym.isUndefined()) { + if (sym.index < module.data_segments.items.len) { + sym.data_offset += module.data_segments.items[sym.index].offset; + } + } + } + + // Validate relocatable requirements + if (require_relocatable) { + if (module.linking.symbol_table.items.len == 0) + return error.MissingLinkingSection; + if (module.reloc_code.entries.items.len == 0) + return error.MissingRelocCode; + if (module.has_stack_pointer) + return error.HasInternalGlobals; + } + + module.import_fn_count = @intCast(module.imports.items.len); + return module; +} + +fn normalizeDataRelocations(self: *Self) ParseError!void { + for (self.reloc_data.entries.items) |*entry| { + const raw_offset = entry.getOffset(); + var matched = false; + + for (self.data_segments.items, 0..) |segment, seg_idx| { + const seg_start = segment.section_offset; + const seg_end = seg_start + @as(u32, @intCast(segment.data.len)); + if (raw_offset < seg_start or raw_offset >= seg_end) continue; + + const in_segment_offset = raw_offset - seg_start; + switch (entry.*) { + .index => |*idx| { + idx.offset = in_segment_offset; + idx.data_segment_index = @intCast(seg_idx); + }, + .offset => |*off| { + off.offset = in_segment_offset; + off.data_segment_index = @intCast(seg_idx); + }, + } + matched = true; break; + } + + if (!matched) return error.InvalidSection; + } +} + +/// Check if the byte at cursor matches the expected section ID. +/// If yes, read the section size and return it. If no, return null (section absent). +fn beginSection(bytes: []const u8, cursor: *usize, expected: SectionId) ParseError!?u32 { + if (cursor.* >= bytes.len) return null; + if (bytes[cursor.*] != @intFromEnum(expected)) return null; + cursor.* += 1; + return try readU32(bytes, cursor); +} + +fn parseTypeSection(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .type_section) orelse return; + const section_end = cursor.* + section_size; + const count = try readU32(bytes, cursor); + + for (0..count) |_| { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + _ = bytes[cursor.*]; // 0x60 func type marker + cursor.* += 1; + + // Parse params + const param_count = try readU32(bytes, cursor); + const params_start = cursor.*; + try skipBytes(bytes, cursor, param_count); + const param_bytes = bytes[params_start..cursor.*]; + const params = try self.allocator.alloc(ValType, param_count); + for (param_bytes, 0..) |b, i| { + params[i] = std.meta.intToEnum(ValType, b) catch return error.InvalidSection; + } + try self.func_types.append(self.allocator, .{ .params = params }); + + // Parse results + const result_count = try readU32(bytes, cursor); + if (result_count > 0) { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const result_type = std.meta.intToEnum(ValType, bytes[cursor.*]) catch return error.InvalidSection; + cursor.* += 1; + // Skip any additional results (multi-value) + if (result_count > 1) try skipBytes(bytes, cursor, result_count - 1); + try self.func_type_results.append(self.allocator, result_type); } else { - try output.append(gpa, byte | 0x80); + try self.func_type_results.append(self.allocator, null); } } + cursor.* = section_end; } -/// Encode an i64 as signed LEB128 and append to the list. -pub fn leb128WriteI64(gpa: Allocator, output: *std.ArrayList(u8), value: i64) !void { - var val = value; - while (true) { - const byte: u8 = @truncate(@as(u64, @bitCast(val)) & 0x7F); - val >>= 7; - if ((val == 0 and (byte & 0x40) == 0) or (val == -1 and (byte & 0x40) != 0)) { - try output.append(gpa, byte); - break; +fn parseImportSection(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .import_section) orelse return; + const section_end = cursor.* + section_size; + const count = try readU32(bytes, cursor); + + for (0..count) |_| { + const module_name = try readString(bytes, cursor); + const field_name = try readString(bytes, cursor); + + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const kind_byte = bytes[cursor.*]; + cursor.* += 1; + + switch (kind_byte) { + 0x00 => { // function import + const type_idx = try readU32(bytes, cursor); + try self.imports.append(self.allocator, .{ + .module_name = module_name, + .field_name = field_name, + .type_idx = type_idx, + }); + }, + 0x01 => { // table import + self.has_table = true; + _ = try readU32(bytes, cursor); // elem type (funcref) + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const limits_flag = bytes[cursor.*]; + cursor.* += 1; + _ = try readU32(bytes, cursor); // min + if (limits_flag == 0x01) _ = try readU32(bytes, cursor); // max + try self.table_imports.append(self.allocator, .{ + .module_name = module_name, + .field_name = field_name, + }); + }, + 0x02 => { // memory import + self.has_memory = true; + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const limits_flag = bytes[cursor.*]; + cursor.* += 1; + self.memory_min_pages = try readU32(bytes, cursor); + if (limits_flag == 0x01) _ = try readU32(bytes, cursor); // max + }, + 0x03 => { // global import (e.g. __stack_pointer) + const val_type_byte = try readU32(bytes, cursor); + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const mutability = bytes[cursor.*]; + cursor.* += 1; + try self.global_imports.append(self.allocator, .{ + .module_name = module_name, + .field_name = field_name, + .val_type = @intCast(val_type_byte), + .mutable = mutability == 0x01, + }); + self.import_global_count += 1; + }, + else => return error.InvalidSection, + } + } + cursor.* = section_end; +} + +fn parseFunctionSection(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .function_section) orelse return; + const section_end = cursor.* + section_size; + const count = try readU32(bytes, cursor); + + try self.func_type_indices.ensureTotalCapacity(self.allocator, count); + for (0..count) |_| { + const type_idx = try readU32(bytes, cursor); + self.func_type_indices.appendAssumeCapacity(type_idx); + } + cursor.* = section_end; +} + +fn parseTableSection_(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .table_section) orelse return; + const section_end = cursor.* + section_size; + self.has_table = true; + // Skip table section contents (we just note it exists) + cursor.* = section_end; +} + +fn parseMemorySection_(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .memory_section) orelse return; + const section_end = cursor.* + section_size; + self.has_memory = true; + const count = try readU32(bytes, cursor); + if (count > 0) { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const limits_flag = bytes[cursor.*]; + cursor.* += 1; + self.memory_min_pages = try readU32(bytes, cursor); + if (limits_flag == 0x01) _ = try readU32(bytes, cursor); // max + } + cursor.* = section_end; +} + +fn parseGlobalSection_(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .global_section) orelse return; + const section_end = cursor.* + section_size; + // For relocatable modules, globals should NOT be defined internally + // (the __stack_pointer comes from an import). Mark that we found them. + self.has_stack_pointer = true; + cursor.* = section_end; +} + +fn parseExportSection(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .export_section) orelse return; + const section_end = cursor.* + section_size; + const count = try readU32(bytes, cursor); + + for (0..count) |_| { + const name = try readString(bytes, cursor); + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const kind = std.meta.intToEnum(ExportKind, bytes[cursor.*]) catch return error.InvalidSection; + cursor.* += 1; + const idx = try readU32(bytes, cursor); + try self.exports.append(self.allocator, .{ .name = name, .kind = kind, .idx = idx }); + } + cursor.* = section_end; +} + +fn parseStartSection(_: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .start_section) orelse return; + cursor.* += section_size; // Skip start section +} + +fn parseElementSection_(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .element_section) orelse return; + const section_end = cursor.* + section_size; + const count = try readU32(bytes, cursor); + + for (0..count) |_| { + const seg_flags = try readU32(bytes, cursor); + + // Only handle flags=0 (active, table 0) — this is what LLVM/Zig emit for PIC. + // Skip other segment types (passive, declarative, etc.) gracefully. + if (seg_flags != 0) { + cursor.* = section_end; + return; + } + + // Parse init expression: i32.const ; end + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + cursor.* += 1; // skip i32.const opcode + _ = try readI32(bytes, cursor); // skip offset value + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + cursor.* += 1; // skip end opcode + + // Parse function indices + const elem_count = try readU32(bytes, cursor); + for (0..elem_count) |_| { + const func_idx = try readU32(bytes, cursor); + try self.table_func_indices.append(self.allocator, func_idx); + } + + self.has_table = true; + } + cursor.* = section_end; +} + +fn parseDataCountSection(_: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .data_count_section) orelse return; + cursor.* += section_size; // Consume and ignore +} + +fn parseCodeSection(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .code_section) orelse return; + const section_end = cursor.* + section_size; + + const before_fn_count = cursor.*; + const fn_count = try readU32(bytes, cursor); + + // Record how many bytes the function count LEB128 consumed. + // reloc.CODE offsets are relative to section body (including fn count), + // but code_bytes starts after it — this delta is needed to adjust offsets. + self.code_section_fn_count_leb_size = @intCast(cursor.* - before_fn_count); + + // Store raw bytes of the entire code section body (after the count). + // Record each function's byte offset within code_bytes. + const code_start = cursor.*; + try self.function_offsets.ensureTotalCapacity(self.allocator, fn_count); + + for (0..fn_count) |_| { + const fn_offset: u32 = @intCast(cursor.* - code_start); + self.function_offsets.appendAssumeCapacity(fn_offset); + const fn_size = try readU32(bytes, cursor); + try skipBytes(bytes, cursor, fn_size); + } + + // Copy the raw code bytes (from after count to end of section) + const code_len = section_end - code_start; + try self.code_bytes.ensureTotalCapacity(self.allocator, code_len); + self.code_bytes.appendSliceAssumeCapacity(bytes[code_start..section_end]); + + cursor.* = section_end; +} + +fn parseDataSection_(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + const section_size = try beginSection(bytes, cursor, .data_section) orelse return; + const section_end = cursor.* + section_size; + const section_body_start = cursor.*; + const count = try readU32(bytes, cursor); + + for (0..count) |_| { + // Segment flags per WASM spec: 0=active mem0, 1=passive, 2=active+memidx + const seg_flags = try readU32(bytes, cursor); + if (seg_flags == 2) { + // Skip explicit memory index + const mem_idx = try readU32(bytes, cursor); + if (mem_idx != 0) return error.InvalidSection; + } + if (seg_flags == 1) { + // Passive segment — no init expression, just data + const data_len = try readU32(bytes, cursor); + const data_start = cursor.*; + try skipBytes(bytes, cursor, data_len); + const data_copy = try self.allocator.dupe(u8, bytes[data_start .. data_start + data_len]); + try self.data_segments.append(self.allocator, .{ + .offset = 0, + .data = data_copy, + .section_offset = @intCast(data_start - section_body_start), + }); } else { - try output.append(gpa, byte | 0x80); + // Active segment — parse init expression: i32.const end + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + cursor.* += 1; // skip i32.const opcode + const offset: u32 = @bitCast(try readI32(bytes, cursor)); + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + cursor.* += 1; // skip end opcode + const data_len = try readU32(bytes, cursor); + const data_start = cursor.*; + try skipBytes(bytes, cursor, data_len); + const data_copy = try self.allocator.dupe(u8, bytes[data_start .. data_start + data_len]); + try self.data_segments.append(self.allocator, .{ + .offset = offset, + .data = data_copy, + .section_offset = @intCast(data_start - section_body_start), + }); + if (offset + data_len > self.data_offset) { + self.data_offset = offset + data_len; + } } } + cursor.* = section_end; +} + +fn parseCustomSection(self: *Self, bytes: []const u8, cursor: *usize) ParseError!void { + if (cursor.* >= bytes.len) return; + if (bytes[cursor.*] != @intFromEnum(SectionId.custom_section)) return error.InvalidSection; + cursor.* += 1; + + const section_size = try readU32(bytes, cursor); + const section_end = cursor.* + section_size; + const name = try readString(bytes, cursor); + + if (std.mem.eql(u8, name, "linking")) { + self.linking = try WasmLinking.LinkingSection.parse( + self.allocator, + bytes, + cursor, + section_end, + ); + } else if (std.mem.eql(u8, name, "reloc.CODE")) { + self.reloc_code = try WasmLinking.RelocationSection.parse( + self.allocator, + name, + bytes, + cursor, + section_end, + ); + } else if (std.mem.eql(u8, name, "reloc.DATA")) { + self.reloc_data = try WasmLinking.RelocationSection.parse( + self.allocator, + name, + bytes, + cursor, + section_end, + ); + } + // Skip any remaining bytes in this custom section (including unknown ones) + cursor.* = section_end; +} + +/// Encode the module to a valid wasm binary. +pub fn encode(self: *Self, allocator: Allocator) ![]u8 { + var output: std.ArrayList(u8) = .empty; + errdefer output.deinit(allocator); + + // Magic number and version + try output.appendSlice(allocator, &.{ 0x00, 0x61, 0x73, 0x6D }); // \0asm + try output.appendSlice(allocator, &.{ 0x01, 0x00, 0x00, 0x00 }); // version 1 + + // Type section + if (self.func_types.items.len > 0) { + try self.encodeTypeSection(allocator, &output); + } + + // Import section (must be between type and function sections) + if (self.imports.items.len > 0) { + try self.encodeImportSection(allocator, &output); + } + + // Function section + if (self.func_type_indices.items.len > 0) { + try self.encodeFunctionSection(allocator, &output); + } + + // Table section (between function and memory) + if (self.has_table) { + try self.encodeTableSection(allocator, &output); + } + + // Memory section + if (self.has_memory) { + try self.encodeMemorySection(allocator, &output); + } + + // Global section + if (self.has_stack_pointer) { + try self.encodeGlobalSection(allocator, &output); + } + + // Export section + if (self.exports.items.len > 0) { + try self.encodeExportSection(allocator, &output); + } + + // Element section (between export and code) + if (self.has_table and self.table_func_indices.items.len > 0) { + try self.encodeElementSection(allocator, &output); + } + + // Code section + if (self.func_bodies.items.len > 0) { + try self.encodeCodeSection(allocator, &output); + } + + // Data section + if (self.data_segments.items.len > 0) { + try self.encodeDataSection(allocator, &output); + } + + return output.toOwnedSlice(allocator); +} + +fn encodeTypeSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, @intCast(self.func_types.items.len)); + + for (self.func_types.items, 0..) |ft, idx| { + try section_data.append(gpa, 0x60); // func type marker + try leb128WriteU32(gpa, §ion_data, @intCast(ft.params.len)); + for (ft.params) |p| { + try section_data.append(gpa, @intFromEnum(p)); + } + if (self.func_type_results.items[idx]) |r| { + try section_data.append(gpa, 1); // 1 result + try section_data.append(gpa, @intFromEnum(r)); + } else { + try section_data.append(gpa, 0); // 0 results + } + } + + try output.append(gpa, @intFromEnum(SectionId.type_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeImportSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, @intCast(self.imports.items.len)); + for (self.imports.items) |imp| { + // Module name + try leb128WriteU32(gpa, §ion_data, @intCast(imp.module_name.len)); + try section_data.appendSlice(gpa, imp.module_name); + // Field name + try leb128WriteU32(gpa, §ion_data, @intCast(imp.field_name.len)); + try section_data.appendSlice(gpa, imp.field_name); + // Import kind: 0x00 = function + try section_data.append(gpa, 0x00); + // Type index + try leb128WriteU32(gpa, §ion_data, imp.type_idx); + } + + try output.append(gpa, @intFromEnum(SectionId.import_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeFunctionSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, @intCast(self.func_type_indices.items.len)); + for (self.func_type_indices.items) |type_idx| { + try leb128WriteU32(gpa, §ion_data, type_idx); + } + + try output.append(gpa, @intFromEnum(SectionId.function_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeMemorySection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, 1); // 1 memory + try section_data.append(gpa, 0x00); // no max + try leb128WriteU32(gpa, §ion_data, self.memory_min_pages); + + try output.append(gpa, @intFromEnum(SectionId.memory_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeGlobalSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + const global_count: u32 = 1 + @as(u32, @intCast(self.extra_globals.items.len)); + try leb128WriteU32(gpa, §ion_data, global_count); + + // Global 0: __stack_pointer (i32, mutable) + try section_data.append(gpa, @intFromEnum(ValType.i32)); + try section_data.append(gpa, 0x01); // mutable + try section_data.append(gpa, Op.i32_const); + try leb128WriteI32(gpa, §ion_data, @intCast(self.stack_pointer_init)); + try section_data.append(gpa, Op.end); + + // Extra globals (PIC: __memory_base=0, __table_base=0, etc.) + for (self.extra_globals.items) |g| { + try section_data.append(gpa, g.val_type); + try section_data.append(gpa, if (g.mutable) @as(u8, 0x01) else @as(u8, 0x00)); + try section_data.append(gpa, Op.i32_const); + try leb128WriteI32(gpa, §ion_data, g.init_value); + try section_data.append(gpa, Op.end); + } + + try output.append(gpa, @intFromEnum(SectionId.global_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeExportSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, @intCast(self.exports.items.len)); + for (self.exports.items) |exp| { + try leb128WriteU32(gpa, §ion_data, @intCast(exp.name.len)); + try section_data.appendSlice(gpa, exp.name); + try section_data.append(gpa, @intFromEnum(exp.kind)); + try leb128WriteU32(gpa, §ion_data, exp.idx); + } + + try output.append(gpa, @intFromEnum(SectionId.export_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeCodeSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, @intCast(self.func_bodies.items.len)); + for (self.func_bodies.items) |fb| { + try leb128WriteU32(gpa, §ion_data, @intCast(fb.body.len)); + try section_data.appendSlice(gpa, fb.body); + } + + try output.append(gpa, @intFromEnum(SectionId.code_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeDataSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, @intCast(self.data_segments.items.len)); + for (self.data_segments.items) |ds| { + // Active segment for memory 0 + try leb128WriteU32(gpa, §ion_data, 0); // flags: active, memory 0 + // Offset expression: i32.const ; end + try section_data.append(gpa, Op.i32_const); + try leb128WriteI32(gpa, §ion_data, @intCast(ds.offset)); + try section_data.append(gpa, Op.end); + // Data bytes + try leb128WriteU32(gpa, §ion_data, @intCast(ds.data.len)); + try section_data.appendSlice(gpa, ds.data); + } + + try output.append(gpa, @intFromEnum(SectionId.data_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeTableSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + // Table size = number of entries in the element section. + // Minimum 1 because table index 0 is reserved (null function reference). + const table_size: u32 = @max(1, @as(u32, @intCast(self.table_func_indices.items.len))); + + try leb128WriteU32(gpa, §ion_data, 1); // 1 table + try section_data.append(gpa, funcref); // element type: funcref + try section_data.append(gpa, 0x00); // limits: no max + try leb128WriteU32(gpa, §ion_data, table_size); + + try output.append(gpa, @intFromEnum(SectionId.table_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +fn encodeElementSection(self: *Self, gpa: Allocator, output: *std.ArrayList(u8)) !void { + var section_data: std.ArrayList(u8) = .empty; + defer section_data.deinit(gpa); + + try leb128WriteU32(gpa, §ion_data, 1); // 1 element segment + // Active segment for table 0 + try leb128WriteU32(gpa, §ion_data, 0); // flags: active, table 0 + // Offset expression: i32.const 0; end + try section_data.append(gpa, Op.i32_const); + try leb128WriteI32(gpa, §ion_data, 0); + try section_data.append(gpa, Op.end); + // Function indices + try leb128WriteU32(gpa, §ion_data, @intCast(self.table_func_indices.items.len)); + for (self.table_func_indices.items) |func_idx| { + try leb128WriteU32(gpa, §ion_data, func_idx); + } + + try output.append(gpa, @intFromEnum(SectionId.element_section)); + try leb128WriteU32(gpa, output, @intCast(section_data.items.len)); + try output.appendSlice(gpa, section_data.items); +} + +// --- LEB128 decoding utilities (for parsing WASM binaries) --- + +/// Errors that can occur when parsing a WASM binary module. +pub const ParseError = error{ + UnexpectedEnd, + Overflow, + InvalidMagic, + InvalidVersion, + InvalidSection, + MissingLinkingSection, + MissingRelocCode, + InvalidLinkingVersion, + HasInternalGlobals, + OutOfMemory, +}; + +/// Decode a u32 from unsigned LEB128 at `bytes[cursor.*]`, advancing cursor. +pub fn readU32(bytes: []const u8, cursor: *usize) ParseError!u32 { + var result: u32 = 0; + for (0..5) |i| { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + const byte = bytes[cursor.*]; + cursor.* += 1; + const shift: u5 = @intCast(i * 7); + result |= @as(u32, byte & 0x7f) << shift; + if ((byte & 0x80) == 0) return result; + } + return error.Overflow; +} + +/// Decode an i32 from signed LEB128 at `bytes[cursor.*]`, advancing cursor. +pub fn readI32(bytes: []const u8, cursor: *usize) ParseError!i32 { + var result: u32 = 0; + var shift: u6 = 0; + var byte: u8 = undefined; + for (0..5) |_| { + if (cursor.* >= bytes.len) return error.UnexpectedEnd; + byte = bytes[cursor.*]; + cursor.* += 1; + result |= @as(u32, byte & 0x7f) << @intCast(shift); + shift += 7; + if ((byte & 0x80) == 0) { + if (shift < 32 and (byte & 0x40) != 0) { + result |= @as(u32, 0xFFFFFFFF) << @intCast(shift); + } + return @bitCast(result); + } + } + return error.Overflow; +} + +/// Read a length-prefixed string from `bytes[cursor.*]`, advancing cursor. +/// Returns a slice into `bytes` (zero-copy, caller must keep bytes alive). +pub fn readString(bytes: []const u8, cursor: *usize) ParseError![]const u8 { + const len = try readU32(bytes, cursor); + const end = cursor.* + len; + if (end > bytes.len) return error.UnexpectedEnd; + const result = bytes[cursor.*..end]; + cursor.* = end; + return result; +} + +/// Skip `count` bytes, returning error if past end. +fn skipBytes(bytes: []const u8, cursor: *usize, count: u32) ParseError!void { + const end = cursor.* + count; + if (end > bytes.len) return error.UnexpectedEnd; + cursor.* = end; +} + +// --- LEB128 encoding utilities --- + +/// Encode a u32 as unsigned LEB128 and append to the list. +pub fn leb128WriteU32(gpa: Allocator, output: *std.ArrayList(u8), value: u32) !void { + var val = value; + while (true) { + const byte: u8 = @truncate(val & 0x7F); + val >>= 7; + if (val == 0) { + try output.append(gpa, byte); + break; + } else { + try output.append(gpa, byte | 0x80); + } + } +} + +/// Encode an i32 as signed LEB128 and append to the list. +pub fn leb128WriteI32(gpa: Allocator, output: *std.ArrayList(u8), value: i32) !void { + var val = value; + while (true) { + const byte: u8 = @truncate(@as(u32, @bitCast(val)) & 0x7F); + val >>= 7; + if ((val == 0 and (byte & 0x40) == 0) or (val == -1 and (byte & 0x40) != 0)) { + try output.append(gpa, byte); + break; + } else { + try output.append(gpa, byte | 0x80); + } + } +} + +/// Encode an i64 as signed LEB128 and append to the list. +pub fn leb128WriteI64(gpa: Allocator, output: *std.ArrayList(u8), value: i64) !void { + var val = value; + while (true) { + const byte: u8 = @truncate(@as(u64, @bitCast(val)) & 0x7F); + val >>= 7; + if ((val == 0 and (byte & 0x40) == 0) or (val == -1 and (byte & 0x40) != 0)) { + try output.append(gpa, byte); + break; + } else { + try output.append(gpa, byte | 0x80); + } + } +} + +// --- Padded LEB128 utilities for surgical linking --- + +/// Fixed size of a padded LEB128 value in bytes. +/// WASM relocatable objects use 5-byte padded LEB128 for all relocatable indices +/// so that values can be patched in-place without shifting surrounding bytes. +/// 5 = ceil(32/7) which is the maximum LEB128 encoding for a u32. +const padded_leb128_size: u32 = 5; + +comptime { + // A u32 LEB128 uses at most ceil(32/7) = 5 bytes. + std.debug.assert(padded_leb128_size == 5); + std.debug.assert(padded_leb128_size == (32 + 6) / 7); +} + +/// Read a u32 from a 5-byte padded LEB128 encoding in the buffer. +pub fn readPaddedU32(buffer: []const u8, offset: u32) u32 { + const off: usize = @intCast(offset); + std.debug.assert(off + padded_leb128_size <= buffer.len); + var result: u32 = 0; + for (0..padded_leb128_size) |i| { + result |= @as(u32, buffer[off + i] & 0x7f) << @intCast(7 * i); + } + return result; +} + +/// Overwrite a 5-byte padded LEB128 u32 at the given offset in the buffer. +pub fn overwritePaddedU32(buffer: []u8, offset: u32, value: u32) void { + var x = value; + const off: usize = @intCast(offset); + std.debug.assert(off + padded_leb128_size <= buffer.len); + for (0..4) |i| { + buffer[off + i] = @as(u8, @truncate(x & 0x7f)) | 0x80; + x >>= 7; + } + buffer[off + 4] = @as(u8, @truncate(x)); +} + +/// Overwrite 5 bytes with a signed i32 in padded LEB128. +/// Used for signed memory address relocations. +pub fn overwritePaddedI32(buffer: []u8, offset: u32, value: i32) void { + var x = value; + const off: usize = @intCast(offset); + std.debug.assert(off + padded_leb128_size <= buffer.len); + for (0..4) |i| { + buffer[off + i] = @as(u8, @truncate(@as(u32, @bitCast(x)) & 0x7f)) | 0x80; + x >>= 7; + } + buffer[off + 4] = @as(u8, @truncate(@as(u32, @bitCast(x)) & 0x7f)); +} + +/// Append a u32 as exactly 5 bytes of padded LEB128 to an output buffer. +/// Used when emitting new relocatable instructions (call, global.get/set). +pub fn appendPaddedU32(gpa: Allocator, output: *std.ArrayList(u8), value: u32) !void { + var x = value; + for (0..padded_leb128_size - 1) |_| { + try output.append(gpa, @as(u8, @truncate(x & 0x7f)) | 0x80); + x >>= 7; + } + try output.append(gpa, @as(u8, @truncate(x))); +} + +// --- Tests for padded LEB128 --- + +/// Decode a 5-byte padded unsigned LEB128 value (test helper). +fn decodePaddedU32(bytes: []const u8) u32 { + var result: u32 = 0; + for (0..5) |i| { + result |= @as(u32, bytes[i] & 0x7f) << @intCast(7 * i); + } + return result; +} + +/// Decode a 5-byte padded signed LEB128 value (test helper). +fn decodePaddedI32(bytes: []const u8) i32 { + var result: u32 = 0; + for (0..5) |i| { + result |= @as(u32, bytes[i] & 0x7f) << @intCast(7 * i); + } + return @bitCast(result); +} + +test "overwritePaddedU32 — value 0 encodes as [0x80, 0x80, 0x80, 0x80, 0x00]" { + var buf = [_]u8{0} ** 5; + overwritePaddedU32(&buf, 0, 0); + try std.testing.expectEqualSlices(u8, &.{ 0x80, 0x80, 0x80, 0x80, 0x00 }, &buf); +} + +test "overwritePaddedU32 — value 1 encodes as [0x81, 0x80, 0x80, 0x80, 0x00]" { + var buf = [_]u8{0} ** 5; + overwritePaddedU32(&buf, 0, 1); + try std.testing.expectEqualSlices(u8, &.{ 0x81, 0x80, 0x80, 0x80, 0x00 }, &buf); +} + +test "overwritePaddedU32 — value 0x7F encodes as [0xFF, 0x80, 0x80, 0x80, 0x00]" { + var buf = [_]u8{0} ** 5; + overwritePaddedU32(&buf, 0, 0x7F); + try std.testing.expectEqualSlices(u8, &.{ 0xFF, 0x80, 0x80, 0x80, 0x00 }, &buf); +} + +test "overwritePaddedU32 — value 128 encodes as [0x80, 0x81, 0x80, 0x80, 0x00]" { + var buf = [_]u8{0} ** 5; + overwritePaddedU32(&buf, 0, 128); + try std.testing.expectEqualSlices(u8, &.{ 0x80, 0x81, 0x80, 0x80, 0x00 }, &buf); +} + +test "overwritePaddedU32 — max u32 (0xFFFFFFFF) encodes correctly" { + var buf = [_]u8{0} ** 5; + overwritePaddedU32(&buf, 0, 0xFFFFFFFF); + try std.testing.expectEqualSlices(u8, &.{ 0xFF, 0xFF, 0xFF, 0xFF, 0x0F }, &buf); +} + +test "overwritePaddedU32 — round-trip: write then decode matches original value" { + const test_values = [_]u32{ 0, 1, 127, 128, 255, 256, 16383, 16384, 2097151, 2097152, 0x0FFFFFFF, 0xFFFFFFFF }; + for (test_values) |val| { + var buf = [_]u8{0} ** 5; + overwritePaddedU32(&buf, 0, val); + try std.testing.expectEqual(val, decodePaddedU32(&buf)); + } +} + +test "overwritePaddedI32 — negative value (-1) encodes correctly" { + var buf = [_]u8{0} ** 5; + overwritePaddedI32(&buf, 0, -1); + // -1 in signed padded LEB128: all 7-bit groups are 0x7F, last byte keeps sign bit + try std.testing.expectEqualSlices(u8, &.{ 0xFF, 0xFF, 0xFF, 0xFF, 0x7F }, &buf); +} + +test "overwritePaddedI32 — positive value round-trips correctly" { + const test_values = [_]i32{ 0, 1, -1, 127, -128, 32767, -32768, std.math.maxInt(i32), std.math.minInt(i32) }; + for (test_values) |val| { + var buf = [_]u8{0} ** 5; + overwritePaddedI32(&buf, 0, val); + try std.testing.expectEqual(val, decodePaddedI32(&buf)); + } +} + +test "appendPaddedU32 — appends exactly 5 bytes" { + var output: std.ArrayList(u8) = .empty; + defer output.deinit(std.testing.allocator); + try appendPaddedU32(std.testing.allocator, &output, 42); + try std.testing.expectEqual(@as(usize, 5), output.items.len); +} + +test "appendPaddedU32 — output is decodable as standard LEB128" { + const test_values = [_]u32{ 0, 1, 127, 128, 16384, 0xFFFFFFFF }; + for (test_values) |val| { + var output: std.ArrayList(u8) = .empty; + defer output.deinit(std.testing.allocator); + try appendPaddedU32(std.testing.allocator, &output, val); + try std.testing.expectEqual(val, decodePaddedU32(output.items)); + } +} + +// --- Tests for LEB128 decoding --- + +test "readU32 — decodes single-byte value" { + const bytes = [_]u8{42}; + var cursor: usize = 0; + try std.testing.expectEqual(@as(u32, 42), try readU32(&bytes, &cursor)); + try std.testing.expectEqual(@as(usize, 1), cursor); +} + +test "readU32 — decodes multi-byte value" { + const bytes = [_]u8{ 0x80, 0x01 }; // 128 + var cursor: usize = 0; + try std.testing.expectEqual(@as(u32, 128), try readU32(&bytes, &cursor)); + try std.testing.expectEqual(@as(usize, 2), cursor); +} + +test "readU32 — decodes max u32 padded" { + const bytes = [_]u8{ 0xFF, 0xFF, 0xFF, 0xFF, 0x0F }; + var cursor: usize = 0; + try std.testing.expectEqual(@as(u32, 0xFFFFFFFF), try readU32(&bytes, &cursor)); +} + +test "readI32 — decodes negative value" { + // -1 in signed LEB128 = 0x7F + const bytes = [_]u8{0x7F}; + var cursor: usize = 0; + try std.testing.expectEqual(@as(i32, -1), try readI32(&bytes, &cursor)); +} + +test "readI32 — decodes positive value" { + const bytes = [_]u8{42}; + var cursor: usize = 0; + try std.testing.expectEqual(@as(i32, 42), try readI32(&bytes, &cursor)); +} + +test "readString — reads length-prefixed string" { + const bytes = [_]u8{ 3, 'f', 'o', 'o' }; + var cursor: usize = 0; + const s = try readString(&bytes, &cursor); + try std.testing.expectEqualStrings("foo", s); + try std.testing.expectEqual(@as(usize, 4), cursor); +} + +// --- Tests for preload --- + +/// Build a minimal relocatable WASM binary for testing. +/// Contains: 1 function import, 1 defined function, 1 export, linking + reloc sections. +fn buildTestRelocatableModule(allocator: Allocator) ![]u8 { + var out: std.ArrayList(u8) = .empty; + errdefer out.deinit(allocator); + + // Magic + version + try out.appendSlice(allocator, "\x00asm"); + try out.appendSlice(allocator, &[_]u8{ 0x01, 0x00, 0x00, 0x00 }); + + // Type section: 1 type () -> () + try out.append(allocator, @intFromEnum(SectionId.type_section)); + try writeSectionBody(allocator, &out, &.{ + 0x01, // 1 type + 0x60, // func + 0x00, // 0 params + 0x00, // 0 results + }); + + // Import section: 1 function import "env" "roc__main_exposed" + { + var import_data: std.ArrayList(u8) = .empty; + defer import_data.deinit(allocator); + try leb128WriteU32(allocator, &import_data, 1); // 1 import + // module name "env" + try leb128WriteU32(allocator, &import_data, 3); + try import_data.appendSlice(allocator, "env"); + // field name "roc__main_exposed" + try leb128WriteU32(allocator, &import_data, 17); + try import_data.appendSlice(allocator, "roc__main_exposed"); + // function import, type 0 + try import_data.append(allocator, 0x00); + try leb128WriteU32(allocator, &import_data, 0); + + try out.append(allocator, @intFromEnum(SectionId.import_section)); + try leb128WriteU32(allocator, &out, @intCast(import_data.items.len)); + try out.appendSlice(allocator, import_data.items); + } + + // Function section: 1 function with type 0 + try out.append(allocator, @intFromEnum(SectionId.function_section)); + try writeSectionBody(allocator, &out, &.{ + 0x01, // 1 function + 0x00, // type index 0 + }); + + // Memory section: 1 memory, min 1 page + try out.append(allocator, @intFromEnum(SectionId.memory_section)); + try writeSectionBody(allocator, &out, &.{ + 0x01, // 1 memory + 0x00, // no max + 0x01, // min 1 page + }); + + // Export section: 1 export "_start" -> function 1 + { + var export_data: std.ArrayList(u8) = .empty; + defer export_data.deinit(allocator); + try leb128WriteU32(allocator, &export_data, 1); // 1 export + try leb128WriteU32(allocator, &export_data, 6); // name length + try export_data.appendSlice(allocator, "_start"); + try export_data.append(allocator, 0x00); // func export + try leb128WriteU32(allocator, &export_data, 1); // func index 1 + + try out.append(allocator, @intFromEnum(SectionId.export_section)); + try leb128WriteU32(allocator, &out, @intCast(export_data.items.len)); + try out.appendSlice(allocator, export_data.items); + } + + // Code section: 1 function body + // Body: call import_0 (padded LEB128), end + { + var code_section: std.ArrayList(u8) = .empty; + defer code_section.deinit(allocator); + try leb128WriteU32(allocator, &code_section, 1); // 1 function + + // Function body: [size] [0 locals] [call 0 (padded)] [end] + const body = [_]u8{ + 0x00, // 0 local declarations + Op.call, + 0x80, 0x80, 0x80, 0x80, 0x00, // padded LEB128 for function index 0 + Op.end, + }; + try leb128WriteU32(allocator, &code_section, body.len); + try code_section.appendSlice(allocator, &body); + + try out.append(allocator, @intFromEnum(SectionId.code_section)); + try leb128WriteU32(allocator, &out, @intCast(code_section.items.len)); + try out.appendSlice(allocator, code_section.items); + } + + // Custom section: "linking" + { + var linking_body: std.ArrayList(u8) = .empty; + defer linking_body.deinit(allocator); + + // Section name "linking" + try leb128WriteU32(allocator, &linking_body, 7); + try linking_body.appendSlice(allocator, "linking"); + + // Version 2 + try linking_body.append(allocator, 2); + + // Subsection: symbol table (ID 8) + { + var sym_data: std.ArrayList(u8) = .empty; + defer sym_data.deinit(allocator); + + try leb128WriteU32(allocator, &sym_data, 2); // 2 symbols + + // Symbol 0: undefined function import "roc__main_exposed" (index 0) + try sym_data.append(allocator, @intFromEnum(WasmLinking.SymKind.function)); + try leb128WriteU32(allocator, &sym_data, WasmLinking.SymFlag.UNDEFINED); // flags + try leb128WriteU32(allocator, &sym_data, 0); // function index + + // Symbol 1: defined function "_start" (index 1) + try sym_data.append(allocator, @intFromEnum(WasmLinking.SymKind.function)); + try leb128WriteU32(allocator, &sym_data, 0); // flags (defined, not undefined) + try leb128WriteU32(allocator, &sym_data, 1); // function index + try leb128WriteU32(allocator, &sym_data, 6); // name length + try sym_data.appendSlice(allocator, "_start"); + + try linking_body.append(allocator, @intFromEnum(WasmLinking.LinkingSubsection.symbol_table)); + try leb128WriteU32(allocator, &linking_body, @intCast(sym_data.items.len)); + try linking_body.appendSlice(allocator, sym_data.items); + } + + try out.append(allocator, @intFromEnum(SectionId.custom_section)); + try leb128WriteU32(allocator, &out, @intCast(linking_body.items.len)); + try out.appendSlice(allocator, linking_body.items); + } + + // Custom section: "reloc.CODE" + { + var reloc_body: std.ArrayList(u8) = .empty; + defer reloc_body.deinit(allocator); + + // Section name + try leb128WriteU32(allocator, &reloc_body, 10); + try reloc_body.appendSlice(allocator, "reloc.CODE"); + + // Target section index (code section) + try leb128WriteU32(allocator, &reloc_body, @intFromEnum(SectionId.code_section)); + + // 1 relocation entry + try leb128WriteU32(allocator, &reloc_body, 1); + + // R_WASM_FUNCTION_INDEX_LEB, offset=2 (after size+locals), symbol_index=0 + try reloc_body.append(allocator, @intFromEnum(WasmLinking.IndexRelocType.function_index_leb)); + try leb128WriteU32(allocator, &reloc_body, 2); // offset within code_bytes + try leb128WriteU32(allocator, &reloc_body, 0); // symbol index + + try out.append(allocator, @intFromEnum(SectionId.custom_section)); + try leb128WriteU32(allocator, &out, @intCast(reloc_body.items.len)); + try out.appendSlice(allocator, reloc_body.items); + } + + return out.toOwnedSlice(allocator); +} + +/// Helper: write a section body with known bytes. +fn writeSectionBody(allocator: Allocator, out: *std.ArrayList(u8), body: []const u8) !void { + try leb128WriteU32(allocator, out, @intCast(body.len)); + try out.appendSlice(allocator, body); +} + +test "preload — rejects bytes without WASM magic number" { + const bad_bytes = [_]u8{ 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00 }; + try std.testing.expectError(error.InvalidMagic, preload(std.testing.allocator, &bad_bytes, false)); +} + +test "preload — rejects wrong version" { + const bad_bytes = [_]u8{ 0x00, 0x61, 0x73, 0x6D, 0x02, 0x00, 0x00, 0x00 }; + try std.testing.expectError(error.InvalidVersion, preload(std.testing.allocator, &bad_bytes, false)); +} + +test "preload — rejects too-short input" { + const bad_bytes = [_]u8{ 0x00, 0x61, 0x73, 0x6D }; + try std.testing.expectError(error.UnexpectedEnd, preload(std.testing.allocator, &bad_bytes, false)); +} + +test "preload — parses minimal valid module (magic + version only)" { + const bytes = [_]u8{ 0x00, 0x61, 0x73, 0x6D, 0x01, 0x00, 0x00, 0x00 }; + var module = try preload(std.testing.allocator, &bytes, false); + defer module.deinit(); + try std.testing.expectEqual(@as(usize, 0), module.func_types.items.len); + try std.testing.expectEqual(@as(usize, 0), module.imports.items.len); +} + +test "preload — parses type section with multiple signatures" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + // We built 1 type: () -> () + try std.testing.expectEqual(@as(usize, 1), module.func_types.items.len); + try std.testing.expectEqual(@as(usize, 0), module.func_types.items[0].params.len); + try std.testing.expectEqual(@as(?ValType, null), module.func_type_results.items[0]); +} + +test "preload — parses import section with function import" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + try std.testing.expectEqual(@as(usize, 1), module.imports.items.len); + try std.testing.expectEqualStrings("env", module.imports.items[0].module_name); + try std.testing.expectEqualStrings("roc__main_exposed", module.imports.items[0].field_name); + try std.testing.expectEqual(@as(u32, 0), module.imports.items[0].type_idx); + try std.testing.expectEqual(@as(u32, 1), module.import_fn_count); +} + +test "preload — records correct function_offsets for code section" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + // 1 locally-defined function + try std.testing.expectEqual(@as(usize, 1), module.function_offsets.items.len); + try std.testing.expectEqual(@as(usize, 1), module.func_type_indices.items.len); + // Function offset 0 = start of first function body within code_bytes + try std.testing.expectEqual(@as(u32, 0), module.function_offsets.items[0]); + // code_bytes should contain the function body + try std.testing.expect(module.code_bytes.items.len > 0); +} + +test "preload — parses linking section symbol table" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + try std.testing.expectEqual(@as(usize, 2), module.linking.symbol_table.items.len); + + // Symbol 0: undefined function import + const sym0 = module.linking.symbol_table.items[0]; + try std.testing.expectEqual(WasmLinking.SymKind.function, sym0.kind); + try std.testing.expect(sym0.isUndefined()); + try std.testing.expectEqual(@as(u32, 0), sym0.index); + // Implicitly named (no explicit name for undefined import without EXPLICIT_NAME flag) + try std.testing.expect(sym0.isImplicitlyNamed()); + + // Symbol 1: defined function "_start" + const sym1 = module.linking.symbol_table.items[1]; + try std.testing.expectEqual(WasmLinking.SymKind.function, sym1.kind); + try std.testing.expect(!sym1.isUndefined()); + try std.testing.expectEqualStrings("_start", sym1.name.?); + try std.testing.expectEqual(@as(u32, 1), sym1.index); +} + +test "preload — parses reloc.CODE section entries" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + try std.testing.expectEqual(@as(usize, 1), module.reloc_code.entries.items.len); + const entry = module.reloc_code.entries.items[0]; + switch (entry) { + .index => |idx| { + try std.testing.expectEqual(WasmLinking.IndexRelocType.function_index_leb, idx.type_id); + // Original binary offset was 2 (relative to code section body start, + // including fn count LEB128). preload adjusts by subtracting the fn + // count LEB size (1 byte), so the stored offset is 1. + try std.testing.expectEqual(@as(u32, 1), idx.offset); + try std.testing.expectEqual(@as(u32, 0), idx.symbol_index); + }, + .offset => unreachable, + } +} + +test "preload — require_relocatable rejects module without linking section" { + // A minimal valid module with no custom sections + const bytes = [_]u8{ 0x00, 0x61, 0x73, 0x6D, 0x01, 0x00, 0x00, 0x00 }; + try std.testing.expectError(error.MissingLinkingSection, preload(std.testing.allocator, &bytes, true)); +} + +test "preload — parsed module has correct function count" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + // 1 import + 1 defined = 2 total functions + try std.testing.expectEqual(@as(u32, 1), module.import_fn_count); + try std.testing.expectEqual(@as(usize, 1), module.func_type_indices.items.len); + // Total function count = import_fn_count + func_type_indices.len + try std.testing.expectEqual( + @as(usize, 2), + @as(usize, module.import_fn_count) + module.func_type_indices.items.len, + ); +} + +test "preload — parses export section" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + try std.testing.expectEqual(@as(usize, 1), module.exports.items.len); + try std.testing.expectEqualStrings("_start", module.exports.items[0].name); + try std.testing.expectEqual(ExportKind.func, module.exports.items[0].kind); + try std.testing.expectEqual(@as(u32, 1), module.exports.items[0].idx); +} + +test "preload — parses memory section" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + try std.testing.expect(module.has_memory); + try std.testing.expectEqual(@as(u32, 1), module.memory_min_pages); +} + +test "preload — symbol name resolution from imports" { + const allocator = std.testing.allocator; + const wasm_bytes = try buildTestRelocatableModule(allocator); + defer allocator.free(wasm_bytes); + + var module = try preload(allocator, wasm_bytes, false); + defer module.deinit(); + + // Symbol 0 is implicitly named — resolve via imports + const sym0 = module.linking.symbol_table.items[0]; + const resolved_name = sym0.resolveName(module.imports.items, module.global_imports.items, module.table_imports.items); + try std.testing.expect(resolved_name != null); + try std.testing.expectEqualStrings("roc__main_exposed", resolved_name.?); + + // findSymbolByName should find it + const found = module.linking.findSymbolByName("roc__main_exposed", module.imports.items, module.global_imports.items, module.table_imports.items); + try std.testing.expectEqual(@as(?u32, 0), found); +} + +// --- Tests for linkHostToAppCalls --- + +/// Build a WasmModule in memory for testing linkHostToAppCalls. +/// +/// Function index space: +/// 0: js_foo (import, env) +/// 1: roc__main_exposed (import, env) — the app function to link +/// 2: js_bar (import, env) +/// 3: defined_0 — body calls roc__main_exposed (fn 1) +/// 4: defined_1 — body calls js_bar (fn 2) +/// +/// Each function body in code_bytes: +/// [body_size] [0x00 locals] [Op.call] [padded_leb128 fn_index] [Op.end] +/// = 1 + 8 = 9 bytes per function (body_size=8, encoded in 1 byte) +/// +/// Relocation entries (offsets into code_bytes): +/// fn3 call operand at offset 3 → symbol 1 (roc__main_exposed) +/// fn4 call operand at offset 12 → symbol 2 (js_bar) +fn buildLinkingTestModule(allocator: Allocator) !Self { + var module = Self.init(allocator); + errdefer module.deinit(); + + // Type 0: () -> () + _ = try module.addFuncType(&.{}, &.{}); + + // 3 function imports + _ = try module.addImport("env", "js_foo", 0); + _ = try module.addImport("env", "roc__main_exposed", 0); + _ = try module.addImport("env", "js_bar", 0); + module.import_fn_count = 3; + + // 2 defined functions (type 0) + try module.func_type_indices.append(allocator, 0); // defined_0 (global index 3) + try module.func_type_indices.append(allocator, 0); // defined_1 (global index 4) + + // Build code_bytes: two function bodies. + // fn3 body: call fn 1 (roc__main_exposed) + // offset 0: body_size=8 + // offset 1: 0x00 (no locals) + // offset 2: Op.call + // offset 3..7: padded LEB128(1) + // offset 8: Op.end + // fn4 body: call fn 2 (js_bar) + // offset 9: body_size=8 + // offset 10: 0x00 (no locals) + // offset 11: Op.call + // offset 12..16: padded LEB128(2) + // offset 17: Op.end + try module.code_bytes.appendSlice(allocator, &.{0x08}); // body size = 8 + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 1); // call fn 1 + try module.code_bytes.append(allocator, Op.end); + + try module.code_bytes.appendSlice(allocator, &.{0x08}); // body size = 8 + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 2); // call fn 2 + try module.code_bytes.append(allocator, Op.end); + + try module.function_offsets.append(allocator, 0); // fn3 at offset 0 + try module.function_offsets.append(allocator, 9); // fn4 at offset 9 + + // Symbol table: + // sym 0: undefined function index 0 (js_foo) — implicitly named + // sym 1: undefined function index 1 (roc__main_exposed) — implicitly named + // sym 2: undefined function index 2 (js_bar) — implicitly named + // sym 3: defined function index 3 (defined_0) + // sym 4: defined function index 4 (defined_1) + try module.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 1 }, + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 2 }, + .{ .kind = .function, .flags = 0, .name = "defined_0", .index = 3 }, + .{ .kind = .function, .flags = 0, .name = "defined_1", .index = 4 }, + }); + + // Relocation entries for code section: + // fn3's call operand at code_bytes offset 3 → sym 1 (roc__main_exposed) + // fn4's call operand at code_bytes offset 12 → sym 2 (js_bar) + try module.reloc_code.entries.appendSlice(allocator, &.{ + .{ .index = .{ .type_id = .function_index_leb, .offset = 3, .symbol_index = 1 } }, + .{ .index = .{ .type_id = .function_index_leb, .offset = 12, .symbol_index = 2 } }, + }); + + return module; +} + +test "linkHostToAppCalls — single app function: import removed, dummy inserted" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Before: 3 imports, 2 defined, 0 dummies + try std.testing.expectEqual(@as(usize, 3), module.imports.items.len); + try std.testing.expectEqual(@as(usize, 2), module.func_type_indices.items.len); + try std.testing.expectEqual(@as(u32, 0), module.dead_import_dummy_count); + + // Link roc__main_exposed → app function at index 5 + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // After: 2 imports, 3 func_type_indices (1 dummy + 2 original), 1 dummy + try std.testing.expectEqual(@as(usize, 2), module.imports.items.len); + try std.testing.expectEqual(@as(usize, 3), module.func_type_indices.items.len); + try std.testing.expectEqual(@as(u32, 1), module.dead_import_dummy_count); +} + +test "linkHostToAppCalls — verifies call instruction patched to app function index" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Before: fn3 calls fn 1 (roc__main_exposed) — LEB128 at code_bytes[3..8] + try std.testing.expectEqual(@as(u32, 1), decodePaddedU32(module.code_bytes.items[3..8])); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // After: fn3's call should be patched to fn 5 (the app function) + try std.testing.expectEqual(@as(u32, 5), decodePaddedU32(module.code_bytes.items[3..8])); +} + +test "linkHostToAppCalls — last import swapped into vacated slot" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // js_bar (was at index 2) should now be at index 1 (the vacated slot) + try std.testing.expectEqualStrings("js_foo", module.imports.items[0].field_name); + try std.testing.expectEqualStrings("js_bar", module.imports.items[1].field_name); +} + +test "linkHostToAppCalls — swap import's call sites updated to new index" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Before: fn4 calls fn 2 (js_bar) — LEB128 at code_bytes[12..17] + try std.testing.expectEqual(@as(u32, 2), decodePaddedU32(module.code_bytes.items[12..17])); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // After: fn4's call should be patched to fn 1 (js_bar's new position) + try std.testing.expectEqual(@as(u32, 1), decodePaddedU32(module.code_bytes.items[12..17])); +} + +test "linkHostToAppCalls — multiple app functions linked in sequence" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Link two app functions sequentially + try module.linkHostToAppCalls(&.{ + .{ .name = "roc__main_exposed", .fn_index = 5 }, + .{ .name = "js_foo", .fn_index = 6 }, + }); + + // 2 imports removed → 1 remaining, 2 dummies + try std.testing.expectEqual(@as(usize, 1), module.imports.items.len); + try std.testing.expectEqual(@as(u32, 2), module.dead_import_dummy_count); + try std.testing.expectEqual(@as(usize, 4), module.func_type_indices.items.len); // 2 dummies + 2 original +} + +test "linkHostToAppCalls — dead_import_dummy_count incremented correctly" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + try std.testing.expectEqual(@as(u32, 0), module.dead_import_dummy_count); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + try std.testing.expectEqual(@as(u32, 1), module.dead_import_dummy_count); + + try module.linkHostToAppCalls(&.{.{ .name = "js_foo", .fn_index = 6 }}); + try std.testing.expectEqual(@as(u32, 2), module.dead_import_dummy_count); +} + +test "linkHostToAppCalls — func_type_indices has dummy signature at position 0" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Before: func_type_indices = [0, 0] (two defined functions, both type 0) + try std.testing.expectEqual(@as(usize, 2), module.func_type_indices.items.len); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // After: func_type_indices = [0, 0, 0] — dummy at position 0 + try std.testing.expectEqual(@as(usize, 3), module.func_type_indices.items.len); + try std.testing.expectEqual(@as(u32, 0), module.func_type_indices.items[0]); // dummy type signature +} + +test "linkHostToAppCalls — total function count unchanged after linking" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Before: 3 imports + 2 defined = 5 total + const total_before = module.imports.items.len + module.func_type_indices.items.len; + try std.testing.expectEqual(@as(usize, 5), total_before); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // After: 2 imports + 3 func_type_indices (1 dummy + 2 original) = 5 total + const total_after = module.imports.items.len + module.func_type_indices.items.len; + try std.testing.expectEqual(@as(usize, 5), total_after); +} + +test "linkHostToAppCalls — unfound import exports app function instead" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + const exports_before = module.exports.items.len; + + // Link a function name that doesn't exist in imports + try module.linkHostToAppCalls(&.{.{ .name = "roc__nonexistent", .fn_index = 7 }}); + + // No imports removed, but an export was added + try std.testing.expectEqual(@as(usize, 3), module.imports.items.len); + try std.testing.expectEqual(exports_before + 1, module.exports.items.len); + const new_export = module.exports.items[module.exports.items.len - 1]; + try std.testing.expectEqualStrings("roc__nonexistent", new_export.name); + try std.testing.expectEqual(ExportKind.func, new_export.kind); + try std.testing.expectEqual(@as(u32, 7), new_export.idx); +} + +test "linkHostToAppCalls — import_fn_count decremented" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + try std.testing.expectEqual(@as(u32, 3), module.import_fn_count); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + try std.testing.expectEqual(@as(u32, 2), module.import_fn_count); +} + +test "linkHostToAppCalls — symbol table updated for linked function" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Before: sym 1 (roc__main_exposed) has index 1 + try std.testing.expectEqual(@as(u32, 1), module.linking.symbol_table.items[1].index); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // After: sym 1 should now point to app function index 5 + try std.testing.expectEqual(@as(u32, 5), module.linking.symbol_table.items[1].index); +} + +test "linkHostToAppCalls — symbol table updated for swapped function" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Before: sym 2 (js_bar) has index 2 + try std.testing.expectEqual(@as(u32, 2), module.linking.symbol_table.items[2].index); + + try module.linkHostToAppCalls(&.{.{ .name = "roc__main_exposed", .fn_index = 5 }}); + + // After: sym 2 (js_bar) should now have index 1 (swapped into roc__main_exposed's slot) + try std.testing.expectEqual(@as(u32, 1), module.linking.symbol_table.items[2].index); +} + +test "linkHostToAppCalls — linking last import is a no-op swap" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Link js_bar (the last import) — swap_fn_index == host_fn_index, no swap needed + try module.linkHostToAppCalls(&.{.{ .name = "js_bar", .fn_index = 5 }}); + + // js_bar removed, js_foo and roc__main_exposed remain + try std.testing.expectEqual(@as(usize, 2), module.imports.items.len); + try std.testing.expectEqualStrings("js_foo", module.imports.items[0].field_name); + try std.testing.expectEqualStrings("roc__main_exposed", module.imports.items[1].field_name); + + // fn4's call should be patched from 2 to 5 + try std.testing.expectEqual(@as(u32, 5), decodePaddedU32(module.code_bytes.items[12..17])); + + // No swap relocation needed — sym 1 (roc__main_exposed) should be unchanged + try std.testing.expectEqual(@as(u32, 1), module.linking.symbol_table.items[1].index); +} + +// --- Tests for loading a real relocatable host module --- + +test "preload — parses real Zig-compiled wasm host object" { + const allocator = std.testing.allocator; + const host_bytes = try std.fs.cwd().readFileAlloc( + allocator, + "test/wasm/platform/targets/wasm32/host.wasm", + 10 * 1024 * 1024, // 10 MB max + ); + defer allocator.free(host_bytes); + + var module = try preload(allocator, host_bytes, true); + defer module.deinit(); + + // The host should have function imports (extern fn declarations in host.zig) + try std.testing.expect(module.imports.items.len > 0); + try std.testing.expect(module.import_fn_count > 0); + + // Should have locally-defined functions + try std.testing.expect(module.func_type_indices.items.len > 0); + + // Should have code bytes and matching function offsets + try std.testing.expect(module.code_bytes.items.len > 0); + try std.testing.expectEqual(module.func_type_indices.items.len, module.function_offsets.items.len); + + // Should have a populated symbol table + try std.testing.expect(module.linking.symbol_table.items.len > 0); + + // Should have relocation entries for code + try std.testing.expect(module.reloc_code.entries.items.len > 0); + + // The host imports roc__main — verify we can find it by name + var found_roc_main = false; + for (module.imports.items) |imp| { + if (std.mem.eql(u8, imp.field_name, "roc__main")) { + found_roc_main = true; + break; + } + } + try std.testing.expect(found_roc_main); + + // Verify total function count is consistent + const total_fns = @as(usize, module.import_fn_count) + module.func_type_indices.items.len; + try std.testing.expect(total_fns > 3); // at least imports + a few defined functions +} + +// --- Phase 5 tests: Memory, Table, and Stack Pointer Ownership --- + +/// Build a test module simulating a parsed relocatable host with memory, table, +/// and __stack_pointer global imports (as produced by clang/zig for wasm32). +fn buildPhase5TestModule(allocator: Allocator) !Self { + var module = Self.init(allocator); + errdefer module.deinit(); + + // Type 0: () -> () + _ = try module.addFuncType(&.{}, &.{}); + + // Function imports + _ = try module.addImport("env", "roc__main", 0); + _ = try module.addImport("env", "roc_panic", 0); + module.import_fn_count = 2; + + // Simulate that the parser found memory and table imports + // (these are NOT stored in the imports array, just flagged) + module.has_memory = true; + module.memory_min_pages = 1; + module.has_table = true; + + // Simulate a __stack_pointer global import + module.import_global_count = 1; + + // One defined function + try module.func_type_indices.append(allocator, 0); + try module.code_bytes.append(allocator, 0x02); // body size + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.end); + try module.function_offsets.append(allocator, 0); + + // Add a data segment at offset 1024 (after the reserved area) + const data = try allocator.dupe(u8, "Hello, WASM!"); + try module.data_segments.append(allocator, .{ .offset = 1024, .data = data }); + + // Symbol table with __stack_pointer global symbol + try module.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 1 }, + .{ .kind = .global, .flags = WasmLinking.SymFlag.UNDEFINED, .name = "__stack_pointer", .index = 0 }, + .{ .kind = .function, .flags = 0, .name = "wasm_main", .index = 2 }, + }); + + // Relocation for global.get __stack_pointer + try module.reloc_code.entries.append(allocator, .{ + .index = .{ .type_id = .global_index_leb, .offset = 1, .symbol_index = 2 }, + }); + + return module; +} + +test "setup — memory and table imports removed from host module" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + module.removeMemoryAndTableImports(); + + // After setup, the imports array should only contain function imports + try std.testing.expectEqual(@as(usize, 2), module.imports.items.len); + try std.testing.expectEqualStrings("roc__main", module.imports.items[0].field_name); + try std.testing.expectEqualStrings("roc_panic", module.imports.items[1].field_name); + + // Memory and table flags should still be set (they'll be defined sections) + try std.testing.expect(module.has_memory); + try std.testing.expect(module.has_table); +} + +test "setup — import_fn_count unchanged after removing non-function imports" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + const fn_count_before = module.import_fn_count; + module.removeMemoryAndTableImports(); + + // import_fn_count should be unchanged — it only counts function imports + try std.testing.expectEqual(fn_count_before, module.import_fn_count); + try std.testing.expectEqual(@as(u32, 2), module.import_fn_count); +} + +test "setup — __stack_pointer global defined with correct initial value" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + module.removeMemoryAndTableImports(); + try module.finalizeMemoryAndTable(1024); // 1KB stack + + // __stack_pointer should be defined (not imported) + try std.testing.expect(module.has_stack_pointer); + + // Initial value = memory_pages * 65536 (top of memory) + try std.testing.expectEqual(module.memory_min_pages * 65536, module.stack_pointer_init); +} + +test "setup — memory section has correct minimum pages" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + module.removeMemoryAndTableImports(); + + // Data segment: 12 bytes at offset 1024 → data_end = 1036 + // data_offset is 1024 (init default), data_end = max(1024, 1036) = 1036 + // With 1KB stack: total = 1036 + 1024 = 2060 bytes → 1 page + try module.finalizeMemoryAndTable(1024); + try std.testing.expectEqual(@as(u32, 1), module.memory_min_pages); + + // With a larger stack that pushes past one page: + // total = 1036 + 65000 = 66036 → 2 pages + module.memory_min_pages = 1; // reset + try module.finalizeMemoryAndTable(65000); + // Recalculate: data_end stays 1036, total = 1036 + 65000 = 66036 + // 66036 / 65536 = 1.007... → 2 pages + try std.testing.expectEqual(@as(u32, 2), module.memory_min_pages); +} + +test "setup — table size matches element count after finalization" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + // Add some function indices to the table + try module.table_func_indices.append(allocator, 2); // wasm_main + try module.table_func_indices.append(allocator, 3); // another fn + try module.table_func_indices.append(allocator, 4); // another fn + + module.removeMemoryAndTableImports(); + try module.finalizeMemoryAndTable(1024); + + // Encode and verify the table section uses the correct size + const encoded = try module.encode(allocator); + defer allocator.free(encoded); + + // Parse the encoded output to verify table section + var decoded = try preload(allocator, encoded, false); + defer decoded.deinit(); + + // The table should exist + try std.testing.expect(decoded.has_table); +} + +test "setup — memory exported as 'memory'" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + const exports_before = module.exports.items.len; + module.removeMemoryAndTableImports(); + try module.finalizeMemoryAndTable(1024); + + // Should have one more export than before + try std.testing.expectEqual(exports_before + 1, module.exports.items.len); + + // Find the memory export + var found_memory_export = false; + for (module.exports.items) |exp| { + if (std.mem.eql(u8, exp.name, "memory") and exp.kind == .memory) { + found_memory_export = true; + try std.testing.expectEqual(@as(u32, 0), exp.idx); + } + } + try std.testing.expect(found_memory_export); +} + +test "setup — global import count tracked correctly" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + // The test module simulates 1 global import (__stack_pointer) + try std.testing.expectEqual(@as(u32, 1), module.import_global_count); +} + +test "setup — finalized module encodes and re-parses as valid WASM" { + const allocator = std.testing.allocator; + var module = try buildPhase5TestModule(allocator); + defer module.deinit(); + + // Add table entries for encoding + try module.table_func_indices.append(allocator, 2); + + module.removeMemoryAndTableImports(); + try module.finalizeMemoryAndTable(4096); + + // Encode to final WASM binary + const encoded = try module.encode(allocator); + defer allocator.free(encoded); + + // Verify it's valid WASM (magic + version) + try std.testing.expectEqualSlices(u8, &.{ 0x00, 0x61, 0x73, 0x6D }, encoded[0..4]); + try std.testing.expectEqual(@as(u32, 1), std.mem.readInt(u32, encoded[4..8], .little)); + + // Should be parseable as a non-relocatable module + var decoded = try preload(allocator, encoded, false); + defer decoded.deinit(); + + // Verify memory is defined + try std.testing.expect(decoded.has_memory); + + // Verify exports include "memory" + var found_memory = false; + for (decoded.exports.items) |exp| { + if (std.mem.eql(u8, exp.name, "memory")) { + found_memory = true; + } + } + try std.testing.expect(found_memory); + + // Verify __stack_pointer global is defined + try std.testing.expect(decoded.has_stack_pointer); +} + +test "phase5 — real host module: removeMemoryAndTableImports preserves function imports" { + const allocator = std.testing.allocator; + const host_bytes = try std.fs.cwd().readFileAlloc( + allocator, + "test/wasm/platform/targets/wasm32/host.wasm", + 10 * 1024 * 1024, + ); + defer allocator.free(host_bytes); + + var module = try preload(allocator, host_bytes, true); + defer module.deinit(); + + const fn_count_before = module.import_fn_count; + const imports_before = module.imports.items.len; + + module.removeMemoryAndTableImports(); + + // Function imports should be completely unchanged + try std.testing.expectEqual(fn_count_before, module.import_fn_count); + try std.testing.expectEqual(imports_before, module.imports.items.len); + + // Memory flag should be set (host imports memory) + try std.testing.expect(module.has_memory); +} + +test "phase5 — real host module: full setup and finalization produces valid WASM" { + const allocator = std.testing.allocator; + const host_bytes = try std.fs.cwd().readFileAlloc( + allocator, + "test/wasm/platform/targets/wasm32/host.wasm", + 10 * 1024 * 1024, + ); + defer allocator.free(host_bytes); + + var module = try preload(allocator, host_bytes, true); + defer module.deinit(); + + // Phase 5 setup + module.removeMemoryAndTableImports(); + + // Phase 5 finalization with 64KB stack + try module.finalizeMemoryAndTable(65536); + + // Verify state after finalization + try std.testing.expect(module.has_memory); + try std.testing.expect(module.has_stack_pointer); + try std.testing.expect(module.memory_min_pages >= 1); + try std.testing.expectEqual(module.memory_min_pages * 65536, module.stack_pointer_init); + + // Verify memory export was added + var found_memory_export = false; + for (module.exports.items) |exp| { + if (std.mem.eql(u8, exp.name, "memory") and exp.kind == .memory) { + found_memory_export = true; + } + } + try std.testing.expect(found_memory_export); +} + +// --- Phase 6 tests: WASM Function Pointer Representation & RocOps Layout --- + +test "RocOps struct — correct field offsets for wasm32 (36 bytes total)" { + const W = Self.WasmRocOps; + try std.testing.expectEqual(@as(u32, 0), W.env_ptr); + try std.testing.expectEqual(@as(u32, 4), W.roc_alloc_table_idx); + try std.testing.expectEqual(@as(u32, 8), W.roc_dealloc_table_idx); + try std.testing.expectEqual(@as(u32, 12), W.roc_realloc_table_idx); + try std.testing.expectEqual(@as(u32, 16), W.roc_dbg_table_idx); + try std.testing.expectEqual(@as(u32, 20), W.roc_expect_failed_table_idx); + try std.testing.expectEqual(@as(u32, 24), W.roc_crashed_table_idx); + try std.testing.expectEqual(@as(u32, 28), W.hosted_fns_count); + try std.testing.expectEqual(@as(u32, 32), W.hosted_fns_ptr); + try std.testing.expectEqual(@as(u32, 36), W.total_size); + // Each field is 4 bytes (i32 on wasm32), 9 fields total + try std.testing.expectEqual(@as(u32, 9 * 4), W.total_size); +} + +test "call_indirect — roc_alloc uses 2-arg callback type, not RocCall type" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + // Register the 2-arg RocOps callback type: (i32, i32) -> void + const roc_ops_type = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + // Register the 3-arg RocCall type: (i32, i32, i32) -> void + const roc_call_type = try module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); + + // They must be distinct type indices + try std.testing.expect(roc_ops_type != roc_call_type); + + // Import roc_alloc with the 2-arg type + module.enableTable(); + const roc_alloc_idx = try module.addImport("env", "roc_alloc", roc_ops_type); + + // Verify the import's type index is the 2-arg type, not the 3-arg type + try std.testing.expectEqual(roc_ops_type, module.imports.items[roc_alloc_idx].type_idx); + try std.testing.expect(module.imports.items[roc_alloc_idx].type_idx != roc_call_type); +} + +test "call_indirect — hosted function uses 3-arg RocCall type" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + // Register both type signatures + const roc_ops_type = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + const roc_call_type = try module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); + + module.enableTable(); + + // Add a hosted function using the convenience method + const table_idx = try module.addHostedFunctionToTable("env", "hosted_fn_0", roc_call_type); + + // The hosted function import should use the 3-arg type + try std.testing.expectEqual(roc_call_type, module.imports.items[0].type_idx); + try std.testing.expect(module.imports.items[0].type_idx != roc_ops_type); + + // It should have a valid table entry + try std.testing.expectEqual(@as(u32, 0), table_idx); + try std.testing.expectEqual(@as(u32, 0), module.table_func_indices.items[0]); +} + +test "call_indirect — mismatched type index would trap (validate type separation)" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + // Register both type signatures + const roc_ops_type = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + const roc_call_type = try module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); + + // Verify they are stored as distinct entries in func_types + try std.testing.expectEqual(@as(usize, 2), module.func_types.items.len); + try std.testing.expect(roc_ops_type != roc_call_type); + + // The 2-arg type has 2 params + try std.testing.expectEqual(@as(usize, 2), module.func_types.items[roc_ops_type].params.len); + try std.testing.expectEqual(ValType.i32, module.func_types.items[roc_ops_type].params[0]); + try std.testing.expectEqual(ValType.i32, module.func_types.items[roc_ops_type].params[1]); + + // The 3-arg type has 3 params + try std.testing.expectEqual(@as(usize, 3), module.func_types.items[roc_call_type].params.len); + try std.testing.expectEqual(ValType.i32, module.func_types.items[roc_call_type].params[0]); + try std.testing.expectEqual(ValType.i32, module.func_types.items[roc_call_type].params[1]); + try std.testing.expectEqual(ValType.i32, module.func_types.items[roc_call_type].params[2]); + + // Both return void (no result) + try std.testing.expectEqual(@as(?ValType, null), module.func_type_results.items[roc_ops_type]); + try std.testing.expectEqual(@as(?ValType, null), module.func_type_results.items[roc_call_type]); +} + +test "function table — all RocOps functions have valid table entries after linking" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + const roc_ops_type = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + module.enableTable(); + + // Import all 6 RocOps callbacks and add them to the table + const roc_alloc_idx = try module.addImport("env", "roc_alloc", roc_ops_type); + const roc_alloc_table = try module.addTableElement(roc_alloc_idx); + + const roc_dealloc_idx = try module.addImport("env", "roc_dealloc", roc_ops_type); + const roc_dealloc_table = try module.addTableElement(roc_dealloc_idx); + + const roc_realloc_idx = try module.addImport("env", "roc_realloc", roc_ops_type); + const roc_realloc_table = try module.addTableElement(roc_realloc_idx); + + const roc_dbg_idx = try module.addImport("env", "roc_dbg", roc_ops_type); + const roc_dbg_table = try module.addTableElement(roc_dbg_idx); + + const roc_expect_failed_idx = try module.addImport("env", "roc_expect_failed", roc_ops_type); + const roc_expect_failed_table = try module.addTableElement(roc_expect_failed_idx); + + const roc_crashed_idx = try module.addImport("env", "roc_crashed", roc_ops_type); + const roc_crashed_table = try module.addTableElement(roc_crashed_idx); + + // Verify all 6 have sequential table indices + try std.testing.expectEqual(@as(u32, 0), roc_alloc_table); + try std.testing.expectEqual(@as(u32, 1), roc_dealloc_table); + try std.testing.expectEqual(@as(u32, 2), roc_realloc_table); + try std.testing.expectEqual(@as(u32, 3), roc_dbg_table); + try std.testing.expectEqual(@as(u32, 4), roc_expect_failed_table); + try std.testing.expectEqual(@as(u32, 5), roc_crashed_table); + + // Verify table_func_indices maps back to the correct function indices + try std.testing.expectEqual(@as(usize, 6), module.table_func_indices.items.len); + try std.testing.expectEqual(roc_alloc_idx, module.table_func_indices.items[0]); + try std.testing.expectEqual(roc_dealloc_idx, module.table_func_indices.items[1]); + try std.testing.expectEqual(roc_realloc_idx, module.table_func_indices.items[2]); + try std.testing.expectEqual(roc_dbg_idx, module.table_func_indices.items[3]); + try std.testing.expectEqual(roc_expect_failed_idx, module.table_func_indices.items[4]); + try std.testing.expectEqual(roc_crashed_idx, module.table_func_indices.items[5]); + + // All imports should use the 2-arg type + for (module.imports.items) |import| { + try std.testing.expectEqual(roc_ops_type, import.type_idx); + } +} + +test "function table — hosted functions added to table with correct indices" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + const roc_ops_type = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + const roc_call_type = try module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); + module.enableTable(); + + // Add RocOps callbacks first (as the codegen does) + const roc_alloc_idx = try module.addImport("env", "roc_alloc", roc_ops_type); + _ = try module.addTableElement(roc_alloc_idx); + const roc_dealloc_idx = try module.addImport("env", "roc_dealloc", roc_ops_type); + _ = try module.addTableElement(roc_dealloc_idx); + + // Now add hosted functions — they follow the RocOps entries in the table + const hosted_0_table = try module.addHostedFunctionToTable("env", "hosted_fn_0", roc_call_type); + const hosted_1_table = try module.addHostedFunctionToTable("env", "hosted_fn_1", roc_call_type); + const hosted_2_table = try module.addHostedFunctionToTable("env", "hosted_fn_2", roc_call_type); + + // Hosted functions follow RocOps entries (indices 0, 1 are alloc/dealloc) + try std.testing.expectEqual(@as(u32, 2), hosted_0_table); + try std.testing.expectEqual(@as(u32, 3), hosted_1_table); + try std.testing.expectEqual(@as(u32, 4), hosted_2_table); + + // Total table size: 2 RocOps + 3 hosted = 5 + try std.testing.expectEqual(@as(usize, 5), module.table_func_indices.items.len); + + // Verify hosted function imports use the 3-arg type + // Imports: [roc_alloc, roc_dealloc, hosted_fn_0, hosted_fn_1, hosted_fn_2] + try std.testing.expectEqual(roc_ops_type, module.imports.items[0].type_idx); + try std.testing.expectEqual(roc_ops_type, module.imports.items[1].type_idx); + try std.testing.expectEqual(roc_call_type, module.imports.items[2].type_idx); + try std.testing.expectEqual(roc_call_type, module.imports.items[3].type_idx); + try std.testing.expectEqual(roc_call_type, module.imports.items[4].type_idx); + + // Verify table entries point to correct function indices + try std.testing.expectEqual(@as(u32, 2), module.table_func_indices.items[2]); // hosted_fn_0 + try std.testing.expectEqual(@as(u32, 3), module.table_func_indices.items[3]); // hosted_fn_1 + try std.testing.expectEqual(@as(u32, 4), module.table_func_indices.items[4]); // hosted_fn_2 +} + +test "findFunctionIdxBySuffix — ignores imported symbols and finds defined host callback" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + const type_idx = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + _ = try module.addImport("env", "roc_dbg", type_idx); + module.import_fn_count = 1; + + const callback_idx = try module.addFunction(type_idx); + try module.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, + .{ .kind = .function, .flags = 0, .name = "host.roc_dbg", .index = callback_idx }, + }); + + try std.testing.expectEqual(callback_idx, module.findFunctionIdxBySuffix("roc_dbg").?); +} + +test "ensureTableElement — reuses existing table entry" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + const type_idx = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + module.enableTable(); + + const func_idx = try module.addFunction(type_idx); + const first = try module.ensureTableElement(func_idx); + const second = try module.ensureTableElement(func_idx); + + try std.testing.expectEqual(@as(u32, 0), first); + try std.testing.expectEqual(first, second); + try std.testing.expectEqual(@as(usize, 1), module.table_func_indices.items.len); +} + +// --- mergeModule tests --- + +/// Build a "host" module for merge testing. +/// +/// Function index space: +/// 0: roc_alloc (import, env) +/// 1: roc_dealloc (import, env) +/// 2: host_fn_0 (defined) — body calls roc_alloc (fn 0) +/// +/// Types: +/// 0: (i32, i32) → void (roc_alloc signature) +/// 1: () → void (simple void function) +/// +/// Symbol table: +/// sym 0: undefined function index 0 (roc_alloc) — implicitly named +/// sym 1: undefined function index 1 (roc_dealloc) — implicitly named +/// sym 2: defined function index 2 (host_fn_0) +fn buildMergeHostModule(allocator: Allocator) !Self { + var module = Self.init(allocator); + errdefer module.deinit(); + + // Type 0: (i32, i32) -> void + _ = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + // Type 1: () -> void + _ = try module.addFuncType(&.{}, &.{}); + + // 2 imports + _ = try module.addImport("env", "roc_alloc", 0); + _ = try module.addImport("env", "roc_dealloc", 0); + module.import_fn_count = 2; + + // 1 defined function (type 1: () -> void) + try module.func_type_indices.append(allocator, 1); // host_fn_0 (global index 2) + + // Code: host_fn_0 calls roc_alloc (fn 0) + // offset 0: body_size=8 + // offset 1: 0x00 (no locals) + // offset 2: Op.call + // offset 3..7: padded LEB128(0) + // offset 8: Op.end + try module.code_bytes.appendSlice(allocator, &.{0x08}); // body size = 8 + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 0); // call fn 0 (roc_alloc) + try module.code_bytes.append(allocator, Op.end); + + try module.function_offsets.append(allocator, 0); // host_fn_0 at offset 0 + + // Symbol table + try module.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 1 }, + .{ .kind = .function, .flags = 0, .name = "host_fn_0", .index = 2 }, + }); + + // Relocation: host_fn_0's call at offset 3 → sym 0 (roc_alloc) + try module.reloc_code.entries.append(allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = 3, + .symbol_index = 0, + } }); + + return module; +} + +/// Build a "builtins" module for merge testing. +/// +/// Function index space: +/// 0: roc_alloc (import, env) — shared with host +/// 1: builtin_fn_0 (defined) — body calls roc_alloc (fn 0) +/// 2: builtin_fn_1 (defined) — body is just Op.end +/// +/// Types: +/// 0: (i32, i32) → void (roc_alloc — same as host type 0) +/// 1: (i32) → i32 (new type, not in host) +/// +/// Data segment: +/// Segment 0: 4 bytes "DATA" at offset 0 +/// +/// Symbol table: +/// sym 0: undefined function index 0 (roc_alloc) — implicitly named +/// sym 1: defined function index 1 (roc_builtins_str_trim) +/// sym 2: defined function index 2 (roc_builtins_str_concat) +/// sym 3: defined data segment 0, offset 0, size 4 +fn buildMergeBuiltinsModule(allocator: Allocator) !Self { + var module = Self.init(allocator); + errdefer module.deinit(); + module.data_offset = 0; // builtins start data at 0 + + // Type 0: (i32, i32) -> void (same as host type 0) + _ = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + // Type 1: (i32) -> i32 (new type) + _ = try module.addFuncType(&.{.i32}, &.{.i32}); + + // 1 import (roc_alloc — shared with host) + _ = try module.addImport("env", "roc_alloc", 0); + module.import_fn_count = 1; + + // 2 defined functions + try module.func_type_indices.append(allocator, 0); // builtin_fn_0 uses type 0 + try module.func_type_indices.append(allocator, 1); // builtin_fn_1 uses type 1 + + // Code: builtin_fn_0 calls roc_alloc (fn 0) + // offset 0: body_size=8 + // offset 1: 0x00 + // offset 2: Op.call + // offset 3..7: padded LEB128(0) + // offset 8: Op.end + try module.code_bytes.appendSlice(allocator, &.{0x08}); + try module.code_bytes.append(allocator, 0x00); + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 0); // call fn 0 + try module.code_bytes.append(allocator, Op.end); + + // Code: builtin_fn_1 (no calls, just return) + // offset 9: body_size=2 + // offset 10: 0x00 + // offset 11: Op.end + try module.code_bytes.appendSlice(allocator, &.{0x02}); + try module.code_bytes.append(allocator, 0x00); + try module.code_bytes.append(allocator, Op.end); + + try module.function_offsets.append(allocator, 0); // builtin_fn_0 at offset 0 + try module.function_offsets.append(allocator, 9); // builtin_fn_1 at offset 9 + + // Data segment: 4 bytes "DATA" + _ = try module.addDataSegment("DATA", 4); + + // Symbol table + try module.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, + .{ .kind = .function, .flags = 0, .name = "roc_builtins_str_trim", .index = 1 }, + .{ .kind = .function, .flags = 0, .name = "roc_builtins_str_concat", .index = 2 }, + .{ .kind = .data, .flags = 0, .name = ".rodata", .index = 0, .data_offset = 0, .data_size = 4 }, + }); + + // Relocation: builtin_fn_0's call at offset 3 → sym 0 (roc_alloc) + try module.reloc_code.entries.append(allocator, .{ .index = .{ + .type_id = .function_index_leb, + .offset = 3, + .symbol_index = 0, + } }); + + return module; +} + +fn buildMergeDataRelocModule(allocator: Allocator) !Self { + var module = Self.init(allocator); + errdefer module.deinit(); + module.data_offset = 0; + + // Segment 0: relocation patch site (4-byte placeholder) + _ = try module.addDataSegment(&[_]u8{ 0, 0, 0, 0 }, 4); + // Segment 1: relocation target + const target_offset = try module.addDataSegment("DATA", 4); + + try module.linking.symbol_table.append(allocator, .{ + .kind = .data, + .flags = 0, + .name = ".rodata.target", + .index = 1, + .data_offset = target_offset, // absolute address before merge + .data_size = 4, + }); + + try module.reloc_data.entries.append(allocator, .{ .offset = .{ + .type_id = .memory_addr_i32, + .offset = 0, + .symbol_index = 0, + .addend = 0, + .data_segment_index = 0, + } }); + + return module; +} + +test "mergeModule — type deduplication: identical signatures share index" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + const builtins = try buildMergeBuiltinsModule(allocator); + defer @constCast(&builtins).deinit(); + + const host_types_before = host.func_types.items.len; + + var result = try host.mergeModule(&builtins); + defer result.deinit(); + + // Host had 2 types: (i32,i32)->void, ()->void + // Builtins had 2 types: (i32,i32)->void (dup), (i32)->i32 (new) + // After merge: 3 types total (one deduplicated) + try std.testing.expectEqual(host_types_before + 1, host.func_types.items.len); +} + +test "mergeModule — function indices remapped correctly" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + const builtins = try buildMergeBuiltinsModule(allocator); + defer @constCast(&builtins).deinit(); + + // Host has: 2 imports + 1 defined = 3 total functions + // After merge: 2 imports + 1 host_defined + 2 builtins_defined = 5 total + var result = try host.mergeModule(&builtins); + defer result.deinit(); + + try std.testing.expectEqual(@as(usize, 3), host.func_type_indices.items.len); // 1 host + 2 builtins + // Builtins defined functions should start at global index 3 (2 imports + 1 host_defined) + // Check symbol for roc_builtins_str_trim (was source global index 1) + const trim_sym_idx = result.symbol_remap[1]; // src sym 1 → host sym + const trim_sym = host.linking.symbol_table.items[trim_sym_idx]; + try std.testing.expectEqual(@as(u32, 3), trim_sym.index); // global fn index 3 + try std.testing.expectEqualStrings("roc_builtins_str_trim", trim_sym.name.?); + + // Check roc_builtins_str_concat (was source global index 2) + const concat_sym_idx = result.symbol_remap[2]; + const concat_sym = host.linking.symbol_table.items[concat_sym_idx]; + try std.testing.expectEqual(@as(u32, 4), concat_sym.index); // global fn index 4 + try std.testing.expectEqualStrings("roc_builtins_str_concat", concat_sym.name.?); +} + +test "mergeModule — code bytes appended at correct offset" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + const builtins = try buildMergeBuiltinsModule(allocator); + defer @constCast(&builtins).deinit(); + + const host_code_len = host.code_bytes.items.len; + const builtins_code_len = builtins.code_bytes.items.len; + + var result = try host.mergeModule(&builtins); + defer result.deinit(); + + // Code bytes should be concatenated + try std.testing.expectEqual(host_code_len + builtins_code_len, host.code_bytes.items.len); + + // Function offsets for builtins should be shifted by host's code length + // Host had 1 function_offset at 0. Builtins had 2 at 0, 9. + // After merge: offsets [0, host_code_len+0, host_code_len+9] + try std.testing.expectEqual(@as(usize, 3), host.function_offsets.items.len); + try std.testing.expectEqual(@as(u32, 0), host.function_offsets.items[0]); // host_fn_0 + try std.testing.expectEqual(@as(u32, @intCast(host_code_len)), host.function_offsets.items[1]); // builtin_fn_0 + try std.testing.expectEqual(@as(u32, @intCast(host_code_len + 9)), host.function_offsets.items[2]); // builtin_fn_1 +} + +test "mergeModule — undefined symbol in builtins resolved to host's roc_alloc import" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + const builtins = try buildMergeBuiltinsModule(allocator); + defer @constCast(&builtins).deinit(); + + var result = try host.mergeModule(&builtins); + defer result.deinit(); + + // Builtins sym 0 was undefined roc_alloc. It should resolve to host sym 0. + try std.testing.expectEqual(@as(u32, 0), result.symbol_remap[0]); + + // No new import should be added (roc_alloc already exists in host) + try std.testing.expectEqual(@as(usize, 2), host.imports.items.len); +} + +test "mergeModule — relocation offsets shifted by base_code_offset" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + const builtins = try buildMergeBuiltinsModule(allocator); + defer @constCast(&builtins).deinit(); + + const host_code_len: u32 = @intCast(host.code_bytes.items.len); + const host_reloc_count = host.reloc_code.entries.items.len; + + var result = try host.mergeModule(&builtins); + defer result.deinit(); + + // Host had 1 relocation, builtins had 1 → total 2 + try std.testing.expectEqual(host_reloc_count + 1, host.reloc_code.entries.items.len); + + // Host's original relocation at offset 3 should be unchanged + try std.testing.expectEqual(@as(u32, 3), host.reloc_code.entries.items[0].getOffset()); + + // Builtins' relocation was at offset 3, should now be at host_code_len + 3 + try std.testing.expectEqual(host_code_len + 3, host.reloc_code.entries.items[1].getOffset()); + + // The builtins relocation's symbol should be remapped to host's roc_alloc symbol + try std.testing.expectEqual(@as(u32, 0), host.reloc_code.entries.items[1].getSymbolIndex()); +} + +test "mergeModule — data segment merged with adjusted offset" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + const builtins = try buildMergeBuiltinsModule(allocator); + defer @constCast(&builtins).deinit(); + + const host_data_count = host.data_segments.items.len; + + var result = try host.mergeModule(&builtins); + defer result.deinit(); + + // Builtins had 1 data segment + try std.testing.expectEqual(host_data_count + 1, host.data_segments.items.len); + + // The new data segment should have the "DATA" content + const new_ds = host.data_segments.items[host.data_segments.items.len - 1]; + try std.testing.expectEqualStrings("DATA", new_ds.data); + + // Offset should be >= host's data_offset (1024 default) + try std.testing.expect(new_ds.offset >= 1024); +} + +test "mergeModule + resolveDataRelocations — patches merged data segment bytes" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + var source = try buildMergeDataRelocModule(allocator); + defer source.deinit(); + + _ = try host.addDataSegment("HOST", 4); + + var result = try host.mergeModule(&source); + defer result.deinit(); + + try std.testing.expectEqual(@as(usize, 3), host.data_segments.items.len); + try std.testing.expectEqual(@as(usize, 1), host.reloc_data.entries.items.len); + + host.resolveDataRelocations(); + + const patch_segment = host.data_segments.items[1]; + const target_segment = host.data_segments.items[2]; + const patched = std.mem.readInt(u32, patch_segment.data[0..4], .little); + + try std.testing.expectEqual(target_segment.offset, patched); +} + +test "mergeModule — element section entries remapped and appended" { + const allocator = std.testing.allocator; + var host = try buildMergeHostModule(allocator); + defer host.deinit(); + var builtins = try buildMergeBuiltinsModule(allocator); + defer builtins.deinit(); + + // Host has 1 table entry: func_idx=2 (host_fn_0) + _ = try host.addTableElement(2); + + // Builtins have 1 table entry: func_idx=1 (builtin_fn_0, source index space) + try builtins.table_func_indices.append(allocator, 1); + + var result = try host.mergeModule(&builtins); + defer result.deinit(); + + // Host had 1 + builtins had 1 → total 2 + try std.testing.expectEqual(@as(usize, 2), host.table_func_indices.items.len); + // Host's entry unchanged + try std.testing.expectEqual(@as(u32, 2), host.table_func_indices.items[0]); + // Builtins' entry remapped: source fn 1 → self fn 3 + // (host has 2 imports + 1 defined = base 3, source defined fn 0 maps to 3) + try std.testing.expectEqual(@as(u32, 3), host.table_func_indices.items[1]); + try std.testing.expect(host.has_table); +} + +test "resolveCodeRelocations — table_index_sleb resolves to table index not function index" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + // Type 0: (i32, i32) -> void + _ = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + // Type 1: () -> void + _ = try module.addFuncType(&.{}, &.{}); + + // 2 imports + _ = try module.addImport("env", "roc_alloc", 0); + _ = try module.addImport("env", "roc_dealloc", 0); + module.import_fn_count = 2; + + // 3 defined functions (global indices 2, 3, 4) + try module.func_type_indices.append(allocator, 0); + try module.func_type_indices.append(allocator, 0); + try module.func_type_indices.append(allocator, 1); + + // Table: fn 0 at table idx 0, fn 3 at table idx 1, fn 4 at table idx 2 + _ = try module.addTableElement(0); + _ = try module.addTableElement(3); + _ = try module.addTableElement(4); + + // Code: a function body with i32.const + try module.code_bytes.appendSlice(allocator, &.{0x08}); // body size + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.i32_const); + try appendPaddedU32(allocator, &module.code_bytes, 0); // placeholder at offset 3 + try module.code_bytes.append(allocator, Op.end); + try module.function_offsets.append(allocator, 0); + + // Symbol: sym 0 = function at global index 4 (which is table index 2) + try module.linking.symbol_table.append(allocator, .{ + .kind = .function, + .flags = 0, + .name = "my_fn", + .index = 4, + }); + + // Relocation: table_index_sleb at offset 3 → sym 0 + try module.reloc_code.entries.append(allocator, .{ .index = .{ + .type_id = .table_index_sleb, + .offset = 3, + .symbol_index = 0, + } }); + + module.resolveCodeRelocations(); + + // Should be patched to table index 2 (position in table_func_indices), NOT function index 4 + var expected = [_]u8{0} ** 5; + overwritePaddedI32(&expected, 0, 2); + try std.testing.expectEqualSlices(u8, &expected, module.code_bytes.items[3..8]); +} + +test "parseElementSection_ — parses function indices into table_func_indices" { + const allocator = std.testing.allocator; + + // Build a module with known table entries, encode it, then re-parse. + var source = Self.init(allocator); + defer source.deinit(); + + _ = try source.addFuncType(&.{}, &.{}); + _ = try source.addImport("env", "fn0", 0); + source.import_fn_count = 1; + + try source.func_type_indices.append(allocator, 0); // fn 1 + try source.func_type_indices.append(allocator, 0); // fn 2 + + // Minimal function bodies + try source.func_bodies.append(allocator, .{ .body = try allocator.dupe(u8, &.{ 0x00, Op.end }) }); + try source.func_bodies.append(allocator, .{ .body = try allocator.dupe(u8, &.{ 0x00, Op.end }) }); + + _ = try source.addTableElement(1); // table idx 0 → fn 1 + _ = try source.addTableElement(2); // table idx 1 → fn 2 + source.has_table = true; + + source.has_memory = true; + + // Encode to binary + const encoded = try source.encode(allocator); + defer allocator.free(encoded); + + // Re-parse from binary + var parsed = try Self.preload(allocator, encoded, false); + defer parsed.deinit(); + + // Verify element section was parsed + try std.testing.expectEqual(@as(usize, 2), parsed.table_func_indices.items.len); + try std.testing.expectEqual(@as(u32, 1), parsed.table_func_indices.items[0]); + try std.testing.expectEqual(@as(u32, 2), parsed.table_func_indices.items[1]); + try std.testing.expect(parsed.has_table); +} + +test "BuiltinSymbols — all symbols found after merge" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + // Type 0: () -> void + _ = try module.addFuncType(&.{}, &.{}); + // 1 import (roc_alloc) + _ = try module.addImport("env", "roc_alloc", 0); + module.import_fn_count = 1; + + // Add a defined function symbol for each builtin that BuiltinSymbols expects. + const names = comptime blk: { + var result: [BuiltinSymbols.mapping.len][]const u8 = undefined; + for (BuiltinSymbols.mapping, 0..) |entry, i| { + result[i] = entry[0]; + } + break :blk result; + }; + + for (names, 0..) |name, i| { + try module.linking.symbol_table.append(allocator, .{ + .kind = .function, + .flags = 0, + .name = name, + .index = @as(u32, @intCast(i)) + 1, // function index after imports + }); + } + + const syms = BuiltinSymbols.populate(&module) catch |err| { + std.debug.print("populate failed: {}\n", .{err}); + return err; + }; + + // Spot check a few fields (populate returns function index = i + 1, since index 0 is the import) + try std.testing.expectEqual(@as(u32, 1), syms.dec_mul); // function index 1 (first defined fn after import) + try std.testing.expectEqual(@as(u32, 21), syms.str_trim); // function index 21 +} + +test "BuiltinSymbols — fails when symbol missing" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + // Empty symbol table — should fail + const result = BuiltinSymbols.populate(&module); + try std.testing.expectError(error.MissingBuiltinSymbol, result); +} + +test "resolveCodeRelocations — patches function call in code_bytes" { + const allocator = std.testing.allocator; + var module = try buildMergeHostModule(allocator); + defer module.deinit(); + + // Before resolution, the call operand at offset 3 is padded LEB128(0). + // Symbol 0 is roc_alloc at function index 0. + // After resolution it should still be 0 — but let's change the symbol's index + // to verify the patch actually happens. + + // Change roc_alloc's symbol to point to function index 42. + module.linking.symbol_table.items[0].index = 42; + + module.resolveCodeRelocations(); + + // Read the patched value at offset 3 (5-byte padded LEB128). + var expected = [_]u8{0} ** 5; + overwritePaddedU32(&expected, 0, 42); + try std.testing.expectEqualSlices(u8, &expected, module.code_bytes.items[3..8]); +} + +test "materializeFuncBodies — produces correct function bodies from code_bytes" { + const allocator = std.testing.allocator; + var module = try buildMergeHostModule(allocator); + defer module.deinit(); + + try module.materializeFuncBodies(); + + // Host has 1 defined function, no dummies. + try std.testing.expectEqual(@as(usize, 1), module.func_bodies.items.len); + + // The body should be 8 bytes: [0x00 (locals), Op.call, 5 bytes LEB128, Op.end] + try std.testing.expectEqual(@as(usize, 8), module.func_bodies.items[0].body.len); +} + +test "materializeFuncBodies — includes dummy functions for dead imports" { + const allocator = std.testing.allocator; + var module = try buildLinkingTestModule(allocator); + defer module.deinit(); + + // Simulate one dead import dummy + module.dead_import_dummy_count = 1; + try module.func_type_indices.insert(allocator, 0, 0); + + try module.materializeFuncBodies(); + + // Should have 1 dummy + 2 real = 3 func bodies + try std.testing.expectEqual(@as(usize, 3), module.func_bodies.items.len); + + // First should be the dummy (unreachable + end) + try std.testing.expectEqualSlices(u8, &DUMMY_FUNCTION, module.func_bodies.items[0].body); +} + +test "verifyNoBuiltinImports — passes when only RocOps imports remain" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + _ = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + _ = try module.addImport("env", "roc_alloc", 0); + _ = try module.addImport("env", "roc_dealloc", 0); + _ = try module.addImport("env", "roc_realloc", 0); + _ = try module.addImport("env", "roc_dbg", 0); + _ = try module.addImport("env", "roc_expect_failed", 0); + _ = try module.addImport("env", "roc_crashed", 0); + + try module.verifyNoBuiltinImports(); +} + +test "verifyNoBuiltinImports — fails if roc_str_trim import still present" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + _ = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + _ = try module.addImport("env", "roc_alloc", 0); + _ = try module.addImport("env", "roc_str_trim", 0); // stale builtin import + + const result = module.verifyNoBuiltinImports(); + try std.testing.expectError(error.UnresolvedBuiltinImport, result); +} + +test "verifyNoBuiltinImports — allows non-roc imports" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + _ = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + _ = try module.addImport("env", "roc_alloc", 0); + _ = try module.addImport("env", "custom_platform_fn", 0); // non-roc import is fine + + try module.verifyNoBuiltinImports(); +} + +test "verifyNoBuiltinImports — allows roc_panic platform import" { + const allocator = std.testing.allocator; + var module = Self.init(allocator); + defer module.deinit(); + + _ = try module.addFuncType(&.{ .i32, .i32 }, &.{}); + _ = try module.addImport("env", "roc_panic", 0); + + try module.verifyNoBuiltinImports(); +} + +// --- Dead Code Elimination Tests --- + +/// Build a test module for DCE tests. +/// +/// Layout: +/// import 0: js_log (type 0) +/// import 1: js_unused (type 0) +/// import 2: js_helper (type 0) +/// defined 3: main_fn — calls js_log (import 0) and helper_fn (defined 4) +/// defined 4: helper_fn — calls js_helper (import 2) +/// defined 5: dead_fn — calls js_unused (import 1) +/// +/// Exports: main_fn (index 3) as "main" +/// +/// Symbol table: +/// sym 0: undefined fn 0 (js_log) +/// sym 1: undefined fn 1 (js_unused) +/// sym 2: undefined fn 2 (js_helper) +/// sym 3: defined fn 3 (main_fn) +/// sym 4: defined fn 4 (helper_fn) +/// sym 5: defined fn 5 (dead_fn) +/// +/// Relocation entries: +/// fn3 body: call sym 0 (js_log) at offset 3, call sym 4 (helper_fn) at offset 10 +/// fn4 body: call sym 2 (js_helper) at offset 21 +/// fn5 body: call sym 1 (js_unused) at offset 30 +fn buildDCETestModule(allocator: Allocator) !Self { + var module = Self.init(allocator); + errdefer module.deinit(); + + // Type 0: () -> () + _ = try module.addFuncType(&.{}, &.{}); + + // 3 function imports + _ = try module.addImport("env", "js_log", 0); + _ = try module.addImport("env", "js_unused", 0); + _ = try module.addImport("env", "js_helper", 0); + module.import_fn_count = 3; + + // 3 defined functions (type 0) + try module.func_type_indices.append(allocator, 0); // main_fn (global index 3) + try module.func_type_indices.append(allocator, 0); // helper_fn (global index 4) + try module.func_type_indices.append(allocator, 0); // dead_fn (global index 5) + + // Build code_bytes: three function bodies, each 9 bytes. + // fn3 (main_fn): call js_log + call helper_fn + // offset 0: body_size=16 + // offset 1: 0x00 (no locals) + // offset 2: Op.call + // offset 3..7: padded LEB128 → sym 0 (js_log, fn 0) + // offset 8: Op.call (second call) + // offset 9..13: padded LEB128 → sym 4 (helper_fn, fn 4) + // offset 14..16: nop nop end + try module.code_bytes.appendSlice(allocator, &.{16}); // body size = 16 + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 0); // call fn 0 (placeholder) + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 4); // call fn 4 (placeholder) + try module.code_bytes.append(allocator, Op.nop); + try module.code_bytes.append(allocator, Op.nop); + try module.code_bytes.append(allocator, Op.end); + // fn3 total: 1 (size) + 16 (body) = 17 bytes, offsets 0..16 + + // fn4 (helper_fn): call js_helper + // offset 17: body_size=8 + // offset 18: 0x00 (no locals) + // offset 19: Op.call + // offset 20..24: padded LEB128 → sym 2 (js_helper, fn 2) + // offset 25: Op.end + try module.code_bytes.appendSlice(allocator, &.{8}); // body size = 8 + try module.code_bytes.append(allocator, 0x00); + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 2); // call fn 2 (placeholder) + try module.code_bytes.append(allocator, Op.end); + // fn4 total: 1 (size) + 8 (body) = 9 bytes, offsets 17..25 + + // fn5 (dead_fn): call js_unused + // offset 26: body_size=8 + // offset 27: 0x00 (no locals) + // offset 28: Op.call + // offset 29..33: padded LEB128 → sym 1 (js_unused, fn 1) + // offset 34: Op.end + try module.code_bytes.appendSlice(allocator, &.{8}); // body size = 8 + try module.code_bytes.append(allocator, 0x00); + try module.code_bytes.append(allocator, Op.call); + try appendPaddedU32(allocator, &module.code_bytes, 1); // call fn 1 (placeholder) + try module.code_bytes.append(allocator, Op.end); + // fn5 total: 1 (size) + 8 (body) = 9 bytes, offsets 26..34 + + try module.function_offsets.append(allocator, 0); // fn3 at offset 0 + try module.function_offsets.append(allocator, 17); // fn4 at offset 17 + try module.function_offsets.append(allocator, 26); // fn5 at offset 26 + + // Symbol table + try module.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, // sym 0: js_log + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 1 }, // sym 1: js_unused + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 2 }, // sym 2: js_helper + .{ .kind = .function, .flags = 0, .name = "main_fn", .index = 3 }, // sym 3: main_fn + .{ .kind = .function, .flags = 0, .name = "helper_fn", .index = 4 }, // sym 4: helper_fn + .{ .kind = .function, .flags = 0, .name = "dead_fn", .index = 5 }, // sym 5: dead_fn + }); + + // Relocation entries for code section + try module.reloc_code.entries.appendSlice(allocator, &.{ + .{ .index = .{ .type_id = .function_index_leb, .offset = 3, .symbol_index = 0 } }, // fn3 calls js_log + .{ .index = .{ .type_id = .function_index_leb, .offset = 10, .symbol_index = 4 } }, // fn3 calls helper_fn (sym 4) + .{ .index = .{ .type_id = .function_index_leb, .offset = 21, .symbol_index = 2 } }, // fn4 calls js_helper + .{ .index = .{ .type_id = .function_index_leb, .offset = 30, .symbol_index = 1 } }, // fn5 calls js_unused + }); + + // Export main_fn + try module.exports.append(allocator, .{ .name = "main", .kind = .func, .idx = 3 }); + + return module; +} + +test "eliminateDeadCode — exported function and its callees are preserved" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + // No extra called_fns — only exports seed the live set. + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + // main_fn (3) is exported → live. + // helper_fn (4) is called by main_fn → live. + // Both should have non-dummy bodies after DCE. + // Materialize to check. + try module.materializeFuncBodies(); + + // func_type_indices should have: 1 dummy (for eliminated import) + 3 original = 4. + // The first dummy entry + 3 defined functions. + // Check that main_fn and helper_fn bodies are NOT dummy. + const dummy_count = module.dead_import_dummy_count; + // main_fn is at func_type_indices[dummy_count + 0], helper_fn at [dummy_count + 1] + const main_body = module.func_bodies.items[dummy_count + 0].body; + const helper_body = module.func_bodies.items[dummy_count + 1].body; + + // Live bodies should be longer than the 3-byte dummy. + try std.testing.expect(main_body.len > DUMMY_FUNCTION.len); + try std.testing.expect(helper_body.len > DUMMY_FUNCTION.len); +} + +test "eliminateDeadCode — unreachable function body replaced with unreachable stub" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + try module.materializeFuncBodies(); + + // dead_fn (5) is not exported and not called by any live function. + // Its body should be the dummy stub. + const dummy_count = module.dead_import_dummy_count; + const dead_body = module.func_bodies.items[dummy_count + 2].body; + try std.testing.expectEqualSlices(u8, &DUMMY_FUNCTION, dead_body); +} + +test "eliminateDeadCode — dead import removed, dead_import_dummy_count incremented" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + const orig_import_count = module.imports.items.len; + const orig_dummy_count = module.dead_import_dummy_count; + + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + // js_unused (import 1) is only called by dead_fn which is dead. + // It should be removed. + try std.testing.expect(module.imports.items.len < orig_import_count); + try std.testing.expect(module.dead_import_dummy_count > orig_dummy_count); + + // js_log and js_helper should still be present (called by live functions). + var has_js_log = false; + var has_js_helper = false; + for (module.imports.items) |imp| { + if (std.mem.eql(u8, imp.field_name, "js_log")) has_js_log = true; + if (std.mem.eql(u8, imp.field_name, "js_helper")) has_js_helper = true; + } + try std.testing.expect(has_js_log); + try std.testing.expect(has_js_helper); +} + +test "eliminateDeadCode — non-function imports are preserved" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + // Non-function imports (memory, table, global) are NOT stored in module.imports + // (the parser strips them). So we just verify that module.has_memory and + // module.has_table are not touched by DCE. + module.has_memory = true; + module.has_table = true; + + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + try std.testing.expect(module.has_memory); + try std.testing.expect(module.has_table); +} + +test "eliminateDeadCode — indirect call targets (element section) preserved" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + // Add dead_fn (5) to the element section (indirect call target). + // Even though nothing directly calls it, it should stay live because + // it's in the table. + try module.table_func_indices.append(allocator, 5); + + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + try module.materializeFuncBodies(); + + // dead_fn should now be live (its body is NOT the dummy). + const dummy_count = module.dead_import_dummy_count; + const dead_body = module.func_bodies.items[dummy_count + 2].body; + try std.testing.expect(dead_body.len > DUMMY_FUNCTION.len); +} + +test "eliminateDeadCode — transitive callees preserved (A calls B calls C → all live)" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + // main_fn (3) → helper_fn (4) → js_helper (import 2) + // All three should be live. js_helper is an import that's called by + // helper_fn which is called by exported main_fn. + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + // js_helper should still be in imports. + var found_js_helper = false; + for (module.imports.items) |imp| { + if (std.mem.eql(u8, imp.field_name, "js_helper")) found_js_helper = true; + } + try std.testing.expect(found_js_helper); +} + +test "eliminateDeadCode — init functions preserved" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + // Remove the export so main_fn would normally be dead. + module.exports.items.len = 0; + + // But mark dead_fn (sym 5) as an init function — it should stay live. + try module.linking.init_funcs.append(allocator, .{ .priority = 0, .symbol_index = 5 }); + + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + try module.materializeFuncBodies(); + + // dead_fn (index 5, third defined function) should be live. + const dummy_count = module.dead_import_dummy_count; + const init_body = module.func_bodies.items[dummy_count + 2].body; + try std.testing.expect(init_body.len > DUMMY_FUNCTION.len); + + // main_fn and helper_fn should now be dead (no export, no callers). + const main_body = module.func_bodies.items[dummy_count + 0].body; + const helper_body = module.func_bodies.items[dummy_count + 1].body; + try std.testing.expectEqualSlices(u8, &DUMMY_FUNCTION, main_body); + try std.testing.expectEqualSlices(u8, &DUMMY_FUNCTION, helper_body); +} + +test "eliminateDeadCode — call_indirect conservatively keeps matching-signature functions" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + // Add a type 1: (i32) -> () + _ = try module.addFuncType(&.{.i32}, &.{}); + + // Change dead_fn (index 5) to type 1 so it has a unique signature. + // func_type_indices[2] = type 1 (dead_fn is the 3rd defined function). + module.func_type_indices.items[2] = 1; + + // Add dead_fn to the element section so it's an indirect call target. + try module.table_func_indices.append(allocator, 5); + + // Add a call_indirect (type_index_leb) relocation in main_fn's body + // pointing to a symbol with type index 1 (matching dead_fn's signature). + // We need a symbol for type 1. Add it to the symbol table. + try module.linking.symbol_table.append(allocator, .{ + .kind = .function, // type_index_leb relocs use function symbols in some impls, + // but the index field carries the type index. + // For our implementation, we use the symbol's index as the type index. + .flags = 0, + .name = "type1_sig", + .index = 1, // type index 1 + }); + const type_sym_idx: u32 = @intCast(module.linking.symbol_table.items.len - 1); + + // Add a type_index_leb reloc inside main_fn's body range (offset 0..17). + try module.reloc_code.entries.append(allocator, .{ + .index = .{ + .type_id = .type_index_leb, + .offset = 14, // within main_fn's byte range + .symbol_index = type_sym_idx, + }, + }); + + // Remove the direct call to helper_fn (reloc at offset 10) so the only + // reason dead_fn stays live is the indirect call. + // Actually, let's keep things simple: just verify dead_fn is live. + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + try module.materializeFuncBodies(); + + // dead_fn (type 1) is in element section AND there's a call_indirect + // with matching type 1 in a live function → should be live. + const dummy_count = module.dead_import_dummy_count; + const dead_body = module.func_bodies.items[dummy_count + 2].body; + try std.testing.expect(dead_body.len > DUMMY_FUNCTION.len); +} + +test "eliminateDeadCode — function indices unchanged after elimination" { + const allocator = std.testing.allocator; + var module = try buildDCETestModule(allocator); + defer module.deinit(); + + // Record original function count. + const orig_defined_count = module.function_offsets.items.len; + + var called_fns = [_]bool{false} ** 6; + try module.eliminateDeadCode(&called_fns); + + // The number of function_offsets entries should be unchanged + // (dead functions are stubbed, not removed). + try std.testing.expectEqual(orig_defined_count, module.function_offsets.items.len); + + // func_type_indices should grow by the number of eliminated imports + // (dummies are prepended), but the defined function entries are unchanged. + // The total should be: eliminated_imports + original_defined_count. + try std.testing.expectEqual( + module.dead_import_dummy_count + @as(u32, @intCast(orig_defined_count)), + @as(u32, @intCast(module.func_type_indices.items.len)), + ); +} + +// --- Phase 11: Serialization Tests --- + +/// Build a module simulating the post-surgical-linking state: +/// - Has code_bytes and function_offsets (from preload) +/// - Has dead_import_dummy_count > 0 (from linkHostToAppCalls) +/// - Has linking and reloc sections (from preload, should NOT appear in output) +/// - Has memory, exports, and data segments +fn buildEncodeTestModule(allocator: Allocator) !Self { + var module = Self.init(allocator); + errdefer module.deinit(); + + // Type 0: () -> () + _ = try module.addFuncType(&.{}, &.{}); + // Type 1: (i32) -> i32 + _ = try module.addFuncType(&.{.i32}, &.{.i32}); + + // One remaining import (e.g. roc_alloc) + _ = try module.addImport("env", "roc_alloc", 0); + module.import_fn_count = 1; + + // Simulate dead_import_dummy_count = 1 (one import was removed during linking) + module.dead_import_dummy_count = 1; + + // Two defined functions: dummy type + real type + // func_type_indices[0] = type 0 (dummy placeholder) + // func_type_indices[1] = type 0 (real func: main) + // func_type_indices[2] = type 1 (real func: helper) + try module.func_type_indices.append(allocator, 0); // dummy + try module.func_type_indices.append(allocator, 0); // main + try module.func_type_indices.append(allocator, 1); // helper + + // Build code_bytes with two real function bodies (LEB128 size prefix + body). + // Function 0 (main): body = [0x00 (no locals), Op.nop, Op.end] → size = 3 + try module.function_offsets.append(allocator, @intCast(module.code_bytes.items.len)); + try module.code_bytes.append(allocator, 0x03); // LEB128 body size = 3 + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.nop); + try module.code_bytes.append(allocator, Op.end); + + // Function 1 (helper): body = [0x00 (no locals), Op.nop, Op.nop, Op.end] → size = 4 + try module.function_offsets.append(allocator, @intCast(module.code_bytes.items.len)); + try module.code_bytes.append(allocator, 0x04); // LEB128 body size = 4 + try module.code_bytes.append(allocator, 0x00); // no locals + try module.code_bytes.append(allocator, Op.nop); + try module.code_bytes.append(allocator, Op.nop); + try module.code_bytes.append(allocator, Op.end); + + // Memory and stack pointer + module.enableMemory(2); + module.enableStackPointer(131072); + + // Export: memory and main function + try module.addExport("memory", .memory, 0); + try module.addExport("_start", .func, 2); // func idx 2 = import(1) + dummy(0) + main(local 1) + + // Data segment + const data = try allocator.dupe(u8, "test data"); + try module.data_segments.append(allocator, .{ .offset = 1024, .data = data }); + + // Linking section (should NOT appear in output) + try module.linking.symbol_table.appendSlice(allocator, &.{ + .{ .kind = .function, .flags = WasmLinking.SymFlag.UNDEFINED, .name = null, .index = 0 }, + .{ .kind = .function, .flags = 0, .name = "main", .index = 2 }, + .{ .kind = .function, .flags = 0, .name = "helper", .index = 3 }, + }); + + // Reloc.CODE section (should NOT appear in output) + try module.reloc_code.entries.append(allocator, .{ + .index = .{ .type_id = .function_index_leb, .offset = 1, .symbol_index = 1 }, + }); + + return module; +} + +test "encode — output is valid WASM (magic, version, section ordering)" { + const allocator = std.testing.allocator; + var module = try buildEncodeTestModule(allocator); + defer module.deinit(); + + try module.materializeFuncBodies(); + const output = try module.encode(allocator); + defer allocator.free(output); + + // Check magic number: \0asm + try std.testing.expectEqualSlices(u8, &.{ 0x00, 0x61, 0x73, 0x6D }, output[0..4]); + // Check version: 1 + try std.testing.expectEqualSlices(u8, &.{ 0x01, 0x00, 0x00, 0x00 }, output[4..8]); + + // Walk through sections and verify ordering + var pos: usize = 8; + var prev_section_id: u8 = 0; + while (pos < output.len) { + const section_id = output[pos]; + pos += 1; + + // Custom sections (id=0) can appear anywhere; standard sections must be ordered + if (section_id != 0) { + try std.testing.expect(section_id > prev_section_id); + prev_section_id = section_id; + } + + // Read section size (LEB128) + var cursor: usize = pos; + const section_size = readU32(output, &cursor) catch unreachable; + pos = cursor + section_size; + } + + // Should have consumed exactly all bytes + try std.testing.expectEqual(output.len, pos); +} + +test "encode — code section function count includes dummies" { + const allocator = std.testing.allocator; + var module = try buildEncodeTestModule(allocator); + defer module.deinit(); + + try module.materializeFuncBodies(); + const output = try module.encode(allocator); + defer allocator.free(output); + + // Find code section (id = 10) + var pos: usize = 8; + var code_section_start: ?usize = null; + while (pos < output.len) { + const section_id = output[pos]; + pos += 1; + var cursor: usize = pos; + const section_size = readU32(output, &cursor) catch unreachable; + if (section_id == @intFromEnum(SectionId.code_section)) { + code_section_start = cursor; + break; + } + pos = cursor + section_size; + } + + try std.testing.expect(code_section_start != null); + + // Read function count from the code section + var cursor = code_section_start.?; + const func_count = readU32(output, &cursor) catch unreachable; + + // Should be dummies (1) + real functions (2) = 3 + try std.testing.expectEqual(@as(u32, 3), func_count); +} + +test "encode — dummy functions prepended before real functions in code section" { + const allocator = std.testing.allocator; + var module = try buildEncodeTestModule(allocator); + defer module.deinit(); + + try module.materializeFuncBodies(); + const output = try module.encode(allocator); + defer allocator.free(output); + + // Find code section + var pos: usize = 8; + var code_body_start: ?usize = null; + while (pos < output.len) { + const section_id = output[pos]; + pos += 1; + var cursor: usize = pos; + const section_size = readU32(output, &cursor) catch unreachable; + if (section_id == @intFromEnum(SectionId.code_section)) { + // Skip the function count + _ = readU32(output, &cursor) catch unreachable; + code_body_start = cursor; + break; + } + pos = cursor + section_size; + } + + try std.testing.expect(code_body_start != null); + var cursor = code_body_start.?; + + // First function should be the dummy: body size = 3, body = DUMMY_FUNCTION + const dummy_size = readU32(output, &cursor) catch unreachable; + try std.testing.expectEqual(@as(u32, DUMMY_FUNCTION.len), dummy_size); + try std.testing.expectEqualSlices(u8, &DUMMY_FUNCTION, output[cursor .. cursor + dummy_size]); + cursor += dummy_size; + + // Second function (main): body size = 3, body = [0x00, nop, end] + const main_size = readU32(output, &cursor) catch unreachable; + try std.testing.expectEqual(@as(u32, 3), main_size); + try std.testing.expectEqual(@as(u8, 0x00), output[cursor]); // no locals + try std.testing.expectEqual(Op.nop, output[cursor + 1]); + try std.testing.expectEqual(Op.end, output[cursor + 2]); + cursor += main_size; + + // Third function (helper): body size = 4, body = [0x00, nop, nop, end] + const helper_size = readU32(output, &cursor) catch unreachable; + try std.testing.expectEqual(@as(u32, 4), helper_size); + try std.testing.expectEqual(@as(u8, 0x00), output[cursor]); // no locals + try std.testing.expectEqual(Op.nop, output[cursor + 1]); + try std.testing.expectEqual(Op.nop, output[cursor + 2]); + try std.testing.expectEqual(Op.end, output[cursor + 3]); +} + +test "encode — linking section NOT present in output" { + const allocator = std.testing.allocator; + var module = try buildEncodeTestModule(allocator); + defer module.deinit(); + + // Verify the module actually has linking data (precondition) + try std.testing.expect(module.linking.symbol_table.items.len > 0); + + try module.materializeFuncBodies(); + const output = try module.encode(allocator); + defer allocator.free(output); + + // Scan all sections — no custom section should have name "linking" + var pos: usize = 8; + while (pos < output.len) { + const section_id = output[pos]; + pos += 1; + var cursor: usize = pos; + const section_size = readU32(output, &cursor) catch unreachable; + const section_end = cursor + section_size; + + if (section_id == @intFromEnum(SectionId.custom_section)) { + // Read custom section name + const name_len = readU32(output, &cursor) catch unreachable; + const name = output[cursor .. cursor + name_len]; + try std.testing.expect(!std.mem.eql(u8, name, "linking")); + } + + pos = section_end; + } +} + +test "encode — reloc.CODE section NOT present in output" { + const allocator = std.testing.allocator; + var module = try buildEncodeTestModule(allocator); + defer module.deinit(); + + // Verify the module actually has reloc data (precondition) + try std.testing.expect(module.reloc_code.entries.items.len > 0); + + try module.materializeFuncBodies(); + const output = try module.encode(allocator); + defer allocator.free(output); + + // Scan all sections — no custom section should have name "reloc.CODE" + var pos: usize = 8; + while (pos < output.len) { + const section_id = output[pos]; + pos += 1; + var cursor: usize = pos; + const section_size = readU32(output, &cursor) catch unreachable; + const section_end = cursor + section_size; + + if (section_id == @intFromEnum(SectionId.custom_section)) { + // Read custom section name + const name_len = readU32(output, &cursor) catch unreachable; + const name = output[cursor .. cursor + name_len]; + try std.testing.expect(!std.mem.eql(u8, name, "reloc.CODE")); + } + + pos = section_end; + } +} + +test "preload + merge + encode roundtrip with real builtins" { + const allocator = std.testing.allocator; + + // Load the pre-built wasm32 builtins object + const wasm32_builtins = @import("wasm32_builtins"); + var builtins_module = try preload(allocator, wasm32_builtins.bytes, false); + defer builtins_module.deinit(); + + // Create an app module with standard RocOps imports + var app_module = Self.init(allocator); + + const roc_ops_type_idx = try app_module.addFuncType(&.{ .i32, .i32 }, &.{}); + for ([_][]const u8{ "roc_alloc", "roc_dealloc", "roc_realloc", "roc_dbg", "roc_expect_failed", "roc_crashed" }) |name| { + _ = try app_module.addImport("env", name, roc_ops_type_idx); + } + + // Merge builtins into app module + var merge_result = try app_module.mergeModule(&builtins_module); + merge_result.deinit(); + + // Populate builtin symbols + _ = try BuiltinSymbols.populate(&app_module); + + // Resolve relocations and materialize function bodies + app_module.resolveRelocations(); + try app_module.materializeFuncBodies(); + + // Enable memory + stack pointer + table (as generateModule does) + app_module.enableMemory(2); + app_module.enableStackPointer(131072); + app_module.enableTable(); + app_module.addExport("memory", .memory, 0) catch unreachable; + + // Add RocCall function: (i32, i32, i32) -> void + const roc_call_type_idx = try app_module.addFuncType(&.{ .i32, .i32, .i32 }, &.{}); + const roc_call_fn_idx = try app_module.addFunction(roc_call_type_idx); + const roc_call_body = [_]u8{ 0x00, Op.end }; + try app_module.setFunctionBody(roc_call_fn_idx, &roc_call_body); + app_module.addExport("roc__main_for_host_1_exposed", .func, roc_call_fn_idx) catch unreachable; + + // Add eval wrapper: (i32) -> i32 + const eval_type_idx = try app_module.addFuncType(&.{.i32}, &.{.i32}); + const eval_fn_idx = try app_module.addFunction(eval_type_idx); + const eval_body = [_]u8{ 0x00, Op.i32_const, 42, Op.end }; + try app_module.setFunctionBody(eval_fn_idx, &eval_body); + app_module.addExport("main", .func, eval_fn_idx) catch unreachable; + + // Encode the module + const encoded = try app_module.encode(allocator); + defer allocator.free(encoded); + app_module.deinit(); + + // Verify bytebox can decode it + const bytebox = @import("bytebox"); + var arena_impl = std.heap.ArenaAllocator.init(allocator); + defer arena_impl.deinit(); + const arena = arena_impl.allocator(); + + var module_def = try bytebox.createModuleDefinition(arena, .{}); + module_def.decode(encoded) catch |err| { + std.debug.print("bytebox decode failed: {}\n", .{err}); + return err; + }; } diff --git a/src/backend/wasm/mod.zig b/src/backend/wasm/mod.zig index ce6719fd7cb..858d75809b8 100644 --- a/src/backend/wasm/mod.zig +++ b/src/backend/wasm/mod.zig @@ -7,4 +7,6 @@ pub const WasmModule = @import("WasmModule.zig"); pub const WasmCodeGen = @import("WasmCodeGen.zig"); pub const WasmLayout = @import("WasmLayout.zig"); +pub const WasmLinking = @import("WasmLinking.zig"); pub const Storage = @import("Storage.zig"); +pub const CodeBuilder = @import("CodeBuilder.zig"); diff --git a/src/builtins/dec.zig b/src/builtins/dec.zig index 43de3e3599a..d446529c053 100644 --- a/src/builtins/dec.zig +++ b/src/builtins/dec.zig @@ -263,7 +263,20 @@ pub const RocDec = extern struct { buf[position] = '.'; position += 1; - const trailing_zeros: u6 = count_trailing_zeros_base10(num); + // Count trailing base-10 zeros directly from the digit characters. + // This avoids i128 modulo arithmetic which produces incorrect results on wasm32. + var trailing_zeros: u6 = 0; + { + var i = num_digits; + while (i > 0) { + i -= 1; + if (digit_bytes[i] == '0') { + trailing_zeros += 1; + } else { + break; + } + } + } if (trailing_zeros >= decimal_places) { // add just a single zero if all decimal digits are zero buf[position] = '0'; @@ -750,27 +763,6 @@ pub const RocDec = extern struct { } }; -// A number has `k` trailing zeros if `10^k` divides into it cleanly -inline fn count_trailing_zeros_base10(input: i128) u6 { - if (input == 0) { - // this should not happen in practice - return 0; - } - - var count: u6 = 0; - var k: i128 = 1; - - while (true) { - if (i128h.mod_i128(input, i128h.pow10_i128(@intCast(k))) == 0) { - count += 1; - k += 1; - } else { - break; - } - } - - return count; -} fn mul_and_decimalize(a: u128, b: u128) WithOverflow(i128) { const answer_u256 = mul_u128(a, b); diff --git a/src/builtins/dev_wrappers.zig b/src/builtins/dev_wrappers.zig index 30d43eef8c2..b0233f2ebdc 100644 --- a/src/builtins/dev_wrappers.zig +++ b/src/builtins/dev_wrappers.zig @@ -1192,3 +1192,85 @@ pub fn roc_builtins_float_from_str( else => unreachable, } } + +// ── List equality and reverse wrappers ── + +/// Compare two lists of flat (non-refcounted) elements for equality. +/// Elements are compared byte-by-byte using the element width. +pub fn roc_builtins_list_eq(a_bytes: ?[*]u8, a_len: usize, _: usize, b_bytes: ?[*]u8, b_len: usize, _: usize, elem_width: usize) callconv(.c) bool { + if (a_len != b_len) return false; + if (a_len == 0) return true; + if (a_bytes == b_bytes) return true; + const a = a_bytes orelse return b_bytes == null; + const b = b_bytes orelse return false; + return std.mem.eql(u8, a[0 .. a_len * elem_width], b[0 .. b_len * elem_width]); +} + +/// Compare two lists of strings for equality. +pub fn roc_builtins_list_str_eq(a_bytes: ?[*]u8, a_len: usize, _: usize, b_bytes: ?[*]u8, b_len: usize, _: usize) callconv(.c) bool { + if (a_len != b_len) return false; + if (a_len == 0) return true; + if (a_bytes == b_bytes) return true; + const a = a_bytes orelse return b_bytes == null; + const b = b_bytes orelse return false; + const str_size = @sizeOf(RocStr); + for (0..a_len) |i| { + const a_str: *const RocStr = @ptrCast(@alignCast(a + i * str_size)); + const b_str: *const RocStr = @ptrCast(@alignCast(b + i * str_size)); + if (!strEqual(a_str.*, b_str.*)) return false; + } + return true; +} + +/// Compare two lists of lists for equality (inner elements are flat). +pub fn roc_builtins_list_list_eq(a_bytes: ?[*]u8, a_len: usize, _: usize, b_bytes: ?[*]u8, b_len: usize, _: usize, inner_elem_width: usize) callconv(.c) bool { + if (a_len != b_len) return false; + if (a_len == 0) return true; + if (a_bytes == b_bytes) return true; + const a = a_bytes orelse return b_bytes == null; + const b = b_bytes orelse return false; + const list_size = @sizeOf(RocList); + for (0..a_len) |i| { + const a_list: *const RocList = @ptrCast(@alignCast(a + i * list_size)); + const b_list: *const RocList = @ptrCast(@alignCast(b + i * list_size)); + if (a_list.length != b_list.length) return false; + if (a_list.length == 0) continue; + if (a_list.bytes == b_list.bytes) continue; + const ab = a_list.bytes orelse return b_list.bytes == null; + const bb = b_list.bytes orelse return false; + if (!std.mem.eql(u8, ab[0 .. a_list.length * inner_elem_width], bb[0 .. b_list.length * inner_elem_width])) return false; + } + return true; +} + +/// Reverse a list of flat elements. +pub fn roc_builtins_list_reverse(out: *RocList, list_bytes: ?[*]u8, list_len: usize, _: usize, elem_width: usize, alignment: u32, roc_ops: *RocOps) callconv(.c) void { + if (list_len == 0 or elem_width == 0) { + out.* = RocList{ .bytes = null, .length = 0, .capacity_or_alloc_ptr = 0 }; + return; + } + const src = list_bytes orelse { + out.* = RocList{ .bytes = null, .length = 0, .capacity_or_alloc_ptr = 0 }; + return; + }; + const total_bytes = list_len * elem_width; + const dest = allocateWithRefcountC(total_bytes, alignment, false, roc_ops); + // Copy elements in reverse order + var i: usize = 0; + while (i < list_len) : (i += 1) { + const src_offset = (list_len - 1 - i) * elem_width; + const dst_offset = i * elem_width; + @memcpy(dest[dst_offset .. dst_offset + elem_width], src[src_offset .. src_offset + elem_width]); + } + out.* = RocList{ .bytes = dest, .length = list_len, .capacity_or_alloc_ptr = list_len }; +} + +/// i32 modulo (floored division mod, not truncated remainder) +pub fn roc_builtins_i32_mod_by(a: i32, b: i32) callconv(.c) i32 { + return @mod(a, b); +} + +/// i64 modulo (floored division mod, not truncated remainder) +pub fn roc_builtins_i64_mod_by(a: i64, b: i64) callconv(.c) i64 { + return @mod(a, b); +} diff --git a/src/builtins/static_lib.zig b/src/builtins/static_lib.zig index f7c7c19c105..0a62d2d1272 100644 --- a/src/builtins/static_lib.zig +++ b/src/builtins/static_lib.zig @@ -110,4 +110,12 @@ comptime { @export(&dw.roc_builtins_int_from_str, .{ .name = "roc_builtins_int_from_str" }); @export(&dw.roc_builtins_dec_from_str, .{ .name = "roc_builtins_dec_from_str" }); @export(&dw.roc_builtins_float_from_str, .{ .name = "roc_builtins_float_from_str" }); + // List equality and reverse wrappers + @export(&dw.roc_builtins_list_eq, .{ .name = "roc_builtins_list_eq" }); + @export(&dw.roc_builtins_list_str_eq, .{ .name = "roc_builtins_list_str_eq" }); + @export(&dw.roc_builtins_list_list_eq, .{ .name = "roc_builtins_list_list_eq" }); + @export(&dw.roc_builtins_list_reverse, .{ .name = "roc_builtins_list_reverse" }); + // Integer modulo wrappers + @export(&dw.roc_builtins_i32_mod_by, .{ .name = "roc_builtins_i32_mod_by" }); + @export(&dw.roc_builtins_i64_mod_by, .{ .name = "roc_builtins_i64_mod_by" }); } diff --git a/src/cli/main.zig b/src/cli/main.zig index 5b13bd1658e..84ff83373af 100644 --- a/src/cli/main.zig +++ b/src/cli/main.zig @@ -4151,7 +4151,8 @@ fn rocBuildNative(ctx: *CliContext, args: cli_args.BuildArgs) !void { else null; - var layout_store = layout.Store.init(all_module_envs, builtin_str, ctx.gpa, base.target.TargetUsize.native) catch { + const target_usize: base.target.TargetUsize = if (target_arch == .wasm32) .u32 else base.target.TargetUsize.native; + var layout_store = layout.Store.init(all_module_envs, builtin_str, ctx.gpa, target_usize) catch { std.log.err("Failed to create layout store", .{}); return error.LayoutStoreFailed; }; @@ -4416,126 +4417,334 @@ fn rocBuildNative(ctx: *CliContext, args: cli_args.BuildArgs) !void { // Get procedures from the LIR store const procs = lir_store.getProcSpecs(); - // Compile to object file - std.log.debug("Generating native code...", .{}); - var object_compiler = backend.ObjectFileCompiler.init(ctx.gpa); + if (target_arch == .wasm32) { + // ---- WASM32 Surgical Linking Pipeline ---- + const WasmModule = backend.wasm.WasmModule; + const WasmCodeGen = backend.wasm.WasmCodeGen; - ensureCompilerCacheDirExists(build_cache_dir) catch |err| { - std.log.err("Failed to create compiler build cache dir {s}: {}", .{ build_cache_dir, err }); - return err; - }; + std.log.debug("WASM32 surgical linking pipeline...", .{}); - const obj_filename = try std.fmt.allocPrint(ctx.arena, "roc_app_{s}.o", .{@tagName(target)}); - const obj_path = try std.fs.path.join(ctx.arena, &.{ build_cache_dir, obj_filename }); + // Step 1: Get host.wasm path from link spec + const target_name = @tagName(target); + const link_spec = targets_config.getLinkSpec(target, link_type) orelse { + return ctx.fail(.{ .linker_failed = .{ + .err = error.UnsupportedTarget, + .target = target_name, + } }); + }; - object_compiler.compileToObjectFileAndWrite( - &lir_store, - &layout_store, - entrypoints.items, - procs, - target, - obj_path, - ) catch |err| { - std.log.err("Native compilation failed: {}", .{err}); - return error.NativeCompilationFailed; - }; + const files_dir = targets_config.files_dir orelse "targets"; + var host_wasm_path: ?[]const u8 = null; + for (link_spec.items) |item| { + switch (item) { + .file_path => |path| { + const full_path = try std.fs.path.join(ctx.arena, &.{ platform_dir, files_dir, target_name, path }); + std.fs.cwd().access(full_path, .{}) catch { + const result = platform_validation.targets_validator.ValidationResult{ + .missing_target_file = .{ + .target = target, + .link_type = link_type, + .file_path = path, + .expected_full_path = full_path, + }, + }; + _ = platform_validation.renderValidationError(ctx.gpa, result, ctx.io.stderr()); + return error.MissingTargetFile; + }; + // For wasm32, the first file_path before `app` is the host.wasm + if (host_wasm_path == null) { + host_wasm_path = full_path; + } + }, + .app, .win_gui => {}, + } + } - std.log.debug("Object file generated: {s}", .{obj_path}); + const host_path = host_wasm_path orelse { + const stderr = ctx.io.stderr(); + try stderr.print("Error: No host.wasm found in platform's wasm32 target configuration.\n", .{}); + return error.MissingTargetFile; + }; + std.log.debug("Host WASM: {s}", .{host_path}); - // If --no-link, we're done - if (args.no_link) { - const stdout = ctx.io.stdout(); - try stdout.print("Object file generated: {s}\n", .{obj_path}); - return; - } + // Step 2: Read and parse host module + const host_bytes = std.fs.cwd().readFileAlloc(ctx.gpa, host_path, 50 * 1024 * 1024) catch |err| { + std.log.err("Failed to read host WASM file {s}: {}", .{ host_path, err }); + return error.MissingTargetFile; + }; + defer ctx.gpa.free(host_bytes); - // Get link spec and build file lists - const target_name = @tagName(target); - const link_spec = targets_config.getLinkSpec(target, link_type) orelse { - return ctx.fail(.{ .linker_failed = .{ - .err = error.UnsupportedTarget, - .target = target_name, - } }); - }; + var host_module = WasmModule.preload(ctx.gpa, host_bytes, true) catch |err| { + std.log.err("Failed to parse host WASM module: {}", .{err}); + return error.NativeCompilationFailed; + }; - const files_dir = targets_config.files_dir orelse "targets"; - var platform_files_pre = try std.array_list.Managed([]const u8).initCapacity(ctx.arena, 8); - var platform_files_post = try std.array_list.Managed([]const u8).initCapacity(ctx.arena, 8); - var hit_app = false; + // Step 3: Export global host symbols and remove memory/table imports + host_module.exportGlobalSymbols(); + host_module.removeMemoryAndTableImports(); - for (link_spec.items) |item| { - switch (item) { - .file_path => |path| { - const full_path = try std.fs.path.join(ctx.arena, &.{ platform_dir, files_dir, target_name, path }); + // Step 4: Merge builtins + const builtins_obj_bytes = BuiltinsObjects.forTarget(target); + var builtins_module = WasmModule.preload(ctx.gpa, builtins_obj_bytes, true) catch |err| { + std.log.err("Failed to parse builtins WASM module: {}", .{err}); + return error.NativeCompilationFailed; + }; + defer builtins_module.deinit(); - std.fs.cwd().access(full_path, .{}) catch { - const result = platform_validation.targets_validator.ValidationResult{ - .missing_target_file = .{ - .target = target, - .link_type = link_type, - .file_path = path, - .expected_full_path = full_path, - }, - }; - _ = platform_validation.renderValidationError(ctx.gpa, result, ctx.io.stderr()); - return error.MissingTargetFile; - }; + var merge_result = host_module.mergeModule(&builtins_module) catch |err| { + std.log.err("Failed to merge builtins into host module: {}", .{err}); + return error.NativeCompilationFailed; + }; + merge_result.deinit(); + // Step 5: Build BuiltinSymbols lookup + const builtin_syms = WasmModule.BuiltinSymbols.populate(&host_module) catch |err| { + std.log.err("Failed to populate builtin symbols: {}", .{err}); + return error.NativeCompilationFailed; + }; - if (!hit_app) { - try platform_files_pre.append(full_path); - } else { - try platform_files_post.append(full_path); - } - }, - .app => { - hit_app = true; - }, - .win_gui => {}, + // Step 6: Create code generator with host module + var codegen = WasmCodeGen.initWithHostModule(ctx.gpa, &lir_store, &layout_store, host_module, builtin_syms); + defer codegen.deinit(); + + // Step 7: Register RocOps callbacks from the host module + codegen.registerRocOpsFromModule() catch |err| { + std.log.err("Failed to register RocOps callbacks: {}", .{err}); + return error.NativeCompilationFailed; + }; + + // Record host+builtin function count before app compilation for DCE seeding + const host_defined_fn_count = codegen.module.function_offsets.items.len; + + // Step 8: Compile all procedures + codegen.compileAllProcSpecs(procs) catch |err| { + std.log.err("WASM proc compilation failed: {}", .{err}); + return error.NativeCompilationFailed; + }; + + // Step 9: Generate entrypoint wrappers and build host-to-app map + var host_to_app_map = try std.ArrayList(WasmModule.HostToAppEntry).initCapacity(ctx.gpa, entrypoints.items.len); + defer host_to_app_map.deinit(ctx.gpa); + + for (entrypoints.items) |ep| { + const proc_spec = lir_store.getProcSpec(ep.proc); + const wrapper_idx = codegen.generateEntrypointWrapper( + proc_spec, + ep.symbol_name, + ep.arg_layouts, + ep.ret_layout, + ) catch |err| { + std.log.err("Failed to generate entrypoint wrapper for {s}: {}", .{ ep.symbol_name, err }); + return error.NativeCompilationFailed; + }; + + try host_to_app_map.append(ctx.gpa, .{ + .name = ep.symbol_name, + .fn_index = wrapper_idx, + }); } - } - // Extract builtins object file for the target and add to link inputs - const builtins_bytes = BuiltinsObjects.forTarget(target); - const builtins_filename = BuiltinsObjects.filename(target); - const builtins_path = try std.fs.path.join(ctx.arena, &.{ build_cache_dir, builtins_filename }); + // Step 10: Transfer app functions to code_bytes representation + codegen.module.transferAppFunctions() catch |err| { + std.log.err("Failed to transfer app functions: {}", .{err}); + return error.NativeCompilationFailed; + }; + + // Step 11: Surgical linking — redirect host imports to app functions + codegen.module.linkHostToAppCalls(host_to_app_map.items) catch |err| { + std.log.err("Surgical linking failed: {}", .{err}); + return error.NativeCompilationFailed; + }; - // Write builtins object to cache - std.fs.cwd().writeFile(.{ - .sub_path = builtins_path, - .data = builtins_bytes, - }) catch |err| { - std.log.err("Failed to write builtins object file: {}", .{err}); - return error.BuiltinsExtractionFailed; - }; - std.log.debug("Builtins object file: {s}", .{builtins_path}); + // Step 12: Resolve relocations (patches builtin call sites and data references) + codegen.module.resolveRelocations(); - // Link the object file with platform files - var object_files = try std.array_list.Managed([]const u8).initCapacity(ctx.arena, 4); - try object_files.append(obj_path); - try object_files.append(builtins_path); + // Step 13: Finalize memory and table + const wasm_stack_bytes: u32 = 1024 * 1024; // 1MB stack + codegen.module.finalizeMemoryAndTable(wasm_stack_bytes) catch |err| { + std.log.err("WASM finalization failed: {}", .{err}); + return error.NativeCompilationFailed; + }; - std.log.debug("Linking: {} pre-files, {} object files, {} post-files", .{ - platform_files_pre.items.len, - object_files.items.len, - platform_files_post.items.len, - }); + // Step 14: Verify no stale builtin imports + codegen.module.verifyNoBuiltinImports() catch |err| { + std.log.err("Stale builtin imports found: {}", .{err}); + return error.NativeCompilationFailed; + }; - linker.link(ctx, .{ - .target_format = linker.TargetFormat.detectFromOs(target_os), - .target_abi = linker.TargetAbi.fromRocTarget(target), - .target_os = target_os, - .target_arch = target_arch, - .output_path = final_output_path, - .object_files = object_files.items, - .platform_files_pre = platform_files_pre.items, - .platform_files_post = platform_files_post.items, - .extra_args = &.{}, - }) catch |err| { - return ctx.fail(.{ .linker_failed = .{ - .err = err, - .target = target_name, - } }); - }; + // Step 15: Dead code elimination + const total_fns = codegen.module.import_fn_count + codegen.module.dead_import_dummy_count + @as(u32, @intCast(codegen.module.function_offsets.items.len)); + var called_fns = try ctx.gpa.alloc(bool, total_fns); + defer ctx.gpa.free(called_fns); + @memset(called_fns, false); + + // Mark all exported functions as live + for (codegen.module.exports.items) |exp| { + if (exp.kind == .func and exp.idx < total_fns) { + called_fns[exp.idx] = true; + } + } + + // Mark all app-compiled functions as live. + // App code uses direct call instructions (no relocation entries), so + // the DCE's relocation-based call tracing can't follow them. + const fn_index_min = codegen.module.import_fn_count + codegen.module.dead_import_dummy_count; + for (host_defined_fn_count..codegen.module.function_offsets.items.len) |i| { + const fn_idx = fn_index_min + @as(u32, @intCast(i)); + if (fn_idx < total_fns) { + called_fns[fn_idx] = true; + } + } + + codegen.module.eliminateDeadCode(called_fns) catch |err| { + std.log.err("Dead code elimination failed: {}", .{err}); + return error.NativeCompilationFailed; + }; + + // Step 16: Materialize function bodies from code_bytes + codegen.module.materializeFuncBodies() catch |err| { + std.log.err("Failed to materialize function bodies: {}", .{err}); + return error.NativeCompilationFailed; + }; + + // Step 17: Serialize + const final_bytes = codegen.module.encode(ctx.gpa) catch |err| { + std.log.err("WASM encoding failed: {}", .{err}); + return error.NativeCompilationFailed; + }; + defer ctx.gpa.free(final_bytes); + + // Step 15: Write output + std.fs.cwd().writeFile(.{ + .sub_path = final_output_path, + .data = final_bytes, + }) catch |err| { + std.log.err("Failed to write WASM output {s}: {}", .{ final_output_path, err }); + return error.NativeCompilationFailed; + }; + + std.log.debug("WASM output: {s} ({} bytes)", .{ final_output_path, final_bytes.len }); + } else { + // ---- Native Compilation + Linking Pipeline ---- + std.log.debug("Generating native code...", .{}); + var object_compiler = backend.ObjectFileCompiler.init(ctx.gpa); + + ensureCompilerCacheDirExists(build_cache_dir) catch |err| { + std.log.err("Failed to create compiler build cache dir {s}: {}", .{ build_cache_dir, err }); + return err; + }; + + const obj_filename = try std.fmt.allocPrint(ctx.arena, "roc_app_{s}.o", .{@tagName(target)}); + const obj_path = try std.fs.path.join(ctx.arena, &.{ build_cache_dir, obj_filename }); + + object_compiler.compileToObjectFileAndWrite( + &lir_store, + &layout_store, + entrypoints.items, + procs, + target, + obj_path, + ) catch |err| { + std.log.err("Native compilation failed: {}", .{err}); + return error.NativeCompilationFailed; + }; + + std.log.debug("Object file generated: {s}", .{obj_path}); + + // If --no-link, we're done + if (args.no_link) { + const stdout = ctx.io.stdout(); + try stdout.print("Object file generated: {s}\n", .{obj_path}); + return; + } + + // Get link spec and build file lists + const target_name = @tagName(target); + const link_spec = targets_config.getLinkSpec(target, link_type) orelse { + return ctx.fail(.{ .linker_failed = .{ + .err = error.UnsupportedTarget, + .target = target_name, + } }); + }; + + const files_dir = targets_config.files_dir orelse "targets"; + var platform_files_pre = try std.array_list.Managed([]const u8).initCapacity(ctx.arena, 8); + var platform_files_post = try std.array_list.Managed([]const u8).initCapacity(ctx.arena, 8); + var hit_app = false; + + for (link_spec.items) |item| { + switch (item) { + .file_path => |path| { + const full_path = try std.fs.path.join(ctx.arena, &.{ platform_dir, files_dir, target_name, path }); + + std.fs.cwd().access(full_path, .{}) catch { + const result = platform_validation.targets_validator.ValidationResult{ + .missing_target_file = .{ + .target = target, + .link_type = link_type, + .file_path = path, + .expected_full_path = full_path, + }, + }; + _ = platform_validation.renderValidationError(ctx.gpa, result, ctx.io.stderr()); + return error.MissingTargetFile; + }; + + if (!hit_app) { + try platform_files_pre.append(full_path); + } else { + try platform_files_post.append(full_path); + } + }, + .app => { + hit_app = true; + }, + .win_gui => {}, + } + } + + // Extract builtins object file for the target and add to link inputs + const builtins_bytes = BuiltinsObjects.forTarget(target); + const builtins_filename = BuiltinsObjects.filename(target); + const builtins_path = try std.fs.path.join(ctx.arena, &.{ build_cache_dir, builtins_filename }); + + // Write builtins object to cache + std.fs.cwd().writeFile(.{ + .sub_path = builtins_path, + .data = builtins_bytes, + }) catch |err| { + std.log.err("Failed to write builtins object file: {}", .{err}); + return error.BuiltinsExtractionFailed; + }; + std.log.debug("Builtins object file: {s}", .{builtins_path}); + + // Link the object file with platform files + var object_files = try std.array_list.Managed([]const u8).initCapacity(ctx.arena, 4); + try object_files.append(obj_path); + try object_files.append(builtins_path); + + std.log.debug("Linking: {} pre-files, {} object files, {} post-files", .{ + platform_files_pre.items.len, + object_files.items.len, + platform_files_post.items.len, + }); + + linker.link(ctx, .{ + .target_format = linker.TargetFormat.detectFromOs(target_os), + .target_abi = linker.TargetAbi.fromRocTarget(target), + .target_os = target_os, + .target_arch = target_arch, + .output_path = final_output_path, + .object_files = object_files.items, + .platform_files_pre = platform_files_pre.items, + .platform_files_post = platform_files_post.items, + .extra_args = &.{}, + }) catch |err| { + return ctx.fail(.{ .linker_failed = .{ + .err = err, + .target = target_name, + } }); + }; + } const elapsed_ns = timer.read(); const elapsed_ms = @as(f64, @floatFromInt(elapsed_ns)) / 1_000_000.0; diff --git a/src/eval/mod.zig b/src/eval/mod.zig index 3fc708c4982..ac1664de8bf 100644 --- a/src/eval/mod.zig +++ b/src/eval/mod.zig @@ -48,6 +48,7 @@ pub const LlvmEvaluator = @import("llvm_evaluator.zig").LlvmEvaluator; /// WebAssembly-based evaluator for wasm code generation const wasm_evaluator_mod = @import("wasm_evaluator.zig"); pub const WasmEvaluator = wasm_evaluator_mod.WasmEvaluator; +pub const WasmCodeResult = wasm_evaluator_mod.WasmCodeResult; test "eval tests" { std.testing.refAllDecls(@This()); diff --git a/src/eval/test/eval_test.zig b/src/eval/test/eval_test.zig index 79435d5367a..4909540cc93 100644 --- a/src/eval/test/eval_test.zig +++ b/src/eval/test/eval_test.zig @@ -3727,6 +3727,14 @@ test "Str.join_with" { , "hello world", .no_trace); } +test "list of strings length" { + try runExpectI64("[\"hello\", \"world\"].len()", 2, .no_trace); +} + +test "Str.join_with empty list" { + try runExpectStr("Str.join_with([], \",\")", "", .no_trace); +} + // Note: List.contains is implemented as List.any(list, |x| x == needle) in the builtins, // which goes through closure + higher-order function paths rather than the list_contains // low-level. The DevEvaluator doesn't currently support List.any with variable-capturing diff --git a/src/eval/test/helpers.zig b/src/eval/test/helpers.zig index 83fc3fa98ce..b040b0c52cf 100644 --- a/src/eval/test/helpers.zig +++ b/src/eval/test/helpers.zig @@ -618,7 +618,7 @@ pub fn wasmEvaluatorStr(allocator: std.mem.Allocator, module_env: *ModuleEnv, ex // Keep module order aligned with resolveImports/getResolvedModule indices. const all_module_envs = [_]*ModuleEnv{ @constCast(builtin_module_env), module_env }; - var wasm_result = wasm_eval.generateWasm(module_env, expr_idx, &all_module_envs) catch { + var wasm_result = wasm_eval.generateWasm(module_env, expr_idx, &all_module_envs, WasmEvaluator.default_entrypoint_name) catch { return error.WasmGenerateCodeFailed; }; defer wasm_result.deinit(); @@ -756,6 +756,14 @@ pub fn wasmEvaluatorStr(allocator: std.mem.Allocator, module_env: *ModuleEnv, ex return error.WasmExecFailed; }; + // Compiler-rt intrinsics needed by merged builtins + env_imports.addHostFunction("__multi3", &[_]bytebox.ValType{ .I32, .I64, .I64, .I64, .I64 }, &[_]bytebox.ValType{}, hostMulti3, null) catch { + return error.WasmExecFailed; + }; + env_imports.addHostFunction("__muloti4", &[_]bytebox.ValType{ .I32, .I64, .I64, .I64, .I64, .I32 }, &[_]bytebox.ValType{}, hostMuloti4, null) catch { + return error.WasmExecFailed; + }; + // i128/u128 division and modulo: (lhs_ptr, rhs_ptr, result_ptr) -> void env_imports.addHostFunction( "roc_i128_div_s", @@ -1836,6 +1844,41 @@ fn hostRocCrashed(_: ?*anyopaque, module: *bytebox.ModuleInstance, params: [*]co } } +/// __multi3: 128-bit signed multiply. result_ptr = a * b (truncating to 128 bits). +fn hostMulti3(_: ?*anyopaque, module: *bytebox.ModuleInstance, params: [*]const bytebox.Val, _: [*]bytebox.Val) error{}!void { + const buffer = module.store.getMemory(0).buffer(); + const result_ptr: usize = @intCast(params[0].I32); + const a_lo: u64 = @bitCast(params[1].I64); + const a_hi: u64 = @bitCast(params[2].I64); + const b_lo: u64 = @bitCast(params[3].I64); + const b_hi: u64 = @bitCast(params[4].I64); + const a: i128 = @bitCast(@as(u128, a_hi) << 64 | @as(u128, a_lo)); + const b: i128 = @bitCast(@as(u128, b_hi) << 64 | @as(u128, b_lo)); + const result = i128h.mul_i128(a, b); + const result_u128: u128 = @bitCast(result); + std.mem.writeInt(u64, buffer[result_ptr..][0..8], @truncate(result_u128), .little); + std.mem.writeInt(u64, buffer[result_ptr + 8 ..][0..8], @truncate(result_u128 >> 64), .little); +} + +/// __muloti4: 128-bit signed multiply with overflow detection. +fn hostMuloti4(_: ?*anyopaque, module: *bytebox.ModuleInstance, params: [*]const bytebox.Val, _: [*]bytebox.Val) error{}!void { + const buffer = module.store.getMemory(0).buffer(); + const result_ptr: usize = @intCast(params[0].I32); + const a_lo: u64 = @bitCast(params[1].I64); + const a_hi: u64 = @bitCast(params[2].I64); + const b_lo: u64 = @bitCast(params[3].I64); + const b_hi: u64 = @bitCast(params[4].I64); + const overflow_ptr: usize = @intCast(params[5].I32); + const a: i128 = @bitCast(@as(u128, a_hi) << 64 | @as(u128, a_lo)); + const b: i128 = @bitCast(@as(u128, b_hi) << 64 | @as(u128, b_lo)); + const result = i128h.mul_i128(a, b); + const overflow: i32 = if (b != 0 and @divTrunc(result, b) != a) 1 else 0; + const result_u128: u128 = @bitCast(result); + std.mem.writeInt(u64, buffer[result_ptr..][0..8], @truncate(result_u128), .little); + std.mem.writeInt(u64, buffer[result_ptr + 8 ..][0..8], @truncate(result_u128 >> 64), .little); + std.mem.writeInt(i32, buffer[overflow_ptr..][0..4], overflow, .little); +} + // --- String operation host function helpers --- fn readWasmStr(buffer: []u8, str_ptr: usize) struct { data: [*]const u8, len: usize } { @@ -7473,3 +7516,143 @@ test "parse diagnostic reporting crashes if module name is uninitialized" { defer report.deinit(); } } + +// Phase 7a: Entrypoint ABI Migration — Structural Tests +// +// These verify the generated wasm module has the correct exports and +// function signatures. Behavioral correctness is validated by the +// 1289 eval tests which exercise the eval wrapper ("main") end-to-end. +// +// Full integration tests (calling roc__main_for_host_1_exposed from a +// surgically-linked host) will be added in Phase 12 when the eval +// pipeline switches to surgical linking. + +const WasmModule = backend.wasm.WasmModule; +const ExportKind = WasmModule.ExportKind; + +/// Generate wasm bytes for a source expression and parse the module structure. +fn generateAndParseWasmModule(source: []const u8) !struct { + module: WasmModule, + wasm_result: eval_mod.WasmCodeResult, + wasm_eval: eval_mod.WasmEvaluator, + parse_resources: ParsedExprResources, + + fn deinit(self: *@This()) void { + self.module.deinit(); + self.wasm_result.deinit(); + self.wasm_eval.deinit(); + cleanupParseAndCanonical(test_allocator, self.parse_resources); + } +} { + const resources = try parseAndCanonicalizeExpr(test_allocator, source); + errdefer cleanupParseAndCanonical(test_allocator, resources); + + var wasm_eval = WasmEvaluator.init(test_allocator) catch return error.WasmEvaluatorInitFailed; + errdefer wasm_eval.deinit(); + + // Wrap in Str.inspect for consistency with eval pipeline + const str_inspect_idx = try wrapInStrInspect(resources.module_env, resources.expr_idx); + const all_module_envs = [_]*ModuleEnv{ @constCast(resources.builtin_module.env), resources.module_env }; + + var wasm_result = wasm_eval.generateWasm(resources.module_env, str_inspect_idx, &all_module_envs, WasmEvaluator.default_entrypoint_name) catch { + return error.WasmGenerateCodeFailed; + }; + errdefer wasm_result.deinit(); + + if (wasm_result.wasm_bytes.len == 0) return error.WasmGenerateCodeFailed; + + // Re-parse the encoded bytes to inspect module structure + const module = WasmModule.preload(test_allocator, @constCast(wasm_result.wasm_bytes), false) catch { + return error.WasmGenerateCodeFailed; + }; + + return .{ + .module = module, + .wasm_result = wasm_result, + .wasm_eval = wasm_eval, + .parse_resources = resources, + }; +} + +fn findExport(exports: []const WasmModule.Export, name: []const u8) ?WasmModule.Export { + for (exports) |exp| { + if (std.mem.eql(u8, exp.name, name)) return exp; + } + return null; +} + +test "app entrypoint — exports both RocCall entrypoint and main" { + var ctx = try generateAndParseWasmModule("42"); + defer ctx.deinit(); + + const exports = ctx.module.exports.items; + + // RocCall entrypoint must exist as a function export + const roc_call_export = findExport(exports, WasmEvaluator.default_entrypoint_name); + try std.testing.expect(roc_call_export != null); + try std.testing.expectEqual(ExportKind.func, roc_call_export.?.kind); + + // main (eval wrapper) must exist as a function export + const main_export = findExport(exports, "main"); + try std.testing.expect(main_export != null); + try std.testing.expectEqual(ExportKind.func, main_export.?.kind); + + // memory must be exported + const mem_export = findExport(exports, "memory"); + try std.testing.expect(mem_export != null); + try std.testing.expectEqual(ExportKind.memory, mem_export.?.kind); +} + +test "app entrypoint — RocCall entrypoint has type (i32, i32, i32) → void" { + var ctx = try generateAndParseWasmModule("42"); + defer ctx.deinit(); + + const roc_call_export = findExport(ctx.module.exports.items, WasmEvaluator.default_entrypoint_name).?; + + // Function index in export points past imports into defined functions. + // Look up its type index. + const import_count = ctx.module.importCount(); + const local_fn_idx = roc_call_export.idx - import_count; + const type_idx = ctx.module.func_type_indices.items[local_fn_idx]; + + // Verify RocCall signature: 3 params (i32, i32, i32), no results + const func_type = ctx.module.func_types.items[type_idx]; + try std.testing.expectEqual(@as(usize, 3), func_type.params.len); + try std.testing.expectEqual(WasmModule.ValType.i32, func_type.params[0]); + try std.testing.expectEqual(WasmModule.ValType.i32, func_type.params[1]); + try std.testing.expectEqual(WasmModule.ValType.i32, func_type.params[2]); + // Return type: void (null) + const result_vt = ctx.module.func_type_results.items[type_idx]; + try std.testing.expectEqual(@as(?WasmModule.ValType, null), result_vt); +} + +test "app entrypoint — main (eval wrapper) returns a value, not void" { + var ctx = try generateAndParseWasmModule("42"); + defer ctx.deinit(); + + const main_export = findExport(ctx.module.exports.items, "main").?; + + const import_count = ctx.module.importCount(); + const local_fn_idx = main_export.idx - import_count; + const type_idx = ctx.module.func_type_indices.items[local_fn_idx]; + + // Eval wrapper takes 1 param (env_ptr: i32) + const func_type = ctx.module.func_types.items[type_idx]; + try std.testing.expectEqual(@as(usize, 1), func_type.params.len); + try std.testing.expectEqual(WasmModule.ValType.i32, func_type.params[0]); + + // Returns a value (not void) — Str.inspect result is i32 pointer + const result_vt = ctx.module.func_type_results.items[type_idx]; + try std.testing.expect(result_vt != null); +} + +test "app entrypoint — roc__main and main are distinct functions" { + var ctx = try generateAndParseWasmModule("42"); + defer ctx.deinit(); + + const roc_call = findExport(ctx.module.exports.items, WasmEvaluator.default_entrypoint_name).?; + const main = findExport(ctx.module.exports.items, "main").?; + + // They must be different function indices + try std.testing.expect(roc_call.idx != main.idx); +} diff --git a/src/eval/wasm_evaluator.zig b/src/eval/wasm_evaluator.zig index 0a8b88b0e2d..16499af3112 100644 --- a/src/eval/wasm_evaluator.zig +++ b/src/eval/wasm_evaluator.zig @@ -35,6 +35,11 @@ const LirExprStore = lir.LirExprStore; const LirExprId = lir.LirExprId; const LirExpr = lir.LirExpr; const WasmCodeGen = backend.wasm.WasmCodeGen; +const WasmModule = backend.wasm.WasmModule; + +/// Pre-built wasm32 builtins object (roc_builtins.o cross-compiled for wasm32-freestanding). +/// Provided by the build system via the "wasm32_builtins" module import. +const wasm32_builtins = @import("wasm32_builtins"); /// Extract the result layout from a LIR expression. /// Mirrors the logic in dev_evaluator.zig. @@ -180,12 +185,58 @@ pub const WasmEvaluator = struct { return resolver; } + /// The default entrypoint name used by the eval/REPL pipeline. + /// Real builds derive this from the platform's `provides` section. + pub const default_entrypoint_name = "roc__main_for_host_1_exposed"; + + /// Prepare a WasmModule with merged builtins and populated BuiltinSymbols. + /// The builtins are resolved from code_bytes into func_bodies so that + /// subsequent addFunction/setFunctionBody calls from the codegen append + /// correctly and encode() produces a valid module. + fn prepareModuleWithBuiltins(self: *WasmEvaluator) Error!struct { module: WasmModule, syms: WasmModule.BuiltinSymbols } { + var builtins_module = WasmModule.preload(self.allocator, wasm32_builtins.bytes, false) catch + return error.RuntimeError; + defer builtins_module.deinit(); + + var app_module = WasmModule.init(self.allocator); + + // Add RocOps function imports BEFORE merging builtins. + // mergeModule computes func_remap from the current import count, so all + // imports must exist before merge for defined function indices to be correct. + const roc_ops_type_idx = app_module.addFuncType( + &.{ .i32, .i32 }, + &.{}, + ) catch return error.RuntimeError; + + for ([_][]const u8{ "roc_alloc", "roc_dealloc", "roc_realloc", "roc_dbg", "roc_expect_failed", "roc_crashed" }) |name| { + _ = app_module.addImport("env", name, roc_ops_type_idx) catch return error.RuntimeError; + } + + var merge_result = app_module.mergeModule(&builtins_module) catch + return error.RuntimeError; + merge_result.deinit(); + + const syms = WasmModule.BuiltinSymbols.populate(&app_module) catch + return error.RuntimeError; + + // Resolve all builtin-to-builtin relocations, + // then materialize into func_bodies so encode() can serialize them. + app_module.resolveRelocations(); + app_module.materializeFuncBodies() catch + return error.RuntimeError; + + return .{ .module = app_module, .syms = syms }; + } + /// Generate wasm bytes for a CIR expression. + /// `entrypoint_name` is the RocCall export name, derived from the platform's + /// `provides` section. Use `default_entrypoint_name` for eval/REPL. pub fn generateWasm( self: *WasmEvaluator, module_env: *ModuleEnv, expr_idx: CIR.Expr.Idx, all_module_envs: []const *ModuleEnv, + entrypoint_name: []const u8, ) Error!WasmCodeResult { // Other evaluators may have resolved this module's imports against a // different module ordering. Refresh them here so CIR external lookups @@ -272,12 +323,17 @@ pub const WasmEvaluator = struct { else 1; - // Generate wasm module - var codegen = WasmCodeGen.init(self.allocator, &lir_store, layout_store_ptr); + // Merge wasm32 builtins and resolve them into func_bodies so the + // codegen can append app functions and encode() produces a valid binary. + const prepared = self.prepareModuleWithBuiltins() catch { + return error.RuntimeError; + }; + + var codegen = WasmCodeGen.initWithHostModule(self.allocator, &lir_store, layout_store_ptr, prepared.module, prepared.syms); codegen.wasm_stack_bytes = self.wasm_stack_bytes; defer codegen.deinit(); - const gen_result = codegen.generateModule(final_expr_id, result_layout) catch { + const gen_result = codegen.generateModule(final_expr_id, result_layout, entrypoint_name) catch { return error.RuntimeError; }; diff --git a/src/interpreter_shim/main.zig b/src/interpreter_shim/main.zig index 023e40d7f93..2ec4c40a8c1 100644 --- a/src/interpreter_shim/main.zig +++ b/src/interpreter_shim/main.zig @@ -73,7 +73,7 @@ var tracy_allocator: tracy.TracyAllocator(null) = undefined; var wrapped_allocator: std.mem.Allocator = undefined; var allocator_initialized: bool = false; -// Wasm32 allocator - uses roc_alloc from host +// Wasm32 allocator - uses the host module's raw allocation exports const wasm_allocator = if (is_wasm32) std.mem.Allocator{ .ptr = undefined, .vtable = &.{ @@ -86,9 +86,9 @@ const wasm_allocator = if (is_wasm32) std.mem.Allocator{ // Wasm32 allocator vtable implementation fn wasmAlloc(_: *anyopaque, len: usize, alignment: std.mem.Alignment, _: usize) ?[*]u8 { - // Pass the actual requested alignment to roc_alloc + // Pass the actual requested alignment to roc_alloc_raw const align_bytes: u32 = @intCast(alignment.toByteUnits()); - const ptr = roc_alloc(len, align_bytes); + const ptr = roc_alloc_raw(len, align_bytes); return if (ptr) |p| @ptrCast(p) else null; } @@ -102,13 +102,13 @@ fn wasmRemap(_: *anyopaque, _: []u8, _: std.mem.Alignment, _: usize, _: usize) ? fn wasmFree(_: *anyopaque, buf: []u8, alignment: std.mem.Alignment, _: usize) void { const align_bytes: u32 = @intCast(alignment.toByteUnits()); - roc_dealloc(@ptrCast(buf.ptr), align_bytes); + roc_dealloc_raw(@ptrCast(buf.ptr), align_bytes); } // Host-provided allocation functions (for wasm32) -extern fn roc_alloc(size: usize, alignment: u32) callconv(.c) ?*anyopaque; -extern fn roc_realloc(ptr: *anyopaque, new_size: usize, old_size: usize, alignment: u32) callconv(.c) ?*anyopaque; -extern fn roc_dealloc(ptr: *anyopaque, alignment: u32) callconv(.c) void; +extern fn roc_alloc_raw(size: usize, alignment: u32) callconv(.c) ?*anyopaque; +extern fn roc_realloc_raw(ptr: *anyopaque, new_size: usize, old_size: usize, alignment: u32) callconv(.c) ?*anyopaque; +extern fn roc_dealloc_raw(ptr: *anyopaque, alignment: u32) callconv(.c) void; // Static empty import mapping for shim (no type name resolution needed) // Lazy-initialized to use the properly wrapped allocator diff --git a/src/repl/wasm_runner.zig b/src/repl/wasm_runner.zig index 9161aa9044d..d8542330645 100644 --- a/src/repl/wasm_runner.zig +++ b/src/repl/wasm_runner.zig @@ -31,7 +31,7 @@ pub fn wasmEvaluatorStr(allocator: std.mem.Allocator, module_env: *ModuleEnv, ex defer wasm_eval.deinit(); const all_module_envs = [_]*ModuleEnv{ @constCast(builtin_module_env), module_env }; - var wasm_result = wasm_eval.generateWasm(module_env, expr_idx, &all_module_envs) catch return error.WasmGenerateCodeFailed; + var wasm_result = wasm_eval.generateWasm(module_env, expr_idx, &all_module_envs, WasmEvaluator.default_entrypoint_name) catch return error.WasmGenerateCodeFailed; defer wasm_result.deinit(); if (wasm_result.wasm_bytes.len == 0) return error.WasmGenerateCodeFailed; @@ -55,6 +55,10 @@ pub fn wasmEvaluatorStr(allocator: std.mem.Allocator, module_env: *ModuleEnv, ex env_imports.addHostFunction("roc_str_eq", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{.I32}, hostStrEq, null) catch return error.WasmExecFailed; env_imports.addHostFunction("roc_list_eq", &[_]bytebox.ValType{ .I32, .I32, .I32 }, &[_]bytebox.ValType{.I32}, hostListEq, null) catch return error.WasmExecFailed; + // Compiler-rt intrinsics needed by merged builtins + env_imports.addHostFunction("__multi3", &[_]bytebox.ValType{ .I32, .I64, .I64, .I64, .I64 }, &[_]bytebox.ValType{}, hostMulti3, null) catch return error.WasmExecFailed; + env_imports.addHostFunction("__muloti4", &[_]bytebox.ValType{ .I32, .I64, .I64, .I64, .I64, .I32 }, &[_]bytebox.ValType{}, hostMuloti4, null) catch return error.WasmExecFailed; + env_imports.addHostFunction("roc_alloc", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{}, hostRocAlloc, null) catch return error.WasmExecFailed; env_imports.addHostFunction("roc_dealloc", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{}, hostRocDealloc, null) catch return error.WasmExecFailed; env_imports.addHostFunction("roc_realloc", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{}, hostRocRealloc, null) catch return error.WasmExecFailed; @@ -1087,3 +1091,42 @@ fn writeFloatParseResult(comptime T: type, buffer: []u8, out_ptr: usize, disc_of @memcpy(buffer[out_ptr..][0..value_bytes.len], value_bytes); buffer[out_ptr + disc_offset] = 1 - r.errorcode; } + +// --- Compiler-rt intrinsics --- + +/// __multi3: 128-bit signed multiply. result_ptr = a * b (truncating to 128 bits). +fn hostMulti3(_: ?*anyopaque, module: *bytebox.ModuleInstance, params: [*]const bytebox.Val, _: [*]bytebox.Val) error{}!void { + const buffer = module.store.getMemory(0).buffer(); + const result_ptr: usize = @intCast(params[0].I32); + const a_lo: u64 = @bitCast(params[1].I64); + const a_hi: u64 = @bitCast(params[2].I64); + const b_lo: u64 = @bitCast(params[3].I64); + const b_hi: u64 = @bitCast(params[4].I64); + const a: i128 = @bitCast(@as(u128, a_hi) << 64 | @as(u128, a_lo)); + const b: i128 = @bitCast(@as(u128, b_hi) << 64 | @as(u128, b_lo)); + const result = i128h.mul_i128(a, b); + const result_u128: u128 = @bitCast(result); + std.mem.writeInt(u64, buffer[result_ptr..][0..8], @truncate(result_u128), .little); + std.mem.writeInt(u64, buffer[result_ptr + 8 ..][0..8], @truncate(result_u128 >> 64), .little); +} + +/// __muloti4: 128-bit signed multiply with overflow detection. +fn hostMuloti4(_: ?*anyopaque, module: *bytebox.ModuleInstance, params: [*]const bytebox.Val, _: [*]bytebox.Val) error{}!void { + const buffer = module.store.getMemory(0).buffer(); + const result_ptr: usize = @intCast(params[0].I32); + const a_lo: u64 = @bitCast(params[1].I64); + const a_hi: u64 = @bitCast(params[2].I64); + const b_lo: u64 = @bitCast(params[3].I64); + const b_hi: u64 = @bitCast(params[4].I64); + const overflow_ptr: usize = @intCast(params[5].I32); + const a: i128 = @bitCast(@as(u128, a_hi) << 64 | @as(u128, a_lo)); + const b: i128 = @bitCast(@as(u128, b_hi) << 64 | @as(u128, b_lo)); + // Use widening multiply to detect overflow + const result = i128h.mul_i128(a, b); + // Overflow if result / b != a (when b != 0) + const overflow: i32 = if (b != 0 and @divTrunc(result, b) != a) 1 else 0; + const result_u128: u128 = @bitCast(result); + std.mem.writeInt(u64, buffer[result_ptr..][0..8], @truncate(result_u128), .little); + std.mem.writeInt(u64, buffer[result_ptr + 8 ..][0..8], @truncate(result_u128 >> 64), .little); + std.mem.writeInt(i32, buffer[overflow_ptr..][0..4], overflow, .little); +} diff --git a/test/wasm/app.roc b/test/wasm/app.roc index 1f32341cdd1..bd63b8e5bed 100644 --- a/test/wasm/app.roc +++ b/test/wasm/app.roc @@ -1,6 +1,8 @@ app [main!] { pf: platform "./platform/main.roc" } +import pf.Stdout + main! = || { - _boxed = Box.box("test") + Stdout.line!("Hello from Roc WASM!") "Hello from Roc WASM!" } diff --git a/test/wasm/index.html b/test/wasm/index.html index 03c0222be5d..8afbdc22b86 100644 --- a/test/wasm/index.html +++ b/test/wasm/index.html @@ -85,6 +85,11 @@

Roc WASM Static Library Test

const msg = decodeString(wasmModule.exports.memory, ptr, len); log(`[EXPECT FAILED] ${msg}`); document.getElementById('debug').textContent += `Expect failed: ${msg}\n`; + }, + echo: (ptr, len) => { + const msg = decodeString(wasmModule.exports.memory, ptr, len); + console.log(msg); + document.getElementById('debug').textContent += msg + '\n'; } } }; @@ -117,23 +122,13 @@

Roc WASM Static Library Test

// Call the main function const resultPtr = wasmModule.exports.wasm_main(); - const heapUsed = wasmModule.exports.wasm_heap_used(); + const resultLen = wasmModule.exports.wasm_result_len(); - log(`Result pointer: ${resultPtr}`); - log(`Heap used: ${heapUsed} bytes`); + log(`Result pointer: ${resultPtr}, length: ${resultLen}`); // Read the result string from memory - // For RocStr, we need to read the string data - // The pointer points to the string bytes - if (resultPtr) { - // Try to read as a null-terminated string or fixed length - const memory = new Uint8Array(wasmModule.exports.memory.buffer); - let len = 0; - const maxLen = 1024; - while (len < maxLen && memory[resultPtr + len] !== 0) { - len++; - } - const resultStr = decodeString(wasmModule.exports.memory, resultPtr, len); + if (resultPtr && resultLen > 0) { + const resultStr = decodeString(wasmModule.exports.memory, resultPtr, resultLen); resultDiv.textContent = resultStr || '(empty result)'; resultDiv.className = 'output success'; log(`Result: "${resultStr}"`); diff --git a/test/wasm/main.zig b/test/wasm/main.zig index 51f07cd85ad..8778ebb2469 100644 --- a/test/wasm/main.zig +++ b/test/wasm/main.zig @@ -86,6 +86,16 @@ const HostContext = struct { const msg = self.readString(ptr, len); std.debug.print("[EXPECT FAILED] {s}\n", .{msg}); } + + /// Called by Stdout.line! hosted effect + pub fn echo(ctx: ?*anyopaque, module: *bytebox.ModuleInstance, params: [*]const bytebox.Val, _: [*]bytebox.Val) error{}!void { + _ = module; + const self: *HostContext = @ptrCast(@alignCast(ctx)); + const ptr = params[0].I32; + const len = params[1].I32; + const msg = self.readString(ptr, len); + std.debug.print("{s}\n", .{msg}); + } }; // Global context for host imports (needed because bytebox stores pointer, not value) @@ -113,6 +123,7 @@ fn setupWasm(gpa: std.mem.Allocator, arena: std.mem.Allocator, wasm_path: []cons try env_imports.addHostFunction("roc_panic", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{}, HostContext.roc_panic, &global_host_context); try env_imports.addHostFunction("roc_dbg", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{}, HostContext.roc_dbg, &global_host_context); try env_imports.addHostFunction("roc_expect_failed", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{}, HostContext.roc_expect_failed, &global_host_context); + try env_imports.addHostFunction("echo", &[_]bytebox.ValType{ .I32, .I32 }, &[_]bytebox.ValType{}, HostContext.echo, &global_host_context); // Use a reasonable stack for the interpreter (256KB - same as playground tests) const imports = [_]bytebox.ModuleImportPackage{env_imports}; diff --git a/test/wasm/platform/Stdout.roc b/test/wasm/platform/Stdout.roc new file mode 100644 index 00000000000..cf4ec55b1a1 --- /dev/null +++ b/test/wasm/platform/Stdout.roc @@ -0,0 +1,3 @@ +Stdout := [].{ + line! : Str => {} +} diff --git a/test/wasm/platform/host.zig b/test/wasm/platform/host.zig index 9146b865ac3..adec402d795 100644 --- a/test/wasm/platform/host.zig +++ b/test/wasm/platform/host.zig @@ -28,22 +28,25 @@ const RocDbg = builtins.host_abi.RocDbg; const RocExpectFailed = builtins.host_abi.RocExpectFailed; const RocCrashed = builtins.host_abi.RocCrashed; -// Import functions from the host environment -extern "env" fn roc_panic(ptr: [*]const u8, len: usize) noreturn; -extern "env" fn roc_dbg(ptr: [*]const u8, len: usize) void; -extern "env" fn roc_expect_failed(ptr: [*]const u8, len: usize) void; +// Import functions from the host environment. +const env_imports = struct { + extern "env" fn roc_panic(ptr: [*]const u8, len: usize) noreturn; + extern "env" fn roc_dbg(ptr: [*]const u8, len: usize) void; + extern "env" fn roc_expect_failed(ptr: [*]const u8, len: usize) void; + extern "env" fn echo(ptr: [*]const u8, len: usize) void; +}; // Use Zig's standard WASM allocator for proper memory management const wasm_allocator = std.heap.wasm_allocator; -// Direct exports for the interpreter shim's memory allocation +// Raw exports for the interpreter shim's memory allocation // (The interpreter shim's wasmAlloc calls these directly) // // IMPORTANT: Since roc_dealloc doesn't receive the allocation size, but Zig's WasmAllocator // needs it to determine the correct size class, we store the size at the beginning of each // allocation and return a pointer past it. This adds @sizeOf(usize) overhead per allocation. -export fn roc_alloc(size: usize, alignment: u32) callconv(.c) ?*anyopaque { +export fn roc_alloc_raw(size: usize, alignment: u32) callconv(.c) ?*anyopaque { const align_log2: std.mem.Alignment = @enumFromInt(std.math.log2_int(usize, alignment)); // Header size must be at least alignment to ensure returned pointer is properly aligned @@ -61,7 +64,7 @@ export fn roc_alloc(size: usize, alignment: u32) callconv(.c) ?*anyopaque { return @ptrCast(result + header_size); } -export fn roc_dealloc(ptr: *anyopaque, alignment: u32) callconv(.c) void { +export fn roc_dealloc_raw(ptr: *anyopaque, alignment: u32) callconv(.c) void { const align_log2: std.mem.Alignment = @enumFromInt(std.math.log2_int(usize, alignment)); // Calculate header size (must match roc_alloc) @@ -80,7 +83,7 @@ export fn roc_dealloc(ptr: *anyopaque, alignment: u32) callconv(.c) void { wasm_allocator.rawFree(base_ptr[0..total_size], align_log2, @returnAddress()); } -export fn roc_realloc(ptr: *anyopaque, new_size: usize, old_size: usize, alignment: u32) callconv(.c) ?*anyopaque { +export fn roc_realloc_raw(ptr: *anyopaque, new_size: usize, old_size: usize, alignment: u32) callconv(.c) ?*anyopaque { const align_log2: std.mem.Alignment = @enumFromInt(std.math.log2_int(usize, alignment)); // Calculate header size (must match roc_alloc) @@ -110,11 +113,11 @@ export fn roc_realloc(ptr: *anyopaque, new_size: usize, old_size: usize, alignme return @ptrCast(new_user_ptr); } -// RocOps callback implementations -// These use the same size-header approach as the exported roc_alloc/roc_dealloc, +// Canonical RocOps callback implementations. +// These use the same size-header approach as the raw roc_alloc/dealloc exports, // because RocDealloc doesn't provide the length (by design for seamless slices). -fn rocAllocFn(alloc_req: *RocAlloc, _: *anyopaque) callconv(.c) void { +fn roc_alloc(alloc_req: *RocAlloc, _: *anyopaque) callconv(.c) void { const alignment: u32 = @intCast(alloc_req.alignment); const align_log2: std.mem.Alignment = @enumFromInt(std.math.log2_int(usize, alignment)); @@ -132,14 +135,14 @@ fn rocAllocFn(alloc_req: *RocAlloc, _: *anyopaque) callconv(.c) void { alloc_req.answer = @ptrCast(result + header_size); } -fn rocDeallocFn(dealloc_req: *RocDealloc, _: *anyopaque) callconv(.c) void { +fn roc_dealloc(dealloc_req: *RocDealloc, _: *anyopaque) callconv(.c) void { const alignment: u32 = @intCast(dealloc_req.alignment); const align_log2: std.mem.Alignment = @enumFromInt(std.math.log2_int(usize, alignment)); - // Calculate header size (must match rocAllocFn) + // Calculate header size (must match roc_alloc) const header_size = @max(alignment, @sizeOf(usize)); - // Get the base pointer (before the header we stored in rocAllocFn) + // Get the base pointer (before the header we stored in roc_alloc) const byte_ptr: [*]u8 = @ptrCast(dealloc_req.ptr); const base_ptr = byte_ptr - header_size; @@ -152,7 +155,7 @@ fn rocDeallocFn(dealloc_req: *RocDealloc, _: *anyopaque) callconv(.c) void { wasm_allocator.rawFree(base_ptr[0..total_size], align_log2, @returnAddress()); } -fn rocReallocFn(realloc_req: *RocRealloc, _: *anyopaque) callconv(.c) void { +fn roc_realloc(realloc_req: *RocRealloc, _: *anyopaque) callconv(.c) void { // RocRealloc provides new_length but we need to allocate with size header const alignment: u32 = @intCast(realloc_req.alignment); const align_log2: std.mem.Alignment = @enumFromInt(std.math.log2_int(usize, alignment)); @@ -171,27 +174,35 @@ fn rocReallocFn(realloc_req: *RocRealloc, _: *anyopaque) callconv(.c) void { realloc_req.answer = @ptrCast(result + header_size); } -fn rocDbgFn(roc_dbg_arg: *const RocDbg, _: *anyopaque) callconv(.c) void { - roc_dbg(roc_dbg_arg.utf8_bytes, roc_dbg_arg.len); +fn roc_dbg(roc_dbg_arg: *const RocDbg, _: *anyopaque) callconv(.c) void { + env_imports.roc_dbg(roc_dbg_arg.utf8_bytes, roc_dbg_arg.len); +} + +fn roc_expect_failed(roc_expect: *const RocExpectFailed, _: *anyopaque) callconv(.c) void { + env_imports.roc_expect_failed(roc_expect.utf8_bytes, roc_expect.len); } -fn rocExpectFailedFn(roc_expect: *const RocExpectFailed, _: *anyopaque) callconv(.c) void { - roc_expect_failed(roc_expect.utf8_bytes, roc_expect.len); +fn roc_crashed(crash_args: *const RocCrashed, _: *anyopaque) callconv(.c) noreturn { + env_imports.roc_panic(crash_args.utf8_bytes, crash_args.len); } -fn rocCrashedFn(roc_crashed: *const RocCrashed, _: *anyopaque) callconv(.c) noreturn { - roc_panic(roc_crashed.utf8_bytes, roc_crashed.len); +// Hosted function: Stdout.line! (index 0) +// Follows RocCall ABI: (ops, ret_ptr, args_ptr) +fn hostedStdoutLine(_: *anyopaque, _: *anyopaque, args: *const extern struct { str: RocStr }) callconv(.c) void { + const s = args.str.asSlice(); + env_imports.echo(s.ptr, s.len); } +const hosted_function_ptrs = [_]builtins.host_abi.HostedFn{ + builtins.host_abi.hostedFn(&hostedStdoutLine), // Stdout.line! (index 0) +}; + // External Roc entrypoint extern fn roc__main(ops: *RocOps, ret_ptr: *anyopaque, arg_ptr: ?*anyopaque) callconv(.c) void; // Dummy env for RocOps (not used in WASM) var dummy_env: u8 = 0; -// Empty hosted functions array (this platform has no hosted effects) -const empty_hosted_fns = [_]builtins.host_abi.HostedFn{}; - // Store the last result for wasm_result_len() var last_result: RocStr = undefined; @@ -200,15 +211,15 @@ var last_result: RocStr = undefined; export fn wasm_main() [*]const u8 { var roc_ops = RocOps{ .env = @ptrCast(&dummy_env), - .roc_alloc = rocAllocFn, - .roc_dealloc = rocDeallocFn, - .roc_realloc = rocReallocFn, - .roc_dbg = rocDbgFn, - .roc_expect_failed = rocExpectFailedFn, - .roc_crashed = rocCrashedFn, + .roc_alloc = roc_alloc, + .roc_dealloc = roc_dealloc, + .roc_realloc = roc_realloc, + .roc_dbg = roc_dbg, + .roc_expect_failed = roc_expect_failed, + .roc_crashed = roc_crashed, .hosted_fns = .{ - .count = 0, - .fns = @constCast(&empty_hosted_fns), + .count = hosted_function_ptrs.len, + .fns = @constCast(&hosted_function_ptrs), }, }; diff --git a/test/wasm/platform/main.roc b/test/wasm/platform/main.roc index 4a05a4fe23a..b74aae5097f 100644 --- a/test/wasm/platform/main.roc +++ b/test/wasm/platform/main.roc @@ -1,14 +1,16 @@ platform "" requires {} { main! : () => Str } - exposes [] + exposes [Stdout] packages {} provides { main_for_host!: "main" } targets: { files: "targets/", - static_lib: { - wasm32: ["libhost.a", app], + exe: { + wasm32: ["host.wasm", app], } } +import Stdout + main_for_host! : () => Str main_for_host! = main!