llvm(py): introduce BuildCtx + trace hub; refactor if-merge prepass + PHI wiring into module; unify logs; ctx-enable compare/ret/call/boxcall/externcall/typeop/newbox/safepoint; curated smoke option for if-merge; README updates; keep behavior stable

This commit is contained in:
Selfhosting Dev
2025-09-17 16:11:01 +09:00
parent 2720884a20
commit a5054a271b
58 changed files with 2002 additions and 311 deletions

View File

@ -7,8 +7,9 @@ Each instruction has its own file, following Rust structure
from .const import lower_const
from .binop import lower_binop
from .compare import lower_compare
from .jump import lower_jump
from .branch import lower_branch
# controlflow
from .controlflow.jump import lower_jump
from .controlflow.branch import lower_branch
from .ret import lower_return
from .phi import lower_phi
from .call import lower_call
@ -29,4 +30,4 @@ __all__ = [
'lower_externcall', 'lower_typeop', 'lower_safepoint',
'lower_barrier', 'lower_newbox',
'LoopFormContext', 'lower_while_loopform'
]
]

View File

@ -5,6 +5,7 @@ Handles +, -, *, /, %, &, |, ^, <<, >>
import llvmlite.ir as ir
from typing import Dict, Optional, Any
from utils.values import resolve_i64_strict
from .compare import lower_compare
import llvmlite.ir as ir
@ -38,12 +39,12 @@ def lower_binop(
"""
# Resolve operands as i64 (using resolver when available)
# For now, simple vmap lookup
if resolver is not None and preds is not None and block_end_values is not None:
lhs_val = resolver.resolve_i64(lhs, current_block, preds, block_end_values, vmap, bb_map)
rhs_val = resolver.resolve_i64(rhs, current_block, preds, block_end_values, vmap, bb_map)
else:
lhs_val = vmap.get(lhs, ir.Constant(ir.IntType(64), 0))
rhs_val = vmap.get(rhs, ir.Constant(ir.IntType(64), 0))
lhs_val = resolve_i64_strict(resolver, lhs, current_block, preds, block_end_values, vmap, bb_map)
rhs_val = resolve_i64_strict(resolver, rhs, current_block, preds, block_end_values, vmap, bb_map)
if lhs_val is None:
lhs_val = ir.Constant(ir.IntType(64), 0)
if rhs_val is None:
rhs_val = ir.Constant(ir.IntType(64), 0)
# Relational/equality operators delegate to compare
if op in ('==','!=','<','>','<=','>='):
@ -60,6 +61,7 @@ def lower_binop(
preds=preds,
block_end_values=block_end_values,
bb_map=bb_map,
ctx=getattr(resolver, 'ctx', None),
)
return

View File

@ -4,7 +4,7 @@ Core of Nyash's "Everything is Box" philosophy
"""
import llvmlite.ir as ir
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Any
def _declare(module: ir.Module, name: str, ret, args):
for f in module.functions:
@ -47,7 +47,8 @@ def lower_boxcall(
resolver=None,
preds=None,
block_end_values=None,
bb_map=None
bb_map=None,
ctx: Optional[Any] = None,
) -> None:
"""
Lower MIR BoxCall instruction
@ -68,10 +69,43 @@ def lower_boxcall(
i8 = ir.IntType(8)
i8p = i8.as_pointer()
# Short-hands with ctx (backward-compatible fallback)
r = resolver
p = preds
bev = block_end_values
bbm = bb_map
if ctx is not None:
try:
r = getattr(ctx, 'resolver', r)
p = getattr(ctx, 'preds', p)
bev = getattr(ctx, 'block_end_values', bev)
bbm = getattr(ctx, 'bb_map', bbm)
except Exception:
pass
def _res_i64(vid: int):
if r is not None and p is not None and bev is not None and bbm is not None:
try:
return r.resolve_i64(vid, builder.block, p, bev, vmap, bbm)
except Exception:
return None
return vmap.get(vid)
# If BuildCtx is provided, prefer its maps for consistency.
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
# Receiver value
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
recv_val = resolver.resolve_i64(box_vid, builder.block, preds, block_end_values, vmap, bb_map)
else:
recv_val = _res_i64(box_vid)
if recv_val is None:
recv_val = vmap.get(box_vid, ir.Constant(i64, 0))
# Minimal method bridging for strings and console
@ -96,11 +130,11 @@ def lower_boxcall(
if method_name == "substring":
# substring(start, end)
# If receiver is a handle (i64), use handle-based helper; else pointer-based API
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
s = resolver.resolve_i64(args[0], builder.block, preds, block_end_values, vmap, bb_map) if args else ir.Constant(i64, 0)
e = resolver.resolve_i64(args[1], builder.block, preds, block_end_values, vmap, bb_map) if len(args) > 1 else ir.Constant(i64, 0)
else:
s = _res_i64(args[0]) if args else ir.Constant(i64, 0)
if s is None:
s = vmap.get(args[0], ir.Constant(i64, 0)) if args else ir.Constant(i64, 0)
e = _res_i64(args[1]) if len(args) > 1 else ir.Constant(i64, 0)
if e is None:
e = vmap.get(args[1], ir.Constant(i64, 0)) if len(args) > 1 else ir.Constant(i64, 0)
if hasattr(recv_val, 'type') and isinstance(recv_val.type, ir.IntType):
# handle-based
@ -191,9 +225,8 @@ def lower_boxcall(
# ArrayBox.get(index) → nyash.array.get_h(handle, idx)
# MapBox.get(key) → nyash.map.get_hh(handle, key_any)
recv_h = _ensure_handle(builder, module, recv_val)
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
k = resolver.resolve_i64(args[0], builder.block, preds, block_end_values, vmap, bb_map) if args else ir.Constant(i64, 0)
else:
k = _res_i64(args[0]) if args else ir.Constant(i64, 0)
if k is None:
k = vmap.get(args[0], ir.Constant(i64, 0)) if args else ir.Constant(i64, 0)
callee_map = _declare(module, "nyash.map.get_hh", i64, [i64, i64])
res = builder.call(callee_map, [recv_h, k], name="map_get_hh")
@ -204,9 +237,8 @@ def lower_boxcall(
if method_name == "push":
# ArrayBox.push(val) → nyash.array.push_h(handle, val)
recv_h = _ensure_handle(builder, module, recv_val)
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
v0 = resolver.resolve_i64(args[0], builder.block, preds, block_end_values, vmap, bb_map) if args else ir.Constant(i64, 0)
else:
v0 = _res_i64(args[0]) if args else ir.Constant(i64, 0)
if v0 is None:
v0 = vmap.get(args[0], ir.Constant(i64, 0)) if args else ir.Constant(i64, 0)
callee = _declare(module, "nyash.array.push_h", i64, [i64, i64])
res = builder.call(callee, [recv_h, v0], name="arr_push_h")
@ -217,11 +249,11 @@ def lower_boxcall(
if method_name == "set":
# MapBox.set(key, val) → nyash.map.set_hh(handle, key_any, val_any)
recv_h = _ensure_handle(builder, module, recv_val)
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
k = resolver.resolve_i64(args[0], builder.block, preds, block_end_values, vmap, bb_map) if len(args) > 0 else ir.Constant(i64, 0)
v = resolver.resolve_i64(args[1], builder.block, preds, block_end_values, vmap, bb_map) if len(args) > 1 else ir.Constant(i64, 0)
else:
k = _res_i64(args[0]) if len(args) > 0 else ir.Constant(i64, 0)
if k is None:
k = vmap.get(args[0], ir.Constant(i64, 0)) if len(args) > 0 else ir.Constant(i64, 0)
v = _res_i64(args[1]) if len(args) > 1 else ir.Constant(i64, 0)
if v is None:
v = vmap.get(args[1], ir.Constant(i64, 0)) if len(args) > 1 else ir.Constant(i64, 0)
callee = _declare(module, "nyash.map.set_hh", i64, [i64, i64, i64])
res = builder.call(callee, [recv_h, k, v], name="map_set_hh")
@ -232,9 +264,8 @@ def lower_boxcall(
if method_name == "has":
# MapBox.has(key) → nyash.map.has_hh(handle, key_any)
recv_h = _ensure_handle(builder, module, recv_val)
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
k = resolver.resolve_i64(args[0], builder.block, preds, block_end_values, vmap, bb_map) if args else ir.Constant(i64, 0)
else:
k = _res_i64(args[0]) if args else ir.Constant(i64, 0)
if k is None:
k = vmap.get(args[0], ir.Constant(i64, 0)) if args else ir.Constant(i64, 0)
callee = _declare(module, "nyash.map.has_hh", i64, [i64, i64])
res = builder.call(callee, [recv_h, k], name="map_has_hh")

View File

@ -4,7 +4,8 @@ Handles regular function calls (not BoxCall or ExternCall)
"""
import llvmlite.ir as ir
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Any
from trace import debug as trace_debug
def lower_call(
builder: ir.IRBuilder,
@ -16,7 +17,8 @@ def lower_call(
resolver=None,
preds=None,
block_end_values=None,
bb_map=None
bb_map=None,
ctx: Optional[Any] = None,
) -> None:
"""
Lower MIR Call instruction
@ -30,6 +32,50 @@ def lower_call(
vmap: Value map
resolver: Optional resolver for type handling
"""
# If BuildCtx is provided, prefer its maps for consistency.
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
# Short-hands with ctx (backward-compatible fallback)
r = resolver
p = preds
bev = block_end_values
bbm = bb_map
if ctx is not None:
try:
r = getattr(ctx, 'resolver', r)
p = getattr(ctx, 'preds', p)
bev = getattr(ctx, 'block_end_values', bev)
bbm = getattr(ctx, 'bb_map', bbm)
except Exception:
pass
# Resolver helpers (prefer resolver when available)
def _res_i64(vid: int):
if r is not None and p is not None and bev is not None and bbm is not None:
try:
return r.resolve_i64(vid, builder.block, p, bev, vmap, bbm)
except Exception:
return None
return vmap.get(vid)
def _res_ptr(vid: int):
if r is not None and p is not None and bev is not None:
try:
return r.resolve_ptr(vid, builder.block, p, bev, vmap)
except Exception:
return None
return vmap.get(vid)
# Resolve function: accepts string name or value-id referencing a string literal
actual_name = func_name
if not isinstance(func_name, str):
@ -58,11 +104,10 @@ def lower_call(
arg_val = None
if i < len(func.args):
expected_type = func.args[i].type
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
if hasattr(expected_type, 'is_pointer') and expected_type.is_pointer:
arg_val = resolver.resolve_ptr(arg_id, builder.block, preds, block_end_values, vmap)
else:
arg_val = resolver.resolve_i64(arg_id, builder.block, preds, block_end_values, vmap, bb_map)
if hasattr(expected_type, 'is_pointer') and expected_type.is_pointer:
arg_val = _res_ptr(arg_id)
else:
arg_val = _res_i64(arg_id)
if arg_val is None:
arg_val = vmap.get(arg_id)
if arg_val is None:
@ -88,13 +133,8 @@ def lower_call(
# Make the call
result = builder.call(func, call_args, name=f"call_{func_name}")
# Optional trace for final debugging
try:
import os
if os.environ.get('NYASH_LLVM_TRACE_FINAL') == '1' and isinstance(actual_name, str):
if actual_name in ("Main.node_json/3", "Main.esc_json/1", "main"):
print(f"[TRACE] call {actual_name} args={len(call_args)}", flush=True)
except Exception:
pass
if isinstance(actual_name, str) and actual_name in ("Main.node_json/3", "Main.esc_json/1", "main"):
trace_debug(f"[TRACE] call {actual_name} args={len(call_args)}")
# Store result if needed
if dst_vid is not None:

View File

@ -5,7 +5,9 @@ Handles comparison operations (<, >, <=, >=, ==, !=)
import llvmlite.ir as ir
from typing import Dict, Optional, Any
from utils.values import resolve_i64_strict
from .externcall import lower_externcall
from trace import values as trace_values
def lower_compare(
builder: ir.IRBuilder,
@ -20,6 +22,7 @@ def lower_compare(
block_end_values=None,
bb_map=None,
meta: Optional[Dict[str, Any]] = None,
ctx: Optional[Any] = None,
) -> None:
"""
Lower MIR Compare instruction
@ -32,15 +35,23 @@ def lower_compare(
dst: Destination value ID
vmap: Value map
"""
# If BuildCtx is provided, prefer its maps for consistency.
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
# Get operands
# Prefer same-block SSA from vmap; fallback to resolver for cross-block dominance
lhs_val = vmap.get(lhs)
rhs_val = vmap.get(rhs)
if (lhs_val is None or rhs_val is None) and resolver is not None and preds is not None and block_end_values is not None and current_block is not None:
if lhs_val is None:
lhs_val = resolver.resolve_i64(lhs, current_block, preds, block_end_values, vmap, bb_map)
if rhs_val is None:
rhs_val = resolver.resolve_i64(rhs, current_block, preds, block_end_values, vmap, bb_map)
lhs_val = resolve_i64_strict(resolver, lhs, current_block, preds, block_end_values, vmap, bb_map)
rhs_val = resolve_i64_strict(resolver, rhs, current_block, preds, block_end_values, vmap, bb_map)
i64 = ir.IntType(64)
i8p = ir.IntType(8).as_pointer()
@ -63,12 +74,7 @@ def lower_compare(
except Exception:
pass
if force_string or lhs_tag or rhs_tag:
try:
import os
if os.environ.get('NYASH_LLVM_TRACE_VALUES') == '1':
print(f"[compare] string-eq path: lhs={lhs} rhs={rhs} force={force_string} tagL={lhs_tag} tagR={rhs_tag}", flush=True)
except Exception:
pass
trace_values(f"[compare] string-eq path: lhs={lhs} rhs={rhs} force={force_string} tagL={lhs_tag} tagR={rhs_tag}")
# Prefer same-block SSA (vmap) since string handles are produced in-place; fallback to resolver
lh = lhs_val if lhs_val is not None else (
resolver.resolve_i64(lhs, current_block, preds, block_end_values, vmap, bb_map)
@ -78,14 +84,7 @@ def lower_compare(
resolver.resolve_i64(rhs, current_block, preds, block_end_values, vmap, bb_map)
if (resolver is not None and preds is not None and block_end_values is not None and current_block is not None) else ir.Constant(i64, 0)
)
try:
import os
if os.environ.get('NYASH_LLVM_TRACE_VALUES') == '1':
lz = isinstance(lh, ir.Constant) and getattr(getattr(lh,'constant',None),'constant',None) == 0
rz = isinstance(rh, ir.Constant) and getattr(getattr(rh,'constant',None),'constant',None) == 0
print(f"[compare] string-eq args: lh_is_const={isinstance(lh, ir.Constant)} rh_is_const={isinstance(rh, ir.Constant)}", flush=True)
except Exception:
pass
trace_values(f"[compare] string-eq args: lh_is_const={isinstance(lh, ir.Constant)} rh_is_const={isinstance(rh, ir.Constant)}")
eqf = None
for f in builder.module.functions:
if f.name == 'nyash.string.eq_hh':
@ -117,12 +116,11 @@ def lower_compare(
# Perform signed comparison using canonical predicates ('<','>','<=','>=','==','!=')
pred = op if op in ('<','>','<=','>=','==','!=') else '=='
cmp_result = builder.icmp_signed(pred, lhs_val, rhs_val, name=f"cmp_{dst}")
# Convert i1 to i64 (0 or 1)
result = builder.zext(cmp_result, i64, name=f"cmp_i64_{dst}")
# Store result
vmap[dst] = result
# Store the canonical i1 compare result. Consumers that require i64
# should explicitly cast at their use site (e.g., via resolver or
# instruction-specific lowering) to avoid emitting casts after
# terminators when used as branch conditions.
vmap[dst] = cmp_result
def lower_fcmp(
builder: ir.IRBuilder,

View File

@ -5,6 +5,7 @@ Conditional branch based on condition value
import llvmlite.ir as ir
from typing import Dict
from utils.values import resolve_i64_strict
def lower_branch(
builder: ir.IRBuilder,
@ -28,22 +29,22 @@ def lower_branch(
vmap: Value map
bb_map: Block map
"""
# Get condition value
if resolver is not None and preds is not None and block_end_values is not None:
cond = resolver.resolve_i64(cond_vid, builder.block, preds, block_end_values, vmap, bb_map)
else:
cond = vmap.get(cond_vid)
if not cond:
# Get condition value with preference to same-block SSA
cond = resolve_i64_strict(resolver, cond_vid, builder.block, preds, block_end_values, vmap, bb_map)
if cond is None:
# Default to false if missing
cond = ir.Constant(ir.IntType(1), 0)
# Convert to i1 if needed
if hasattr(cond, 'type'):
if cond.type == ir.IntType(64):
# If we already have an i1 (canonical compare result), use it directly.
if isinstance(cond.type, ir.IntType) and cond.type.width == 1:
pass
elif isinstance(cond.type, ir.IntType) and cond.type.width == 64:
# i64 to i1: compare != 0
zero = ir.Constant(ir.IntType(64), 0)
cond = builder.icmp_unsigned('!=', cond, zero, name="cond_i1")
elif cond.type == ir.IntType(8).as_pointer():
elif isinstance(cond.type, ir.PointerType):
# Pointer to i1: compare != null
null = ir.Constant(cond.type, None)
cond = builder.icmp_unsigned('!=', cond, null, name="cond_p1")

View File

@ -21,4 +21,5 @@ def lower_jump(
"""
target_bb = bb_map.get(target_bid)
if target_bb:
builder.branch(target_bb)
builder.branch(target_bb)

View File

@ -0,0 +1,80 @@
"""
Lowering helpers for while-control flow (regular structured)
"""
from typing import List, Dict, Any
import llvmlite.ir as ir
def lower_while_regular(
builder: ir.IRBuilder,
func: ir.Function,
cond_vid: int,
body_insts: List[Dict[str, Any]],
loop_id: int,
vmap: Dict[int, ir.Value],
bb_map: Dict[int, ir.Block],
resolver,
preds,
block_end_values,
):
"""Create a minimal while in IR: cond -> body -> cond, with exit.
The body instructions are lowered using the caller's dispatcher.
"""
i1 = ir.IntType(1)
i64 = ir.IntType(64)
# Create basic blocks: cond -> body -> cond, and exit
cond_bb = func.append_basic_block(name=f"while{loop_id}_cond")
body_bb = func.append_basic_block(name=f"while{loop_id}_body")
exit_bb = func.append_basic_block(name=f"while{loop_id}_exit")
# Jump from current to cond
builder.branch(cond_bb)
# Cond block
cbuild = ir.IRBuilder(cond_bb)
try:
# Resolve against the condition block to localize dominance
cond_val = resolver.resolve_i64(cond_vid, cond_bb, preds, block_end_values, vmap, bb_map)
except Exception:
cond_val = vmap.get(cond_vid)
if cond_val is None:
cond_val = ir.Constant(i1, 0)
# Normalize to i1
if hasattr(cond_val, 'type'):
if isinstance(cond_val.type, ir.IntType) and cond_val.type.width == 64:
zero64 = ir.Constant(i64, 0)
cond_val = cbuild.icmp_unsigned('!=', cond_val, zero64, name="while_cond_i1")
elif isinstance(cond_val.type, ir.PointerType):
nullp = ir.Constant(cond_val.type, None)
cond_val = cbuild.icmp_unsigned('!=', cond_val, nullp, name="while_cond_p1")
elif isinstance(cond_val.type, ir.IntType) and cond_val.type.width == 1:
# already i1
pass
else:
# Fallback: treat as false
cond_val = ir.Constant(i1, 0)
else:
cond_val = ir.Constant(i1, 0)
cbuild.cbranch(cond_val, body_bb, exit_bb)
# Body block
bbuild = ir.IRBuilder(body_bb)
# The caller must provide a dispatcher to lower body_insts; do a simple inline here.
# We expect the caller to have a method lower_instruction(builder, inst, func).
lower_instruction = getattr(resolver, '_owner_lower_instruction', None)
if lower_instruction is None:
raise RuntimeError('resolver._owner_lower_instruction not set (needs NyashLLVMBuilder.lower_instruction)')
for sub in body_insts:
if bbuild.block.terminator is not None:
cont = func.append_basic_block(name=f"cont_bb_{bbuild.block.name}")
bbuild.position_at_end(cont)
lower_instruction(bbuild, sub, func)
# Ensure terminator: if not terminated, branch back to cond
if bbuild.block.terminator is None:
bbuild.branch(cond_bb)
# Continue at exit
builder.position_at_end(exit_bb)

View File

@ -0,0 +1,46 @@
"""
Copy instruction lowering
MIR13 PHI-off uses explicit copies along edges/blocks to model merges.
"""
import llvmlite.ir as ir
from typing import Dict, Optional, Any
from utils.values import resolve_i64_strict
def lower_copy(
builder: ir.IRBuilder,
dst: int,
src: int,
vmap: Dict[int, ir.Value],
resolver=None,
current_block=None,
preds=None,
block_end_values=None,
bb_map=None,
ctx: Optional[Any] = None,
):
"""Lower a copy by mapping dst to src value in the current block scope.
Prefer same-block SSA from vmap; fallback to resolver to preserve
dominance and to localize values across predecessors.
"""
# If BuildCtx is provided, prefer its maps for consistency.
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'vmap', None) is not None and vmap is None:
vmap = ctx.vmap
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
# Prefer local SSA; resolve otherwise to preserve dominance
val = resolve_i64_strict(resolver, src, current_block, preds, block_end_values, vmap, bb_map)
if val is None:
val = ir.Constant(ir.IntType(64), 0)
vmap[dst] = val

View File

@ -4,7 +4,7 @@ Minimal mapping for NyRT-exported symbols (console/log family等)
"""
import llvmlite.ir as ir
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Any
def lower_externcall(
builder: ir.IRBuilder,
@ -16,7 +16,8 @@ def lower_externcall(
resolver=None,
preds=None,
block_end_values=None,
bb_map=None
bb_map=None,
ctx: Optional[Any] = None,
) -> None:
"""
Lower MIR ExternCall instruction
@ -30,6 +31,19 @@ def lower_externcall(
vmap: Value map
resolver: Optional resolver for type handling
"""
# If BuildCtx is provided, prefer its maps for consistency.
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
# Accept full symbol names (e.g., "nyash.console.log", "nyash.string.len_h").
llvm_name = func_name
@ -83,13 +97,17 @@ def lower_externcall(
call_args: List[ir.Value] = []
for i, arg_id in enumerate(args):
orig_arg_id = arg_id
# Prefer resolver
# Prefer resolver/ctx
aval = None
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
if len(func.args) > i and isinstance(func.args[i].type, ir.PointerType):
aval = resolver.resolve_ptr(arg_id, builder.block, preds, block_end_values, vmap)
else:
aval = resolver.resolve_i64(arg_id, builder.block, preds, block_end_values, vmap, bb_map)
else:
try:
if len(func.args) > i and isinstance(func.args[i].type, ir.PointerType):
aval = resolver.resolve_ptr(arg_id, builder.block, preds, block_end_values, vmap)
else:
aval = resolver.resolve_i64(arg_id, builder.block, preds, block_end_values, vmap, bb_map)
except Exception:
aval = None
if aval is None:
aval = vmap.get(arg_id)
if aval is None:
# Default guess

View File

@ -123,7 +123,10 @@ def lower_while_loopform(
lf.tag_phi = tag_phi
lf.payload_phi = payload_phi
if os.environ.get('NYASH_CLI_VERBOSE') == '1':
print(f"[LoopForm] Created loop structure (id={loop_id})")
try:
from trace import debug as trace_debug
trace_debug(f"[LoopForm] Created loop structure (id={loop_id})")
except Exception:
pass
return True

View File

@ -4,7 +4,7 @@ Handles box creation (new StringBox(), new IntegerBox(), etc.)
"""
import llvmlite.ir as ir
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Any
def lower_newbox(
builder: ir.IRBuilder,
@ -13,7 +13,8 @@ def lower_newbox(
args: List[int],
dst_vid: int,
vmap: Dict[int, ir.Value],
resolver=None
resolver=None,
ctx: Optional[Any] = None
) -> None:
"""
Lower MIR NewBox instruction

View File

@ -134,12 +134,15 @@ def lower_phi(
import os
if used_default_zero and os.environ.get('NYASH_LLVM_PHI_STRICT') == '1':
raise RuntimeError(f"[LLVM_PY] PHI dst={dst_vid} used synthesized zero; check preds/incoming")
if os.environ.get('NYASH_LLVM_TRACE_PHI') == '1':
try:
from trace import phi as trace_phi
try:
blkname = str(current_block.name)
except Exception:
blkname = '<blk>'
print(f"[PHI] {blkname} v{dst_vid} incoming={len(incoming_pairs)} zero={1 if used_default_zero else 0}")
trace_phi(f"[PHI] {blkname} v{dst_vid} incoming={len(incoming_pairs)} zero={1 if used_default_zero else 0}")
except Exception:
pass
# Propagate string-ness: if any incoming value-id is tagged string-ish, mark dst as string-ish.
try:
if resolver is not None and hasattr(resolver, 'is_stringish') and hasattr(resolver, 'mark_string'):

View File

@ -4,7 +4,7 @@ Handles void and value returns
"""
import llvmlite.ir as ir
from typing import Dict, Optional
from typing import Dict, Optional, Any
def lower_return(
builder: ir.IRBuilder,
@ -14,7 +14,8 @@ def lower_return(
resolver=None,
preds=None,
block_end_values=None,
bb_map=None
bb_map=None,
ctx: Optional[Any] = None,
) -> None:
"""
Lower MIR Return instruction
@ -25,6 +26,19 @@ def lower_return(
vmap: Value map
return_type: Expected return type
"""
# Prefer BuildCtx maps if provided
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
if value_id is None:
# Void return
builder.ret_void()
@ -33,6 +47,53 @@ def lower_return(
ret_val = None
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
try:
# If this block has a declared PHI for the return value, force using the
# local PHI placeholder to ensure dominance and let finalize_phis wire it.
try:
block_name = builder.block.name
cur_bid = int(str(block_name).replace('bb',''))
except Exception:
cur_bid = -1
try:
bm = getattr(resolver, 'block_phi_incomings', {}) or {}
except Exception:
bm = {}
if isinstance(value_id, int) and isinstance(bm.get(cur_bid), dict) and value_id in bm.get(cur_bid):
# Reuse predeclared ret-phi when available
cur = None
try:
rp = getattr(resolver, 'ret_phi_map', {}) or {}
key = (int(cur_bid), int(value_id))
if key in rp:
cur = rp[key]
except Exception:
cur = None
if cur is None:
btop = ir.IRBuilder(builder.block)
try:
btop.position_at_start(builder.block)
except Exception:
pass
# Reuse existing local phi if present; otherwise create
cur = vmap.get(value_id)
need_new = True
try:
need_new = not (cur is not None and hasattr(cur, 'add_incoming') and getattr(getattr(cur, 'basic_block', None), 'name', None) == builder.block.name)
except Exception:
need_new = True
if need_new:
cur = btop.phi(ir.IntType(64), name=f"phi_ret_{value_id}")
# Bind to maps
vmap[value_id] = cur
try:
if hasattr(resolver, 'global_vmap') and isinstance(resolver.global_vmap, dict):
resolver.global_vmap[value_id] = cur
except Exception:
pass
ret_val = cur
if ret_val is not None:
builder.ret(ret_val)
return
if isinstance(return_type, ir.PointerType):
ret_val = resolver.resolve_ptr(value_id, builder.block, preds, block_end_values, vmap)
else:

View File

@ -4,7 +4,7 @@ GC safepoints where runtime can safely collect garbage
"""
import llvmlite.ir as ir
from typing import Dict, List, Optional
from typing import Dict, List, Optional, Any
def lower_safepoint(
builder: ir.IRBuilder,
@ -15,7 +15,8 @@ def lower_safepoint(
resolver=None,
preds=None,
block_end_values=None,
bb_map=None
bb_map=None,
ctx: Optional[Any] = None
) -> None:
"""
Lower MIR Safepoint instruction
@ -53,8 +54,18 @@ def lower_safepoint(
# Store each live value
for i, vid in enumerate(live_values):
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
val = resolver.resolve_i64(vid, builder.block, preds, block_end_values, vmap, bb_map)
# Prefer BuildCtx if provided
r = resolver; p = preds; bev = block_end_values; bbm = bb_map
if ctx is not None:
try:
r = getattr(ctx, 'resolver', r)
p = getattr(ctx, 'preds', p)
bev = getattr(ctx, 'block_end_values', bev)
bbm = getattr(ctx, 'bb_map', bbm)
except Exception:
pass
if r is not None and p is not None and bev is not None and bbm is not None:
val = r.resolve_i64(vid, builder.block, p, bev, vmap, bbm)
else:
val = vmap.get(vid, ir.Constant(i64, 0))

View File

@ -4,7 +4,7 @@ Handles type conversions and type checks
"""
import llvmlite.ir as ir
from typing import Dict, Optional
from typing import Dict, Optional, Any
def lower_typeop(
builder: ir.IRBuilder,
@ -16,7 +16,8 @@ def lower_typeop(
resolver=None,
preds=None,
block_end_values=None,
bb_map=None
bb_map=None,
ctx: Optional[Any] = None,
) -> None:
"""
Lower MIR TypeOp instruction
@ -35,6 +36,19 @@ def lower_typeop(
vmap: Value map
resolver: Optional resolver for type handling
"""
# Prefer BuildCtx maps when provided
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
src_val = resolver.resolve_i64(src_vid, builder.block, preds, block_end_values, vmap, bb_map)
else:
@ -83,7 +97,8 @@ def lower_convert(
resolver=None,
preds=None,
block_end_values=None,
bb_map=None
bb_map=None,
ctx: Optional[Any] = None,
) -> None:
"""
Lower type conversion between primitive types
@ -96,6 +111,18 @@ def lower_convert(
to_type: Target type
vmap: Value map
"""
if ctx is not None:
try:
if getattr(ctx, 'resolver', None) is not None:
resolver = ctx.resolver
if getattr(ctx, 'preds', None) is not None and preds is None:
preds = ctx.preds
if getattr(ctx, 'block_end_values', None) is not None and block_end_values is None:
block_end_values = ctx.block_end_values
if getattr(ctx, 'bb_map', None) is not None and bb_map is None:
bb_map = ctx.bb_map
except Exception:
pass
if resolver is not None and preds is not None and block_end_values is not None and bb_map is not None:
# Choose resolution based on from_type
if from_type == "ptr":