- Add HAKO_TRACE_EXECUTION to trace executor route - Rust hv1_inline: stderr [trace] executor: hv1_inline (rust) - Hakovm dispatcher: stdout [trace] executor: hakovm (hako) - test_runner: trace lines for hv1_inline/core/hakovm routes - Add HAKO_VERIFY_SHOW_LOGS and HAKO_DEBUG=1 (enables both) - verify_v1_inline_file() log passthrough with numeric rc extraction - test_runner exports via HAKO_DEBUG - Canary expansion under phase2170 (state spec) - Array: push×5/10 → size, len/length alias, per‑recv/global, flow across blocks - Map: set dup-key non-increment, value_state get/has - run_all.sh: unify, remove SKIPs; all PASS - Docs - ENV_VARS.md: add Debug/Tracing toggles and examples - PLAN.md/CURRENT_TASK.md: mark 21.7 green, add Quickstart lines All changes gated by env vars; default behavior unchanged.
131 lines
5.2 KiB
Plaintext
131 lines
5.2 KiB
Plaintext
// tools/hako_parser/parser_core.hako - HakoParserCoreBox (token-based MVP)
|
|
using tools.hako_parser.tokenizer as HakoTokenizerBox
|
|
|
|
static box HakoParserCoreBox {
|
|
// Parse .hako source into minimal AST map:
|
|
// {
|
|
// uses: Array<String>,
|
|
// aliases: Array<{name,alias}>,
|
|
// boxes: Array<{name,is_static,methods:Array<{name,arity,span}>}>
|
|
// }
|
|
parse(text) {
|
|
local ast = new MapBox()
|
|
ast.set("uses", new ArrayBox())
|
|
ast.set("aliases", new ArrayBox())
|
|
ast.set("boxes", new ArrayBox())
|
|
ast.set("includes", new ArrayBox())
|
|
if text == null { return ast }
|
|
|
|
local toks = HakoTokenizerBox.tokenize(text)
|
|
local p = 0
|
|
local N = toks.size()
|
|
|
|
// Parse stream (single pass, tolerant)
|
|
while p < N {
|
|
local t = me._peek(toks, p, N)
|
|
if me._eq(t, "USING") == 1 {
|
|
// using "mod" (as Alias)?
|
|
p = me._advance(p, N)
|
|
local t1 = me._peek(toks, p, N)
|
|
if me._eq(t1, "STRING") == 1 {
|
|
local mod_name = t1.get("lexeme"); ast.get("uses").push(mod_name); p = me._advance(p, N)
|
|
// optional: as Alias
|
|
local t2 = me._peek(toks, p, N);
|
|
if me._eq(t2, "AS") == 1 {
|
|
p = me._advance(p, N)
|
|
local t3 = me._peek(toks, p, N)
|
|
if me._eq(t3, "IDENT") == 1 {
|
|
local alias = t3.get("lexeme"); p = me._advance(p, N)
|
|
local rec = new MapBox(); rec.set("name", mod_name); rec.set("alias", alias)
|
|
ast.get("aliases").push(rec)
|
|
}
|
|
}
|
|
} else {
|
|
// tolerate malformed using; skip token
|
|
}
|
|
continue
|
|
}
|
|
if me._eq(t, "INCLUDE") == 1 {
|
|
// include "path" (record line for diagnostics as string to keep emitter simple)
|
|
p = me._advance(p, N)
|
|
local s = me._peek(toks, p, N)
|
|
if me._eq(s, "STRING") == 1 {
|
|
local ln = s.get("line"); if ln == null { ln = 0 }
|
|
ast.get("includes").push(me._itoa(ln)); p = me._advance(p, N)
|
|
}
|
|
continue
|
|
}
|
|
if me._eq(t, "STATIC") == 1 {
|
|
// static box Name { methods }
|
|
// STATIC BOX IDENT LBRACE ... RBRACE
|
|
local save = p
|
|
local static_tok = t
|
|
p = me._advance(p, N) // STATIC
|
|
local tb = me._peek(toks, p, N)
|
|
if me._eq(tb, "BOX") == 0 { p = save + 1; continue }
|
|
local box_tok = tb
|
|
p = me._advance(p, N)
|
|
local tn = me._peek(toks, p, N)
|
|
if me._eq(tn, "IDENT") == 0 { continue }
|
|
local box_name = tn.get("lexeme");
|
|
local box_line = tn.get("line");
|
|
if box_line == null { box_line = static_tok.get("line") }
|
|
p = me._advance(p, N)
|
|
// expect '{'
|
|
local tl = me._peek(toks, p, N)
|
|
if me._eq(tl, "LBRACE") == 0 { continue }
|
|
p = me._advance(p, N)
|
|
// register box (bool is_static, and span_line metadata)
|
|
local b = new MapBox(); b.set("name", box_name); b.set("is_static", true); b.set("span_line", box_line); b.set("methods", new ArrayBox())
|
|
ast.get("boxes").push(b)
|
|
// scan until matching RBRACE (flat, tolerate nested braces count)
|
|
local depth = 1
|
|
while p < N && depth > 0 {
|
|
local tk = me._peek(toks, p, N)
|
|
if me._eq(tk, "LBRACE") == 1 { depth = depth + 1; p = me._advance(p, N); continue }
|
|
if me._eq(tk, "RBRACE") == 1 { depth = depth - 1; p = me._advance(p, N); if depth == 0 { break } else { continue } }
|
|
// method
|
|
if me._eq(tk, "METHOD") == 1 {
|
|
local mline = tk.get("line"); p = me._advance(p, N)
|
|
local mid = me._peek(toks, p, N); if me._eq(mid, "IDENT") == 0 { continue }
|
|
local mname = mid.get("lexeme"); p = me._advance(p, N)
|
|
// params
|
|
local lp = me._peek(toks, p, N); if me._eq(lp, "LPAREN") == 0 { continue } p = me._advance(p, N)
|
|
// count commas until RPAREN (no nesting inside params for MVP)
|
|
local arity = 0; local any = 0
|
|
while p < N {
|
|
local tt = me._peek(toks, p, N)
|
|
if me._eq(tt, "RPAREN") == 1 { p = me._advance(p, N); break }
|
|
if me._eq(tt, "COMMA") == 1 { arity = arity + 1; p = me._advance(p, N); any = 1; continue }
|
|
// consume any token inside params
|
|
p = me._advance(p, N); any = 1
|
|
}
|
|
// arity = comma count + 1 (if non-empty)
|
|
if any == 1 { arity = arity + 1 }
|
|
// record method
|
|
local m = new MapBox(); m.set("name", mname); m.set("arity", arity); m.set("span", mline)
|
|
b.get("methods").push(m)
|
|
continue
|
|
}
|
|
p = me._advance(p, N)
|
|
}
|
|
continue
|
|
}
|
|
// skip unhandled token
|
|
p = me._advance(p, N)
|
|
}
|
|
return ast
|
|
}
|
|
_peek(toks, idx, N) { if idx >= N { return null } return toks.get(idx) }
|
|
_eq(t, kind) { if t == null { return 0 } if t.get("type") == kind { return 1 } return 0 }
|
|
_advance(p, N) { if p < N { return p + 1 } return p }
|
|
_itoa(n) {
|
|
local v = 0 + n; if v == 0 { return "0" }
|
|
local digits = "0123456789"; local out = ""; local tmp = ""
|
|
while v > 0 { local d = v % 10; tmp = digits.substring(d,d+1) + tmp; v = v / 10 }
|
|
out = tmp; return out
|
|
}
|
|
}
|
|
|
|
static box HakoParserCoreMain { method main(args) { return 0 } }
|