Files
hakorune/src/using/resolver.rs

344 lines
13 KiB
Rust
Raw Normal View History

use crate::using::errors::UsingError;
use crate::using::policy::UsingPolicy;
use crate::using::spec::{PackageKind, UsingPackage};
use std::collections::HashMap;
/// Populate using context vectors from nyash.toml (if present).
/// Keeps behavior aligned with existing runner pipeline:
/// - Adds [using.paths] entries to `using_paths`
/// - Flattens [modules] into (name, path) pairs appended to `pending_modules`
/// - Reads optional [aliases] table (k -> v)
pub fn populate_from_toml(
using_paths: &mut Vec<String>,
pending_modules: &mut Vec<(String, String)>,
aliases: &mut HashMap<String, String>,
packages: &mut HashMap<String, UsingPackage>,
) -> Result<UsingPolicy, UsingError> {
let mut policy = UsingPolicy::default();
feat(stage-b): Add FLOW keyword support + fix Stage-3 keyword conflicts ## ✅ Fixed Issues ### 1. `local` keyword tokenization (commit 9aab64f7) - Added Stage-3 gate for LOCAL/TRY/CATCH/THROW keywords - LOCAL now only active when NYASH_PARSER_STAGE3=1 ### 2. `env.local.get` keyword conflict - File: `lang/src/compiler/entry/compiler_stageb.hako:21-23` - Problem: `.local` in member access tokenized as `.LOCAL` keyword - Fix: Commented out `env.local.get("HAKO_SOURCE")` line - Fallback: Use `--source` argument (still functional) ### 3. `flow` keyword missing - Added FLOW to TokenType enum (`src/tokenizer/kinds.rs`) - Added "flow" → TokenType::FLOW mapping (`src/tokenizer/lex_ident.rs`) - Added FLOW to Stage-3 gate (requires NYASH_PARSER_STAGE3=1) - Added FLOW to parser statement dispatch (`src/parser/statements/mod.rs`) - Added FLOW to declaration handler (`src/parser/statements/declarations.rs`) - Updated box_declaration parser to accept BOX or FLOW (`src/parser/declarations/box_definition.rs`) - Treat `flow FooBox {}` as syntactic sugar for `box FooBox {}` ### 4. Module namespace conversion - Renamed `lang.compiler.builder.ssa.local` → `localvar` (avoid keyword) - Renamed file `local.hako` → `local_ssa.hako` - Converted 152 path-based using statements to namespace format - Added 26+ entries to `nyash.toml` [modules] section ## ⚠️ Remaining Issues ### Stage-B selfhost compiler performance - Stage-B compiler not producing output (hangs/times out after 10+ seconds) - Excessive PHI debug output suggests compilation loop issue - Needs investigation: infinite loop or N² algorithm in hako compiler ### Fallback JSON version mismatch - Rust fallback (`--emit-mir-json`) emits MIR v1 JSON (schema_version: "1.0") - Smoke tests expect MIR v0 JSON (`"version":0, "kind":"Program"`) - stageb_helpers.sh fallback needs adjustment ## Test Status - Parse errors: FIXED ✅ - Keyword conflicts: FIXED ✅ - Stage-B smoke tests: STILL FAILING ❌ (performance issue) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-02 04:13:17 +09:00
// Prefer CWD nyash.toml; if missing, honor NYASH_ROOT/nyash.toml for tools that run from subdirs
let (text, toml_path) = {
// Prefer hakorune.toml, fallback to nyash.toml; check CWD then NYASH_ROOT
let candidates = ["hakorune.toml", "nyash.toml"];
let mut found: Option<(String, std::path::PathBuf)> = None;
// 1) Try current directory
for name in candidates.iter() {
let p = std::path::Path::new(name);
if p.exists() {
let txt =
std::fs::read_to_string(p).map_err(|e| UsingError::ReadToml(e.to_string()))?;
found = Some((txt, p.to_path_buf()));
break;
}
}
// 2) Try NYASH_ROOT if not found yet
if found.is_none() {
if let Ok(root) = std::env::var("NYASH_ROOT") {
for name in candidates.iter() {
let alt = std::path::Path::new(&root).join(name);
if alt.exists() {
let txt = std::fs::read_to_string(&alt)
.map_err(|e| UsingError::ReadToml(e.to_string()))?;
found = Some((txt, alt));
break;
}
}
}
}
// 3) Fallback: empty content and path
Ok(found.unwrap_or((String::new(), std::path::PathBuf::from(""))))
}?;
let doc =
toml::from_str::<toml::Value>(&text).map_err(|e| UsingError::ParseToml(e.to_string()))?;
let toml_dir = toml_path
.parent()
.map(|p| p.to_path_buf())
.unwrap_or_else(|| std::path::PathBuf::from("."));
// [modules] table flatten: supports nested namespaces (a.b.c = "path")
if let Some(mods) = doc.get("modules").and_then(|v| v.as_table()) {
fn visit(prefix: &str, tbl: &toml::value::Table, out: &mut Vec<(String, String)>) {
for (k, v) in tbl.iter() {
let name = if prefix.is_empty() {
k.to_string()
} else {
format!("{}.{}", prefix, k)
};
if let Some(s) = v.as_str() {
out.push((name, s.to_string()));
} else if let Some(t) = v.as_table() {
visit(&name, t, out);
}
}
}
visit("", mods, pending_modules);
if let Some(workspace_tbl) = mods.get("workspace").and_then(|v| v.as_table()) {
load_workspace_modules(&toml_dir, workspace_tbl, pending_modules, aliases)?;
}
}
// [using.paths] array
if let Some(using_tbl) = doc.get("using").and_then(|v| v.as_table()) {
// paths
if let Some(paths_arr) = using_tbl.get("paths").and_then(|v| v.as_array()) {
for p in paths_arr {
if let Some(s) = p.as_str() {
let s = s.trim();
if !s.is_empty() {
using_paths.push(s.to_string());
policy.search_paths.push(s.to_string());
}
}
}
}
// aliases
if let Some(alias_tbl) = using_tbl.get("aliases").and_then(|v| v.as_table()) {
for (k, v) in alias_tbl.iter() {
if let Some(target) = v.as_str() {
aliases.insert(k.to_string(), target.to_string());
}
}
}
// named packages: any subtable not paths/aliases is a package
for (k, v) in using_tbl.iter() {
if k == "paths" || k == "aliases" {
continue;
}
if let Some(tbl) = v.as_table() {
let kind = tbl
.get("kind")
.and_then(|x| x.as_str())
.map(PackageKind::from_str)
.unwrap_or(PackageKind::Package);
// path is required
if let Some(path_s) = tbl.get("path").and_then(|x| x.as_str()) {
let path = path_s.to_string();
let main = tbl
.get("main")
.and_then(|x| x.as_str())
.map(|s| s.to_string());
let bid = tbl
.get("bid")
.and_then(|x| x.as_str())
.map(|s| s.to_string());
packages.insert(
k.to_string(),
UsingPackage {
kind,
path,
main,
bid,
},
);
}
}
}
}
// legacy top-level [aliases] also accepted (migration)
if let Some(alias_tbl) = doc.get("aliases").and_then(|v| v.as_table()) {
for (k, v) in alias_tbl.iter() {
if let Some(target) = v.as_str() {
aliases.insert(k.to_string(), target.to_string());
}
}
}
Ok(policy)
}
/// Resolve a using target name into a concrete path token.
/// - Returns plain file path for modules/package files
/// - Returns a marker token `dylib:<path>` for kind="dylib" packages
/// - Searches relative to `context_dir` then `using_paths` for bare names
/// - When `strict` and multiple candidates exist, returns Err
pub fn resolve_using_target_common(
tgt: &str,
modules: &[(String, String)],
using_paths: &[String],
packages: &HashMap<String, UsingPackage>,
context_dir: Option<&std::path::Path>,
strict: bool,
verbose: bool,
) -> Result<String, String> {
// 1) modules mapping
if let Some((_, p)) = modules.iter().find(|(n, _)| n == tgt) {
if verbose {
eprintln!("[using/resolve] modules '{}' -> '{}'", tgt, p);
}
return Ok(p.clone());
}
// 2) named packages
if let Some(pkg) = packages.get(tgt) {
match pkg.kind {
PackageKind::Dylib => {
let out = format!("dylib:{}", pkg.path);
if verbose {
eprintln!("[using/resolve] dylib '{}' -> '{}'", tgt, out);
}
return Ok(out);
}
PackageKind::Package => {
let base = std::path::Path::new(&pkg.path);
let out = if let Some(m) = &pkg.main {
if matches!(
base.extension().and_then(|s| s.to_str()),
Some("nyash") | Some("hako")
) {
pkg.path.clone()
} else {
base.join(m).to_string_lossy().to_string()
}
} else {
if matches!(
base.extension().and_then(|s| s.to_str()),
Some("nyash") | Some("hako")
) {
pkg.path.clone()
} else {
let leaf = base.file_name().and_then(|s| s.to_str()).unwrap_or(tgt);
let hako = base.join(format!("{}.hako", leaf));
if hako.exists() {
hako.to_string_lossy().to_string()
} else {
base.join(format!("{}.hako", leaf))
.to_string_lossy()
.to_string()
}
}
};
if verbose {
eprintln!("[using/resolve] package '{}' -> '{}'", tgt, out);
}
return Ok(out);
}
}
}
// 3) relative: prefer cwd > using_paths; .hako first then .nyash
let rel_hako = tgt.replace('.', "/") + ".hako";
let rel_ny = tgt.replace('.', "/") + ".nyash";
let mut cand: Vec<String> = Vec::new();
if let Some(dir) = context_dir {
let c1 = dir.join(&rel_hako);
if c1.exists() {
cand.push(c1.to_string_lossy().to_string());
}
let c2 = dir.join(&rel_ny);
if c2.exists() {
cand.push(c2.to_string_lossy().to_string());
}
}
for base in using_paths {
let p = std::path::Path::new(base);
let c1 = p.join(&rel_hako);
if c1.exists() {
cand.push(c1.to_string_lossy().to_string());
}
let c2 = p.join(&rel_ny);
if c2.exists() {
cand.push(c2.to_string_lossy().to_string());
}
}
if cand.is_empty() {
if verbose {
eprintln!("[using] unresolved '{}' (searched: rel+paths)", tgt);
}
return Err(format!(
"using: unresolved '{}': searched relative and using.paths",
tgt
));
}
if cand.len() > 1 && strict {
return Err(format!("ambiguous using '{}': {}", tgt, cand.join(", ")));
}
let out = cand.remove(0);
if verbose {
eprintln!("[using/resolve] '{}' -> '{}'", tgt, out);
}
Ok(out)
}
fn load_workspace_modules(
nyash_dir: &std::path::Path,
workspace_tbl: &toml::value::Table,
pending_modules: &mut Vec<(String, String)>,
aliases: &mut HashMap<String, String>,
) -> Result<(), UsingError> {
let members = workspace_tbl
.get("members")
.and_then(|v| v.as_array())
.ok_or_else(|| {
UsingError::ParseWorkspaceModule(
"modules.workspace".into(),
"expected members array".into(),
)
})?;
for entry in members {
let raw_path = entry.as_str().ok_or_else(|| {
UsingError::ParseWorkspaceModule(
"modules.workspace".into(),
"members must be string paths".into(),
)
})?;
let module_path = if std::path::Path::new(raw_path).is_absolute() {
std::path::PathBuf::from(raw_path)
} else {
nyash_dir.join(raw_path)
};
let module_dir = module_path
.parent()
.map(|p| p.to_path_buf())
.unwrap_or_else(|| nyash_dir.to_path_buf());
let module_text = std::fs::read_to_string(&module_path).map_err(|e| {
UsingError::ReadWorkspaceModule(
module_path.to_string_lossy().to_string(),
e.to_string(),
)
})?;
let module_doc = toml::from_str::<toml::Value>(&module_text).map_err(|e| {
UsingError::ParseWorkspaceModule(
module_path.to_string_lossy().to_string(),
e.to_string(),
)
})?;
let module_name = module_doc
.get("module")
.and_then(|v| v.get("name"))
.and_then(|v| v.as_str())
.ok_or_else(|| {
UsingError::WorkspaceModuleMissingName(module_path.to_string_lossy().to_string())
})?;
if let Some(exports_tbl) = module_doc.get("exports").and_then(|v| v.as_table()) {
for (export_key, export_value) in exports_tbl {
if let Some(rel_path) = export_value.as_str() {
let mut full_name = module_name.to_string();
if !export_key.is_empty() {
full_name.push('.');
full_name.push_str(export_key);
}
if pending_modules.iter().any(|(name, _)| name == &full_name) {
continue;
}
let resolved_path = module_dir.join(rel_path);
let resolved_str = resolved_path
.canonicalize()
.unwrap_or(resolved_path)
.to_string_lossy()
.to_string();
pending_modules.push((full_name, resolved_str));
}
}
}
if let Some(alias_tbl) = module_doc.get("aliases").and_then(|v| v.as_table()) {
for (alias, target) in alias_tbl {
if let Some(target_str) = target.as_str() {
aliases.insert(alias.to_string(), target_str.to_string());
}
}
}
}
Ok(())
}