Phase 25.1 完了成果: - ✅ LoopForm v2 テスト・ドキュメント・コメント完備 - 4ケース(A/B/C/D)完全テストカバレッジ - 最小再現ケース作成(SSAバグ調査用) - SSOT文書作成(loopform_ssot.md) - 全ソースに [LoopForm] コメントタグ追加 - ✅ Stage-1 CLI デバッグ環境構築 - stage1_cli.hako 実装 - stage1_bridge.rs ブリッジ実装 - デバッグツール作成(stage1_debug.sh/stage1_minimal.sh) - アーキテクチャ改善提案文書 - ✅ 環境変数削減計画策定 - 25変数の完全調査・分類 - 6段階削減ロードマップ(25→5、80%削減) - 即時削除可能変数特定(NYASH_CONFIG/NYASH_DEBUG) Phase 26-D からの累積変更: - PHI実装改善(ExitPhiBuilder/HeaderPhiBuilder等) - MIRビルダーリファクタリング - 型伝播・最適化パス改善 - その他約300ファイルの累積変更 🎯 技術的成果: - SSAバグ根本原因特定(条件分岐内loop変数変更) - Region+next_iパターン適用完了(UsingCollectorBox等) - LoopFormパターン文書化・テスト化完了 - セルフホスティング基盤強化 Co-Authored-By: Claude <noreply@anthropic.com> Co-Authored-By: ChatGPT <noreply@openai.com> Co-Authored-By: Task Assistant <task@anthropic.com>
344 lines
13 KiB
Rust
344 lines
13 KiB
Rust
use crate::using::errors::UsingError;
|
|
use crate::using::policy::UsingPolicy;
|
|
use crate::using::spec::{PackageKind, UsingPackage};
|
|
use std::collections::HashMap;
|
|
|
|
/// Populate using context vectors from nyash.toml (if present).
|
|
/// Keeps behavior aligned with existing runner pipeline:
|
|
/// - Adds [using.paths] entries to `using_paths`
|
|
/// - Flattens [modules] into (name, path) pairs appended to `pending_modules`
|
|
/// - Reads optional [aliases] table (k -> v)
|
|
pub fn populate_from_toml(
|
|
using_paths: &mut Vec<String>,
|
|
pending_modules: &mut Vec<(String, String)>,
|
|
aliases: &mut HashMap<String, String>,
|
|
packages: &mut HashMap<String, UsingPackage>,
|
|
) -> Result<UsingPolicy, UsingError> {
|
|
let mut policy = UsingPolicy::default();
|
|
// Prefer CWD nyash.toml; if missing, honor NYASH_ROOT/nyash.toml for tools that run from subdirs
|
|
let (text, toml_path) = {
|
|
// Prefer hakorune.toml, fallback to nyash.toml; check CWD then NYASH_ROOT
|
|
let candidates = ["hakorune.toml", "nyash.toml"];
|
|
let mut found: Option<(String, std::path::PathBuf)> = None;
|
|
|
|
// 1) Try current directory
|
|
for name in candidates.iter() {
|
|
let p = std::path::Path::new(name);
|
|
if p.exists() {
|
|
let txt =
|
|
std::fs::read_to_string(p).map_err(|e| UsingError::ReadToml(e.to_string()))?;
|
|
found = Some((txt, p.to_path_buf()));
|
|
break;
|
|
}
|
|
}
|
|
// 2) Try NYASH_ROOT if not found yet
|
|
if found.is_none() {
|
|
if let Ok(root) = std::env::var("NYASH_ROOT") {
|
|
for name in candidates.iter() {
|
|
let alt = std::path::Path::new(&root).join(name);
|
|
if alt.exists() {
|
|
let txt = std::fs::read_to_string(&alt)
|
|
.map_err(|e| UsingError::ReadToml(e.to_string()))?;
|
|
found = Some((txt, alt));
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// 3) Fallback: empty content and path
|
|
Ok(found.unwrap_or((String::new(), std::path::PathBuf::from(""))))
|
|
}?;
|
|
let doc =
|
|
toml::from_str::<toml::Value>(&text).map_err(|e| UsingError::ParseToml(e.to_string()))?;
|
|
let toml_dir = toml_path
|
|
.parent()
|
|
.map(|p| p.to_path_buf())
|
|
.unwrap_or_else(|| std::path::PathBuf::from("."));
|
|
|
|
// [modules] table flatten: supports nested namespaces (a.b.c = "path")
|
|
if let Some(mods) = doc.get("modules").and_then(|v| v.as_table()) {
|
|
fn visit(prefix: &str, tbl: &toml::value::Table, out: &mut Vec<(String, String)>) {
|
|
for (k, v) in tbl.iter() {
|
|
let name = if prefix.is_empty() {
|
|
k.to_string()
|
|
} else {
|
|
format!("{}.{}", prefix, k)
|
|
};
|
|
if let Some(s) = v.as_str() {
|
|
out.push((name, s.to_string()));
|
|
} else if let Some(t) = v.as_table() {
|
|
visit(&name, t, out);
|
|
}
|
|
}
|
|
}
|
|
visit("", mods, pending_modules);
|
|
if let Some(workspace_tbl) = mods.get("workspace").and_then(|v| v.as_table()) {
|
|
load_workspace_modules(&toml_dir, workspace_tbl, pending_modules, aliases)?;
|
|
}
|
|
}
|
|
|
|
// [using.paths] array
|
|
if let Some(using_tbl) = doc.get("using").and_then(|v| v.as_table()) {
|
|
// paths
|
|
if let Some(paths_arr) = using_tbl.get("paths").and_then(|v| v.as_array()) {
|
|
for p in paths_arr {
|
|
if let Some(s) = p.as_str() {
|
|
let s = s.trim();
|
|
if !s.is_empty() {
|
|
using_paths.push(s.to_string());
|
|
policy.search_paths.push(s.to_string());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
// aliases
|
|
if let Some(alias_tbl) = using_tbl.get("aliases").and_then(|v| v.as_table()) {
|
|
for (k, v) in alias_tbl.iter() {
|
|
if let Some(target) = v.as_str() {
|
|
aliases.insert(k.to_string(), target.to_string());
|
|
}
|
|
}
|
|
}
|
|
// named packages: any subtable not paths/aliases is a package
|
|
for (k, v) in using_tbl.iter() {
|
|
if k == "paths" || k == "aliases" {
|
|
continue;
|
|
}
|
|
if let Some(tbl) = v.as_table() {
|
|
let kind = tbl
|
|
.get("kind")
|
|
.and_then(|x| x.as_str())
|
|
.map(PackageKind::from_str)
|
|
.unwrap_or(PackageKind::Package);
|
|
// path is required
|
|
if let Some(path_s) = tbl.get("path").and_then(|x| x.as_str()) {
|
|
let path = path_s.to_string();
|
|
let main = tbl
|
|
.get("main")
|
|
.and_then(|x| x.as_str())
|
|
.map(|s| s.to_string());
|
|
let bid = tbl
|
|
.get("bid")
|
|
.and_then(|x| x.as_str())
|
|
.map(|s| s.to_string());
|
|
packages.insert(
|
|
k.to_string(),
|
|
UsingPackage {
|
|
kind,
|
|
path,
|
|
main,
|
|
bid,
|
|
},
|
|
);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// legacy top-level [aliases] also accepted (migration)
|
|
if let Some(alias_tbl) = doc.get("aliases").and_then(|v| v.as_table()) {
|
|
for (k, v) in alias_tbl.iter() {
|
|
if let Some(target) = v.as_str() {
|
|
aliases.insert(k.to_string(), target.to_string());
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(policy)
|
|
}
|
|
|
|
/// Resolve a using target name into a concrete path token.
|
|
/// - Returns plain file path for modules/package files
|
|
/// - Returns a marker token `dylib:<path>` for kind="dylib" packages
|
|
/// - Searches relative to `context_dir` then `using_paths` for bare names
|
|
/// - When `strict` and multiple candidates exist, returns Err
|
|
pub fn resolve_using_target_common(
|
|
tgt: &str,
|
|
modules: &[(String, String)],
|
|
using_paths: &[String],
|
|
packages: &HashMap<String, UsingPackage>,
|
|
context_dir: Option<&std::path::Path>,
|
|
strict: bool,
|
|
verbose: bool,
|
|
) -> Result<String, String> {
|
|
// 1) modules mapping
|
|
if let Some((_, p)) = modules.iter().find(|(n, _)| n == tgt) {
|
|
if verbose {
|
|
eprintln!("[using/resolve] modules '{}' -> '{}'", tgt, p);
|
|
}
|
|
return Ok(p.clone());
|
|
}
|
|
// 2) named packages
|
|
if let Some(pkg) = packages.get(tgt) {
|
|
match pkg.kind {
|
|
PackageKind::Dylib => {
|
|
let out = format!("dylib:{}", pkg.path);
|
|
if verbose {
|
|
eprintln!("[using/resolve] dylib '{}' -> '{}'", tgt, out);
|
|
}
|
|
return Ok(out);
|
|
}
|
|
PackageKind::Package => {
|
|
let base = std::path::Path::new(&pkg.path);
|
|
let out = if let Some(m) = &pkg.main {
|
|
if matches!(
|
|
base.extension().and_then(|s| s.to_str()),
|
|
Some("nyash") | Some("hako")
|
|
) {
|
|
pkg.path.clone()
|
|
} else {
|
|
base.join(m).to_string_lossy().to_string()
|
|
}
|
|
} else {
|
|
if matches!(
|
|
base.extension().and_then(|s| s.to_str()),
|
|
Some("nyash") | Some("hako")
|
|
) {
|
|
pkg.path.clone()
|
|
} else {
|
|
let leaf = base.file_name().and_then(|s| s.to_str()).unwrap_or(tgt);
|
|
let hako = base.join(format!("{}.hako", leaf));
|
|
if hako.exists() {
|
|
hako.to_string_lossy().to_string()
|
|
} else {
|
|
base.join(format!("{}.hako", leaf))
|
|
.to_string_lossy()
|
|
.to_string()
|
|
}
|
|
}
|
|
};
|
|
if verbose {
|
|
eprintln!("[using/resolve] package '{}' -> '{}'", tgt, out);
|
|
}
|
|
return Ok(out);
|
|
}
|
|
}
|
|
}
|
|
// 3) relative: prefer cwd > using_paths; .hako first then .nyash
|
|
let rel_hako = tgt.replace('.', "/") + ".hako";
|
|
let rel_ny = tgt.replace('.', "/") + ".nyash";
|
|
let mut cand: Vec<String> = Vec::new();
|
|
if let Some(dir) = context_dir {
|
|
let c1 = dir.join(&rel_hako);
|
|
if c1.exists() {
|
|
cand.push(c1.to_string_lossy().to_string());
|
|
}
|
|
let c2 = dir.join(&rel_ny);
|
|
if c2.exists() {
|
|
cand.push(c2.to_string_lossy().to_string());
|
|
}
|
|
}
|
|
for base in using_paths {
|
|
let p = std::path::Path::new(base);
|
|
let c1 = p.join(&rel_hako);
|
|
if c1.exists() {
|
|
cand.push(c1.to_string_lossy().to_string());
|
|
}
|
|
let c2 = p.join(&rel_ny);
|
|
if c2.exists() {
|
|
cand.push(c2.to_string_lossy().to_string());
|
|
}
|
|
}
|
|
if cand.is_empty() {
|
|
if verbose {
|
|
eprintln!("[using] unresolved '{}' (searched: rel+paths)", tgt);
|
|
}
|
|
return Err(format!(
|
|
"using: unresolved '{}': searched relative and using.paths",
|
|
tgt
|
|
));
|
|
}
|
|
if cand.len() > 1 && strict {
|
|
return Err(format!("ambiguous using '{}': {}", tgt, cand.join(", ")));
|
|
}
|
|
let out = cand.remove(0);
|
|
if verbose {
|
|
eprintln!("[using/resolve] '{}' -> '{}'", tgt, out);
|
|
}
|
|
Ok(out)
|
|
}
|
|
|
|
fn load_workspace_modules(
|
|
nyash_dir: &std::path::Path,
|
|
workspace_tbl: &toml::value::Table,
|
|
pending_modules: &mut Vec<(String, String)>,
|
|
aliases: &mut HashMap<String, String>,
|
|
) -> Result<(), UsingError> {
|
|
let members = workspace_tbl
|
|
.get("members")
|
|
.and_then(|v| v.as_array())
|
|
.ok_or_else(|| {
|
|
UsingError::ParseWorkspaceModule(
|
|
"modules.workspace".into(),
|
|
"expected members array".into(),
|
|
)
|
|
})?;
|
|
|
|
for entry in members {
|
|
let raw_path = entry.as_str().ok_or_else(|| {
|
|
UsingError::ParseWorkspaceModule(
|
|
"modules.workspace".into(),
|
|
"members must be string paths".into(),
|
|
)
|
|
})?;
|
|
let module_path = if std::path::Path::new(raw_path).is_absolute() {
|
|
std::path::PathBuf::from(raw_path)
|
|
} else {
|
|
nyash_dir.join(raw_path)
|
|
};
|
|
let module_dir = module_path
|
|
.parent()
|
|
.map(|p| p.to_path_buf())
|
|
.unwrap_or_else(|| nyash_dir.to_path_buf());
|
|
let module_text = std::fs::read_to_string(&module_path).map_err(|e| {
|
|
UsingError::ReadWorkspaceModule(
|
|
module_path.to_string_lossy().to_string(),
|
|
e.to_string(),
|
|
)
|
|
})?;
|
|
let module_doc = toml::from_str::<toml::Value>(&module_text).map_err(|e| {
|
|
UsingError::ParseWorkspaceModule(
|
|
module_path.to_string_lossy().to_string(),
|
|
e.to_string(),
|
|
)
|
|
})?;
|
|
let module_name = module_doc
|
|
.get("module")
|
|
.and_then(|v| v.get("name"))
|
|
.and_then(|v| v.as_str())
|
|
.ok_or_else(|| {
|
|
UsingError::WorkspaceModuleMissingName(module_path.to_string_lossy().to_string())
|
|
})?;
|
|
if let Some(exports_tbl) = module_doc.get("exports").and_then(|v| v.as_table()) {
|
|
for (export_key, export_value) in exports_tbl {
|
|
if let Some(rel_path) = export_value.as_str() {
|
|
let mut full_name = module_name.to_string();
|
|
if !export_key.is_empty() {
|
|
full_name.push('.');
|
|
full_name.push_str(export_key);
|
|
}
|
|
if pending_modules.iter().any(|(name, _)| name == &full_name) {
|
|
continue;
|
|
}
|
|
let resolved_path = module_dir.join(rel_path);
|
|
let resolved_str = resolved_path
|
|
.canonicalize()
|
|
.unwrap_or(resolved_path)
|
|
.to_string_lossy()
|
|
.to_string();
|
|
pending_modules.push((full_name, resolved_str));
|
|
}
|
|
}
|
|
}
|
|
if let Some(alias_tbl) = module_doc.get("aliases").and_then(|v| v.as_table()) {
|
|
for (alias, target) in alias_tbl {
|
|
if let Some(target_str) = target.as_str() {
|
|
aliases.insert(alias.to_string(), target_str.to_string());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|