feat(phase-21.8): extend collect_using_and_strip to return imports HashMap
**Step 6 Complete**: Extract using imports from .hako source in pipeline Changes: - Modified collect_using_and_strip() to return (cleaned, prelude_paths, imports) - Build imports HashMap from seen_aliases (alias -> alias mapping) - Updated all 6 call sites to handle new 3-tuple return type - This provides MirBuilder with info about which names are valid static box references Next: Wire imports through extern_provider to MirBuilder (Step 7) Related: #phase-21.8 MatI64/IntArrayCore integration
This commit is contained in:
@ -169,11 +169,12 @@ impl<'a> PreludeManagerBox<'a> {
|
||||
) -> Result<(String, Vec<String>), String> {
|
||||
// 既存のcollect_using_and_strip関数を呼び出す
|
||||
// TODO: 将来的にはUsingResolutionBox経由に置き換える
|
||||
crate::runner::modes::common_util::resolve::strip::collect_using_and_strip(
|
||||
let (cleaned, prelude_paths, _imports) = crate::runner::modes::common_util::resolve::strip::collect_using_and_strip(
|
||||
&self.runner,
|
||||
code,
|
||||
filename,
|
||||
)
|
||||
)?;
|
||||
Ok((cleaned, prelude_paths))
|
||||
}
|
||||
|
||||
/// 🔧 テキストを正規化するにゃ!
|
||||
|
||||
@ -1,8 +1,9 @@
|
||||
use crate::runner::NyashRunner;
|
||||
|
||||
/// Collect using targets and strip using lines (no inlining).
|
||||
/// Returns (cleaned_source, prelude_paths) where `prelude_paths` are resolved
|
||||
/// file paths to be parsed separately and AST-merged (when `NYASH_USING_AST=1`).
|
||||
/// Returns (cleaned_source, prelude_paths, imports) where:
|
||||
/// - `prelude_paths` are resolved file paths to be parsed separately and AST-merged (when `NYASH_USING_AST=1`)
|
||||
/// - `imports` is a HashMap mapping alias names to box types (for MirBuilder resolution)
|
||||
///
|
||||
/// Notes
|
||||
/// - This function enforces profile policies (prod: disallow file-using; only
|
||||
@ -13,9 +14,9 @@ pub fn collect_using_and_strip(
|
||||
runner: &NyashRunner,
|
||||
code: &str,
|
||||
filename: &str,
|
||||
) -> Result<(String, Vec<String>), String> {
|
||||
) -> Result<(String, Vec<String>, std::collections::HashMap<String, String>), String> {
|
||||
if !crate::config::env::enable_using() {
|
||||
return Ok((code.to_string(), Vec::new()));
|
||||
return Ok((code.to_string(), Vec::new(), std::collections::HashMap::new()));
|
||||
}
|
||||
let using_ctx = runner.init_using_context();
|
||||
let prod = crate::config::env::using_is_prod();
|
||||
@ -421,7 +422,13 @@ pub fn collect_using_and_strip(
|
||||
with_marker.push_str(&out);
|
||||
out = with_marker;
|
||||
}
|
||||
Ok((out, prelude_paths))
|
||||
// Phase 21.8: Build imports map from seen_aliases (alias -> alias for now)
|
||||
// This provides the MirBuilder with information about which names are valid static box references
|
||||
let mut imports = std::collections::HashMap::new();
|
||||
for (alias, _) in seen_aliases.iter() {
|
||||
imports.insert(alias.clone(), alias.clone());
|
||||
}
|
||||
Ok((out, prelude_paths, imports))
|
||||
}
|
||||
|
||||
/// Profile-aware prelude resolution wrapper (single entrypoint).
|
||||
@ -435,7 +442,7 @@ pub fn resolve_prelude_paths_profiled(
|
||||
filename: &str,
|
||||
) -> Result<(String, Vec<String>), String> {
|
||||
// First pass: strip using from the main source and collect direct prelude paths
|
||||
let (cleaned, direct) = collect_using_and_strip(runner, code, filename)?;
|
||||
let (cleaned, direct, _imports) = collect_using_and_strip(runner, code, filename)?;
|
||||
// Recursively collect nested preludes (DFS) for both AST/text merges.
|
||||
// Rationale: even when we merge via text, nested `using` inside preludes
|
||||
// must be discovered so that their definitions are present at runtime
|
||||
@ -469,7 +476,7 @@ pub fn resolve_prelude_paths_profiled(
|
||||
}
|
||||
let src = std::fs::read_to_string(&real_path)
|
||||
.map_err(|e| format!("using: failed to read '{}': {}", real_path, e))?;
|
||||
let (_cleaned, nested) = collect_using_and_strip(runner, &src, &real_path)?;
|
||||
let (_cleaned, nested, _nested_imports) = collect_using_and_strip(runner, &src, &real_path)?;
|
||||
for n in nested.iter() {
|
||||
dfs(runner, n, out, seen)?;
|
||||
}
|
||||
@ -556,7 +563,7 @@ pub fn parse_preludes_to_asts(
|
||||
}
|
||||
let src = std::fs::read_to_string(prelude_path)
|
||||
.map_err(|e| format!("using: error reading {}: {}", prelude_path, e))?;
|
||||
let (clean_src, _nested) = collect_using_and_strip(runner, &src, prelude_path)?;
|
||||
let (clean_src, _nested, _nested_imports) = collect_using_and_strip(runner, &src, prelude_path)?;
|
||||
|
||||
// IMPORTANT: Do not attempt to AST-parse .hako preludes here.
|
||||
// .hako is Hakorune surface, not Nyash AST. VM/VM-fallback paths
|
||||
@ -766,7 +773,7 @@ pub fn merge_prelude_text(
|
||||
}
|
||||
let src = std::fs::read_to_string(path)
|
||||
.map_err(|e| format!("using: failed to read '{}': {}", path, e))?;
|
||||
let (_cleaned, nested) = collect_using_and_strip(runner, &src, path)?;
|
||||
let (_cleaned, nested, _nested_imports) = collect_using_and_strip(runner, &src, path)?;
|
||||
for n in nested.iter() {
|
||||
dfs_text(runner, n, out, seen)?;
|
||||
}
|
||||
@ -802,7 +809,7 @@ pub fn merge_prelude_text(
|
||||
.map_err(|e| format!("using: failed to read '{}': {}", path, e))?;
|
||||
|
||||
// Strip using lines from prelude and normalize
|
||||
let (cleaned_raw, _nested) = collect_using_and_strip(runner, &content, path)?;
|
||||
let (cleaned_raw, _nested, _nested_imports) = collect_using_and_strip(runner, &content, path)?;
|
||||
let mut cleaned = normalize_text_for_inline(&cleaned_raw);
|
||||
// Hako-friendly normalize for preludes: always strip leading `local ` at line head
|
||||
// when the prelude is a .hako (or looks like Hako code). This prevents top-level
|
||||
|
||||
Reference in New Issue
Block a user