diff --git a/Cargo.lock b/Cargo.lock index 0fff70a..cbe3f2a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -219,7 +219,16 @@ version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44c45a9d03d6676652bcb5e724c7e988de1acad23a711b5217ab9cbecbec2225" dependencies = [ - "dirs-sys", + "dirs-sys 0.4.1", +] + +[[package]] +name = "dirs" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" +dependencies = [ + "dirs-sys 0.5.0", ] [[package]] @@ -230,10 +239,22 @@ checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c" dependencies = [ "libc", "option-ext", - "redox_users", + "redox_users 0.4.6", "windows-sys 0.48.0", ] +[[package]] +name = "dirs-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" +dependencies = [ + "libc", + "option-ext", + "redox_users 0.5.2", + "windows-sys 0.60.2", +] + [[package]] name = "displaydoc" version = "0.2.5" @@ -299,12 +320,24 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "fixedbitset" +version = "0.5.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" + [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foldhash" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -366,9 +399,18 @@ checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "hashbrown" -version = "0.15.4" +version = "0.15.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" +dependencies = [ + "foldhash", +] + +[[package]] +name = "hashbrown" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" +checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" [[package]] name = "heck" @@ -485,12 +527,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.10.0" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +checksum = "0ad4bb2b565bca0645f4d68c5c9af97fba094e9791da685bf83cb5f3ce74acf2" dependencies = [ "equivalent", - "hashbrown", + "hashbrown 0.16.1", ] [[package]] @@ -740,6 +782,18 @@ version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +[[package]] +name = "petgraph" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8701b58ea97060d5e5b155d383a69952a60943f0e6dfe30b04c287beb0b27455" +dependencies = [ + "fixedbitset", + "hashbrown 0.15.5", + "indexmap", + "serde", +] + [[package]] name = "pin-project-lite" version = "0.2.16" @@ -767,6 +821,16 @@ dependencies = [ "zerovec", ] +[[package]] +name = "prettyplease" +version = "0.2.36" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2" +dependencies = [ + "proc-macro2", + "syn", +] + [[package]] name = "proc-macro2" version = "1.0.95" @@ -828,7 +892,18 @@ checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom 0.2.16", "libredox", - "thiserror", + "thiserror 1.0.69", +] + +[[package]] +name = "redox_users" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" +dependencies = [ + "getrandom 0.2.16", + "libredox", + "thiserror 2.0.17", ] [[package]] @@ -879,12 +954,6 @@ dependencies = [ "windows-sys 0.60.2", ] -[[package]] -name = "rustversion" -version = "1.0.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" - [[package]] name = "ryu" version = "1.0.20" @@ -989,23 +1058,22 @@ checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "strum" -version = "0.26.3" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fec0f0aef304996cf250b31b5a10dee7980c85da9d759361292b8bca5a18f06" +checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.26.4" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c6bee85a5a24955dc440386795aa378cd9cf82acd5f764469152d2270e581be" +checksum = "7695ce3845ea4b33927c055a39dc438a45b059f7c1b3d91d38d10355fb8cbca7" dependencies = [ "heck", "proc-macro2", "quote", - "rustversion", "syn", ] @@ -1060,7 +1128,16 @@ version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" dependencies = [ - "thiserror-impl", + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +dependencies = [ + "thiserror-impl 2.0.17", ] [[package]] @@ -1074,6 +1151,17 @@ dependencies = [ "syn", ] +[[package]] +name = "thiserror-impl" +version = "2.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + [[package]] name = "tinystr" version = "0.8.1" @@ -1199,12 +1287,12 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "vhdl_lang" -version = "0.83.1" +version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46fdc1d1c52bc06f3a4ee0de29a1df1ba44ab36a648d5fd9be7c0725d74d9650" +checksum = "27f3ee86c0a8ca087e0e4eece220e516ce31a00b23de10aa8102b7f98f928977" dependencies = [ "clap", - "dirs", + "dirs 6.0.0", "dunce", "enum-map", "fnv", @@ -1222,9 +1310,9 @@ dependencies = [ [[package]] name = "vhdl_lang_macros" -version = "0.83.1" +version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65b120c4d3b0b90fc7078ac0fd339d59ff9f5090f5df85aa20ce42ec070ad4a0" +checksum = "712baaaca92e0ca66b7a924165c5a17146c91b0b42a675517ac6c364e5969132" dependencies = [ "quote", "syn", @@ -1246,15 +1334,20 @@ name = "vw-lib" version = "0.1.0" dependencies = [ "camino", - "dirs", + "dirs 5.0.1", "git2", "glob", "netrc", + "petgraph", + "prettyplease", + "proc-macro2", + "quote", "regex", "serde", "serde_json", + "syn", "tempfile", - "thiserror", + "thiserror 1.0.69", "tokio", "toml", "url", diff --git a/Cargo.toml b/Cargo.toml index 22bdc64..128c96f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,3 +22,4 @@ camino = "1.1" netrc = "0.4" url = "2.5" glob = "0.3" +petgraph = "0.8.3" diff --git a/vw-cli/src/main.rs b/vw-cli/src/main.rs index 545c56b..447bf32 100644 --- a/vw-cli/src/main.rs +++ b/vw-cli/src/main.rs @@ -5,6 +5,7 @@ use camino::Utf8PathBuf; use clap::{Parser, Subcommand, ValueEnum}; use colored::*; +use std::collections::HashSet; use std::fmt; use std::process; @@ -98,6 +99,30 @@ enum Commands { std: CliVhdlStandard, #[arg(long, help = "List all available testbenches")] list: bool, + #[arg( + long, + help = "Enable recursive search when looking for testbenches" + )] + recurse: bool, + #[arg( + long, + value_delimiter = ',', + help = "Ignore directories matching these names (comma-separated or use multiple times)" + )] + ignore: Vec, + #[arg( + long, + value_delimiter = ',', + help = "Runtime flags to pass to NVC (comma-separated or use multiple times)", + requires = "testbench" + )] + runtime_flags: Vec, + #[arg( + long, + help = "Build Rust library for testbench before running", + requires = "testbench" + )] + build_rust: bool, }, } @@ -133,7 +158,6 @@ async fn get_access_credentials_for_workspace( #[tokio::main] async fn main() { let cli = Cli::parse(); - // Get current working directory let cwd = Utf8PathBuf::try_from(std::env::current_dir().unwrap_or_else(|e| { @@ -322,35 +346,58 @@ async fn main() { testbench, std, list, + recurse, + ignore, + runtime_flags, + build_rust, } => { if list { - match list_testbenches(&cwd) { - Ok(testbenches) => { - if testbenches.is_empty() { - println!("No testbenches found in bench directory"); - } else { - println!("Available testbenches:"); - for tb in testbenches { + let bench_dir = cwd.join("bench"); + if !bench_dir.exists() { + println!("No bench dir found in {:}", bench_dir.as_str()); + } else { + let mut ignore_set: HashSet = HashSet::new(); + for ignore_pattern in ignore { + ignore_set.insert(ignore_pattern); + } + + match list_testbenches(&bench_dir, &ignore_set, recurse) { + Ok(testbenches) => { + if testbenches.is_empty() { println!( - " {} - {}", - tb.name.cyan(), - tb.path - .display() - .to_string() - .bright_black() + "No testbenches found in bench directory" ); + } else { + println!("Available testbenches:"); + for tb in testbenches { + println!( + " {} - {}", + tb.name.cyan(), + tb.path + .display() + .to_string() + .bright_black() + ); + } } } - } - Err(e) => { - eprintln!("{} {e}", "error:".bright_red()); - process::exit(1); + Err(e) => { + eprintln!("{} {e}", "error:".bright_red()); + process::exit(1); + } } } } else if let Some(testbench_name) = testbench { println!("Running testbench: {}", testbench_name.cyan()); - match run_testbench(&cwd, testbench_name.clone(), std.into()) - .await + match run_testbench( + &cwd, + testbench_name.clone(), + std.into(), + recurse, + &runtime_flags, + build_rust, + ) + .await { Ok(()) => { println!( diff --git a/vw-lib/Cargo.toml b/vw-lib/Cargo.toml index aa898fa..b96e057 100644 --- a/vw-lib/Cargo.toml +++ b/vw-lib/Cargo.toml @@ -21,5 +21,10 @@ camino.workspace = true netrc.workspace = true url.workspace = true glob.workspace = true +petgraph.workspace = true git2 = "0.18" -vhdl_lang = "0.83" +vhdl_lang = "0.86" +quote = "1" +proc-macro2 = "1" +syn = "2" +prettyplease = "0.2" diff --git a/vw-lib/src/lib.rs b/vw-lib/src/lib.rs index 45fe17d..a5b5291 100644 --- a/vw-lib/src/lib.rs +++ b/vw-lib/src/lib.rs @@ -28,13 +28,28 @@ //! ``` use std::cell::RefCell; -use std::collections::{HashMap, HashSet, VecDeque}; -use std::fmt; -use std::fs; +use std::collections::{hash_map::Entry, HashMap, HashSet, VecDeque}; use std::path::{Path, PathBuf}; +use std::{fmt, fs}; -use camino::Utf8Path; +use camino::{Utf8Path, Utf8PathBuf}; use serde::{Deserialize, Serialize}; +use vhdl_lang::{VHDLParser, VHDLStandard}; + +use petgraph::{ + algo::toposort, + graph::{DiGraph, NodeIndex}, +}; + +use crate::mapping::{FileData, VwSymbol, VwSymbolFinder}; +use crate::nvc_helpers::{run_nvc_analysis, run_nvc_elab, run_nvc_sim}; +use crate::visitor::walk_design_file; + +pub mod mapping; +pub mod nvc_helpers; +pub mod visitor; + +const BUILD_DIR: &str = "vw_build"; // ============================================================================ // Error Types @@ -48,7 +63,9 @@ pub enum VwError { FileSystem { message: String }, Testbench { message: String }, NvcSimulation { command: String }, + NvcElab { command: String }, NvcAnalysis { library: String, command: String }, + CodeGen { message: String }, Io(std::io::Error), Serialization(toml::ser::Error), Deserialization(toml::de::Error), @@ -56,7 +73,6 @@ pub enum VwError { } impl std::error::Error for VwError {} - impl From for VwError { fn from(err: std::io::Error) -> Self { VwError::Io(err) @@ -92,12 +108,21 @@ impl fmt::Display for VwError { writeln!(f, "{command}")?; Ok(()) } + VwError::NvcElab { command } => { + writeln!(f, "NVC elaboration failed")?; + writeln!(f, "command:")?; + writeln!(f, "{command}")?; + Ok(()) + } VwError::NvcAnalysis { library, command } => { writeln!(f, "NVC analysis failed for library '{library}'")?; writeln!(f, "command:")?; writeln!(f, "{command}")?; Ok(()) } + VwError::CodeGen { message } => { + write!(f, "Code generation failed: {message}") + } VwError::Config { message } => { write!(f, "Configuration error: {message}") } @@ -133,6 +158,15 @@ pub enum VhdlStandard { Vhdl2019, } +impl From for VHDLStandard { + fn from(val: VhdlStandard) -> Self { + match val { + VhdlStandard::Vhdl2008 => VHDLStandard::VHDL2008, + VhdlStandard::Vhdl2019 => VHDLStandard::VHDL2019, + } + } +} + impl fmt::Display for VhdlStandard { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { @@ -168,11 +202,16 @@ pub struct Dependency { pub branch: Option, #[serde(default)] pub commit: Option, - pub src: String, + #[serde(default)] + pub src: Vec, #[serde(default)] pub recursive: bool, #[serde(default)] pub sim_only: bool, + #[serde(default)] + pub submodules: bool, + #[serde(default)] + pub exclude: Vec, } #[derive(Debug, Serialize, Deserialize)] @@ -184,12 +223,17 @@ pub struct LockFile { pub struct LockedDependency { pub repo: String, pub commit: String, - pub src: String, + #[serde(default)] + pub src: Vec, pub path: PathBuf, #[serde(default)] pub recursive: bool, #[serde(default)] pub sim_only: bool, + #[serde(default)] + pub submodules: bool, + #[serde(default)] + pub exclude: Vec, } #[derive(Debug, Serialize, Deserialize)] @@ -201,6 +245,16 @@ pub struct VhdlLsConfig { pub lint: Option>, } +#[derive(Deserialize, Debug)] +struct CargoToml { + package: CargoPackage, +} + +#[derive(Deserialize, Debug)] +struct CargoPackage { + name: String, +} + #[derive(Debug, Serialize, Deserialize, Clone)] pub struct VhdlLsLibrary { pub files: Vec, @@ -416,6 +470,8 @@ pub async fn update_workspace_with_token( &dep.src, &dep_path, dep.recursive, + &dep.exclude, + dep.submodules, creds, ) .await @@ -439,11 +495,14 @@ pub async fn update_workspace_with_token( path: dep_path.clone(), recursive: dep.recursive, sim_only: dep.sim_only, + submodules: dep.submodules, + exclude: dep.exclude.clone(), }, ); // Find VHDL files in the cached dependency directory - let vhdl_files = find_vhdl_files(&dep_path, dep.recursive)?; + let vhdl_files = + find_vhdl_files(&dep_path, dep.recursive, &dep.exclude)?; if !vhdl_files.is_empty() { let portable_files = vhdl_files.into_iter().map(make_path_portable).collect(); @@ -535,15 +594,17 @@ pub async fn add_dependency_with_token( } let dep_name = name.unwrap_or_else(|| extract_repo_name(&repo)); - let src_path = src.unwrap_or_else(|| ".".to_string()); + let src_paths = vec![src.unwrap_or_else(|| ".".to_string())]; let dependency = Dependency { repo: repo.clone(), branch, commit, - src: src_path, + src: src_paths, recursive, sim_only, + submodules: false, + exclude: Vec::new(), }; config.dependencies.insert(dep_name.clone(), dependency); @@ -692,8 +753,11 @@ pub fn generate_deps_tcl(workspace_dir: &Utf8Path) -> Result<()> { continue; } - let vhdl_files = - find_vhdl_files(&locked_dep.path, locked_dep.recursive)?; + let vhdl_files = find_vhdl_files( + &locked_dep.path, + locked_dep.recursive, + &locked_dep.exclude, + )?; // Create array entry for this library tcl_content.push_str(&format!("set dep_files({dep_name}) [list")); @@ -730,13 +794,20 @@ pub fn generate_deps_tcl(workspace_dir: &Utf8Path) -> Result<()> { /// List all available testbenches in the workspace. pub fn list_testbenches( - workspace_dir: &Utf8Path, + bench_dir: &Utf8Path, + ignore_dirs: &HashSet, + recurse: bool, ) -> Result> { - let bench_dir = workspace_dir.join("bench"); - if !bench_dir.exists() { - return Ok(Vec::new()); - } + let mut entities_cache = HashMap::new(); + list_testbenches_impl(bench_dir, ignore_dirs, recurse, &mut entities_cache) +} +fn list_testbenches_impl( + bench_dir: &Utf8Path, + ignore_dirs: &HashSet, + recurse: bool, + entities_cache: &mut HashMap>, +) -> Result> { let mut testbenches = Vec::new(); for entry in fs::read_dir(bench_dir).map_err(|e| VwError::FileSystem { @@ -750,15 +821,31 @@ pub fn list_testbenches( if path.is_file() { if let Some(extension) = path.extension() { if extension == "vhd" || extension == "vhdl" { - let entities = find_entities_in_file(&path)?; + let entities = get_cached_entities(&path, entities_cache)?; for entity in entities { testbenches.push(TestbenchInfo { - name: entity, + name: entity.clone(), path: path.clone(), }); } } } + } else if recurse { + let dir_path: Utf8PathBuf = + path.try_into().map_err(|e| VwError::FileSystem { + message: format!("Failed to get dir path: {e}"), + })?; + if let Some(file_name) = dir_path.file_name() { + if !ignore_dirs.contains(file_name) { + let mut lower_testbenches = list_testbenches_impl( + &dir_path, + ignore_dirs, + recurse, + entities_cache, + )?; + testbenches.append(&mut lower_testbenches); + } + } } } @@ -771,17 +858,297 @@ pub struct TestbenchInfo { pub path: PathBuf, } -/// Run a testbench using NVC simulator. -pub async fn run_testbench( - workspace_dir: &Utf8Path, - testbench_name: String, +pub struct RecordProcessor { + pub vhdl_std: VhdlStandard, + pub symbols: HashMap, + pub symbol_to_file: HashMap, + pub tagged_names: HashSet, + pub file_info: HashMap, + pub target_attr: String, +} + +const RECORD_PARSE_ATTRIBUTE: &str = "serialize_rust"; +impl RecordProcessor { + pub fn new(std: VhdlStandard) -> Self { + Self { + vhdl_std: std, + symbols: HashMap::new(), + symbol_to_file: HashMap::new(), + tagged_names: HashSet::new(), + file_info: HashMap::new(), + target_attr: RECORD_PARSE_ATTRIBUTE.to_string(), + } + } +} + +// ============================================================================ +// File Cache - Reduces redundant file reads during build +// ============================================================================ + +/// Cache for parsed file data to avoid redundant parsing during builds. +/// Only caches parsed results, not raw file contents. +pub struct FileCache { + dependencies: HashMap>, + provided_symbols: HashMap>, + entities: HashMap>, +} + +impl FileCache { + pub fn new() -> Self { + Self { + dependencies: HashMap::new(), + provided_symbols: HashMap::new(), + entities: HashMap::new(), + } + } + + /// Get cached file dependencies, reading and parsing file if not cached. + pub fn get_dependencies(&mut self, path: &Path) -> Result<&Vec> { + match self.dependencies.entry(path.to_path_buf()) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(e) => { + let content = fs::read_to_string(path).map_err(|e| { + VwError::FileSystem { + message: format!("Failed to read file {path:?}: {e}"), + } + })?; + let deps = parse_file_dependencies(&content)?; + Ok(e.insert(deps)) + } + } + } + + /// Get cached provided symbols (packages and entities), reading and parsing if not cached. + pub fn get_provided_symbols( + &mut self, + path: &Path, + ) -> Result<&Vec> { + match self.provided_symbols.entry(path.to_path_buf()) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(e) => { + let content = fs::read_to_string(path).map_err(|e| { + VwError::FileSystem { + message: format!("Failed to read file {path:?}: {e}"), + } + })?; + let symbols = parse_provided_symbols(&content)?; + Ok(e.insert(symbols)) + } + } + } + + /// Get cached entities in file, reading and parsing if not cached. + pub fn get_entities(&mut self, path: &Path) -> Result<&Vec> { + match self.entities.entry(path.to_path_buf()) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(e) => { + let content = fs::read_to_string(path).map_err(|e| { + VwError::FileSystem { + message: format!("Failed to read file {path:?}: {e}"), + } + })?; + let entities = parse_entities(&content)?; + Ok(e.insert(entities)) + } + } + } + + /// Get mutable access to the entities cache for functions that only need entity lookups. + pub fn entities_cache_mut(&mut self) -> &mut HashMap> { + &mut self.entities + } +} + +impl Default for FileCache { + fn default() -> Self { + Self::new() + } +} + +/// Parse dependencies from file content (extracted for use by FileCache). +fn parse_file_dependencies(content: &str) -> Result> { + let mut dependencies = Vec::new(); + let mut seen = HashSet::new(); + + // Package imports from "use work.package_name" + let imports = get_package_imports(content)?; + for pkg in imports { + let key = format!("pkg:{}", pkg.to_lowercase()); + if seen.insert(key) { + dependencies.push(VwSymbol::Package(pkg)); + } + } + + // Find direct entity instantiations (instance_name: entity work.entity_name) + let entity_inst_pattern = r"(?i)\w+\s*:\s*entity\s+work\.(\w+)"; + let entity_inst_re = regex::Regex::new(entity_inst_pattern)?; + + for captures in entity_inst_re.captures_iter(content) { + if let Some(entity_name) = captures.get(1) { + let name = entity_name.as_str().to_string(); + let key = format!("ent:{}", name.to_lowercase()); + if seen.insert(key) { + dependencies.push(VwSymbol::Entity(name)); + } + } + } + + // Find component declarations + let comp_decl_pattern = r"(?i)component\s+(\w+)"; + let comp_decl_re = regex::Regex::new(comp_decl_pattern)?; + + for captures in comp_decl_re.captures_iter(content) { + if let Some(comp_name) = captures.get(1) { + let name = comp_name.as_str().to_string(); + let key = format!("ent:{}", name.to_lowercase()); + if seen.insert(key) { + dependencies.push(VwSymbol::Entity(name)); + } + } + } + + Ok(dependencies) +} + +/// Parse provided symbols (packages and entities) from file content. +fn parse_provided_symbols(content: &str) -> Result> { + let mut symbols = Vec::new(); + + // Find package declarations + let package_pattern = r"(?i)\bpackage\s+(\w+)\s+is\b"; + let package_re = regex::Regex::new(package_pattern)?; + + for captures in package_re.captures_iter(content) { + if let Some(package_name) = captures.get(1) { + symbols.push(VwSymbol::Package(package_name.as_str().to_string())); + } + } + + // Find entity declarations + let entity_pattern = r"(?i)\bentity\s+(\w+)\s+is\b"; + let entity_re = regex::Regex::new(entity_pattern)?; + + for captures in entity_re.captures_iter(content) { + if let Some(entity_name) = captures.get(1) { + symbols.push(VwSymbol::Entity(entity_name.as_str().to_string())); + } + } + + Ok(symbols) +} + +/// Parse entity declarations from file content. +fn parse_entities(content: &str) -> Result> { + let mut entities = Vec::new(); + + let entity_pattern = r"(?i)\bentity\s+(\w+)\s+is\b"; + let re = regex::Regex::new(entity_pattern)?; + + for captures in re.captures_iter(content) { + if let Some(entity_name) = captures.get(1) { + entities.push(entity_name.as_str().to_string()); + } + } + + Ok(entities) +} + +pub async fn analyze_ext_libraries( + vhdl_ls_config: &VhdlLsConfig, + processor: &mut RecordProcessor, vhdl_std: VhdlStandard, + cache: &mut FileCache, ) -> Result<()> { - let vhdl_ls_config = load_existing_vhdl_ls_config(workspace_dir)?; + // Collect non-defaultlib library names + let ext_lib_names: Vec = vhdl_ls_config + .libraries + .keys() + .filter(|k| k.as_str() != "defaultlib") + .cloned() + .collect(); - // First, analyze all non-defaultlib libraries - for (lib_name, library) in &vhdl_ls_config.libraries { - if lib_name != "defaultlib" { + // Build inter-library dependency graph by scanning for `library ;` + let ext_lib_set: HashSet = ext_lib_names.iter().cloned().collect(); + let mut lib_deps: HashMap> = HashMap::new(); + for lib_name in &ext_lib_names { + let mut deps = Vec::new(); + if let Some(library) = vhdl_ls_config.libraries.get(lib_name) { + for file_path in &library.files { + let expanded = if file_path.starts_with("$HOME") { + if let Some(home) = dirs::home_dir() { + home.join( + file_path + .strip_prefix("$HOME/") + .unwrap_or(file_path), + ) + } else { + PathBuf::from(file_path) + } + } else { + PathBuf::from(file_path) + }; + if let Ok(contents) = fs::read_to_string(&expanded) { + for line in contents.lines() { + let trimmed = line.trim().to_lowercase(); + if let Some(rest) = trimmed.strip_prefix("library ") { + let dep_lib = rest.trim_end_matches(';').trim(); + if ext_lib_set.contains(dep_lib) + && dep_lib != lib_name.to_lowercase() + { + deps.push(dep_lib.to_string()); + } + } + } + } + } + } + lib_deps.insert(lib_name.clone(), deps); + } + + // Topological sort of library names (Kahn's algorithm) + let mut in_degree: HashMap = + ext_lib_names.iter().map(|n| (n.clone(), 0)).collect(); + let mut adj: HashMap> = ext_lib_names + .iter() + .map(|n| (n.clone(), Vec::new())) + .collect(); + for (lib, deps) in &lib_deps { + for dep in deps { + if let Some(neighbors) = adj.get_mut(dep) { + neighbors.push(lib.clone()); + } + if let Some(deg) = in_degree.get_mut(lib) { + *deg += 1; + } + } + } + let mut queue: VecDeque = in_degree + .iter() + .filter(|(_, &d)| d == 0) + .map(|(n, _)| n.clone()) + .collect(); + let mut sorted_libs = Vec::new(); + while let Some(current) = queue.pop_front() { + sorted_libs.push(current.clone()); + if let Some(neighbors) = adj.get(¤t) { + for neighbor in neighbors { + if let Some(deg) = in_degree.get_mut(neighbor) { + *deg -= 1; + if *deg == 0 { + queue.push_back(neighbor.clone()); + } + } + } + } + } + // Fall back to unsorted if cycle detected + if sorted_libs.len() != ext_lib_names.len() { + sorted_libs = ext_lib_names; + } + + // Analyze libraries in dependency order + for lib_name in &sorted_libs { + if let Some(library) = vhdl_ls_config.libraries.get(lib_name) { // Convert library name to be NVC-compatible (no hyphens) let nvc_lib_name = lib_name.replace('-', "_"); @@ -805,44 +1172,50 @@ pub async fn run_testbench( } // Sort files in dependency order (dependencies first) - sort_files_by_dependencies(&mut files)?; + sort_files_by_dependencies(processor, &mut files, cache)?; let file_strings: Vec = files .iter() .map(|p| p.to_string_lossy().to_string()) .collect(); - let mut nvc_cmd = tokio::process::Command::new("nvc"); - nvc_cmd - .arg(format!("--std={vhdl_std}")) - .arg(format!("--work={nvc_lib_name}")) - .arg("-M") - .arg("256m") - .arg("-a"); + run_nvc_analysis( + vhdl_std, + BUILD_DIR, + &nvc_lib_name, + &file_strings, + false, + ) + .await?; + } + } - for file in &file_strings { - nvc_cmd.arg(file); - } + Ok(()) +} - let status = - nvc_cmd.status().await.map_err(|e| VwError::Testbench { - message: format!("Failed to execute NVC analysis: {e}"), - })?; +/// Run a testbench using NVC simulator. +pub async fn run_testbench( + workspace_dir: &Utf8Path, + testbench_name: String, + vhdl_std: VhdlStandard, + recurse: bool, + runtime_flags: &[String], + build_rust: bool, +) -> Result<()> { + let vhdl_ls_config = load_existing_vhdl_ls_config(workspace_dir)?; + let mut processor = RecordProcessor::new(vhdl_std); + let mut cache = FileCache::new(); - if !status.success() { - let cmd_str = format!( - "nvc --std={} --work={} -M 256m -a {}", - vhdl_std, - nvc_lib_name, - file_strings.join(" ") - ); - return Err(VwError::NvcAnalysis { - library: lib_name.clone(), - command: cmd_str, - }); - } - } - } + fs::create_dir_all(BUILD_DIR)?; + + // First, analyze all non-defaultlib libraries + analyze_ext_libraries( + &vhdl_ls_config, + &mut processor, + vhdl_std, + &mut cache, + ) + .await?; // Get defaultlib files for later use let defaultlib_files = vhdl_ls_config @@ -859,10 +1232,31 @@ pub async fn run_testbench( }); } - let testbench_file = find_testbench_file(&testbench_name, &bench_dir)?; + let testbench_file = find_testbench_file( + &testbench_name, + &bench_dir, + recurse, + cache.entities_cache_mut(), + )?; // Filter defaultlib files to exclude OTHER testbenches but allow common bench code let bench_dir_abs = workspace_dir.as_std_path().join("bench"); + + // Pre-compute entities for bench files to avoid mutable borrow in closure + let mut bench_file_entities: HashMap> = HashMap::new(); + for file_path in &defaultlib_files { + let absolute_path = if file_path.is_relative() { + workspace_dir.as_std_path().join(file_path) + } else { + file_path.clone() + }; + if absolute_path.starts_with(&bench_dir_abs) { + if let Ok(entities) = cache.get_entities(&absolute_path) { + bench_file_entities.insert(absolute_path, entities.clone()); + } + } + } + let filtered_defaultlib_files: Vec = defaultlib_files .into_iter() .filter(|file_path| { @@ -879,11 +1273,11 @@ pub async fn run_testbench( } // If it's in the bench directory, check if it's a different testbench - if let Ok(entities) = find_entities_in_file(&absolute_path) { + if let Some(entities) = bench_file_entities.get(&absolute_path) { // Exclude files that contain testbench entities other than the one we're running for entity in entities { if entity.to_lowercase().ends_with("_tb") - && entity != testbench_name + && entity != &testbench_name { return false; // This is a different testbench, exclude it } @@ -896,82 +1290,61 @@ pub async fn run_testbench( .collect(); // Find only the defaultlib files that are actually referenced by this testbench - let mut referenced_files = - find_referenced_files(&testbench_file, &filtered_defaultlib_files)?; + let mut referenced_files = find_referenced_files( + &testbench_file, + &filtered_defaultlib_files, + &mut cache, + )?; // Sort files in dependency order (dependencies first) - sort_files_by_dependencies(&mut referenced_files)?; + sort_files_by_dependencies( + &mut processor, + &mut referenced_files, + &mut cache, + )?; + + let mut files: Vec = referenced_files + .iter() + .map(|s| s.to_string_lossy().to_string()) + .collect(); - // Run NVC simulation - let mut nvc_cmd = tokio::process::Command::new("nvc"); - nvc_cmd - .arg(format!("--std={vhdl_std}")) - .arg("-M") - .arg("256m") - .arg("-L") - .arg(".") - .arg("-a") - .arg("--check-synthesis"); - - // Add only the defaultlib files that are referenced by this testbench - for file_path in &referenced_files { - nvc_cmd.arg(file_path.to_string_lossy().as_ref()); - } - - // Add testbench file - nvc_cmd.arg(testbench_file.to_string_lossy().as_ref()); - - // Elaborate and run - nvc_cmd - .arg("-e") - .arg(&testbench_name) - .arg("-r") - .arg(&testbench_name) - .arg("--dump-arrays") - .arg("--format=fst") - .arg(format!("--wave={testbench_name}.fst")); - - let status = nvc_cmd.status().await.map_err(|e| VwError::Testbench { - message: format!("Failed to execute NVC simulation: {e}"), - })?; + files.push(testbench_file.to_string_lossy().to_string()); - if !status.success() { - // Build command string for display - let mut cmd_parts = vec!["nvc".to_string()]; - cmd_parts.push(format!("--std={vhdl_std}")); - cmd_parts.push("-M".to_string()); - cmd_parts.push("256m".to_string()); - cmd_parts.push("-L".to_string()); - cmd_parts.push(".".to_string()); - cmd_parts.push("-a".to_string()); - cmd_parts.push("--check-synthesis".to_string()); - - for file_path in &referenced_files { - cmd_parts.push(file_path.to_string_lossy().to_string()); - } - cmd_parts.push(testbench_file.to_string_lossy().to_string()); - cmd_parts.push("-e".to_string()); - cmd_parts.push(testbench_name.clone()); - cmd_parts.push("-r".to_string()); - cmd_parts.push(testbench_name.clone()); - cmd_parts.push("--dump-arrays".to_string()); - cmd_parts.push("--format=fst".to_string()); - cmd_parts.push(format!("--wave={testbench_name}.fst")); + run_nvc_analysis(vhdl_std, BUILD_DIR, "work", &files, false).await?; - let cmd_str = cmd_parts.join(" "); - return Err(VwError::NvcSimulation { command: cmd_str }); - } + run_nvc_elab(vhdl_std, BUILD_DIR, "work", &testbench_name, false).await?; + + // Build Rust library if requested + let rust_lib_path = if build_rust { + Some( + build_rust_library(&bench_dir, &testbench_file) + .await? + .to_string_lossy() + .to_string(), + ) + } else { + None + }; + + // Run NVC simulation + run_nvc_sim( + vhdl_std, + BUILD_DIR, + "work", + &testbench_name, + rust_lib_path, + &runtime_flags.to_vec(), + false, + ) + .await?; Ok(()) } -// ============================================================================ -// Internal Helper Functions -// ============================================================================ - -fn find_referenced_files( +pub fn find_referenced_files( testbench_file: &Path, available_files: &[PathBuf], + cache: &mut FileCache, ) -> Result> { let mut referenced_files = Vec::new(); let mut processed_files = HashSet::new(); @@ -989,13 +1362,12 @@ fn find_referenced_files( referenced_files.push(current_file.clone()); } - // Parse the file to find dependencies - let dependencies = find_file_dependencies(¤t_file)?; + let dependencies = cache.get_dependencies(¤t_file)?.clone(); // Find corresponding files for each dependency for dep in dependencies { for available_file in available_files { - if file_provides_symbol(available_file, &dep)? { + if file_provides_symbol(available_file, &dep, cache)? { if !processed_files.contains(available_file) { files_to_process.push(available_file.clone()); } @@ -1008,237 +1380,222 @@ fn find_referenced_files( Ok(referenced_files) } -fn find_file_dependencies(file_path: &Path) -> Result> { - let content = - fs::read_to_string(file_path).map_err(|e| VwError::FileSystem { - message: format!("Failed to read file {file_path:?}: {e}"), - })?; - - let mut dependencies = HashSet::new(); - - // Find 'use work.package_name' statements - let use_work_pattern = r"(?i)use\s+work\.(\w+)"; - let use_work_re = regex::Regex::new(use_work_pattern)?; - - for captures in use_work_re.captures_iter(&content) { - if let Some(package_name) = captures.get(1) { - dependencies.insert(package_name.as_str().to_string()); - } - } - - // Find direct entity instantiations (instance_name: entity work.entity_name) - let entity_inst_pattern = r"(?i)\w+\s*:\s*entity\s+work\.(\w+)"; - let entity_inst_re = regex::Regex::new(entity_inst_pattern)?; +pub fn sort_files_by_dependencies( + processor: &mut RecordProcessor, + files: &mut Vec, + cache: &mut FileCache, +) -> Result<()> { + // Build dependency graph + let mut dependencies: HashMap> = HashMap::new(); + let mut all_symbols: HashMap = HashMap::new(); - for captures in entity_inst_re.captures_iter(&content) { - if let Some(entity_name) = captures.get(1) { - dependencies.insert(entity_name.as_str().to_string()); + // First pass: collect all symbols provided by each file + for file in files.iter() { + let symbols = analyze_file(processor, file)?; + for symbol in symbols { + match symbol { + VwSymbol::Package(name) => { + all_symbols.insert(name.clone(), file.clone()); + let entry = processor + .file_info + .entry(file.to_string_lossy().to_string()) + .or_default(); + entry.add_defined_pkg(&name); + + // Use cache to get package imports only + let deps = cache.get_dependencies(file)?; + for dep in deps { + if let VwSymbol::Package(pkg_name) = dep { + entry.add_imported_pkg(pkg_name); + } + } + } + VwSymbol::Entity(name) => { + all_symbols.insert(name, file.clone()); + } + _ => {} + } } } - // Find component instantiations (component_name : entity_name) - let component_pattern = r"(?i)(\w+)\s*:\s*(\w+)"; - let component_re = regex::Regex::new(component_pattern)?; + // Second pass: find dependencies for each file + for file in files.iter() { + let deps = cache.get_dependencies(file)?.clone(); + let mut file_deps = Vec::new(); - for captures in component_re.captures_iter(&content) { - if let Some(entity_name) = captures.get(2) { - // Skip if this looks like an entity instantiation (already handled above) - if !entity_name.as_str().eq_ignore_ascii_case("entity") { - dependencies.insert(entity_name.as_str().to_string()); + for dep in deps { + let dep_name = match &dep { + VwSymbol::Package(name) | VwSymbol::Entity(name) => name, + _ => continue, + }; + if let Some(provider_file) = all_symbols.get(dep_name) { + if provider_file != file { + file_deps.push(provider_file.clone()); + } } } + + dependencies.insert(file.clone(), file_deps); } - // Find component declarations - let comp_decl_pattern = r"(?i)component\s+(\w+)"; - let comp_decl_re = regex::Regex::new(comp_decl_pattern)?; + // Topological sort using Kahn's algorithm + let sorted = topological_sort_files(files.clone(), dependencies)?; + *files = sorted; - for captures in comp_decl_re.captures_iter(&content) { - if let Some(comp_name) = captures.get(1) { - dependencies.insert(comp_name.as_str().to_string()); - } - } - - Ok(dependencies.into_iter().collect()) + Ok(()) } -fn file_provides_symbol(file_path: &Path, symbol: &str) -> Result { - let content = - fs::read_to_string(file_path).map_err(|e| VwError::FileSystem { - message: format!("Failed to read file {file_path:?}: {e}"), +pub fn load_existing_vhdl_ls_config( + workspace_dir: &Utf8Path, +) -> Result { + let config_path = workspace_dir.join("vhdl_ls.toml"); + if config_path.exists() { + let config_content = fs::read_to_string(&config_path).map_err(|e| { + VwError::FileSystem { + message: format!("Failed to read existing vhdl_ls.toml: {e}"), + } })?; - // Check for package declaration - let package_pattern = - format!(r"(?i)\bpackage\s+{}\s+is\b", regex::escape(symbol)); - let package_re = regex::Regex::new(&package_pattern)?; - - if package_re.is_match(&content) { - return Ok(true); - } - - // Check for entity declaration - let entity_pattern = - format!(r"(?i)\bentity\s+{}\s+is\b", regex::escape(symbol)); - let entity_re = regex::Regex::new(&entity_pattern)?; + let config: VhdlLsConfig = toml::from_str(&config_content)?; - if entity_re.is_match(&content) { - return Ok(true); + Ok(config) + } else { + Ok(VhdlLsConfig { + standard: None, + libraries: HashMap::new(), + lint: None, + }) } - - Ok(false) } -fn sort_files_by_dependencies(files: &mut Vec) -> Result<()> { - // Build dependency graph - let mut dependencies: HashMap> = HashMap::new(); - let mut all_symbols: HashMap = HashMap::new(); - - // First pass: collect all symbols provided by each file - for file in files.iter() { - let symbols = get_file_symbols(file)?; - for symbol in symbols { - all_symbols.insert(symbol, file.clone()); - } - } +// ============================================================================ +// Internal Helper Functions +// ============================================================================ - // Second pass: find dependencies for each file - for file in files.iter() { - let deps = find_file_dependencies(file)?; - let mut file_deps = Vec::new(); +fn get_package_imports(content: &str) -> Result> { + // Find 'use work.package_name' statements + let use_work_pattern = r"(?i)use\s+work\.(\w+)"; + let use_work_re = regex::Regex::new(use_work_pattern)?; + let mut imports = Vec::new(); - for dep in deps { - if let Some(provider_file) = all_symbols.get(&dep) { - if provider_file != file { - file_deps.push(provider_file.clone()); - } - } + for captures in use_work_re.captures_iter(content) { + if let Some(package_name) = captures.get(1) { + imports.push(package_name.as_str().to_string()); } - - dependencies.insert(file.clone(), file_deps); } - - // Topological sort using Kahn's algorithm - let sorted = topological_sort(files.clone(), dependencies)?; - *files = sorted; - - Ok(()) + Ok(imports) } -fn get_file_symbols(file_path: &Path) -> Result> { - let content = - fs::read_to_string(file_path).map_err(|e| VwError::FileSystem { - message: format!("Failed to read file {file_path:?}: {e}"), - })?; - - let mut symbols = Vec::new(); +fn file_provides_symbol( + file_path: &Path, + needed: &VwSymbol, + cache: &mut FileCache, +) -> Result { + let provided = cache.get_provided_symbols(file_path)?; + Ok(provided.iter().any(|s| match (needed, s) { + // Package dependency matches package declaration + (VwSymbol::Package(need), VwSymbol::Package(have)) => { + need.eq_ignore_ascii_case(have) + } + // Entity dependency matches entity declaration + (VwSymbol::Entity(need), VwSymbol::Entity(have)) => { + need.eq_ignore_ascii_case(have) + } + _ => false, + })) +} - // Find package declarations - let package_pattern = r"(?i)\bpackage\s+(\w+)\s+is\b"; - let package_re = regex::Regex::new(package_pattern)?; +fn analyze_file( + processor: &mut RecordProcessor, + file: &Path, +) -> Result> { + let parser = VHDLParser::new(processor.vhdl_std.into()); + let mut diagnostics = Vec::new(); + let (_, design_file) = parser.parse_design_file(file, &mut diagnostics)?; + + let mut file_finder = VwSymbolFinder::new(&processor.target_attr); + walk_design_file(&mut file_finder, &design_file); + + let file_str = file.to_string_lossy().to_string(); + + // Add records to symbols map + for record in file_finder.get_records() { + let name = record.get_name().to_string(); + processor + .symbols + .insert(name.clone(), VwSymbol::Record(record.clone())); + processor.symbol_to_file.insert(name, file_str.clone()); + } - for captures in package_re.captures_iter(&content) { - if let Some(package_name) = captures.get(1) { - symbols.push(package_name.as_str().to_string()); + // Add enums from symbols (they're already VwSymbol::Enum) + for symbol in file_finder.get_symbols() { + if let VwSymbol::Enum(enum_data) = symbol { + let name = enum_data.get_name().to_string(); + processor.symbols.insert(name.clone(), symbol.clone()); + processor.symbol_to_file.insert(name, file_str.clone()); } } - // Find entity declarations - let entity_pattern = r"(?i)\bentity\s+(\w+)\s+is\b"; - let entity_re = regex::Regex::new(entity_pattern)?; - - for captures in entity_re.captures_iter(&content) { - if let Some(entity_name) = captures.get(1) { - symbols.push(entity_name.as_str().to_string()); - } + for tagged_type in file_finder.get_tagged_types() { + processor.tagged_names.insert(tagged_type.clone()); } - Ok(symbols) + Ok(file_finder.get_symbols().clone()) } -fn topological_sort( +fn topological_sort_files( files: Vec, dependencies: HashMap>, ) -> Result> { - let mut in_degree: HashMap = HashMap::new(); - let mut adj_list: HashMap> = HashMap::new(); + let mut dep_graph: DiGraph = DiGraph::default(); + let mut index_map: HashMap = HashMap::new(); - // Initialize in-degree and adjacency list + // initialize the nodes for file in &files { - in_degree.insert(file.clone(), 0); - adj_list.insert(file.clone(), Vec::new()); + let index = dep_graph.add_node(file.clone()); + index_map.insert(file.clone(), index); } - // Build the graph + // now add edges from files to their dependencies for (file, deps) in &dependencies { + let source_node = index_map.get(file).ok_or(VwError::Dependency { + message: format!( + "Index map somehow didn't contain file {:?}", + file + ), + })?; + // file depends on every dep in deps for dep in deps { - if files.contains(dep) { - adj_list.get_mut(dep).unwrap().push(file.clone()); - *in_degree.get_mut(file).unwrap() += 1; - } - } - } - - // Kahn's algorithm - let mut queue = VecDeque::new(); - let mut result = Vec::new(); - - // Add all nodes with in-degree 0 to queue - for (file, °ree) in &in_degree { - if degree == 0 { - queue.push_back(file.clone()); - } - } - - while let Some(current) = queue.pop_front() { - result.push(current.clone()); - - // For each neighbor of current - if let Some(neighbors) = adj_list.get(¤t) { - for neighbor in neighbors { - *in_degree.get_mut(neighbor).unwrap() -= 1; - if in_degree[neighbor] == 0 { - queue.push_back(neighbor.clone()); - } - } + let dst_node = index_map.get(dep).ok_or(VwError::Dependency { + message: format!( + "Index map somehow didn't contain dep {:?}", + dep + ), + })?; + dep_graph.add_edge(*source_node, *dst_node, ()); } } - // Check for cycles - if result.len() != files.len() { - return Err(VwError::Dependency { - message: "Circular dependency detected in VHDL files".to_string(), - }); - } - - Ok(result) -} - -fn find_entities_in_file(file_path: &Path) -> Result> { - let content = - fs::read_to_string(file_path).map_err(|e| VwError::FileSystem { - message: format!("Failed to read file {file_path:?}: {e}"), + // ok now topological sort + let ordered_files = + toposort(&dep_graph, None).map_err(|_| VwError::Dependency { + message: "Got circular dependency".to_string(), })?; - let mut entities = Vec::new(); - - // Regex to find entity declarations - let entity_pattern = r"(?i)\bentity\s+(\w+)\s+is\b"; - let re = regex::Regex::new(entity_pattern)?; - - for captures in re.captures_iter(&content) { - if let Some(entity_name) = captures.get(1) { - entities.push(entity_name.as_str().to_string()); - } - } - - Ok(entities) + let result: Vec = ordered_files + .iter() + .map(|&idx| dep_graph[idx].clone()) + .rev() + .collect(); + Ok(result) } -fn find_testbench_file( +fn find_testbench_file_recurse( testbench_name: &str, bench_dir: &Utf8Path, -) -> Result { + recurse: bool, + entities_cache: &mut HashMap>, +) -> Result> { let mut found_files = Vec::new(); for entry in fs::read_dir(bench_dir).map_err(|e| VwError::FileSystem { @@ -1253,13 +1610,44 @@ fn find_testbench_file( if let Some(extension) = path.extension() { if extension == "vhd" || extension == "vhdl" { // Check if this file contains the entity we're looking for - if file_contains_entity(&path, testbench_name)? { + if file_contains_entity( + &path, + testbench_name, + entities_cache, + )? { found_files.push(path); } } } + } else if recurse { + let dir_path: Utf8PathBuf = + path.try_into().map_err(|e| VwError::FileSystem { + message: format!("Failed to get dir path: {e}"), + })?; + let mut lower_testbenches = find_testbench_file_recurse( + testbench_name, + &dir_path, + recurse, + entities_cache, + )?; + found_files.append(&mut lower_testbenches); } } + Ok(found_files) +} + +fn find_testbench_file( + testbench_name: &str, + bench_dir: &Utf8Path, + recurse: bool, + entities_cache: &mut HashMap>, +) -> Result { + let found_files = find_testbench_file_recurse( + testbench_name, + bench_dir, + recurse, + entities_cache, + )?; match found_files.len() { 0 => Err(VwError::Testbench { @@ -1272,19 +1660,31 @@ fn find_testbench_file( } } -fn file_contains_entity(file_path: &Path, entity_name: &str) -> Result { - let content = - fs::read_to_string(file_path).map_err(|e| VwError::FileSystem { - message: format!("Failed to read file {file_path:?}: {e}"), - })?; - - // Simple regex to find entity declarations - // This is a basic implementation that looks for "entity is" - let entity_pattern = - format!(r"(?i)\bentity\s+{}\s+is\b", regex::escape(entity_name)); - let re = regex::Regex::new(&entity_pattern)?; +fn file_contains_entity( + file_path: &Path, + entity_name: &str, + entities_cache: &mut HashMap>, +) -> Result { + let entities = get_cached_entities(file_path, entities_cache)?; + Ok(entities.iter().any(|e| e.eq_ignore_ascii_case(entity_name))) +} - Ok(re.is_match(&content)) +/// Get entities from cache, parsing and caching if not present. +fn get_cached_entities<'a>( + path: &Path, + entities_cache: &'a mut HashMap>, +) -> Result<&'a Vec> { + match entities_cache.entry(path.to_path_buf()) { + Entry::Occupied(e) => Ok(e.into_mut()), + Entry::Vacant(e) => { + let content = + fs::read_to_string(path).map_err(|e| VwError::FileSystem { + message: format!("Failed to read file {path:?}: {e}"), + })?; + let entities = parse_entities(&content)?; + Ok(e.insert(entities)) + } + } } fn make_path_portable(path: PathBuf) -> PathBuf { @@ -1408,115 +1808,133 @@ async fn get_branch_head_commit( let branch = branch.to_string(); let credentials = credentials.map(|(u, p)| (u.to_string(), p.to_string())); - tokio::task::spawn_blocking(move || { - // Create a temporary directory for the operation - let temp_dir = - tempfile::tempdir().map_err(|e| VwError::FileSystem { - message: format!("Failed to create temporary directory: {e}"), - })?; - - // Create an empty repository to work with remotes - let repo = - git2::Repository::init_bare(temp_dir.path()).map_err(|e| { - VwError::Git { + tokio::time::timeout( + std::time::Duration::from_secs(30), + tokio::task::spawn_blocking(move || { + // Create a temporary directory for the operation + let temp_dir = + tempfile::tempdir().map_err(|e| VwError::FileSystem { message: format!( - "Failed to initialize temporary repository: {e}" + "Failed to create temporary directory: {e}" ), - } - })?; + })?; - // Create a remote - let mut remote = - repo.remote_anonymous(&normalized_repo_url).map_err(|e| { - VwError::Git { - message: format!("Failed to create remote: {e}"), - } - })?; + // Create an empty repository to work with remotes + let repo = + git2::Repository::init_bare(temp_dir.path()).map_err(|e| { + VwError::Git { + message: format!( + "Failed to initialize temporary repository: {e}" + ), + } + })?; - // Connect and list references - // Always set a credentials callback so git2 doesn't fail with "no callback set". - // The callback will try explicit credentials first, then fall back to git's - // credential helper system (which includes .netrc support). - let mut callbacks = git2::RemoteCallbacks::new(); - let attempt_count = RefCell::new(0); + // Create a remote + let mut remote = repo + .remote_anonymous(&normalized_repo_url) + .map_err(|e| VwError::Git { + message: format!("Failed to create remote: {e}"), + })?; - callbacks.credentials(move |url, username_from_url, allowed_types| { - let mut attempts = attempt_count.borrow_mut(); - *attempts += 1; + // Connect and list references + // Always set a credentials callback so git2 doesn't fail with "no callback set". + // The callback will try explicit credentials first, then fall back to git's + // credential helper system (which includes .netrc support). + let mut callbacks = git2::RemoteCallbacks::new(); + let attempt_count = RefCell::new(0); + + callbacks.credentials( + move |url, username_from_url, allowed_types| { + let mut attempts = attempt_count.borrow_mut(); + *attempts += 1; + + // Limit attempts to prevent infinite loops + if *attempts > 1 { + return git2::Cred::default(); + } - // Limit attempts to prevent infinite loops - if *attempts > 1 { - return git2::Cred::default(); - } + // First, try explicit credentials from netrc if available + if allowed_types + .contains(git2::CredentialType::USER_PASS_PLAINTEXT) + { + if let Some((ref username, ref password)) = credentials + { + // Use both username and password from netrc + return git2::Cred::userpass_plaintext( + username, password, + ); + } + } - // First, try explicit credentials from netrc if available - if allowed_types.contains(git2::CredentialType::USER_PASS_PLAINTEXT) - { - if let Some((ref username, ref password)) = credentials { - // Use both username and password from netrc - return git2::Cred::userpass_plaintext(username, password); - } - } + // Try SSH key if available + if allowed_types.contains(git2::CredentialType::SSH_KEY) { + if let Some(username) = username_from_url { + if let Ok(cred) = + git2::Cred::ssh_key_from_agent(username) + { + return Ok(cred); + } + } + } - // Try SSH key if available - if allowed_types.contains(git2::CredentialType::SSH_KEY) { - if let Some(username) = username_from_url { - if let Ok(cred) = git2::Cred::ssh_key_from_agent(username) { - return Ok(cred); + // Fall back to git's credential helper system (includes .netrc) + if let Ok(config) = git2::Config::open_default() { + if let Ok(cred) = git2::Cred::credential_helper( + &config, + url, + username_from_url, + ) { + return Ok(cred); + } } - } - } - // Fall back to git's credential helper system (includes .netrc) - if let Ok(config) = git2::Config::open_default() { - if let Ok(cred) = git2::Cred::credential_helper( - &config, - url, - username_from_url, - ) { - return Ok(cred); - } - } + git2::Cred::default() + }, + ); - git2::Cred::default() - }); + remote + .connect_auth(git2::Direction::Fetch, Some(callbacks), None) + .map_err(|e| VwError::Git { + message: format!("Failed to connect to remote: {e}"), + })?; - remote - .connect_auth(git2::Direction::Fetch, Some(callbacks), None) - .map_err(|e| VwError::Git { - message: format!("Failed to connect to remote: {e}"), + let refs = remote.list().map_err(|e| VwError::Git { + message: format!("Failed to list remote references: {e}"), })?; - let refs = remote.list().map_err(|e| VwError::Git { - message: format!("Failed to list remote references: {e}"), - })?; - - // Look for the specific branch reference - let ref_name = format!("refs/heads/{branch}"); - for remote_head in refs { - if remote_head.name() == ref_name { - return Ok(remote_head.oid().to_string()); + // Look for the specific branch reference + let ref_name = format!("refs/heads/{branch}"); + for remote_head in refs { + if remote_head.name() == ref_name { + return Ok(remote_head.oid().to_string()); + } } - } - Err(VwError::Git { - message: format!( - "Branch '{branch}' not found in remote repository" - ), - }) - }) + Err(VwError::Git { + message: format!( + "Branch '{branch}' not found in remote repository" + ), + }) + }), + ) .await + .map_err(|_| VwError::Git { + message: "Git ls-remote timed out after 30 seconds".to_string(), + })? .map_err(|e| VwError::Git { message: format!("Failed to execute git ls-remote task: {e}"), })? } +#[allow(clippy::too_many_arguments)] async fn download_dependency( repo_url: &str, commit: &str, - src_path: &str, + src_paths: &[String], dest_path: &Path, recursive: bool, + exclude: &[String], + submodules: bool, credentials: Option<(&str, &str)>, // (username, password) ) -> Result<()> { let temp_dir = tempfile::tempdir().map_err(|e| VwError::FileSystem { @@ -1533,101 +1951,139 @@ async fn download_dependency( let commit = commit.to_string(); let temp_path = temp_dir.path().to_path_buf(); - let src_path = src_path.to_string(); + let src_paths = src_paths.to_vec(); let credentials = credentials.map(|(u, p)| (u.to_string(), p.to_string())); - tokio::task::spawn_blocking(move || { - // Set up clone options with authentication - let mut builder = git2::build::RepoBuilder::new(); - - // Always set a credentials callback so git2 doesn't fail with "no callback set". - // The callback will try explicit credentials first, then fall back to git's - // credential helper system (which includes .netrc support). - let mut callbacks = git2::RemoteCallbacks::new(); - let attempt_count = RefCell::new(0); - - callbacks.credentials(move |url, username_from_url, allowed_types| { - let mut attempts = attempt_count.borrow_mut(); - *attempts += 1; - - // Limit attempts to prevent infinite loops - if *attempts > 1 { - return git2::Cred::default(); - } + tokio::time::timeout( + std::time::Duration::from_secs(120), + tokio::task::spawn_blocking(move || { + // Set up clone options with authentication + let mut builder = git2::build::RepoBuilder::new(); + + // Always set a credentials callback so git2 doesn't fail with "no callback set". + // The callback will try explicit credentials first, then fall back to git's + // credential helper system (which includes .netrc support). + let mut callbacks = git2::RemoteCallbacks::new(); + let attempt_count = RefCell::new(0); + + callbacks.credentials( + move |url, username_from_url, allowed_types| { + let mut attempts = attempt_count.borrow_mut(); + *attempts += 1; + + // Limit attempts to prevent infinite loops + if *attempts > 1 { + return git2::Cred::default(); + } - // First, try explicit credentials from netrc if available - if allowed_types.contains(git2::CredentialType::USER_PASS_PLAINTEXT) - { - if let Some((ref username, ref password)) = credentials { - // Use both username and password from netrc - return git2::Cred::userpass_plaintext(username, password); - } - } + // First, try explicit credentials from netrc if available + if allowed_types + .contains(git2::CredentialType::USER_PASS_PLAINTEXT) + { + if let Some((ref username, ref password)) = credentials + { + // Use both username and password from netrc + return git2::Cred::userpass_plaintext( + username, password, + ); + } + } - // Try SSH key if available - if allowed_types.contains(git2::CredentialType::SSH_KEY) { - if let Some(username) = username_from_url { - if let Ok(cred) = git2::Cred::ssh_key_from_agent(username) { - return Ok(cred); + // Try SSH key if available + if allowed_types.contains(git2::CredentialType::SSH_KEY) { + if let Some(username) = username_from_url { + if let Ok(cred) = + git2::Cred::ssh_key_from_agent(username) + { + return Ok(cred); + } + } } - } - } - // Fall back to git's credential helper system (includes .netrc) - if let Ok(config) = git2::Config::open_default() { - if let Ok(cred) = git2::Cred::credential_helper( - &config, - url, - username_from_url, - ) { - return Ok(cred); - } - } + // Fall back to git's credential helper system (includes .netrc) + if let Ok(config) = git2::Config::open_default() { + if let Ok(cred) = git2::Cred::credential_helper( + &config, + url, + username_from_url, + ) { + return Ok(cred); + } + } - git2::Cred::default() - }); + git2::Cred::default() + }, + ); - let mut fetch_options = git2::FetchOptions::new(); - fetch_options.remote_callbacks(callbacks); - builder.fetch_options(fetch_options); + let mut fetch_options = git2::FetchOptions::new(); + fetch_options.depth(1); // shallow clone — only need one commit + fetch_options.remote_callbacks(callbacks); + builder.fetch_options(fetch_options); - // Clone the repository - let repo = - builder + // Clone the repository + let repo = builder .clone(&normalized_repo_url, &temp_path) .map_err(|e| VwError::Git { message: format!("Failed to clone repository: {e}"), })?; - // Parse the commit SHA - let commit_oid = - git2::Oid::from_str(&commit).map_err(|e| VwError::Git { - message: format!("Invalid commit SHA '{commit}': {e}"), - })?; + // Parse the commit SHA + let commit_oid = + git2::Oid::from_str(&commit).map_err(|e| VwError::Git { + message: format!("Invalid commit SHA '{commit}': {e}"), + })?; - // Find the commit object - let commit_obj = - repo.find_commit(commit_oid).map_err(|e| VwError::Git { - message: format!("Commit '{commit}' not found: {e}"), - })?; + // Find the commit object + let commit_obj = + repo.find_commit(commit_oid).map_err(|e| VwError::Git { + message: format!("Commit '{commit}' not found: {e}"), + })?; - // Checkout the specific commit - repo.checkout_tree(commit_obj.as_object(), None) - .map_err(|e| VwError::Git { - message: format!("Failed to checkout commit '{commit}': {e}"), - })?; + // Checkout the specific commit + repo.checkout_tree(commit_obj.as_object(), None) + .map_err(|e| VwError::Git { + message: format!( + "Failed to checkout commit '{commit}': {e}" + ), + })?; - // Set HEAD to the commit - repo.set_head_detached(commit_oid) - .map_err(|e| VwError::Git { - message: format!( - "Failed to set HEAD to commit '{commit}': {e}" - ), - })?; + // Set HEAD to the commit + repo.set_head_detached(commit_oid) + .map_err(|e| VwError::Git { + message: format!( + "Failed to set HEAD to commit '{commit}': {e}" + ), + })?; - Ok::<(), VwError>(()) - }) + // Initialize and update submodules if requested + if submodules { + for mut submodule in + repo.submodules().map_err(|e| VwError::Git { + message: format!("Failed to list submodules: {e}"), + })? + { + submodule.init(false).map_err(|e| VwError::Git { + message: format!( + "Failed to init submodule '{}': {e}", + submodule.name().unwrap_or("unknown") + ), + })?; + submodule.update(true, None).map_err(|e| VwError::Git { + message: format!( + "Failed to update submodule '{}': {e}", + submodule.name().unwrap_or("unknown") + ), + })?; + } + } + + Ok::<(), VwError>(()) + }), + ) .await + .map_err(|_| VwError::Git { + message: "Git clone timed out after 120 seconds".to_string(), + })? .map_err(|e| VwError::Git { message: format!("Failed to execute git operations: {e}"), })??; @@ -1637,7 +2093,15 @@ async fn download_dependency( })?; // Treat all src values as globs (handles files, directories, and patterns) - copy_vhdl_files_glob(temp_dir.path(), &src_path, dest_path, recursive)?; + for src_path in &src_paths { + copy_vhdl_files_glob( + temp_dir.path(), + src_path, + dest_path, + recursive, + exclude, + )?; + } Ok(()) } @@ -1647,12 +2111,19 @@ fn copy_vhdl_files_glob( src_pattern: &str, dest: &Path, recursive: bool, + exclude: &[String], ) -> Result<()> { // Build patterns to match let src_path = repo_root.join(src_pattern); let mut patterns = Vec::new(); let strip_prefix: PathBuf; + // Compile exclude patterns + let exclude_patterns: Vec = exclude + .iter() + .filter_map(|p| glob::Pattern::new(p).ok()) + .collect(); + // Check if src_pattern points to a directory if src_path.is_dir() { // It's a directory - create appropriate glob patterns @@ -1730,6 +2201,13 @@ fn copy_vhdl_files_glob( } })?; + // Check if file matches any exclude pattern + let path_str = relative_path.to_string_lossy(); + if exclude_patterns.iter().any(|p| p.matches(&path_str)) + { + continue; // Skip excluded files + } + let dest_file = dest.join(relative_path); // Create parent directories if needed @@ -1766,9 +2244,31 @@ fn copy_vhdl_files_glob( Ok(()) } -fn find_vhdl_files(dir: &Path, recursive: bool) -> Result> { +fn find_vhdl_files( + dir: &Path, + recursive: bool, + exclude: &[String], +) -> Result> { let mut vhdl_files = Vec::new(); find_vhdl_files_impl(dir, &mut vhdl_files, recursive)?; + + // Filter out excluded files + if !exclude.is_empty() { + let exclude_patterns: Vec = exclude + .iter() + .filter_map(|p| glob::Pattern::new(p).ok()) + .collect(); + + vhdl_files.retain(|file| { + // Match against path relative to the base directory + let relative = file.strip_prefix(dir).unwrap_or(file); + let path_str = relative.to_string_lossy(); + !exclude_patterns + .iter() + .any(|pattern| pattern.matches(&path_str)) + }); + } + Ok(vhdl_files) } @@ -1837,25 +2337,83 @@ fn write_vhdl_ls_config( Ok(()) } -fn load_existing_vhdl_ls_config( - workspace_dir: &Utf8Path, -) -> Result { - let config_path = workspace_dir.join("vhdl_ls.toml"); - if config_path.exists() { - let config_content = fs::read_to_string(&config_path).map_err(|e| { +/// Build a Rust library for a testbench. +/// Looks for Cargo.toml in the testbench directory, builds it, and returns the path to the .so file. +async fn build_rust_library( + bench_dir: &Utf8Path, + testbench_file: &Path, +) -> Result { + // Get the testbench directory + let testbench_dir = + testbench_file.parent().ok_or_else(|| VwError::Testbench { + message: format!( + "Testbench file {:?} has no parent directory???", + testbench_file + ), + })?; + + // Look for Cargo.toml in the testbench directory + let cargo_toml_path = testbench_dir.join("Cargo.toml"); + if !cargo_toml_path.exists() { + return Err(VwError::Testbench { + message: format!( + "Cargo.toml not found in testbench directory: {:?}", + testbench_dir + ), + }); + } + + // Parse Cargo.toml to get the package name + let cargo_toml_content = + fs::read_to_string(&cargo_toml_path).map_err(|e| { VwError::FileSystem { - message: format!("Failed to read existing vhdl_ls.toml: {e}"), + message: format!("Failed to read Cargo.toml: {e}"), } })?; - let config: VhdlLsConfig = toml::from_str(&config_content)?; + let cargo_toml: CargoToml = toml::from_str(&cargo_toml_content)?; + let package_name = cargo_toml.package.name; - Ok(config) - } else { - Ok(VhdlLsConfig { - standard: None, - libraries: HashMap::new(), - lint: None, - }) + // Run cargo build in the testbench directory + let testbench_dir_owned = testbench_dir.to_path_buf(); + tokio::task::spawn_blocking(move || { + let output = std::process::Command::new("cargo") + .arg("build") + .current_dir(&testbench_dir_owned) + .output() + .map_err(|e| VwError::Testbench { + message: format!("Failed to execute cargo build: {e}"), + })?; + + if !output.status.success() { + let stderr = String::from_utf8_lossy(&output.stderr); + return Err(VwError::Testbench { + message: format!("cargo build failed:\n{stderr}"), + }); + } + + Ok::<(), VwError>(()) + }) + .await + .map_err(|e| VwError::Testbench { + message: format!("Failed to execute cargo build task: {e}"), + })??; + + // Find the .so file in the workspace target directory (parent of testbench dir) + let ext = if cfg!(target_os = "macos") { "dylib" } else { "so" }; + let lib_name = format!("lib{}.{ext}", package_name.replace('-', "_")); + let workspace_target = bench_dir.join("target").join("debug"); + + let lib_path = workspace_target.join(&lib_name); + + if !lib_path.exists() { + return Err(VwError::Testbench { + message: format!( + "Built Rust library not found at expected path: {:?}", + lib_path + ), + }); } + + Ok(lib_path.into()) } diff --git a/vw-lib/src/mapping.rs b/vw-lib/src/mapping.rs new file mode 100644 index 0000000..63a3644 --- /dev/null +++ b/vw-lib/src/mapping.rs @@ -0,0 +1,296 @@ +use vhdl_lang::ast::{ + AnyDesignUnit, AnyPrimaryUnit, AttributeSpecification, Designator, + DiscreteRange, ElementDeclaration, EntityClass, EntityDeclaration, + EntityName, Name, PackageDeclaration, PackageInstantiation, Range, + RangeConstraint, SubtypeConstraint, TypeDeclaration, TypeDefinition, +}; + +use crate::visitor::{Visitor, VisitorResult}; + +#[derive(Debug, Clone)] +pub enum VwSymbol { + Package(String), + Entity(String), + Constant(String), + Record(RecordData), + Enum(EnumData), +} + +#[derive(Debug, Clone)] +pub struct EnumData { + pub containing_pkg: Option, + pub name: String, + pub has_custom_encoding: bool, +} + +impl EnumData { + pub fn new(containing_pkg: Option, name: &str) -> Self { + Self { + containing_pkg, + name: String::from(name), + has_custom_encoding: false, + } + } + + pub fn get_pkg_name(&self) -> Option<&String> { + self.containing_pkg.as_ref() + } + + pub fn get_name(&self) -> &str { + &self.name + } +} + +#[derive(Debug, Clone)] +pub struct RecordData { + containing_pkg: Option, + name: String, + fields: Vec, +} + +#[derive(Debug, Default)] +pub struct FileData { + defined_pkgs: Vec, + imported_pkgs: Vec, +} + +impl FileData { + pub fn new() -> Self { + Self { + defined_pkgs: Vec::new(), + imported_pkgs: Vec::new(), + } + } + + pub fn add_defined_pkg(&mut self, pkg_name: &str) { + self.defined_pkgs.push(pkg_name.to_string()); + } + + pub fn add_imported_pkg(&mut self, pkg_name: &str) { + self.imported_pkgs.push(pkg_name.to_string()); + } + + pub fn get_imported_pkgs(&self) -> &Vec { + &self.imported_pkgs + } +} + +impl RecordData { + pub fn new(containing_pkg: Option, name: &str) -> Self { + Self { + containing_pkg, + name: String::from(name), + fields: Vec::new(), + } + } + + pub fn get_pkg_name(&self) -> Option<&String> { + self.containing_pkg.as_ref() + } + + pub fn get_fields(&self) -> &Vec { + &self.fields + } + + pub fn get_name(&self) -> &str { + &self.name + } +} + +#[derive(Debug, Clone)] +pub struct FieldData { + pub name: String, + pub subtype_name: String, + pub constraint: Option, +} + +#[derive(Debug)] +pub struct VwSymbolFinder { + symbols: Vec, + records: Vec, + tagged_types: Vec, + target_attr: String, +} + +impl VwSymbolFinder { + pub fn new(target_attr: &str) -> Self { + Self { + symbols: Vec::new(), + records: Vec::new(), + tagged_types: Vec::new(), + target_attr: target_attr.to_string(), + } + } + + pub fn get_symbols(&self) -> &Vec { + &self.symbols + } + + pub fn get_records(&self) -> &Vec { + &self.records + } + + pub fn get_tagged_types(&self) -> &Vec { + &self.tagged_types + } +} + +impl Visitor for VwSymbolFinder { + fn visit_attribute_specification( + &mut self, + spec: &AttributeSpecification, + _unit: &AnyDesignUnit, + ) -> VisitorResult { + let attr_name = spec.ident.item.item.name_utf8(); + + // Check for custom enum encoding + if attr_name == "enum_encoding" { + if let EntityClass::Type = spec.entity_class { + if let EntityName::Name(tag) = &spec.entity_name { + if let Designator::Identifier(id) = + &tag.designator.item.item + { + let type_name = id.name_utf8(); + // Find the enum and set its flag + for symbol in &mut self.symbols { + if let VwSymbol::Enum(enum_data) = symbol { + if enum_data.name == type_name { + enum_data.has_custom_encoding = true; + break; + } + } + } + } + } + } + } + + // if we found the attribute with the right name + if attr_name == self.target_attr { + // if we tagged a type (like a record) + if let EntityClass::Type = spec.entity_class { + // get the entity name + if let EntityName::Name(tag) = &spec.entity_name { + // get the identifier + if let Designator::Identifier(id) = + &tag.designator.item.item + { + let type_name = id.name_utf8(); + self.tagged_types.push(type_name); + } + } + } + } + VisitorResult::Continue + } + + #[allow(clippy::collapsible_match)] + fn visit_type_declaration( + &mut self, + decl: &TypeDeclaration, + unit: &AnyDesignUnit, + ) -> VisitorResult { + let name = decl.ident.tree.item.name_utf8(); + + // Figure out where this type was defined (containing package) + let defining_pkg_name = + if let AnyDesignUnit::Primary(primary_unit) = unit { + if let AnyPrimaryUnit::Package(package) = primary_unit { + Some(package.ident.tree.item.name_utf8()) + } else { + None + } + } else { + None + }; + + match &decl.def { + TypeDefinition::Record(elements) => { + let mut record_struct = + RecordData::new(defining_pkg_name, &name); + let fields = get_fields(elements); + record_struct.fields = fields; + self.records.push(record_struct); + } + TypeDefinition::Enumeration(_) => { + let enum_data = EnumData::new(defining_pkg_name, &name); + self.symbols.push(VwSymbol::Enum(enum_data)); + } + _ => {} + } + VisitorResult::Continue + } + + fn visit_entity(&mut self, entity: &EntityDeclaration) -> VisitorResult { + let name = entity.ident.tree.item.name_utf8(); + self.symbols.push(VwSymbol::Entity(name)); + VisitorResult::Continue + } + + fn visit_package(&mut self, package: &PackageDeclaration) -> VisitorResult { + let name = package.ident.tree.item.name_utf8(); + self.symbols.push(VwSymbol::Package(name)); + VisitorResult::Continue + } + + fn visit_package_instance( + &mut self, + instance: &PackageInstantiation, + ) -> VisitorResult { + let name = instance.ident.tree.item.name_utf8(); + self.symbols.push(VwSymbol::Package(name)); + VisitorResult::Continue + } +} + +fn get_fields(elements: &Vec) -> Vec { + let mut fields = Vec::new(); + + for element in elements { + let element_name = element.idents[0].tree.item.name_utf8(); + let element_subtype = if let Name::Designator(designator) = + &element.subtype.type_mark.item + { + if let Designator::Identifier(symbol) = &designator.item { + Some(symbol.name_utf8()) + } else { + None + } + } else { + None + // panic here for now, because i want to see what struct differences there + // might be + } + .unwrap(); + + let element_constraint = element + .subtype + .constraint + .as_ref() + .map(|constraint| get_range_constraint(&constraint.item)); + + fields.push(FieldData { + name: element_name, + subtype_name: element_subtype, + constraint: element_constraint, + }); + } + + fields +} + +fn get_range_constraint(constraint: &SubtypeConstraint) -> RangeConstraint { + if let SubtypeConstraint::Array(array_range, _) = constraint { + if let DiscreteRange::Range(discrete_range) = &array_range[0].item { + if let Range::Range(constraint) = discrete_range { + constraint.clone() + } else { + panic!("We don't handle other range types") + } + } else { + panic!("We don't handle other DiscreteRange types"); + } + } else { + panic!("We don't handle other constraint types"); + } +} diff --git a/vw-lib/src/nvc_helpers.rs b/vw-lib/src/nvc_helpers.rs new file mode 100644 index 0000000..97a3f63 --- /dev/null +++ b/vw-lib/src/nvc_helpers.rs @@ -0,0 +1,191 @@ +use crate::{VhdlStandard, VwError}; + +use tokio::process::Command; + +use std::{ + io::Write, + process::{ExitStatus, Output}, +}; + +fn get_base_nvc_cmd_args( + std: VhdlStandard, + build_dir: &str, + lib_name: &str, +) -> Vec { + let lib_dir = build_dir.to_owned() + "/" + lib_name; + let args = vec![ + format!("--std={std}"), + format!("--work={lib_dir}"), + "-M".to_string(), + "256m".to_string(), + "-L".to_string(), + build_dir.to_owned(), + ]; + args +} + +async fn run_cmd_w_output( + args: &Vec, + envs: Option<&Vec<(String, String)>>, +) -> Result { + let mut nvc_cmd = Command::new("nvc"); + for arg in args { + nvc_cmd.arg(arg); + } + + if let Some(vars) = envs { + for (env_var, value) in vars { + nvc_cmd.env(env_var, value); + } + } + + nvc_cmd.output().await.map_err(|e| VwError::Testbench { + message: format!("nvc command failed : {e}"), + }) +} + +async fn run_cmd( + args: &Vec, + envs: Option<&Vec<(String, String)>>, +) -> Result { + let mut nvc_cmd = Command::new("nvc"); + for arg in args { + nvc_cmd.arg(arg); + } + + if let Some(vars) = envs { + for (env_var, value) in vars { + nvc_cmd.env(env_var, value); + } + } + + nvc_cmd.status().await.map_err(|e| VwError::Testbench { + message: format!("nvc command failed : {e}"), + }) +} + +pub async fn run_nvc_analysis( + std: VhdlStandard, + build_dir: &str, + lib_name: &str, + referenced_files: &Vec, + capture_output: bool, +) -> Result, Vec)>, VwError> { + let mut args = get_base_nvc_cmd_args(std, build_dir, lib_name); + args.push("-a".to_string()); + + for file in referenced_files { + args.push(file.clone()); + } + + if capture_output { + let output = run_cmd_w_output(&args, None).await?; + + if !output.status.success() { + let cmd_str = format!("nvc {}", args.join(" ")); + std::io::stdout().write_all(&output.stdout)?; + std::io::stderr().write_all(&output.stderr)?; + return Err(VwError::NvcAnalysis { + library: lib_name.to_owned(), + command: cmd_str, + }); + } + Ok(Some((output.stdout, output.stderr))) + } else { + let status = run_cmd(&args, None).await?; + + if !status.success() { + let cmd_str = format!("nvc {}", args.join(" ")); + return Err(VwError::NvcAnalysis { + library: lib_name.to_owned(), + command: cmd_str, + }); + } + Ok(None) + } +} + +pub async fn run_nvc_elab( + std: VhdlStandard, + build_dir: &str, + lib_name: &str, + testbench_name: &str, + capture_output: bool, +) -> Result, Vec)>, VwError> { + let mut args = get_base_nvc_cmd_args(std, build_dir, lib_name); + args.push("-e".to_string()); + args.push(testbench_name.to_owned()); + + if capture_output { + let output = run_cmd_w_output(&args, None).await?; + if !output.status.success() { + let cmd_str = format!("nvc {}", args.join(" ")); + std::io::stdout().write_all(&output.stdout)?; + std::io::stdout().write_all(&output.stderr)?; + + return Err(VwError::NvcElab { command: cmd_str }); + } + Ok(Some((output.stdout, output.stderr))) + } else { + let status = run_cmd(&args, None).await?; + + if !status.success() { + let cmd_str = format!("nvc {}", args.join(" ")); + return Err(VwError::NvcElab { command: cmd_str }); + } + + Ok(None) + } +} + +pub async fn run_nvc_sim( + std: VhdlStandard, + build_dir: &str, + lib_name: &str, + testbench_name: &String, + rust_lib_path: Option, + runtime_flags: &Vec, + capture_output: bool, +) -> Result, Vec)>, VwError> { + let mut args = get_base_nvc_cmd_args(std, build_dir, lib_name); + args.push("-r".to_string()); + args.push(testbench_name.clone()); + + for flag in runtime_flags { + args.push(flag.clone()); + } + + args.push("--dump-arrays".to_string()); + args.push("--format=fst".to_string()); + args.push(format!("--wave={testbench_name}.fst")); + + let envs = match rust_lib_path { + Some(path) => { + args.push(format!("--load={path}")); + let envs_vec = vec![("GPI_USERS".to_string(), path.clone())]; + Some(envs_vec) + } + None => None, + }; + + if capture_output { + let output = run_cmd_w_output(&args, envs.as_ref()).await?; + + if !output.status.success() { + let cmd_str = format!("nvc {}", args.join(" ")); + std::io::stdout().write_all(&output.stdout)?; + std::io::stdout().write_all(&output.stderr)?; + + return Err(VwError::NvcSimulation { command: cmd_str }); + } + Ok(Some((output.stdout, output.stderr))) + } else { + let status = run_cmd(&args, envs.as_ref()).await?; + + if !status.success() { + let cmd_str = format!("nvc {}", args.join(" ")); + return Err(VwError::NvcSimulation { command: cmd_str }); + } + Ok(None) + } +} diff --git a/vw-lib/src/visitor.rs b/vw-lib/src/visitor.rs new file mode 100644 index 0000000..149193d --- /dev/null +++ b/vw-lib/src/visitor.rs @@ -0,0 +1,399 @@ +//! Generic AST visitor for vhdl_lang. +//! +//! This module provides a `Visitor` trait that allows arbitrary AST traversal, +//! unlike the built-in `Searcher` trait which only exposes limited node types. +//! +//! # Example +//! +//! ```ignore +//! use vw_lib::visitor::{Visitor, VisitorResult, walk_design_file}; +//! +//! struct MyVisitor { +//! record_count: usize, +//! } +//! +//! impl Visitor for MyVisitor { +//! fn visit_type_declaration(&mut self, decl: &TypeDeclaration, unit: &AnyDesignUnit) -> VisitorResult { +//! if matches!(&decl.def, TypeDefinition::Record(_)) { +//! self.record_count += 1; +//! println!("Found record type in unit: {:?}", unit); +//! } +//! VisitorResult::Continue +//! } +//! } +//! ``` + +use vhdl_lang::ast::{ + AnyDesignUnit, AnyPrimaryUnit, AnySecondaryUnit, ArchitectureBody, + Attribute, AttributeDeclaration, AttributeSpecification, + ComponentDeclaration, ConfigurationDeclaration, ContextDeclaration, + Declaration, DesignFile, EntityDeclaration, PackageBody, + PackageDeclaration, PackageInstantiation, SubprogramBody, + SubprogramDeclaration, SubprogramInstantiation, TypeDeclaration, +}; + +/// Controls whether AST traversal should continue or stop. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum VisitorResult { + /// Continue traversing the AST + Continue, + /// Stop traversal immediately + Stop, +} + +impl VisitorResult { + /// Returns true if traversal should continue + pub fn should_continue(&self) -> bool { + matches!(self, VisitorResult::Continue) + } +} + +/// A trait for visiting nodes in a vhdl_lang AST. +/// +/// All methods have default implementations that return `Continue`, +/// so you only need to override the methods for nodes you care about. +/// +/// Methods are called in a depth-first traversal order. +#[allow(unused_variables)] +pub trait Visitor { + // ======================================================================== + // Design Units + // ======================================================================== + + /// Called for each design file before visiting its contents + fn visit_design_file(&mut self, file: &DesignFile) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for each design unit + fn visit_design_unit(&mut self, unit: &AnyDesignUnit) -> VisitorResult { + VisitorResult::Continue + } + + // ------------------------------------------------------------------------ + // Primary Units + // ------------------------------------------------------------------------ + + /// Called for entity declarations + fn visit_entity(&mut self, entity: &EntityDeclaration) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for package declarations + fn visit_package(&mut self, package: &PackageDeclaration) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for package instantiations + fn visit_package_instance( + &mut self, + instance: &PackageInstantiation, + ) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for context declarations + fn visit_context(&mut self, context: &ContextDeclaration) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for configuration declarations + fn visit_configuration( + &mut self, + config: &ConfigurationDeclaration, + ) -> VisitorResult { + VisitorResult::Continue + } + + // ------------------------------------------------------------------------ + // Secondary Units + // ------------------------------------------------------------------------ + + /// Called for architecture bodies + fn visit_architecture(&mut self, arch: &ArchitectureBody) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for package bodies + fn visit_package_body(&mut self, body: &PackageBody) -> VisitorResult { + VisitorResult::Continue + } + + // ======================================================================== + // Declarations + // ======================================================================== + + /// Called for each declaration (before dispatching to specific type) + fn visit_declaration( + &mut self, + decl: &Declaration, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for type declarations + fn visit_type_declaration( + &mut self, + decl: &TypeDeclaration, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for component declarations + fn visit_component( + &mut self, + comp: &ComponentDeclaration, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for subprogram declarations (function/procedure specs) + fn visit_subprogram_declaration( + &mut self, + decl: &SubprogramDeclaration, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for subprogram bodies (function/procedure implementations) + fn visit_subprogram_body( + &mut self, + body: &SubprogramBody, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for subprogram instantiations + fn visit_subprogram_instantiation( + &mut self, + inst: &SubprogramInstantiation, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } + + // ------------------------------------------------------------------------ + // Attributes + // ------------------------------------------------------------------------ + + /// Called for attribute declarations (attribute X : type) + fn visit_attribute_declaration( + &mut self, + decl: &AttributeDeclaration, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } + + /// Called for attribute specifications (attribute X of Y : class is value) + fn visit_attribute_specification( + &mut self, + spec: &AttributeSpecification, + unit: &AnyDesignUnit, + ) -> VisitorResult { + VisitorResult::Continue + } +} + +/// Walk a design file, calling visitor methods for each node. +pub fn walk_design_file( + visitor: &mut V, + file: &DesignFile, +) -> VisitorResult { + if !visitor.visit_design_file(file).should_continue() { + return VisitorResult::Stop; + } + + for (_tokens, unit) in &file.design_units { + if !walk_design_unit(visitor, unit).should_continue() { + return VisitorResult::Stop; + } + } + + VisitorResult::Continue +} + +/// Walk a design unit, calling visitor methods for each node. +pub fn walk_design_unit( + visitor: &mut V, + unit: &AnyDesignUnit, +) -> VisitorResult { + if !visitor.visit_design_unit(unit).should_continue() { + return VisitorResult::Stop; + } + + match unit { + AnyDesignUnit::Primary(primary) => { + walk_primary_unit(visitor, primary, unit) + } + AnyDesignUnit::Secondary(secondary) => { + walk_secondary_unit(visitor, secondary, unit) + } + } +} + +/// Walk a primary unit. +fn walk_primary_unit( + visitor: &mut V, + unit: &AnyPrimaryUnit, + design_unit: &AnyDesignUnit, +) -> VisitorResult { + match unit { + AnyPrimaryUnit::Entity(entity) => { + if !visitor.visit_entity(entity).should_continue() { + return VisitorResult::Stop; + } + walk_declarations(visitor, &entity.decl, design_unit) + } + AnyPrimaryUnit::Package(package) => { + if !visitor.visit_package(package).should_continue() { + return VisitorResult::Stop; + } + walk_declarations(visitor, &package.decl, design_unit) + } + AnyPrimaryUnit::PackageInstance(instance) => { + visitor.visit_package_instance(instance) + } + AnyPrimaryUnit::Context(context) => visitor.visit_context(context), + AnyPrimaryUnit::Configuration(config) => { + visitor.visit_configuration(config) + } + } +} + +/// Walk a secondary unit. +fn walk_secondary_unit( + visitor: &mut V, + unit: &AnySecondaryUnit, + design_unit: &AnyDesignUnit, +) -> VisitorResult { + match unit { + AnySecondaryUnit::Architecture(arch) => { + if !visitor.visit_architecture(arch).should_continue() { + return VisitorResult::Stop; + } + walk_declarations(visitor, &arch.decl, design_unit) + } + AnySecondaryUnit::PackageBody(body) => { + if !visitor.visit_package_body(body).should_continue() { + return VisitorResult::Stop; + } + walk_declarations(visitor, &body.decl, design_unit) + } + } +} + +/// Walk a list of declarations. +fn walk_declarations( + visitor: &mut V, + decls: &[vhdl_lang::ast::token_range::WithTokenSpan], + unit: &AnyDesignUnit, +) -> VisitorResult { + for decl in decls { + if !walk_declaration(visitor, &decl.item, unit).should_continue() { + return VisitorResult::Stop; + } + } + VisitorResult::Continue +} + +/// Walk a single declaration. +fn walk_declaration( + visitor: &mut V, + decl: &Declaration, + unit: &AnyDesignUnit, +) -> VisitorResult { + // First call the generic declaration visitor + if !visitor.visit_declaration(decl, unit).should_continue() { + return VisitorResult::Stop; + } + + // Then dispatch to specific visitors + match decl { + Declaration::Type(type_decl) => { + visitor.visit_type_declaration(type_decl, unit) + } + Declaration::Component(comp) => visitor.visit_component(comp, unit), + Declaration::Attribute(attr) => match attr { + Attribute::Declaration(decl) => { + visitor.visit_attribute_declaration(decl, unit) + } + Attribute::Specification(spec) => { + visitor.visit_attribute_specification(spec, unit) + } + }, + Declaration::SubprogramDeclaration(decl) => { + visitor.visit_subprogram_declaration(decl, unit) + } + Declaration::SubprogramBody(body) => { + if !visitor.visit_subprogram_body(body, unit).should_continue() { + return VisitorResult::Stop; + } + // Recurse into subprogram body declarations + walk_declarations(visitor, &body.declarations, unit) + } + Declaration::SubprogramInstantiation(inst) => { + visitor.visit_subprogram_instantiation(inst, unit) + } + // For other declaration types, just continue + _ => VisitorResult::Continue, + } +} + +#[cfg(test)] +mod tests { + //use super::*; + + //struct CountingVisitor { + // entities: usize, + // packages: usize, + // types: usize, + // attr_specs: usize, + //} + + //impl CountingVisitor { + // fn new() -> Self { + // Self { + // entities: 0, + // packages: 0, + // types: 0, + // attr_specs: 0, + // } + // } + //} + + //impl Visitor for CountingVisitor { + // fn visit_entity(&mut self, _: &EntityDeclaration) -> VisitorResult { + // self.entities += 1; + // VisitorResult::Continue + // } + + // fn visit_package(&mut self, _: &PackageDeclaration) -> VisitorResult { + // self.packages += 1; + // VisitorResult::Continue + // } + + // fn visit_type_declaration( + // &mut self, + // _: &TypeDeclaration, + // _: &AnyDesignUnit, + // ) -> VisitorResult { + // self.types += 1; + // VisitorResult::Continue + // } + + // fn visit_attribute_specification( + // &mut self, + // _: &AttributeSpecification, + // _: &AnyDesignUnit, + // ) -> VisitorResult { + // self.attr_specs += 1; + // VisitorResult::Continue + // } + //} +}