diff --git a/Cargo.lock b/Cargo.lock index 058bc503..5c013393 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -435,12 +435,6 @@ dependencies = [ "syn", ] -[[package]] -name = "lazy_static" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" - [[package]] name = "libc" version = "0.2.180" @@ -731,10 +725,8 @@ dependencies = [ "camino", "difference", "globset", - "lazy_static", "log", "maud", - "once_cell", "pathdiff", "predicates", "pulldown-cmark", diff --git a/Cargo.toml b/Cargo.toml index c2823a4f..735fbbf8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,7 +25,6 @@ maud = "0.27" anyhow = "1.0" thiserror = "2.0" walkdir = "2.4" -lazy_static = "1.4" rayon = "1.8" log = "0.4" env_logger = "0.11" @@ -37,7 +36,6 @@ globset = "0.4.16" rustc-hash = "2.1" difference = "2.0" reqvire = { path = "core" } -once_cell = "1.19" tiny_http = "0.12" diff --git a/cli/src/cli.rs b/cli/src/cli.rs index 101359c4..f1ec80fa 100644 --- a/cli/src/cli.rs +++ b/cli/src/cli.rs @@ -31,7 +31,7 @@ use std::path::Path; #[clap( author, version, - about = "Reqvire requirements & treacibility management tool", + about = "Reqvire requirements & traceability management tool", long_about = None, name = "reqvire" )] @@ -149,8 +149,8 @@ pub enum Commands { filter_attachment: Option, }, - /// Analise change impact and provides report - #[clap(override_help = "Analise change impact and provides report\n\nCHANGE IMPACT OPTIONS:\n --git-commit Git commit hash to use when comparing models [default: HEAD]\n --json Output results in JSON format\n --output Save JSON output to file (requires --json)")] + /// Analyze change impact and provide report + #[clap(override_help = "Analyze change impact and provide report\n\nCHANGE IMPACT OPTIONS:\n --git-commit Git commit hash to use when comparing models [default: HEAD]\n --json Output results in JSON format\n --output Save JSON output to file (requires --json)")] ChangeImpact { /// Git commit hash to use when comparing models #[clap(long, default_value = "HEAD", help_heading = "CHANGE IMPACT OPTIONS")] @@ -581,7 +581,7 @@ fn print_custom_help(cmd: &clap::Command) { let short = arg.get_short().map(|s| format!("-{}, ", s)).unwrap_or_default(); let value_name = if arg.get_action().takes_values() { let value = arg.get_value_names() - .and_then(|v| v.get(0)) + .and_then(|v| v.first()) .map(|s| s.to_string()) .unwrap_or_else(|| "VALUE".to_string()); format!(" <{}>", value) @@ -617,7 +617,7 @@ fn print_custom_help(cmd: &clap::Command) { let long = arg.get_long().map(|l| format!("--{}", l)).unwrap_or_default(); let value_name = if arg.get_action().takes_values() { let value = arg.get_value_names() - .and_then(|v| v.get(0)) + .and_then(|v| v.first()) .map(|s| s.to_string()) .unwrap_or_else(|| "VALUE".to_string()); format!(" <{}>", value) @@ -651,7 +651,7 @@ fn print_custom_help(cmd: &clap::Command) { let long = arg.get_long().map(|l| format!("--{}", l)).unwrap_or_default(); let value_name = if arg.get_action().takes_values() { let value = arg.get_value_names() - .and_then(|v| v.get(0)) + .and_then(|v| v.first()) .map(|s| s.to_string()) .unwrap_or_else(|| "VALUE".to_string()); format!(" <{}>", value) @@ -694,10 +694,10 @@ fn print_validation_results(errors: &[ReqvireError], json_output: bool) { println!("{}", serde_json::to_string_pretty(&json_result).unwrap()); } else { println!("\n❌ {} validation failed with error(s):", errors.len()); - println!(""); + println!(); for (i, error) in errors.iter().enumerate() { println!(" {}. {}", i + 1, error); - println!(""); + println!(); } println!(); } @@ -826,7 +826,7 @@ pub fn handle_command( } else { println!("✅ No validation issues found"); } - return Ok(0); + Ok(0) }, Some(Commands::Search { json, @@ -869,7 +869,7 @@ pub fn handle_command( } else { println!("{}", report_output); } - return Ok(0); + Ok(0) }, Some(Commands::ChangeImpact { json, git_commit, output }) => { validate_output_requires_json(&output, json)?; @@ -898,7 +898,7 @@ pub fn handle_command( println!("{}", report.to_text(&base_url, ¤t_commit, &git_commit)); } - return Ok(0); + Ok(0) }, Some(Commands::Format { fix, json, output, with_full_relations }) => { validate_output_requires_json(&output, json)?; @@ -912,7 +912,7 @@ pub fn handle_command( } else { render_diff(&format_result); } - return Ok(0); + Ok(0) }, Some(Commands::Traces { json, @@ -953,7 +953,7 @@ pub fn handle_command( println!("{}", markdown_output); } - return Ok(0); + Ok(0) }, Some(Commands::Coverage { json, output }) => { validate_output_requires_json(&output, json)?; @@ -963,7 +963,7 @@ pub fn handle_command( } else { coverage_report.print(false); } - return Ok(0); + Ok(0) }, Some(Commands::Model { from, reverse, filter_type, json, output }) => { validate_output_requires_json(&output, json)?; @@ -986,7 +986,7 @@ pub fn handle_command( } else { println!("{}", report_output); } - return Ok(0); + Ok(0) }, Some(Commands::Lint { fixable, auditable, fix, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1039,7 +1039,7 @@ pub fn handle_command( } } - return Ok(0); + Ok(0) }, Some(Commands::Export { output }) => { let git_root = git_commands::get_git_root_dir()?; @@ -1073,7 +1073,7 @@ pub fn handle_command( )?; println!("✅ Export completed successfully to: {}", temp_dir.display()); } - return Ok(0); + Ok(0) }, Some(Commands::Serve { host, port }) => { // Enable quiet mode for serve command (suppress verbose export output) @@ -1095,7 +1095,7 @@ pub fn handle_command( // Cleanup temporary directory after server stops std::fs::remove_dir_all(&temp_dir)?; - return Ok(0); + Ok(0) }, Some(Commands::Add { file, content, override_existing, dry_run, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1135,7 +1135,7 @@ pub fn handle_command( render_crud_result(&result); } - return Ok(0); + Ok(0) }, Some(Commands::Rm { element_name, dry_run, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1158,7 +1158,7 @@ pub fn handle_command( render_crud_result(&result); } - return Ok(0); + Ok(0) }, Some(Commands::Mv { element_name, file, dry_run, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1184,7 +1184,7 @@ pub fn handle_command( render_crud_result(&result); } - return Ok(0); + Ok(0) }, Some(Commands::Rename { element_name, new_name, dry_run, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1208,7 +1208,7 @@ pub fn handle_command( render_crud_result(&result); } - return Ok(0); + Ok(0) }, Some(Commands::Merge { target, sources, dry_run, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1229,7 +1229,7 @@ pub fn handle_command( render_crud_result(&result); } - return Ok(0); + Ok(0) }, Some(Commands::MvFile { source_file, target_file, squash, dry_run, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1252,7 +1252,7 @@ pub fn handle_command( render_crud_result(&result); } - return Ok(0); + Ok(0) }, Some(Commands::Link { source, relation_type, target, dry_run }) => { let git_root = git_commands::get_git_root_dir()?; @@ -1305,7 +1305,7 @@ pub fn handle_command( )?; render_crud_result(&result); } - return Ok(0); + Ok(0) }, Some(Commands::Unlink { source, target, dry_run }) => { let git_root = git_commands::get_git_root_dir()?; @@ -1317,7 +1317,7 @@ pub fn handle_command( dry_run, )?; render_crud_result(&result); - return Ok(0); + Ok(0) }, Some(Commands::MvAsset { old_path, new_path, dry_run }) => { let git_root = git_commands::get_git_root_dir()?; @@ -1330,7 +1330,7 @@ pub fn handle_command( )?; render_crud_result(&result); - return Ok(0); + Ok(0) }, Some(Commands::RmAsset { file_path, dry_run }) => { let git_root = git_commands::get_git_root_dir()?; @@ -1342,7 +1342,7 @@ pub fn handle_command( )?; render_crud_result(&result); - return Ok(0); + Ok(0) }, Some(Commands::Containment { json, output, short }) => { validate_output_requires_json(&output, json)?; @@ -1357,7 +1357,7 @@ pub fn handle_command( let diagram_output = diagrams::generate_containment_diagram(&model_manager.graph_registry, short)?; println!("{}", diagram_output); } - return Ok(0); + Ok(0) }, Some(Commands::Resources { json, output }) => { validate_output_requires_json(&output, json)?; @@ -1367,7 +1367,7 @@ pub fn handle_command( let report = report_resources::generate_resources_report(&model_manager.graph_registry); report.print(false); } - return Ok(0); + Ok(0) }, Some(Commands::Collect { element_name, direction, json, output }) => { validate_output_requires_json(&output, json)?; @@ -1392,15 +1392,15 @@ pub fn handle_command( } else { println!("{}", report_output); } - return Ok(0); + Ok(0) }, Some(Commands::Shell) => { run_shell(&mut model_manager)?; - return Ok(0); + Ok(0) }, Some(Commands::Sout) => { run_sout(&model_manager.graph_registry)?; - return Ok(0); + Ok(0) }, None => { // This case is handled at the beginning of handle_command diff --git a/core/Cargo.toml b/core/Cargo.toml index 9a8967bb..7f6a2e13 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -12,7 +12,6 @@ documentation.workspace = true # Parsing and content manipulation pulldown-cmark = { workspace = true } regex = { workspace = true } -lazy_static = { workspace = true } difference = { workspace = true } # Path handling @@ -40,8 +39,6 @@ rustc-hash = { workspace = true } # Terminal output termcolor = { workspace = true } -once_cell = { workspace = true } - [dev-dependencies] assert_fs = { workspace = true } predicates = { workspace = true } diff --git a/core/src/change_impact.rs b/core/src/change_impact.rs index 85f036df..4200a7e0 100644 --- a/core/src/change_impact.rs +++ b/core/src/change_impact.rs @@ -113,6 +113,12 @@ pub struct ChangeImpactReport { pub all_changed_element_ids: HashSet, } +impl Default for ChangeImpactReport { + fn default() -> Self { + Self::new() + } +} + impl ChangeImpactReport { pub fn new() -> Self { Self { @@ -308,7 +314,7 @@ impl ChangeImpactReport { elem.name, element_url )); let empty_changed: HashSet = HashSet::new(); - let rendered_tree = render_change_impact_tree(&elem.change_impact_tree, 1, base_url, git_commit, &new_element_ids, &empty_changed); + let rendered_tree = render_change_impact_tree(&elem.change_impact_tree, 1, base_url, git_commit, new_element_ids, &empty_changed); if !rendered_tree.trim().is_empty() { output.push_str(&rendered_tree); output.push_str("\n\n"); @@ -342,11 +348,11 @@ impl ChangeImpactReport { } // Render relations tree - let rendered_tree = render_change_impact_tree(&elem.change_impact_tree, 1, base_url, git_commit, &new_element_ids, &elem.changed_attachments); + let rendered_tree = render_change_impact_tree(&elem.change_impact_tree, 1, base_url, git_commit, new_element_ids, &elem.changed_attachments); if !rendered_tree.trim().is_empty() { output.push_str(&rendered_tree); } - output.push_str("\n"); + output.push('\n'); } if !self.changed.is_empty() { output.push_str("\n---\n\n"); @@ -369,7 +375,7 @@ impl ChangeImpactReport { let target_url = format!("{}/blob/{}/{}", base_url, git_commit, invalidated_ver.element_id); output.push_str(&format!("- [ ] [{}]({})\n", invalidated_ver.name, target_url)); } - output.push_str("\n"); + output.push('\n'); } if self.removed.is_empty() && self.added.is_empty() && self.changed.is_empty() && self.relocated.is_empty() { @@ -399,21 +405,21 @@ fn _generate_markdown_diff(old: &str, new: &str) -> String { match diff { Difference::Same(ref x) => { for line in x.lines() { - diff_output.push_str(" "); + diff_output.push(' '); diff_output.push_str(line); diff_output.push('\n'); } }, Difference::Rem(ref x) => { for line in x.lines() { - diff_output.push_str("-"); + diff_output.push('-'); diff_output.push_str(line); diff_output.push('\n'); } }, Difference::Add(ref x) => { for line in x.lines() { - diff_output.push_str("+"); + diff_output.push('+'); diff_output.push_str(line); diff_output.push('\n'); } @@ -443,7 +449,7 @@ fn format_attachment_name(target: &element::AttachmentTarget) -> String { } element::AttachmentTarget::ElementIdentifier(id) => { // Extract element name from identifier (after #) - id.split('#').last().unwrap_or(id) + id.split('#').next_back().unwrap_or(id) .split('-') .map(|word| { let mut chars = word.chars(); @@ -1159,8 +1165,7 @@ pub fn compute_change_impact( .relations .iter() .filter(|r| relation::IMPACT_PROPAGATION_RELATIONS.contains(&r.relation_type.name)) - .cloned() - .map(|rel: Relation| convert_relation_to_summary(&rel)) + .map(convert_relation_to_summary) .collect(); let mut visited = BTreeSet::new(); visited.insert(id.clone()); @@ -1183,8 +1188,7 @@ pub fn compute_change_impact( let removed_relations: Vec<_> = ref_elem .relations .iter() - .cloned() - .map(|rel: Relation| convert_relation_to_summary(&rel)) + .map(convert_relation_to_summary) .collect(); report.removed.push(RemovedElement { element_id: id.clone(), diff --git a/core/src/containment.rs b/core/src/containment.rs index 3b36aacc..d93fe643 100644 --- a/core/src/containment.rs +++ b/core/src/containment.rs @@ -111,7 +111,7 @@ impl ContainmentHierarchy { let mut files_map: BTreeMap> = BTreeMap::new(); for element in registry.get_all_elements() { files_map.entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(element); } @@ -280,7 +280,7 @@ fn build_folder_structure( }; folder_files.entry(folder_path) - .or_insert_with(Vec::new) + .or_default() .push(file); } @@ -302,8 +302,7 @@ fn build_folder_recursive( all_folder_paths: &std::collections::HashSet>, design_docs: &BTreeMap, Vec> ) -> ContainmentFolder { - let folder_name = current_path.last() - .map(|s| s.clone()) + let folder_name = current_path.last().cloned() .unwrap_or_else(|| "Reqvire root".to_string()); // Get files directly in this folder diff --git a/core/src/crud.rs b/core/src/crud.rs index 9d4c7382..50c3eec3 100644 --- a/core/src/crud.rs +++ b/core/src/crud.rs @@ -19,6 +19,7 @@ use std::path::Path; /// * `git_root` - Git root directory /// * `dry_run` - If true, don't write changes to disk /// * `override_existing` - If true, replace existing element with same name +#[allow(clippy::too_many_arguments)] pub fn add_element( model_manager: &mut ModelManager, element_markdown: &str, @@ -374,7 +375,7 @@ pub fn move_file( let source_path = git_root.join(&source_file_normalized); if source_path.exists() { std::fs::remove_file(&source_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } } @@ -426,7 +427,7 @@ pub fn attach( // Read current file content let absolute_file_path = git_root.join(&file_path); let content = fs::read_to_string(&absolute_file_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Check if attachment already exists - return error if element.attachments.iter().any(|a| a.target.as_str() == attachment_path) { @@ -462,7 +463,7 @@ pub fn attach( // Write to file if not dry run if !dry_run { fs::write(&absolute_file_path, &new_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Mark file as modified for re-parsing model_manager.graph_registry.modified_files.insert(file_path.clone()); @@ -500,7 +501,7 @@ pub fn detach( // Read current file content let absolute_file_path = git_root.join(&file_path); let content = fs::read_to_string(&absolute_file_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Calculate file-relative path for finding the attachment link in markdown let file_dir = crate::utils::get_parent_dir(&file_path); @@ -518,7 +519,7 @@ pub fn detach( // Write to file if not dry run if !dry_run { fs::write(&absolute_file_path, &new_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Mark file as modified for re-parsing model_manager.graph_registry.modified_files.insert(file_path.clone()); @@ -613,7 +614,7 @@ pub fn attach_element( // Read current file content let absolute_file_path = git_root.join(&file_path); let content = fs::read_to_string(&absolute_file_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Calculate relative identifier from target element's file to attachment element // If both elements are in the same file, use just #fragment format @@ -646,7 +647,7 @@ pub fn attach_element( // Write to file if not dry run if !dry_run { fs::write(&absolute_file_path, &new_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Mark file as modified for re-parsing model_manager.graph_registry.modified_files.insert(file_path.clone()); @@ -699,7 +700,7 @@ pub fn detach_element( // Read current file content let absolute_file_path = git_root.join(&file_path); let content = fs::read_to_string(&absolute_file_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Calculate relative identifier from target element's file to attachment element let target_file_path = std::path::PathBuf::from(&file_path); @@ -722,7 +723,7 @@ pub fn detach_element( // Write to file if not dry run if !dry_run { fs::write(&absolute_file_path, &new_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Mark file as modified for re-parsing model_manager.graph_registry.modified_files.insert(file_path.clone()); @@ -790,7 +791,7 @@ pub fn mv_asset( for file_path in &affected_files { let absolute_file_path = git_root.join(file_path); let content = fs::read_to_string(&absolute_file_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; let mut new_content = content.clone(); @@ -824,7 +825,7 @@ pub fn mv_asset( if !dry_run { fs::write(&absolute_file_path, &new_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; model_manager.graph_registry.modified_files.insert(file_path.clone()); } @@ -838,10 +839,10 @@ pub fn mv_asset( // Create parent directory if needed if let Some(parent) = new_abs.parent() { - fs::create_dir_all(parent).map_err(|e| ReqvireError::IoError(e))?; + fs::create_dir_all(parent).map_err(ReqvireError::IoError)?; } - fs::rename(&old_abs, &new_abs).map_err(|e| ReqvireError::IoError(e))?; + fs::rename(&old_abs, &new_abs).map_err(ReqvireError::IoError)?; } Ok(CrudResult { @@ -899,7 +900,7 @@ pub fn rm_asset( for spec_file_path in &affected_files { let absolute_file_path = git_root.join(spec_file_path); let content = fs::read_to_string(&absolute_file_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Paths in markdown are file-relative, calculate the relative path from this file let file_dir = crate::utils::get_parent_dir(spec_file_path); @@ -919,7 +920,7 @@ pub fn rm_asset( if !dry_run { fs::write(&absolute_file_path, &new_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; model_manager.graph_registry.modified_files.insert(spec_file_path.clone()); } @@ -930,7 +931,7 @@ pub fn rm_asset( if !dry_run { let abs_path = git_root.join(file_path_arg); if abs_path.exists() { - fs::remove_file(&abs_path).map_err(|e| ReqvireError::IoError(e))?; + fs::remove_file(&abs_path).map_err(ReqvireError::IoError)?; } } @@ -1140,10 +1141,7 @@ fn add_attachment_to_element(content: &str, element_name: &str, attachment_path: // Add the new attachment after existing ones while let Some(next_line) = lines_iter.peek() { let next_trimmed = next_line.trim(); - if next_trimmed.starts_with("* ") || next_trimmed.starts_with("- ") { - result.push_str(lines_iter.next().unwrap()); - result.push('\n'); - } else if next_trimmed.is_empty() { + if next_trimmed.starts_with("* ") || next_trimmed.starts_with("- ") || next_trimmed.is_empty() { result.push_str(lines_iter.next().unwrap()); result.push('\n'); } else { @@ -1266,10 +1264,7 @@ fn add_element_attachment_to_element(content: &str, element_name: &str, display_ // Add the new attachment after existing ones while let Some(next_line) = lines_iter.peek() { let next_trimmed = next_line.trim(); - if next_trimmed.starts_with("* ") || next_trimmed.starts_with("- ") { - result.push_str(lines_iter.next().unwrap()); - result.push('\n'); - } else if next_trimmed.is_empty() { + if next_trimmed.starts_with("* ") || next_trimmed.starts_with("- ") || next_trimmed.is_empty() { result.push_str(lines_iter.next().unwrap()); result.push('\n'); } else { @@ -1602,13 +1597,13 @@ pub fn unlink( model_manager.graph_registry.flush_modified_files(git_root)?; } - return Ok(CrudResult { + Ok(CrudResult { operation: CrudOperation::Update, element_id: source_id, element_name: format!("Unlinked {} {} {}", source_name, relation_type, target_display), diffs, dry_run, - }); + }) } None => { // No relation found - check if it's an attachment instead diff --git a/core/src/diagrams.rs b/core/src/diagrams.rs index 9a26ebb7..a95adb98 100644 --- a/core/src/diagrams.rs +++ b/core/src/diagrams.rs @@ -224,7 +224,7 @@ impl<'a> ModelDiagramGenerator<'a> { /// Generate Mermaid diagram from model report pub fn generate_mermaid(&self, report: &ModelDiagramReport) -> String { - let mut diagram = String::from(format!("```mermaid\ngraph {};\n", DEFAULT_DIAGRAM_DIRECTION)); + let mut diagram = format!("```mermaid\ngraph {};\n", DEFAULT_DIAGRAM_DIRECTION); // Add auto-generation marker diagram.push_str(&format!(" %% {}\n", AUTOGEN_DIAGRAM_MARKER)); @@ -329,7 +329,7 @@ impl<'a> ModelDiagramGenerator<'a> { markdown.push_str(&format!("- **{}** ({}): `{}`\n", element.name, element.element_type, element.identifier)); } - markdown.push_str("\n"); + markdown.push('\n'); } } @@ -367,7 +367,7 @@ pub fn generate_diagrams_by_file( for element in elements { grouped_elements .entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(element); } @@ -397,10 +397,7 @@ fn generate_file_diagram( Err(_) => PathBuf::from(""), }; - let base_url = match git_commands::get_repository_base_url() { - Ok(url) => url, - Err(_) => String::from(""), - }; + let base_url = git_commands::get_repository_base_url().unwrap_or_default(); let commit_hash = match git_commands::get_commit_hash() { Ok(hash) => hash, @@ -458,14 +455,14 @@ fn generate_file_diagram( .to_string(); folders.entry(folder) - .or_insert_with(HashMap::new) + .or_default() .entry(file_name) - .or_insert_with(Vec::new) + .or_default() .push(*elem); } // Use default diagram direction constant - let mut diagram = String::from(format!("```mermaid\ngraph {};\n", DEFAULT_DIAGRAM_DIRECTION)); + let mut diagram = format!("```mermaid\ngraph {};\n", DEFAULT_DIAGRAM_DIRECTION); // Add auto-generation marker for identification diagram.push_str(&format!(" %% {}\n", AUTOGEN_DIAGRAM_MARKER)); @@ -594,14 +591,14 @@ fn generate_file_diagram( relation::LinkType::ExternalUrl(url) => url.clone(), relation::LinkType::InternalPath(path) => { if diagrams_with_blobs && has_git_info { - let relative_id = match utils::get_relative_path(&path) { + let relative_id = match utils::get_relative_path(path) { Ok(rel_path) => rel_path.to_string_lossy().to_string(), Err(_) => path.to_string_lossy().to_string() }; format!("{}/blob/{}/{}", base_url, commit_hash, relative_id) } else { utils::to_relative_identifier( - &path.to_string_lossy().into_owned(), + &path.to_string_lossy(), &base_dir, false )? @@ -666,7 +663,7 @@ pub fn process_diagrams( ) -> Result<(), ReqvireError> { // Generate diagrams by file - let diagrams = generate_diagrams_by_file(®istry, diagrams_with_blobs)?; + let diagrams = generate_diagrams_by_file(registry, diagrams_with_blobs)?; // Get git root for resolving relative paths let git_root = match git_commands::get_git_root_dir() { @@ -743,7 +740,7 @@ fn replace_file_diagram(content: &str, new_diagram: &str) -> String { mermaid_lines.push(lines.next().unwrap().to_string()); // mermaid start let mut has_autogen_marker = false; - while let Some(l) = lines.next() { + for l in lines.by_ref() { if l.contains(AUTOGEN_DIAGRAM_MARKER) { has_autogen_marker = true; } @@ -794,7 +791,7 @@ fn remove_file_diagrams(content: &str) -> String { mermaid_lines.push(line.to_string()); let mut has_autogen_marker = false; - while let Some(l) = lines.next() { + for l in lines.by_ref() { if l.contains(AUTOGEN_DIAGRAM_MARKER) { has_autogen_marker = true; } @@ -871,7 +868,7 @@ pub fn remove_diagrams(registry: &GraphRegistry) -> Result<(), ReqvireError> { /// # Arguments /// * `registry` - The graph registry containing all elements /// * `root_element_id` - Optional element ID to generate diagram from. If None, generates full model. -/// If Some, generates diagram starting from that element and its related elements. +/// If Some, generates diagram starting from that element and its related elements. pub fn generate_model_diagram(registry: &GraphRegistry, root_element_id: Option<&str>) -> Result { let generator = ModelDiagramGenerator::new(registry, root_element_id); let report = generator.generate()?; @@ -987,7 +984,7 @@ fn group_elements_by_file_filtered<'a>(registry: &'a GraphRegistry, filter: &Has for element in registry.get_all_elements() { if filter.contains(&element.identifier) { result.entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(element); } } @@ -1004,7 +1001,7 @@ fn group_files_by_folder(elements_by_file: &HashMap>) -> H .unwrap_or("") .to_string(); result.entry(folder) - .or_insert_with(Vec::new) + .or_default() .push(file_path.clone()); } result @@ -1053,7 +1050,7 @@ pub fn generate_containment_diagram(registry: &GraphRegistry, short: bool) -> Re // Generate tree structure from hierarchy generate_folder_tree(&hierarchy.root_folder, "root", &mut output)?; - output.push_str("\n"); + output.push('\n'); // Collect all elements for styling and links let all_elements = collect_all_elements(&hierarchy.root_folder); diff --git a/core/src/diff.rs b/core/src/diff.rs index 45c64015..0283f823 100644 --- a/core/src/diff.rs +++ b/core/src/diff.rs @@ -82,7 +82,7 @@ pub fn generate_file_diff(file_path: &str, current: &str, new: &str) -> FileDiff let line_count = if lines.last() == Some(&"") { lines.len() - 1 } else { lines.len() }; // Determine if we should show context lines - let next_has_change = changeset.diffs.get(i + 1).map_or(false, |d| !matches!(d, Difference::Same(_))); + let next_has_change = changeset.diffs.get(i + 1).is_some_and(|d| !matches!(d, Difference::Same(_))); let show_context = previous_was_change || next_has_change; // Special case: handle empty Same sections (blank lines) @@ -138,7 +138,7 @@ pub fn generate_file_diff(file_path: &str, current: &str, new: &str) -> FileDiff // Show trailing context (before a change) let next_is_removal = changeset.diffs.get(i + 1) - .map_or(false, |d| matches!(d, Difference::Rem(_))); + .is_some_and(|d| matches!(d, Difference::Rem(_))); let start_end_lines = line_count.saturating_sub(end_lines); for line_idx in start_end_lines..line_count { if line_idx < lines.len() { @@ -242,7 +242,7 @@ pub fn render_file_diffs(diffs: &[FileDiff]) { println!(" \x1b[37m{} {}\x1b[0m", line.prefix, line.content) } }, - "separator" => println!(""), + "separator" => println!(), _ => { if line.content.is_empty() { println!(" {}", line.prefix) diff --git a/core/src/element.rs b/core/src/element.rs index 97392746..1974db33 100644 --- a/core/src/element.rs +++ b/core/src/element.rs @@ -120,12 +120,12 @@ impl SubSection { } } - pub fn from_str(s: &str) -> Self { + pub fn parse(s: &str) -> Self { match s { "Requirement" => SubSection::Requirement, "Relations" => SubSection::Relations, "Metadata" => SubSection::Metadata, - "Details" => SubSection::Details, + "Details" => SubSection::Details, "Properties" => SubSection::Properties, "Attachments" => SubSection::Attachments, other => SubSection::Other(other.to_string()), @@ -295,10 +295,8 @@ impl Element { } } - pub fn add_relation(&mut self, relation: Relation) -> () { - - - self.relations.push(relation); + pub fn add_relation(&mut self, relation: Relation) { + self.relations.push(relation); } pub fn add_content(&mut self, content: &str) { @@ -307,13 +305,13 @@ impl Element { pub fn freeze_content(&mut self) { // Trim newlines and tabs from the beginning and end. - let trimmed = self.content.trim_matches(&['\n', '\t'][..]); - + let trimmed = self.content.trim_matches(&['\n', '\t'][..]); + // Normalize content by removing all whitespace (spaces, tabs, newlines, etc.) let normalized: String = trimmed.chars().filter(|c| !c.is_whitespace()).collect(); - self.content=trimmed.to_string(); - self.hash_impact_content=utils::hash_content(&normalized); + self.content = trimmed.to_string(); + self.hash_impact_content = utils::hash_content(&normalized); } pub fn set_type_from_metadata(&mut self) { diff --git a/core/src/export.rs b/core/src/export.rs index a006a46a..e105b2f2 100644 --- a/core/src/export.rs +++ b/core/src/export.rs @@ -68,12 +68,12 @@ The resources view shows all files referenced by the model through relations and fn copy_assets_folder(output_dir: &Path) -> Result<(), ReqvireError> { let assets_dir = output_dir.join("assets"); fs::create_dir_all(&assets_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; for (filename, content) in ASSETS { let dest_path = assets_dir.join(filename); fs::write(&dest_path, content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; debug!("Copied asset: {}", filename); } @@ -92,8 +92,8 @@ pub fn flush_model_to_temp( ) -> Result<(), ReqvireError> { // Determine if we're in a subdirectory and get the relative path prefix to strip - let subdir_prefix = if current_dir.starts_with(&git_root) && current_dir != git_root { - current_dir.strip_prefix(&git_root).ok() + let subdir_prefix = if current_dir.starts_with(git_root) && current_dir != git_root { + current_dir.strip_prefix(git_root).ok() } else { None }; @@ -124,12 +124,12 @@ pub fn flush_model_to_temp( // Create parent directories if needed if let Some(parent_dir) = dest_path.parent() { fs::create_dir_all(parent_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } // Write the generated markdown file fs::write(&dest_path, markdown_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; copied_files.insert(file_path.clone()); markdown_files_written += 1; @@ -213,12 +213,13 @@ pub fn copy_html_output( } /// Helper function to recursively copy files, skipping .md files that have .html equivalents +#[allow(clippy::only_used_in_recursion)] fn copy_html_and_assets(src: &Path, dst: &Path, temp_root: &Path) -> Result<(), ReqvireError> { fs::create_dir_all(dst) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; - for entry in fs::read_dir(src).map_err(|e| ReqvireError::IoError(e))? { - let entry = entry.map_err(|e| ReqvireError::IoError(e))?; + for entry in fs::read_dir(src).map_err(ReqvireError::IoError)? { + let entry = entry.map_err(ReqvireError::IoError)?; let src_path = entry.path(); let dst_path = dst.join(entry.file_name()); @@ -272,7 +273,7 @@ fn post_process_html_files(temp_dir: &Path) -> Result<(), ReqvireError> { } let content = fs::read_to_string(&file_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Convert .md references to .html ONLY in specific contexts: // 1. ID attributes: id="file:-path/file.md" → id="file:-path/file.html" @@ -291,7 +292,7 @@ fn post_process_html_files(temp_dir: &Path) -> Result<(), ReqvireError> { .replace(".md", ".html"); fs::write(&file_path, processed) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; debug!("Post-processed HTML: {}", file_name); } @@ -341,10 +342,10 @@ pub fn generate_artifacts_in_temp( // Step 4: Change to temp directory and create new model manager let original_dir = env::current_dir() - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; env::set_current_dir(&temp_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Clear git cache so paths resolve to temp directory instead of original repo git_commands::clear_git_cache(); @@ -639,7 +640,7 @@ function showView(view) {{ let index_html = temp_dir.join("index.html"); if containment_html.exists() { fs::rename(&containment_html, &index_html) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; info!("✅ Renamed containment.html to index.html"); } @@ -653,7 +654,7 @@ function showView(view) {{ // Step 7: Restore original directory env::set_current_dir(&original_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Clear git cache again so it refreshes for original directory git_commands::clear_git_cache(); @@ -715,7 +716,7 @@ pub fn export_model( }; // prepare output folder - prepare_output_folder(&output_folder)?; + prepare_output_folder(output_folder)?; let count = html_export::export_markdown_to_html(&base_dir, output_folder)?; diff --git a/core/src/filesystem.rs b/core/src/filesystem.rs index 1d2d31dc..0a3a81e1 100644 --- a/core/src/filesystem.rs +++ b/core/src/filesystem.rs @@ -13,7 +13,7 @@ impl<'a> FileReaderIterator<'a> { pub fn new(git_commit_hash: Option<&'a str>, files: Vec) -> Self { Self { files: files.into_iter(), - git_commit_hash: git_commit_hash, + git_commit_hash, } } } @@ -58,7 +58,7 @@ impl Iterator for FileReaderIterator<'_>{ } /// Reads a file's content pub fn read_file(path: &Path) -> Result { - fs::read_to_string(path).map_err(|e| ReqvireError::IoError(e)) + fs::read_to_string(path).map_err(ReqvireError::IoError) } @@ -100,7 +100,7 @@ pub fn copy_file_with_structure(src: &Path, dst: &Path) -> Result<(), ReqvireErr } fs::copy(src, dst) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; Ok(()) } @@ -108,11 +108,11 @@ pub fn copy_file_with_structure(src: &Path, dst: &Path) -> Result<(), ReqvireErr /// Recursively copies all files and directories from source to destination pub fn copy_dir_all(src: &Path, dst: &Path) -> Result<(), ReqvireError> { fs::create_dir_all(dst) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; - for entry in fs::read_dir(src).map_err(|e| ReqvireError::IoError(e))? { - let entry = entry.map_err(|e| ReqvireError::IoError(e))?; - let ty = entry.file_type().map_err(|e| ReqvireError::IoError(e))?; + for entry in fs::read_dir(src).map_err(ReqvireError::IoError)? { + let entry = entry.map_err(ReqvireError::IoError)?; + let ty = entry.file_type().map_err(ReqvireError::IoError)?; let src_path = entry.path(); let dst_path = dst.join(entry.file_name()); @@ -120,7 +120,7 @@ pub fn copy_dir_all(src: &Path, dst: &Path) -> Result<(), ReqvireError> { copy_dir_all(&src_path, &dst_path)?; } else { fs::copy(&src_path, &dst_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } } Ok(()) @@ -130,7 +130,7 @@ pub fn copy_dir_all(src: &Path, dst: &Path) -> Result<(), ReqvireError> { pub fn remove_dir_all>(path: P) -> Result<(), ReqvireError> { if path.as_ref().exists() { fs::remove_dir_all(path.as_ref()) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } Ok(()) } diff --git a/core/src/filters.rs b/core/src/filters.rs index e975451d..7be230d2 100644 --- a/core/src/filters.rs +++ b/core/src/filters.rs @@ -18,6 +18,7 @@ pub struct Filters { impl Filters { /// Builds a Filters struct, or returns a ReqvireError::InvalidGlob / InvalidRegex + #[allow(clippy::too_many_arguments)] pub fn new( file: Option<&str>, name_regex: Option<&str>, @@ -36,7 +37,7 @@ impl Filters { } - let file_glob = file.map(|p| compile_glob(p)).transpose()?; + let file_glob = file.map(compile_glob).transpose()?; let name_re = match name_regex { Some(r) => Some(Regex::new(r).map_err(|e| ReqvireError::InvalidRegex(e.to_string()))?), None => None, @@ -59,7 +60,7 @@ impl Filters { Some(r) => Some(Regex::new(r).map_err(|e| ReqvireError::InvalidRegex(e.to_string()))?), None => None, }; - let attachment_glob = attachment.map(|p| compile_glob(p)).transpose()?; + let attachment_glob = attachment.map(compile_glob).transpose()?; Ok(Filters { file_glob, @@ -90,9 +91,8 @@ impl Filters { // 3) type filter if let Some(tp) = &self.type_pat { // Handle "other-TYPENAME" pattern for custom types - if tp.starts_with("other-") { + if let Some(custom_type_name) = tp.strip_prefix("other-") { // Extract the custom type name after "other-" - let custom_type_name = &tp[6..]; match &e.element_type { element::ElementType::Other(actual_name) => { if actual_name.to_lowercase() != custom_type_name { @@ -103,15 +103,14 @@ impl Filters { } } else { let filter_type = element::ElementType::from_metadata(tp); - if &e.element_type != &filter_type { + if e.element_type != filter_type { return false; } } } // 5) content regex if let Some(re) = &self.content_re { - let text = e.content.clone(); - if !re.is_match(&text) { + if !re.is_match(&e.content) { return false; } } diff --git a/core/src/format.rs b/core/src/format.rs index a0192593..2b2d6cb4 100644 --- a/core/src/format.rs +++ b/core/src/format.rs @@ -46,7 +46,7 @@ pub fn format_files(registry: &GraphRegistry, dry_run: bool, with_full_relations // Read current content if file exists let current_content = if full_file_path.exists() { fs::read_to_string(&full_file_path) - .map_err(|e| ReqvireError::IoError(e))? + .map_err(ReqvireError::IoError)? } else { String::new() // File doesn't exist, treat as empty }; @@ -66,12 +66,12 @@ pub fn format_files(registry: &GraphRegistry, dry_run: bool, with_full_relations // Create parent directories if needed if let Some(parent_dir) = full_file_path.parent() { fs::create_dir_all(parent_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } // Write the new content fs::write(&full_file_path, new_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; debug!("Formatted {} with {} elements", file_path, elements.len()); } diff --git a/core/src/git_commands.rs b/core/src/git_commands.rs index 74b6070b..77140dd5 100644 --- a/core/src/git_commands.rs +++ b/core/src/git_commands.rs @@ -2,16 +2,15 @@ use std::process::Command; use anyhow::Result; use crate::error::ReqvireError; use std::path::PathBuf; -use once_cell::sync::Lazy; -use std::sync::Mutex; +use std::sync::{LazyLock, Mutex}; use std::collections::HashMap; use std::sync::atomic::{AtomicBool, Ordering}; -static REPO_URL: Lazy>> = Lazy::new(|| Mutex::new(None)); -static COMMIT_HASH: Lazy>> = Lazy::new(|| Mutex::new(None)); -static GIT_ROOT_DIR: Lazy>> = Lazy::new(|| Mutex::new(None)); -static GIT_ROOT_CACHE: Lazy>> = Lazy::new(|| Mutex::new(HashMap::new())); +static REPO_URL: LazyLock>> = LazyLock::new(|| Mutex::new(None)); +static COMMIT_HASH: LazyLock>> = LazyLock::new(|| Mutex::new(None)); +static GIT_ROOT_DIR: LazyLock>> = LazyLock::new(|| Mutex::new(None)); +static GIT_ROOT_CACHE: LazyLock>> = LazyLock::new(|| Mutex::new(HashMap::new())); // Disable caching in tests to prevent interference between parallel tests static DISABLE_CACHE_FOR_TESTS: AtomicBool = AtomicBool::new(false); @@ -43,7 +42,7 @@ pub fn get_repository_base_url() -> Result { // Fetch the repository URL from git configuration let output = Command::new("git") - .args(&["config", "--get", "remote.origin.url"]) + .args(["config", "--get", "remote.origin.url"]) .output()?; if !output.status.success() { @@ -89,7 +88,7 @@ pub fn get_commit_hash() -> Result { // Run the git command to get the current commit hash let output = Command::new("git") - .args(&["rev-parse", "HEAD"]) + .args(["rev-parse", "HEAD"]) .output()?; if !output.status.success() { let err = String::from_utf8_lossy(&output.stderr); @@ -117,7 +116,7 @@ pub fn get_file_at_commit(file_path: &str,folder:&PathBuf, commit: &str) -> Resu match file_path.strip_prefix(&git_root) { Some(relative_path) => { let output = Command::new("git") - .args(&["show", &format!("{}:{}", commit, relative_path.trim_start_matches('/'))]) + .args(["show", &format!("{}:{}", commit, relative_path.trim_start_matches('/'))]) .current_dir(&git_root) .output()?; if !output.status.success() { @@ -178,7 +177,7 @@ pub fn get_git_root_dir() -> Result { } let output = Command::new("git") - .args(&["rev-parse", "--show-toplevel"]) + .args(["rev-parse", "--show-toplevel"]) .output()?; if !output.status.success() { @@ -204,7 +203,7 @@ pub fn ls_tree_commit(commit: &str) -> Result, ReqvireError> { let git_root = get_git_root_dir()?; let output = Command::new("git") - .args(&["ls-tree", "--name-only", "-r", commit]) + .args(["ls-tree", "--name-only", "-r", commit]) .current_dir(&git_root) .output()?; @@ -230,7 +229,7 @@ pub fn ls_tree_commit(commit: &str) -> Result, ReqvireError> { #[allow(dead_code)] fn get_changed_files_from_git() -> Result, ReqvireError> { let output = Command::new("git") - .args(&["diff", "--name-only"]) + .args(["diff", "--name-only"]) .output()?; if !output.status.success() { let err = String::from_utf8_lossy(&output.stderr); @@ -251,7 +250,7 @@ fn get_changed_files_from_git() -> Result, ReqvireError> { /// with `folder` as the current directory. Returns a list of file paths. pub fn ls_tree_commit_in_folder(commit: &str, folder: &PathBuf) -> Result,ReqvireError> { let output = Command::new("git") - .args(&["ls-tree", "--name-only", "-r", commit]) + .args(["ls-tree", "--name-only", "-r", commit]) .current_dir(folder) .output()?; diff --git a/core/src/graph_registry.rs b/core/src/graph_registry.rs index d8739a66..9845d8e5 100644 --- a/core/src/graph_registry.rs +++ b/core/src/graph_registry.rs @@ -1,6 +1,7 @@ use std::collections::{HashMap, BTreeSet, HashSet, BTreeMap}; use std::fs; use std::path::{Path, PathBuf}; +use std::sync::LazyLock; use log::{debug, warn}; use serde::Serialize; @@ -12,6 +13,11 @@ use crate::git_commands; use globset::GlobSet; use regex::Regex; +/// Cached regex for matching .md file references in relation targets +static MD_FILE_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"\.md(?:#|$)").unwrap() +}); + #[derive(Debug, Clone, Serialize)] pub struct Page { @@ -46,6 +52,12 @@ pub struct GraphRegistry { pub modified_files: HashSet, // Track files modified during CRUD operations } +impl Default for GraphRegistry { + fn default() -> Self { + Self::new() + } +} + impl GraphRegistry { /// Creates a new empty GraphRegistry pub fn new() -> Self { @@ -121,7 +133,7 @@ impl GraphRegistry { log::debug!("Running cross-section duplicate validation..."); let mut errors = Vec::new(); - for (_identifier, node) in &self.nodes { + for node in self.nodes.values() { let element = &node.element; // Collect all relation targets (normalized identifiers) @@ -180,13 +192,11 @@ impl GraphRegistry { log::debug!("Propagating missing opposite relations..."); let mut to_add: Vec<(String, crate::relation::Relation)> = Vec::new(); let element_ids: Vec = self.nodes.keys().cloned().collect(); - let md_regex = Regex::new(r"\.md(?:#|$)").unwrap(); - for source_id in &element_ids { if let Some(source_node) = self.nodes.get(source_id) { for relation in &source_node.element.relations { if let crate::relation::LinkType::Identifier(ref target_id) = relation.target.link { - if !md_regex.is_match(target_id) || excluded_filename_patterns.is_match(target_id) { + if !MD_FILE_RE.is_match(target_id) || excluded_filename_patterns.is_match(target_id) { continue; } @@ -1466,7 +1476,7 @@ impl GraphRegistry { /// Leaf elements are those that: /// 1. Have backward relations (derivedFrom, satisfy, verify) - they trace upward to something /// 2. Have no outgoing forward relations to other elements - nothing derives from them - /// Optionally filter by element types + /// Optionally filter by element types pub fn find_leaf_elements(&self, type_filter: Option<&[&str]>) -> Vec { let mut leaves: Vec = self.nodes.values() .map(|node| &node.element) @@ -1474,7 +1484,7 @@ impl GraphRegistry { // Apply type filter if provided if let Some(types) = type_filter { let element_type_str = element.element_type.as_str(); - if !types.iter().any(|t| *t == element_type_str) { + if !types.contains(&element_type_str) { return false; } } @@ -1513,7 +1523,7 @@ impl GraphRegistry { .map(|node| &node.element) .filter(|element| { let element_type_str = element.element_type.as_str(); - type_filter.iter().any(|t| *t == element_type_str) + type_filter.contains(&element_type_str) }) .map(|e| e.identifier.clone()) .collect(); @@ -1619,7 +1629,7 @@ impl GraphRegistry { relations_by_target .entry(target_id) - .or_insert_with(Vec::new) + .or_default() .push(relation.clone()); } @@ -1671,7 +1681,7 @@ impl GraphRegistry { // Add the element content if !element.content.trim().is_empty() { markdown.push_str(element.content.trim_end()); - markdown.push_str("\n"); + markdown.push('\n'); } // Add metadata subsection @@ -1690,7 +1700,7 @@ impl GraphRegistry { for (key, value) in custom_metadata { markdown.push_str(&format!(" * {}: {}\n", key, value)); } - markdown.push_str("\n"); + markdown.push('\n'); // Add attachments subsection if there are attachments // Deduplicate attachments by target, keeping first occurrence @@ -1749,13 +1759,13 @@ impl GraphRegistry { .map(|e| e.name.clone()) .unwrap_or_else(|| { // Fallback to identifier fragment if element not found - identifier.split('#').last().unwrap_or(identifier).to_string() + identifier.split('#').next_back().unwrap_or(identifier).to_string() }); markdown.push_str(&format!(" * [{}]({})\n", display_name, relative_id)); } } } - markdown.push_str("\n"); + markdown.push('\n'); } // Add relations subsection if there are relations to include @@ -1873,7 +1883,7 @@ impl GraphRegistry { target_text )); } - markdown.push_str("\n"); + markdown.push('\n'); } // Apply generic formatting to ensure exactly one blank line before all #### headers @@ -1909,7 +1919,7 @@ impl GraphRegistry { if !in_details && line.trim().is_empty() { // Check if the previous line was a #### header let prev_line_is_header = result.lines().last() - .map_or(false, |l| l.trim_start().starts_with("####")); + .is_some_and(|l| l.trim_start().starts_with("####")); if prev_line_is_header { continue; } @@ -1947,7 +1957,7 @@ impl GraphRegistry { file_elements .entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(element); } @@ -1974,7 +1984,7 @@ impl GraphRegistry { .iter() .enumerate() .map(|(i, e)| { - let fragment = e.identifier.split('#').last().unwrap_or(&e.identifier).to_string(); + let fragment = e.identifier.split('#').next_back().unwrap_or(&e.identifier).to_string(); (fragment, i) }) .collect(); @@ -1996,7 +2006,7 @@ impl GraphRegistry { // This element has a file-local parent children_map .entry(parent_idx) - .or_insert_with(Vec::new) + .or_default() .push(idx); has_parent.insert(idx); } @@ -2046,7 +2056,7 @@ impl GraphRegistry { } // Reorder elements based on ordered indices - let original: Vec<&Element> = elements.drain(..).collect(); + let original: Vec<&Element> = std::mem::take(elements); for idx in ordered_indices { elements.push(original[idx]); } @@ -2127,7 +2137,7 @@ impl GraphRegistry { // Create parent directories if needed if let Some(parent_dir) = dst_path.parent() { fs::create_dir_all(parent_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } // Copy the file @@ -2377,7 +2387,7 @@ impl GraphRegistry { // Create output directory if it doesn't exist if !output_dir.exists() { fs::create_dir_all(output_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } // Generate and write markdown files @@ -2394,12 +2404,12 @@ impl GraphRegistry { // Create parent directories if needed if let Some(parent_dir) = output_file_path.parent() { fs::create_dir_all(parent_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } // Write the markdown file fs::write(&output_file_path, markdown_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; debug!("Flushed {} elements to {}", elements.len(), @@ -2425,7 +2435,7 @@ impl GraphRegistry { // Create output directory if it doesn't exist if !output_dir.exists() { fs::create_dir_all(output_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } let grouped_elements = self.group_elements_by_location(); @@ -2443,12 +2453,12 @@ impl GraphRegistry { // Create parent directories if needed if let Some(parent_dir) = output_file_path.parent() { fs::create_dir_all(parent_dir) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; } // Write the markdown file fs::write(&output_file_path, markdown_content) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; // Collect InternalPath relations from elements in this file for element in elements { @@ -2483,7 +2493,7 @@ impl GraphRegistry { /// Updates relation identifiers when elements move between files fn update_relation_identifiers(&mut self, moved_element_id: &str, _old_file_path: &str, new_file_path: &str) { // Extract just the fragment (element name) from the moved element's identifier - let moved_fragment = moved_element_id.split('#').last().unwrap_or(moved_element_id); + let moved_fragment = moved_element_id.split('#').next_back().unwrap_or(moved_element_id); // 1. Update relations FROM other elements TO the moved element for (_id, node) in self.nodes.iter_mut() { @@ -2496,45 +2506,55 @@ impl GraphRegistry { for relation in &mut node.element.relations { if let crate::relation::LinkType::Identifier(ref mut target_id) = relation.target.link { if target_id == moved_element_id { - // The target element moved to a different file if node.element.file_path != new_file_path { - // Cross-file reference needed - use just the fragment + // Cross-file reference needed *target_id = format!("{}#{}", new_file_path, moved_fragment); relation.target.text = format!("{}#{}", new_file_path, moved_fragment); + } else { + // Same file now — update to fragment-only reference + *target_id = moved_fragment.to_string(); + relation.target.text = moved_fragment.to_string(); } - // If same file, keep as-is } } } } // 2. Update relations FROM the moved element TO other elements - // First collect target file paths to avoid borrowing issues - let target_file_paths: std::collections::HashMap = self.nodes.values() - .map(|node| (node.element.identifier.clone(), node.element.file_path.clone())) - .collect(); + // Build lookup maps: full identifier -> file_path, and fragment -> (full_id, file_path) + // This allows resolving both full identifiers and bare fragments + let mut id_to_file: std::collections::HashMap = std::collections::HashMap::new(); + let mut fragment_to_id_file: std::collections::HashMap = std::collections::HashMap::new(); + for node in self.nodes.values() { + let id = node.element.identifier.clone(); + let file = node.element.file_path.clone(); + id_to_file.insert(id.clone(), file.clone()); + // Also index by bare fragment for fallback lookup + let fragment = id.split('#').next_back().unwrap_or(&id).to_string(); + fragment_to_id_file.insert(fragment, (id, file)); + } if let Some(moved_node) = self.nodes.get_mut(moved_element_id) { for relation in &mut moved_node.element.relations { if let crate::relation::LinkType::Identifier(ref mut target_id) = relation.target.link { - // Extract the original target identifier (remove any file path prefix) - let original_target_id = if target_id.contains('#') { - target_id.split('#').last().unwrap_or("").to_string() - } else { - target_id.clone() - }; + // Extract the bare fragment (element name) from the target + let fragment = target_id.split('#').next_back().unwrap_or(target_id).to_string(); + + // Resolve target's file path: try full target_id first, then bare fragment + let resolved = id_to_file.get(target_id.as_str()) + .map(|file| (fragment.clone(), file.clone())) + .or_else(|| fragment_to_id_file.get(&fragment) + .map(|(_full_id, file)| (fragment.clone(), file.clone()))); - // Find the target element to check its file location - if let Some(target_file_path) = target_file_paths.get(&original_target_id) { - // If moved element is now in different file than target + if let Some((target_fragment, target_file_path)) = resolved { if new_file_path != target_file_path { - // Update to cross-file reference - *target_id = format!("{}#{}", target_file_path, original_target_id); - relation.target.text = format!("{}#{}", target_file_path, original_target_id); + // Cross-file reference needed + *target_id = format!("{}#{}", target_file_path, target_fragment); + relation.target.text = format!("{}#{}", target_file_path, target_fragment); } else { - // Same file, use simple reference - *target_id = original_target_id.clone(); - relation.target.text = original_target_id; + // Same file, use simple fragment reference + *target_id = target_fragment.clone(); + relation.target.text = target_fragment; } } } @@ -3138,7 +3158,7 @@ impl GraphRegistry { let mut orphaned_children: Vec = Vec::new(); let hierarchical_types = crate::relation::get_hierarchical_relation_types(); - for (_child_id, child_node) in &self.nodes { + for child_node in self.nodes.values() { // Count how many hierarchical parent relations this child has to the element being deleted let mut parents_to_target = 0; let mut total_parents = 0; @@ -3304,12 +3324,13 @@ impl GraphRegistry { self.update_relation_identifiers(&old_identifier, &source_file, target_file); // Construct the new identifier (file path changed, fragment stays the same) - let fragment = old_identifier.split('#').last().unwrap_or(""); + let fragment = old_identifier.split('#').next_back().unwrap_or(""); let new_identifier = format!("{}#{}", target_file, fragment); - // Update the element's identifier field in the node - if let Some(node) = self.nodes.get_mut(&old_identifier) { + // Re-key the node in the HashMap: remove with old key, update identifier, insert with new key + if let Some(mut node) = self.nodes.remove(&old_identifier) { node.element.identifier = new_identifier.clone(); + self.nodes.insert(new_identifier.clone(), node); } // Update all attachment identifiers pointing to this element @@ -3357,6 +3378,7 @@ impl GraphRegistry { let target_type = target_node.element.element_type.clone(); // Validate all sources exist and collect their data + #[allow(clippy::type_complexity)] let mut source_data: Vec<(String, String, String, Vec, Vec)> = Vec::new(); for source_id in source_ids { let source_node = self.nodes.get(source_id) @@ -3674,7 +3696,7 @@ impl GraphRegistry { let file_full_path = directory.join(file_path); if file_full_path.exists() { fs::remove_file(&file_full_path) - .map_err(|e| ReqvireError::IoError(e))?; + .map_err(ReqvireError::IoError)?; log::info!("Deleted empty file: {}", file_path); } } @@ -3703,7 +3725,6 @@ fn extract_content_parts(content: &str) -> (String, String) { // Find end of details (next #### or end) let details_end = rest.find("\n#### ") - .map(|p| p) .unwrap_or(rest.len()); (main, rest[..details_end].to_string()) diff --git a/core/src/html/markdown.rs b/core/src/html/markdown.rs index a4c3870e..c1dd4c90 100644 --- a/core/src/html/markdown.rs +++ b/core/src/html/markdown.rs @@ -1,9 +1,58 @@ use std::path::{Path, PathBuf}; use pulldown_cmark::{html, Options, Parser}; -use lazy_static::lazy_static; +use std::sync::LazyLock; use regex::{Captures, Regex}; use crate::error::ReqvireError; +// --- Module-level lazy statics (moved from function bodies) --- + +static HEADER_REGEX: LazyLock = LazyLock::new(|| Regex::new(r"<(h[1-3])>([^<]+)").unwrap()); + +/// Find each mermaid code-block in HTML output +static MERMAID_HTML_BLOCK: LazyLock = LazyLock::new(|| Regex::new( + r#"
([\s\S]*?)
"# +).unwrap()); + +/// Find all .md links in mermaid diagrams +static MERMAID_MD_LINK: LazyLock = LazyLock::new(|| Regex::new( + r#"(click\s+\S+\s+")([^"]*?)\.md(#[^"]*)?(")"# +).unwrap()); + +static MERMAID_BLOCK: LazyLock = LazyLock::new(|| Regex::new( + r"(?s)(?P```mermaid\s+(?P.*?)```)" +).unwrap()); + +static D3_TREE_BLOCK: LazyLock = LazyLock::new(|| Regex::new( + r"(?s)```d3-tree\s*\n(?P.*?)```" +).unwrap()); + +static D3_SANKEY_BLOCK: LazyLock = LazyLock::new(|| Regex::new( + r"(?s)```d3-sankey\s*\n(?P.*?)```" +).unwrap()); + +static D3_SUNBURST_BLOCK: LazyLock = LazyLock::new(|| Regex::new( + r"(?s)```d3-sunburst\s*\n(?P.*?)```" +).unwrap()); + +static D3_ICICLE_BLOCK: LazyLock = LazyLock::new(|| Regex::new( + r"(?s)```d3-icicle\s*\n(?P.*?)```" +).unwrap()); + +// [text](../path/to/file.md#fragment) +static MD_LINK_WITH_HASH_REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r"(\]\()((?:\.\./)*)([^#)]+)\.md(#[^)]+)(\))").unwrap() +}); + +// [text](../path/to/file.md) +static MD_LINK_REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r"(\]\()((?:\.\./)*)([^#)]+)\.md(\))").unwrap() +}); + +// bare link text [foo.md] +static MD_LINK_TEXT_REGEX: LazyLock = LazyLock::new(|| { + Regex::new(r"\[([^]]+)\.md\]").unwrap() +}); + pub fn markdown_to_html_content( file_path: &PathBuf, markdown_content: &str, @@ -48,12 +97,6 @@ pub fn markdown_to_html_content( /// Add id attributes to headers for anchor links fn add_anchor_ids(html_content: &str) -> String { - use regex::Regex; - - lazy_static::lazy_static! { - static ref HEADER_REGEX: Regex = Regex::new(r"<(h[1-3])>([^<]+)").unwrap(); - } - HEADER_REGEX .replace_all(html_content, |caps: ®ex::Captures| { let tag = &caps[1]; @@ -71,21 +114,8 @@ pub fn process_mermaid_diagrams( _file_path: &Path, // Used to determine if we're in a specifications folder html_content: &str, // the rendered HTML ) -> String { - lazy_static! { - /// 1) Find each mermaid code‐block - static ref MERMAID_BLOCK: Regex = Regex::new( - r#"
([\s\S]*?)
"# - ).unwrap(); - - /// 2) Find all .md links, we'll filter GitHub links in the replacement code - /// Note: pulldown-cmark 0.13+ does NOT HTML-encode quotes inside code blocks - static ref MD_LINK: Regex = Regex::new( - r#"(click\s+\S+\s+")([^"]*?)\.md(#[^"]*)?(")"# - ).unwrap(); - } - // Process mermaid blocks - let mermaid_processed = MERMAID_BLOCK + let mermaid_processed = MERMAID_HTML_BLOCK .replace_all(html_content, |caps: ®ex::Captures| { let inner = &caps[1]; @@ -96,7 +126,7 @@ pub fn process_mermaid_diagrams( .replace("&", "&"); // Handle .md links, but preserve GitHub blob links - let fixed = MD_LINK.replace_all(&decoded, |c: ®ex::Captures| { + let fixed = MERMAID_MD_LINK.replace_all(&decoded, |c: ®ex::Captures| { let prefix = &c[1]; // click X " let path = &c[2]; // path/to/file let anchor = c.get(3).map_or("", |m| m.as_str()); @@ -125,12 +155,6 @@ use std::collections::HashMap; /// Extracts Mermaid blocks and replaces them with placeholders fn extract_mermaid_blocks(markdown: &str) -> (String, HashMap) { - lazy_static! { - static ref MERMAID_BLOCK: Regex = Regex::new( - r"(?s)(?P```mermaid\s+(?P.*?)```)" - ).unwrap(); - } - let mut map = HashMap::new(); let mut counter = 0; let result = MERMAID_BLOCK.replace_all(markdown, |caps: &Captures| { @@ -146,12 +170,6 @@ fn extract_mermaid_blocks(markdown: &str) -> (String, HashMap) { /// Extracts D3 tree blocks and replaces them with placeholders fn extract_d3_tree_blocks(markdown: &str) -> (String, HashMap) { - lazy_static! { - static ref D3_TREE_BLOCK: Regex = Regex::new( - r"(?s)```d3-tree\s*\n(?P.*?)```" - ).unwrap(); - } - let mut map = HashMap::new(); let mut counter = 0; let result = D3_TREE_BLOCK.replace_all(markdown, |caps: &Captures| { @@ -479,12 +497,6 @@ fn generate_d3_tree_html(json_data: &str) -> String { /// Extracts D3 Sankey blocks and replaces them with placeholders fn extract_d3_sankey_blocks(markdown: &str) -> (String, HashMap) { - lazy_static! { - static ref D3_SANKEY_BLOCK: Regex = Regex::new( - r"(?s)```d3-sankey\s*\n(?P.*?)```" - ).unwrap(); - } - let mut map = HashMap::new(); let mut counter = 0; let result = D3_SANKEY_BLOCK.replace_all(markdown, |caps: &Captures| { @@ -648,12 +660,6 @@ fn generate_d3_sankey_html(json_data: &str) -> String { /// Extracts D3 Sunburst blocks and replaces them with placeholders fn extract_d3_sunburst_blocks(markdown: &str) -> (String, HashMap) { - lazy_static! { - static ref D3_SUNBURST_BLOCK: Regex = Regex::new( - r"(?s)```d3-sunburst\s*\n(?P.*?)```" - ).unwrap(); - } - let mut map = HashMap::new(); let mut counter = 0; let result = D3_SUNBURST_BLOCK.replace_all(markdown, |caps: &Captures| { @@ -990,12 +996,6 @@ fn generate_d3_sunburst_html(json_data: &str) -> String { /// Extracts D3 Icicle blocks and replaces them with placeholders fn extract_d3_icicle_blocks(markdown: &str) -> (String, HashMap) { - lazy_static! { - static ref D3_ICICLE_BLOCK: Regex = Regex::new( - r"(?s)```d3-icicle\s*\n(?P.*?)```" - ).unwrap(); - } - let mut map = HashMap::new(); let mut counter = 0; let result = D3_ICICLE_BLOCK.replace_all(markdown, |caps: &Captures| { @@ -1297,20 +1297,6 @@ fn convert_markdown_links_to_html( markdown_content: &str, _base_folder: &PathBuf ) -> String { - lazy_static! { - // 1) [text](../path/to/file.md#fragment) - static ref MD_LINK_WITH_HASH_REGEX: Regex = - Regex::new(r"(\]\()((?:\.\./)*)([^#)]+)\.md(#[^)]+)(\))").unwrap(); - - // 2) [text](../path/to/file.md) - static ref MD_LINK_REGEX: Regex = - Regex::new(r"(\]\()((?:\.\./)*)([^#)]+)\.md(\))").unwrap(); - - // 3) bare link text [foo.md] - static ref MD_LINK_TEXT_REGEX: Regex = - Regex::new(r"\[([^]]+)\.md\]").unwrap(); - } - // 1) Links with a fragment let content = MD_LINK_WITH_HASH_REGEX.replace_all(markdown_content, |caps: &Captures| { let before = &caps[1]; // "](" diff --git a/core/src/html/mod.rs b/core/src/html/mod.rs index 72892602..5f08d083 100644 --- a/core/src/html/mod.rs +++ b/core/src/html/mod.rs @@ -14,7 +14,6 @@ /// - `scripts`: JavaScript utilities (mobile menu toggle) /// - `pages`: Page-specific generators (to be implemented in Phase 2/3) /// - `visualizations`: Visualization components (to be implemented in Phase 2/3) - mod components; mod layouts; mod scripts; @@ -32,7 +31,7 @@ pub use maud::Markup; pub use pages::{coverage, index, model, resources, traces, traceflow}; use crate::error::ReqvireError; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; /// Convert markdown file to HTML using component-based architecture /// @@ -127,7 +126,7 @@ pub fn convert_to_html( /// Calculate the relative path prefix needed for navigation links /// based on the depth of the current file relative to base_folder -fn calculate_nav_prefix(file_path: &PathBuf, base_folder: &PathBuf) -> String { +fn calculate_nav_prefix(file_path: &Path, base_folder: &Path) -> String { // Get relative path from base_folder let relative_path = match file_path.strip_prefix(base_folder) { Ok(rel) => rel, @@ -187,7 +186,7 @@ pub fn generate_diagram_page(title: &str, diagram: Markup, nav_prefix: &str) -> /// # Arguments /// * `html_content` - Pre-converted HTML content from markdown /// * `nav_prefix` - Relative path prefix for navigation links -/// Generate coverage page with new component system +/// Generate coverage page with new component system pub fn generate_coverage_page(html_content: &str, nav_prefix: &str) -> String { pages::coverage::render(html_content, nav_prefix).into_string() } diff --git a/core/src/html_export.rs b/core/src/html_export.rs index d3f94ce3..77ca4db0 100644 --- a/core/src/html_export.rs +++ b/core/src/html_export.rs @@ -67,7 +67,7 @@ fn process_markdown_files( for entry in WalkDir::new(scan_folder) .into_iter() .filter_map(Result::ok) - .filter(|e| e.path().is_file() && e.path().extension().map_or(false, |ext| ext == "md")) + .filter(|e| e.path().is_file() && e.path().extension().is_some_and(|ext| ext == "md")) { let file_path = entry.path().to_path_buf(); diff --git a/core/src/index_generator.rs b/core/src/index_generator.rs index f71cab47..aeaf5f45 100644 --- a/core/src/index_generator.rs +++ b/core/src/index_generator.rs @@ -19,7 +19,7 @@ pub fn generate_readme_index( for element in registry.get_all_elements() { grouped_elements .entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(element); } @@ -68,7 +68,7 @@ pub fn generate_readme_index( } } - index_content.push_str("\n"); // Add spacing between files + index_content.push('\n'); // Add spacing between files } let total_files = grouped_elements.len(); diff --git a/core/src/lint.rs b/core/src/lint.rs index dda23f39..b5d27ba1 100644 --- a/core/src/lint.rs +++ b/core/src/lint.rs @@ -3,7 +3,6 @@ /// This module provides linting functionality to detect issues in requirements relations: /// - Redundant verify relations (auto-fixable) /// - Redundant hierarchical relations (auto-fixable) - use crate::element::ElementType; use crate::error::ReqvireError; use crate::graph_registry::GraphRegistry; @@ -658,7 +657,7 @@ fn detect_multi_branch_convergence(registry: &GraphRegistry) -> Vec Self { + Self::new() + } +} + impl ModelManager { /// Creates a new ModelManager pub fn new() -> Self { - // Initialize empty graph registry - let graph_registry = GraphRegistry::new(); - Self { - graph_registry + graph_registry: GraphRegistry::new(), } } @@ -139,7 +142,7 @@ impl ModelManager { for (name, file_path, line_number) in all_element_locations { name_locations .entry(name) - .or_insert_with(Vec::new) + .or_default() .push((file_path, line_number)); } diff --git a/core/src/parser.rs b/core/src/parser.rs index 5f5c578f..80ed881a 100644 --- a/core/src/parser.rs +++ b/core/src/parser.rs @@ -19,7 +19,7 @@ fn remove_generated_diagrams(content: &str) -> String { let mut is_auto_generated = false; // Read until the closing ``` - while let Some(block_line) = lines.next() { + for block_line in lines.by_ref() { block_lines.push(block_line); if block_line.contains("REQVIRE-AUTOGENERATED-DIAGRAM") { @@ -117,7 +117,7 @@ pub fn parse_single_element( found_header = true; current_subsection = SubSection::Requirement; - let element_name = trimmed[4..].trim().to_string(); + let element_name = trimmed.strip_prefix("### ").unwrap_or(trimmed).trim().to_string(); // file_path is already relative to git root, use it directly // Only normalize the fragment part (element name) @@ -139,7 +139,7 @@ pub fn parse_single_element( // Parse #### subsections } else if trimmed.starts_with("#### ") && current_element.is_some() { - let subsection = SubSection::from_str(&trimmed[5..].trim()); + let subsection = SubSection::parse(trimmed[5..].trim()); if seen_subsections.contains(&subsection) { return Err(ReqvireError::DuplicateSubsection( @@ -361,7 +361,7 @@ fn is_specification_file(content: &str) -> bool { pub fn parse_elements( file: &str, content: &str, - file_path: &PathBuf, + file_path: &Path, git_commit: Option<&str>, ) -> (Vec, Vec, String) { // Check if this is a specification file (first H1 must be "# Elements") @@ -419,7 +419,7 @@ pub fn parse_elements( } else if trimmed.starts_with("## ") { // Section headers (## ) are not allowed - report syntax error - let section_name = trimmed[3..].trim(); + let section_name = trimmed.strip_prefix("## ").unwrap_or(trimmed).trim(); errors.push(ReqvireError::InvalidMarkdownStructure(format!( "Section headers (## ) are not allowed in specification files. Found '## {}' at line {} in file '{}'. Use ### for element headers instead.", section_name, @@ -441,7 +441,7 @@ pub fn parse_elements( skip_current_element = false; seen_subsections.clear(); - let element_name = trimmed[4..].trim().to_string(); + let element_name = trimmed.strip_prefix("### ").unwrap_or(trimmed).trim().to_string(); match file_path.parent() { Some(file_folder) => { @@ -449,11 +449,11 @@ pub fn parse_elements( match utils::normalize_identifier( &identifier, - &file_folder.to_path_buf() + file_folder ) { Ok(identifier) => { - let relative_file = match utils::get_relative_path(&file_path) { + let relative_file = match utils::get_relative_path(file_path) { Ok(path) => path, Err(err) => { debug!("Error: {}", &err); @@ -523,7 +523,7 @@ pub fn parse_elements( debug!("Error: {}", msg); } else if trimmed.starts_with("#### ") && current_element.is_some() { - let subsection = SubSection::from_str(&trimmed[5..].trim()); + let subsection = SubSection::parse(trimmed[5..].trim()); if !skip_current_element { if seen_subsections.contains(&subsection) { @@ -610,7 +610,7 @@ pub fn parse_elements( Some(file_folder) => { match utils::normalize_identifier( &final_link, - &file_folder.to_path_buf() + file_folder ) { Ok(normalized_target) => { // element_id will be populated later by GraphRegistry after all elements are registered diff --git a/core/src/relation.rs b/core/src/relation.rs index 4c982b0a..902eed2e 100644 --- a/core/src/relation.rs +++ b/core/src/relation.rs @@ -1,5 +1,5 @@ -use lazy_static::lazy_static; use std::collections::HashMap; +use std::sync::LazyLock; use crate::error::ReqvireError; use serde::Serialize; use std::cmp::Ordering; @@ -17,86 +17,84 @@ pub struct RelationTypeInfo { pub label: &'static str, } -lazy_static! { - pub static ref RELATION_TYPES: HashMap<&'static str, RelationTypeInfo> = { - let mut m = HashMap::new(); - - // Derive relations - m.insert("derivedFrom", RelationTypeInfo { - name: "derivedFrom", - opposite: Some("derive"), - description: "Element is derived from another element", - arrow: "-.->", - label: "derivedFrom", - }); - m.insert("derive", RelationTypeInfo { - name: "derive", - opposite: Some("derivedFrom"), - description: "Element is source for a derived element", - arrow: "-.->", - label: "deriveReqT", - }); - - // Satisfy relations (implementations only) - m.insert("satisfiedBy", RelationTypeInfo { - name: "satisfiedBy", - opposite: Some("satisfy"), - description: "A requirement being satisfied by an implementation.", - arrow: "-->", - label: "satisfiedBy", - }); - m.insert("satisfy", RelationTypeInfo { - name: "satisfy", - opposite: Some("satisfiedBy"), - description: "Implementation satisfies a requirement", - arrow: "-->", - label: "satisfies", - }); - - // Refine relations (refinement ownership) - m.insert("refinedBy", RelationTypeInfo { - name: "refinedBy", - opposite: Some("refine"), - description: "A requirement being refined by a refinement element or specification file.", - arrow: "-->", - label: "refinedBy", - }); - m.insert("refine", RelationTypeInfo { - name: "refine", - opposite: Some("refinedBy"), - description: "Element refines a requirement", - arrow: "-->", - label: "refines", - }); - - // Verify relations - m.insert("verifiedBy", RelationTypeInfo { - name: "verifiedBy", - opposite: Some("verify"), - description: "A souce element being verified by other element.", - arrow: "-.->", - label: "verifiedBy", - }); - m.insert("verify", RelationTypeInfo { - name: "verify", - opposite: Some("verifiedBy"), - description: "Element verifies another element", - arrow: "-.->", - label: "verifies", - }); - - // Trace relations - m.insert("trace", RelationTypeInfo { - name: "trace", - opposite: None, - description: "Element is related to another element in a non-directional way", - arrow: "-.->", - label: "trace", - }); - - m - }; -} +pub static RELATION_TYPES: LazyLock> = LazyLock::new(|| { + let mut m = HashMap::new(); + + // Derive relations + m.insert("derivedFrom", RelationTypeInfo { + name: "derivedFrom", + opposite: Some("derive"), + description: "Element is derived from another element", + arrow: "-.->", + label: "derivedFrom", + }); + m.insert("derive", RelationTypeInfo { + name: "derive", + opposite: Some("derivedFrom"), + description: "Element is source for a derived element", + arrow: "-.->", + label: "deriveReqT", + }); + + // Satisfy relations (implementations only) + m.insert("satisfiedBy", RelationTypeInfo { + name: "satisfiedBy", + opposite: Some("satisfy"), + description: "A requirement being satisfied by an implementation.", + arrow: "-->", + label: "satisfiedBy", + }); + m.insert("satisfy", RelationTypeInfo { + name: "satisfy", + opposite: Some("satisfiedBy"), + description: "Implementation satisfies a requirement", + arrow: "-->", + label: "satisfies", + }); + + // Refine relations (refinement ownership) + m.insert("refinedBy", RelationTypeInfo { + name: "refinedBy", + opposite: Some("refine"), + description: "A requirement being refined by a refinement element or specification file.", + arrow: "-->", + label: "refinedBy", + }); + m.insert("refine", RelationTypeInfo { + name: "refine", + opposite: Some("refinedBy"), + description: "Element refines a requirement", + arrow: "-->", + label: "refines", + }); + + // Verify relations + m.insert("verifiedBy", RelationTypeInfo { + name: "verifiedBy", + opposite: Some("verify"), + description: "A source element being verified by other element.", + arrow: "-.->", + label: "verifiedBy", + }); + m.insert("verify", RelationTypeInfo { + name: "verify", + opposite: Some("verifiedBy"), + description: "Element verifies another element", + arrow: "-.->", + label: "verifies", + }); + + // Trace relations + m.insert("trace", RelationTypeInfo { + name: "trace", + opposite: None, + description: "Element is related to another element in a non-directional way", + arrow: "-.->", + label: "trace", + }); + + m +}); /// Relations to show in diagrams (one from each pair to avoid duplicates) /// These are typically the "forward" relations from the old direction system @@ -168,7 +166,7 @@ impl Eq for RelationTarget {} impl Ord for RelationTarget { fn cmp(&self, other: &Self) -> Ordering { - self.link.as_str().cmp(&other.link.as_str()) + self.link.as_str().cmp(other.link.as_str()) } } @@ -193,12 +191,13 @@ pub enum LinkType { } impl LinkType { /// Converts `LinkType` into a string representation. + /// For `InternalPath`, falls back to lossy UTF-8 conversion via the `Display` impl. + /// Use `as_string()` if you need an owned fallback for non-UTF-8 paths. pub fn as_str(&self) -> &str { match self { LinkType::Identifier(id) => id, LinkType::ExternalUrl(url) => url, - LinkType::InternalPath(path) => path.to_str() - .expect(&format!("InternalPath is not valid UTF-8: {:?}", path)) + LinkType::InternalPath(path) => path.to_str().unwrap_or_default(), } } } @@ -224,7 +223,7 @@ impl Eq for Relation {} impl Ord for Relation { fn cmp(&self, other: &Self) -> Ordering { // Compare relation types by name first - let relation_cmp = self.relation_type.name.cmp(&other.relation_type.name); + let relation_cmp = self.relation_type.name.cmp(other.relation_type.name); // If relation types are equal, compare targets if relation_cmp == Ordering::Equal { @@ -257,7 +256,7 @@ impl Relation { .ok_or_else(|| ReqvireError::UnsupportedRelationType(relation_type.to_string()))?; Ok(Self { relation_type: relation_info, - target: RelationTarget{text: text, link: link, element_id}, + target: RelationTarget{text, link, element_id}, user_created: true, // Relations created via parsing are user-created }) } @@ -274,19 +273,14 @@ impl Relation { } pub fn update_target_identifier_link_url(&mut self, url: &str) { - match self.target.link { - LinkType::Identifier(_) => self.target.link=LinkType::Identifier(url.to_string()), - _ =>{} - }; + if let LinkType::Identifier(_) = self.target.link { self.target.link=LinkType::Identifier(url.to_string()) }; } /// Creates an opposite relation if possible for given target pub fn to_opposite(&self, name: &str, identifier: &str, element_id: &str) -> Option { if let Some(opposite_name) = self.relation_type.opposite { - match RELATION_TYPES.get(opposite_name) { - Some(opposite_info) => { - Some(Relation { + RELATION_TYPES.get(opposite_name).map(|opposite_info| Relation { relation_type: opposite_info, target: RelationTarget { text: name.to_string(), @@ -295,11 +289,6 @@ impl Relation { }, user_created: false, // Auto-generated opposite relations are not user-created }) - } - None => { - None - } - } } else { None } diff --git a/core/src/report_collect.rs b/core/src/report_collect.rs index 2602413e..df959ad0 100644 --- a/core/src/report_collect.rs +++ b/core/src/report_collect.rs @@ -343,8 +343,7 @@ fn collect_attachment_content( } AttachmentTarget::ElementIdentifier(elem_id) => { // Look up element content from registry - if let Some(elem) = registry.get_element(elem_id) { - Some(CollectedItem { + registry.get_element(elem_id).map(|elem| CollectedItem { name: elem.name.clone(), identifier: elem.identifier.clone(), file_path: elem.file_path.clone(), @@ -354,9 +353,6 @@ fn collect_attachment_content( source_type: SourceType::AttachmentElement, attached_to: Some(parent_identifier.to_string()), }) - } else { - None - } } } } @@ -372,8 +368,7 @@ fn collect_refinement_content( match &target.link { relation::LinkType::Identifier(elem_id) => { // Element identifier - look up refinement element content - if let Some(elem) = registry.get_element(elem_id) { - Some(CollectedItem { + registry.get_element(elem_id).map(|elem| CollectedItem { name: elem.name.clone(), identifier: elem.identifier.clone(), file_path: elem.file_path.clone(), @@ -383,9 +378,6 @@ fn collect_refinement_content( source_type: SourceType::RefinedByElement, attached_to: Some(parent_identifier.to_string()), }) - } else { - None - } } relation::LinkType::InternalPath(path) => { // File path - read file content (same logic as attachment file handling) diff --git a/core/src/report_coverage.rs b/core/src/report_coverage.rs index ed70d5a6..184057e3 100644 --- a/core/src/report_coverage.rs +++ b/core/src/report_coverage.rs @@ -151,7 +151,7 @@ impl CoverageReport { } } } - output.push_str("\n"); + output.push('\n'); } } @@ -169,7 +169,7 @@ impl CoverageReport { for requirement in sorted_requirements { output.push_str(&format!("- ❌ **[{}]({})**\n", requirement.name, requirement.identifier)); } - output.push_str("\n"); + output.push('\n'); } } @@ -193,7 +193,7 @@ impl CoverageReport { } } } - output.push_str("\n"); + output.push('\n'); } } @@ -211,7 +211,7 @@ impl CoverageReport { for verification in sorted_verifications { output.push_str(&format!("- ❌ **[{}]({})** ({})\n", verification.name, verification.identifier, verification.verification_type)); } - output.push_str("\n"); + output.push('\n'); } } @@ -229,7 +229,7 @@ impl CoverageReport { for verification in sorted_verifications { output.push_str(&format!("- ⚠️ **[{}]({})** ({})\n", verification.name, verification.identifier, verification.verification_type)); } - output.push_str("\n"); + output.push('\n'); } } @@ -299,13 +299,13 @@ pub fn generate_coverage_report(registry: &GraphRegistry) -> CoverageReport { if satisfied_by.is_empty() { // Unsatisfied test verification unsatisfied_test_files.entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(verification_details); } else { // Satisfied test verification satisfied_test_verifications += 1; satisfied_test_files.entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(verification_details); } } @@ -330,7 +330,7 @@ pub fn generate_coverage_report(registry: &GraphRegistry) -> CoverageReport { satisfied_by: vec![], // Orphaned verifications don't need satisfied_by info here }; orphaned_verifications_files.entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(orphaned_details); } } @@ -381,13 +381,13 @@ pub fn generate_coverage_report(registry: &GraphRegistry) -> CoverageReport { if verified_by.is_empty() { // Unverified leaf requirement unverified_leaf_files.entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(requirement_details); } else { // Verified leaf requirement verified_leaf_requirements += 1; verified_leaf_files.entry(element.file_path.clone()) - .or_insert_with(Vec::new) + .or_default() .push(requirement_details); } } diff --git a/core/src/report_model.rs b/core/src/report_model.rs index f31fb3e2..4f37da36 100644 --- a/core/src/report_model.rs +++ b/core/src/report_model.rs @@ -207,7 +207,7 @@ fn build_element_recursive( let mut sorted_relations = element.relations.clone(); sorted_relations.sort_by(|a, b| { // First sort by relation type - let type_cmp = a.relation_type.name.cmp(&b.relation_type.name); + let type_cmp = a.relation_type.name.cmp(b.relation_type.name); if type_cmp != std::cmp::Ordering::Equal { return type_cmp; } @@ -316,7 +316,7 @@ fn count_relations_recursive(registry: &GraphRegistry, element_id: &str, visited // Recurse for identifier targets if let relation::LinkType::Identifier(target_id) = &relation.target.link { - count_relations_recursive(registry, &target_id, visited, count, direction); + count_relations_recursive(registry, target_id, visited, count, direction); } } } @@ -337,7 +337,7 @@ fn generate_model_text(report: &ModelCentricReport, diagram_direction: &str) -> if let Some(ref type_filter) = report.metadata.type_filter { output.push_str(&format!("**Type Filter**: {}\n", type_filter.join(", "))); } - output.push_str("\n"); + output.push('\n'); // Elements with mermaid diagrams for element in &report.elements { @@ -407,9 +407,9 @@ fn generate_mermaid_for_element(element: &ModelCentricElement, indent: &str) -> .to_string(); folders.entry(folder) - .or_insert_with(HashMap::new) + .or_default() .entry(file_name) - .or_insert_with(Vec::new) + .or_default() .push(*elem); } diff --git a/core/src/search.rs b/core/src/search.rs index 1f71a2e5..456ffd7e 100644 --- a/core/src/search.rs +++ b/core/src/search.rs @@ -25,6 +25,7 @@ pub struct SearchFilters { impl SearchFilters { /// Creates new search filters with validation + #[allow(clippy::too_many_arguments)] pub fn new( file: Option<&str>, name_regex: Option<&str>, @@ -47,8 +48,8 @@ impl SearchFilters { Regex::new(pattern).map_err(|e| ReqvireError::InvalidRegex(e.to_string())) } - let file_glob = file.map(|p| compile_glob(p)).transpose()?; - let name_re = name_regex.map(|r| compile_regex(r)).transpose()?; + let file_glob = file.map(compile_glob).transpose()?; + let name_re = name_regex.map(compile_regex).transpose()?; // Parse and validate comma-separated element types let type_patterns = if let Some(t) = typ { @@ -72,9 +73,9 @@ impl SearchFilters { None }; - let content_re = content.map(|r| compile_regex(r)).transpose()?; - let page_content_re = page_content.map(|r| compile_regex(r)).transpose()?; - let attachment_glob = attachment.map(|p| compile_glob(p)).transpose()?; + let content_re = content.map(compile_regex).transpose()?; + let page_content_re = page_content.map(compile_regex).transpose()?; + let attachment_glob = attachment.map(compile_glob).transpose()?; // Parse and validate comma-separated relation lists let have_relations = if let Some(s) = have_relations { @@ -147,10 +148,9 @@ impl SearchFilters { let mut matches_any = false; for tp in types { - let matches = if tp.starts_with("other-") { + let matches = if let Some(custom_type_name) = tp.strip_prefix("other-") { // Handle "other-TYPENAME" pattern for custom types // Strip "other-" prefix and compare with stored custom type name - let custom_type_name = &tp[6..]; match &elem.element_type { element::ElementType::Other(actual_name) => { actual_name.to_lowercase() == custom_type_name @@ -159,7 +159,7 @@ impl SearchFilters { } } else { let filter_type = element::ElementType::from_metadata(tp); - &elem.element_type == &filter_type + elem.element_type == filter_type }; if matches { diff --git a/core/src/trace_tree_builder.rs b/core/src/trace_tree_builder.rs index 9ff83112..04c1a48b 100644 --- a/core/src/trace_tree_builder.rs +++ b/core/src/trace_tree_builder.rs @@ -4,7 +4,6 @@ /// from requirements and detect redundant relations. Used by both: /// - verification_trace: for redundant verify relations /// - lint: for both redundant verify and maybe-redundant hierarchical relations - use crate::element::Element; use crate::graph_registry::GraphRegistry; use crate::relation::VERIFICATION_TRACES_RELATIONS; diff --git a/core/src/utils.rs b/core/src/utils.rs index 45441f57..3aadedfa 100644 --- a/core/src/utils.rs +++ b/core/src/utils.rs @@ -1,5 +1,6 @@ use anyhow::Result; use std::path::{Path, PathBuf}; +use std::sync::LazyLock; use pathdiff::diff_paths; use log::debug; use walkdir::WalkDir; @@ -12,7 +13,7 @@ use crate::git_commands; use std::cell::RefCell; thread_local! { - static QUIET_MODE: RefCell = RefCell::new(false); + static QUIET_MODE: RefCell = const { RefCell::new(false) }; } /// Enable quiet mode (suppress verbose output) @@ -56,7 +57,7 @@ pub fn is_excluded_by_patterns(path: &Path, excluded_filename_patterns: &GlobSet // Convert absolute path to relative path from git root for pattern matching // This ensures patterns like "external/**/*.md" work correctly regardless of working directory - let relative_path = match get_relative_path(&path.to_path_buf()) { + let relative_path = match get_relative_path(path) { Ok(rel_path) => rel_path, Err(_) => { // If we can't get relative path, fall back to original behavior @@ -222,7 +223,7 @@ pub fn scan_markdown_files( for entry in WalkDir::new(&scan_dir) .into_iter() .filter_map(Result::ok) - .filter(|e| e.path().is_file() && e.path().extension().map_or(false, |ext| ext == "md")) + .filter(|e| e.path().is_file() && e.path().extension().is_some_and(|ext| ext == "md")) .filter(|e| !is_to_be_ignored(e.path(), excluded_filename_patterns)) { files.push(entry.path().to_path_buf()); @@ -255,7 +256,7 @@ pub fn scan_markdown_files_from_commit( }; // Run git ls-tree command to get all files in the commit - let result = git_commands::ls_tree_commit(&commit); + let result = git_commands::ls_tree_commit(commit); let documents_vec = match result { Err(e) => { eprintln!("Error listing files in commit: {}", e); @@ -267,7 +268,7 @@ pub fn scan_markdown_files_from_commit( let matching_paths = documents_vec .into_iter() .map(|p| git_root.join(p)) - .filter(|p| p.extension().map_or(false, |ext| ext == "md")) + .filter(|p| p.extension().is_some_and(|ext| ext == "md")) .filter(|p| !is_to_be_ignored(p, excluded_filename_patterns)) .collect::>(); @@ -278,7 +279,7 @@ pub fn scan_markdown_files_from_commit( /// Gets the relative path of a file from the git repository root -pub fn get_relative_path(path: &PathBuf) -> Result { +pub fn get_relative_path(path: &Path) -> Result { let git_root = match git_commands::get_git_root_dir() { Ok(dir) => dir, Err(_) => { @@ -310,8 +311,7 @@ pub fn extract_path_and_fragment(identifier: &str) -> (&str, Option<&str>) { if identifier.is_empty(){ return ("",None); } - if identifier.starts_with('#') { - let frag = &identifier[1..]; + if let Some(frag) = identifier.strip_prefix('#') { return ("", Some(frag)); } // If identifier contains a '/' or a '.', assume it's a file reference. @@ -381,7 +381,7 @@ pub fn is_external_url(s: &str) -> bool { /// - Paths starting with '/' are treated as relative to git repository root /// - Other paths are treated as relative to base_path /// - External URLs are passed through unchanged -fn resolve_path_to_absolute(path_part: &str, base_path: &PathBuf) -> Result { +fn resolve_path_to_absolute(path_part: &str, base_path: &Path) -> Result { // Check for external URLs first if EXTERNAL_SCHEMES.iter().any(|scheme| path_part.starts_with(scheme)) { return Err(ReqvireError::PathError("External URLs should not be resolved as paths".to_string())); @@ -404,7 +404,7 @@ fn resolve_path_to_absolute(path_part: &str, base_path: &PathBuf) -> Result Ok(canonical), Err(_) => { // Logical path resolution for non-existent files - let mut resolved_path = base_path.clone(); + let mut resolved_path = base_path.to_path_buf(); for component in p.components() { match component { std::path::Component::Normal(_) => { @@ -429,7 +429,7 @@ fn resolve_path_to_absolute(path_part: &str, base_path: &PathBuf) -> Result Result { // 0) Extract the path and any trailing fragment let (path_part, fragment_opt) = extract_path_and_fragment(identifier); @@ -465,7 +465,7 @@ pub fn normalize_identifier( // 5) Re-attach the fragment, if present let final_result = match fragment_opt { Some(frag) => { - let fragment = normalize_fragment(&frag); + let fragment = normalize_fragment(frag); format!("{}#{}", rel, fragment) } None => rel, @@ -526,7 +526,7 @@ pub fn to_relative_identifier( let full = match fragment_opt { Some(frag) => { let fragment = if should_normalize_fragment { - normalize_fragment(&frag) + normalize_fragment(frag) } else { frag.to_string() }; @@ -602,7 +602,7 @@ fn normalize_nonlink_identifier(input: &str) -> (String, String) { // Normalize the fragment if present let normalized_link = if let Some(frag) = fragment_opt { - let norm_frag = normalize_fragment(&frag); + let norm_frag = normalize_fragment(frag); if file_part.is_empty() { // For fragment-only references, always include a leading '#' in the target format!("#{}", norm_frag) @@ -627,11 +627,15 @@ fn normalize_nonlink_identifier(input: &str) -> (String, String) { (display_text, normalized_link) } +/// Cached regex for markdown link extraction +static MARKDOWN_LINK_RE: LazyLock = LazyLock::new(|| { + Regex::new(r"^\[(.+?)\]\((.+?)\)$").unwrap() +}); + /// Extracts text and link from a Markdown-style link if present. fn extract_markdown_link(input: &str) -> Option<(String, String)> { let input = input.trim(); - let markdown_regex = Regex::new(r"^\[(.+?)\]\((.+?)\)$").unwrap(); - if let Some(captures) = markdown_regex.captures(input) { + if let Some(captures) = MARKDOWN_LINK_RE.captures(input) { let text = captures.get(1)?.as_str().to_string(); let link = captures.get(2)?.as_str().to_string(); Some((text, link)) @@ -1145,7 +1149,6 @@ mod tests { } /// Diagram utility functions for consistent filtering across the codebase - /// Constant marker used to identify auto-generated diagrams pub const REQVIRE_AUTOGENERATED_DIAGRAM_MARKER: &str = "REQVIRE-AUTOGENERATED-DIAGRAM"; @@ -1163,7 +1166,7 @@ pub fn remove_autogenerated_diagrams(content: &str) -> String { let mut is_auto_generated = false; // Read until the closing ``` - while let Some(block_line) = lines.next() { + for block_line in lines.by_ref() { block_lines.push(block_line); if block_line.contains(REQVIRE_AUTOGENERATED_DIAGRAM_MARKER) { diff --git a/core/src/verification_trace.rs b/core/src/verification_trace.rs index 8565553f..9b19c570 100644 --- a/core/src/verification_trace.rs +++ b/core/src/verification_trace.rs @@ -5,7 +5,7 @@ use crate::relation::{VERIFY_RELATION, VERIFICATION_TRACES_RELATIONS}; use crate::utils; use serde::Serialize; use std::collections::{BTreeMap, HashSet}; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; #[derive(Debug, Serialize)] pub struct VerificationTracesReport { @@ -245,15 +245,9 @@ impl<'a> VerificationTraceGenerator<'a> { Err(_) => PathBuf::from(""), }; - let base_url = match git_commands::get_repository_base_url() { - Ok(url) => url, - Err(_) => String::new(), - }; + let base_url = git_commands::get_repository_base_url().unwrap_or_default(); - let commit_hash = match git_commands::get_commit_hash() { - Ok(hash) => hash, - Err(_) => String::new(), - }; + let commit_hash = git_commands::get_commit_hash().unwrap_or_default(); let has_git_info = !repo_root.as_os_str().is_empty() && !base_url.is_empty() @@ -284,6 +278,7 @@ impl<'a> VerificationTraceGenerator<'a> { self.collect_elements_from_tree(&tree_with_relations, &mut all_elements, &mut collected_ids); // Group elements by folder -> file for containment structure + #[allow(clippy::type_complexity)] let mut folders: HashMap)>>> = HashMap::new(); for (elem_id, elem_name, elem_type, attachments) in all_elements { @@ -304,9 +299,9 @@ impl<'a> VerificationTraceGenerator<'a> { .to_string(); folders.entry(folder) - .or_insert_with(HashMap::new) + .or_default() .entry(file_name) - .or_insert_with(Vec::new) + .or_default() .push((elem_id, elem_name, elem_type, attachments)); } @@ -320,7 +315,7 @@ impl<'a> VerificationTraceGenerator<'a> { diagram.push_str(" classDef folder fill:#FAFAFA,stroke:#9E9E9E,stroke-width:3px;\n"); diagram.push_str(" classDef file fill:#FFF8E1,stroke:#FFCA28,stroke-width:2px;\n"); diagram.push_str(" classDef default fill:#F5F5F5,stroke:#424242,stroke-width:1.5px;\n"); - diagram.push_str("\n"); + diagram.push('\n'); // Sort folders for deterministic output let mut folder_names: Vec<&String> = folders.keys().collect(); @@ -452,7 +447,7 @@ impl<'a> VerificationTraceGenerator<'a> { } // Recursively add relations for children - self.add_relations_from_tree(&[child.clone()], diagram, visited_edges); + self.add_relations_from_tree(std::slice::from_ref(child), diagram, visited_edges); } } } @@ -461,7 +456,7 @@ impl<'a> VerificationTraceGenerator<'a> { fn get_click_target( &self, elem_id: &str, - repo_root: &PathBuf, + repo_root: &Path, base_url: &str, commit_hash: &str, has_git_info: bool, diff --git a/docs/cli/src/cli.rs b/docs/cli/src/cli.rs index 03365afd..08978f4c 100644 --- a/docs/cli/src/cli.rs +++ b/docs/cli/src/cli.rs @@ -137,8 +137,8 @@ pub enum Commands { filter_attachment: Option, }, - /// Analise change impact and provides report - #[clap(override_help = "Analise change impact and provides report\n\nCHANGE IMPACT OPTIONS:\n --git-commit Git commit hash to use when comparing models [default: HEAD]\n --json Output results in JSON format")] + /// Analyze change impact and provide report + #[clap(override_help = "Analyze change impact and provide report\n\nCHANGE IMPACT OPTIONS:\n --git-commit Git commit hash to use when comparing models [default: HEAD]\n --json Output results in JSON format")] ChangeImpact { /// Git commit hash to use when comparing models #[clap(long, default_value = "HEAD", help_heading = "CHANGE IMPACT OPTIONS")] diff --git a/docs/core/src/parser.rs b/docs/core/src/parser.rs index 5f5c578f..902197ec 100644 --- a/docs/core/src/parser.rs +++ b/docs/core/src/parser.rs @@ -139,7 +139,7 @@ pub fn parse_single_element( // Parse #### subsections } else if trimmed.starts_with("#### ") && current_element.is_some() { - let subsection = SubSection::from_str(&trimmed[5..].trim()); + let subsection = SubSection::parse(&trimmed[5..].trim()); if seen_subsections.contains(&subsection) { return Err(ReqvireError::DuplicateSubsection( @@ -523,7 +523,7 @@ pub fn parse_elements( debug!("Error: {}", msg); } else if trimmed.starts_with("#### ") && current_element.is_some() { - let subsection = SubSection::from_str(&trimmed[5..].trim()); + let subsection = SubSection::parse(&trimmed[5..].trim()); if !skip_current_element { if seen_subsections.contains(&subsection) { diff --git a/tests/test-cli-help-structure/expected/expected.txt b/tests/test-cli-help-structure/expected/expected.txt index 4e3a9f12..0ac6acfd 100644 --- a/tests/test-cli-help-structure/expected/expected.txt +++ b/tests/test-cli-help-structure/expected/expected.txt @@ -1,4 +1,4 @@ -Reqvire requirements & treacibility management tool +Reqvire requirements & traceability management tool Usage: reqvire [OPTIONS] [COMMAND OPTIONS] @@ -8,7 +8,7 @@ Commands: format Format and normalize requirements files. By default, shows preview without applying changes validate Validate model search Search and filter model elements with comprehensive filtering options - change-impact Analise change impact and provides report + change-impact Analyze change impact and provide report traces Generate verification traces showing upward paths from verifications to root requirements coverage Generate verification coverage report for leaf requirements model Generate model-centric structure with nested relations diff --git a/tests/test-mv-cross-file-relation-integrity/.reqvireignore b/tests/test-mv-cross-file-relation-integrity/.reqvireignore new file mode 100644 index 00000000..30d7a54c --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/.reqvireignore @@ -0,0 +1 @@ +expected/ diff --git a/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-requirements.md b/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-requirements.md new file mode 100644 index 00000000..b389a5a9 --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-requirements.md @@ -0,0 +1,22 @@ +# Elements + +### Root Requirement + +The system SHALL provide core functionality. + +#### Metadata + * type: user-requirement +--- + +### Feature B + +The system SHALL implement Feature B. + +#### Metadata + * type: requirement + +#### Relations + * derivedFrom: [Root Requirement](#root-requirement) + * verifiedBy: [Test Feature B](Verifications/Tests.md#test-feature-b) +--- + diff --git a/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-subrequirements.md b/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-subrequirements.md new file mode 100644 index 00000000..3d31139f --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-subrequirements.md @@ -0,0 +1,25 @@ +# Elements + +### Feature A + +The system SHALL implement Feature A. + +#### Metadata + * type: requirement + +#### Relations + * derivedFrom: [Root Requirement](../Requirements.md#root-requirement) + * verifiedBy: [Test Feature A](../Verifications/Tests.md#test-feature-a) +--- + +### Sub Feature + +The system SHALL implement a sub-feature of Feature A. + +#### Metadata + * type: requirement + +#### Relations + * derivedFrom: [Feature A](#feature-a) +--- + diff --git a/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-tests.md b/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-tests.md new file mode 100644 index 00000000..eb973d57 --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/expected/01-after-move-tests.md @@ -0,0 +1,24 @@ +# Elements + +### Test Feature A + +Verify Feature A implementation. + +#### Metadata + * type: test-verification + +#### Relations + * verify: [Feature A](../SubDir/SubRequirements.md#feature-a) +--- + +### Test Feature B + +Verify Feature B implementation. + +#### Metadata + * type: test-verification + +#### Relations + * verify: [Feature B](../Requirements.md#feature-b) +--- + diff --git a/tests/test-mv-cross-file-relation-integrity/specifications/Requirements.md b/tests/test-mv-cross-file-relation-integrity/specifications/Requirements.md new file mode 100644 index 00000000..71b0734f --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/specifications/Requirements.md @@ -0,0 +1,31 @@ +# Elements + + +### Root Requirement + +The system SHALL provide core functionality. + +#### Metadata + * type: user-requirement + +### Feature A + +The system SHALL implement Feature A. + +#### Metadata + * type: requirement + +#### Relations + * derivedFrom: #root-requirement + * verifiedBy: Verifications/Tests.md#test-feature-a + +### Feature B + +The system SHALL implement Feature B. + +#### Metadata + * type: requirement + +#### Relations + * derivedFrom: #root-requirement + * verifiedBy: Verifications/Tests.md#test-feature-b diff --git a/tests/test-mv-cross-file-relation-integrity/specifications/SubDir/SubRequirements.md b/tests/test-mv-cross-file-relation-integrity/specifications/SubDir/SubRequirements.md new file mode 100644 index 00000000..9bd11262 --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/specifications/SubDir/SubRequirements.md @@ -0,0 +1,12 @@ +# Elements + + +### Sub Feature + +The system SHALL implement a sub-feature of Feature A. + +#### Metadata + * type: requirement + +#### Relations + * derivedFrom: ../Requirements.md#feature-a diff --git a/tests/test-mv-cross-file-relation-integrity/specifications/Verifications/Tests.md b/tests/test-mv-cross-file-relation-integrity/specifications/Verifications/Tests.md new file mode 100644 index 00000000..95dc0598 --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/specifications/Verifications/Tests.md @@ -0,0 +1,22 @@ +# Elements + + +### Test Feature A + +Verify Feature A implementation. + +#### Metadata + * type: test-verification + +#### Relations + * verify: ../Requirements.md#feature-a + +### Test Feature B + +Verify Feature B implementation. + +#### Metadata + * type: test-verification + +#### Relations + * verify: ../Requirements.md#feature-b diff --git a/tests/test-mv-cross-file-relation-integrity/test.sh b/tests/test-mv-cross-file-relation-integrity/test.sh new file mode 100755 index 00000000..07e8a5f4 --- /dev/null +++ b/tests/test-mv-cross-file-relation-integrity/test.sh @@ -0,0 +1,97 @@ +#!/bin/bash +set -uo pipefail + +# Test: Cross-File Relation Integrity After mv +# ----------------------------------------------- +# Regression test for update_relation_identifiers bug (graph_registry.rs line 2538): +# +# BUG: When element A moves to the same file as element B (which has a +# derivedFrom relation to A), the relation identifier is not updated. +# Part 1 of update_relation_identifiers detects that both elements are +# now in the same file and does "keep as-is" - but "as-is" means the +# OLD cross-file path (e.g. ../Requirements.md#feature-a) which is now +# stale. The relation should become a same-file fragment (#feature-a). +# +# Additionally, Part 2 of update_relation_identifiers fails to update +# the moved element's own outgoing relations because it looks up bare +# fragments against full-identifier keys in target_file_paths. +# +# Acceptance Criteria: +# - After mv: cross-file relations pointing to moved element are updated +# - After mv: same-file relations are converted to fragment-only references +# - After mv: moved element's own outgoing cross-file relations are correct +# - Model validates after each mv operation (no missing relation targets) +# +# Test Criteria: +# - mv command exits with success (0) +# - File contents match expected output (diff comparison) +# - Model validation passes after the move + +TEST_SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +assert_file_matches() { + local expected="$1" + local actual="$2" + local description="$3" + + if ! diff -u "$expected" "$actual"; then + echo "FAILED: $description" + echo "" + echo "If changes are intentional, update $expected" + exit 1 + fi +} + +# ================================== +# Test 1: Move element into same file as referring element +# ================================== +# Setup: +# - Feature A is in specifications/Requirements.md +# - Sub Feature is in specifications/SubDir/SubRequirements.md +# - Sub Feature has derivedFrom: ../Requirements.md#feature-a (cross-file) +# +# Action: Move Feature A to specifications/SubDir/SubRequirements.md +# +# Expected: Sub Feature's derivedFrom should become #feature-a (same-file) +# because both elements are now in the same file. +# +# BUG: The relation stays as ../Requirements.md#feature-a (stale cross-file ref) +# causing "Missing relation target" validation error. + +set +e +OUTPUT=$(cd "$TEST_DIR" && "$REQVIRE_BIN" mv "Feature A" "specifications/SubDir/SubRequirements.md" 2>&1) +EXIT_CODE=$? +set -e + +if [ $EXIT_CODE -ne 0 ]; then + echo "FAILED: mv command failed with exit code $EXIT_CODE" + echo "$OUTPUT" + exit 1 +fi + +# Check file contents match expected +assert_file_matches "${TEST_SCRIPT_DIR}/expected/01-after-move-requirements.md" \ + "$TEST_DIR/specifications/Requirements.md" \ + "Requirements.md after moving Feature A out" + +assert_file_matches "${TEST_SCRIPT_DIR}/expected/01-after-move-subrequirements.md" \ + "$TEST_DIR/specifications/SubDir/SubRequirements.md" \ + "SubRequirements.md after Feature A moved in (cross-file ref should become same-file)" + +assert_file_matches "${TEST_SCRIPT_DIR}/expected/01-after-move-tests.md" \ + "$TEST_DIR/specifications/Verifications/Tests.md" \ + "Tests.md verify relation should point to new location" + +# CRITICAL: Model must validate - no missing relation targets +set +e +VALIDATE_OUTPUT=$(cd "$TEST_DIR" && "$REQVIRE_BIN" validate 2>&1) +VALIDATE_EXIT=$? +set -e + +if [ $VALIDATE_EXIT -ne 0 ]; then + echo "FAILED: Model validation failed after mv" + echo "$VALIDATE_OUTPUT" + exit 1 +fi + +exit 0