From 78c76cf83818881bdd7b1752be7929498c786994 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 14:24:03 +0000 Subject: [PATCH 01/11] Add `--all-targets` flag to `cargo clipfix` alias --- .cargo/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.cargo/config.toml b/.cargo/config.toml index e72c13a9e..49bf715d6 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,3 +1,3 @@ [alias] # Automatically fix clippy warnings (where possible) -clipfix = "clippy --fix --allow-dirty --allow-staged" +clipfix = "clippy --all-targets --fix --allow-dirty --allow-staged" From 3cf8f8802c66f06cb7228f8bba4fe56e5f9cc72a Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 14:29:52 +0000 Subject: [PATCH 02/11] Run `clippy` for test code with `pre-commit` hook and fix all warnings --- .pre-commit-config.yaml | 1 + src/asset.rs | 29 ++++++++-------- src/commodity.rs | 4 +-- src/finance.rs | 1 + src/fixture.rs | 20 +++++------ src/input.rs | 6 ++-- src/input/agent.rs | 10 +++--- src/input/asset.rs | 2 +- src/input/commodity.rs | 8 ++--- src/input/commodity/demand.rs | 2 +- src/input/commodity/demand_slicing.rs | 37 +++++++++++---------- src/input/commodity/levy.rs | 2 +- src/input/process/flow.rs | 2 +- src/input/process/investment_constraints.rs | 9 +++-- src/input/region.rs | 4 +-- src/input/time_slice.rs | 2 +- src/output.rs | 8 ++--- src/patch.rs | 4 +-- src/process.rs | 14 ++++---- src/simulation/prices.rs | 1 + src/time_slice.rs | 2 +- tests/citation_cff.rs | 5 ++- tests/regression.rs | 11 +++--- tests/regression_missing_commodity.rs | 4 +-- tests/regression_muse1_default.rs | 4 +-- tests/regression_simple.rs | 2 +- tests/regression_two_outputs.rs | 4 +-- tests/regression_two_regions.rs | 4 +-- 28 files changed, 103 insertions(+), 99 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b47c980b9..8b40ea805 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -51,6 +51,7 @@ repos: rev: v1.0 hooks: - id: clippy + args: ["--all-targets", "--", "-D", "warnings"] - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.11.6 hooks: diff --git a/src/asset.rs b/src/asset.rs index dcc7b6ce1..fe5a55976 100644 --- a/src/asset.rs +++ b/src/asset.rs @@ -1305,12 +1305,22 @@ mod tests { ActivityPerCapacity, Capacity, Dimensionless, FlowPerActivity, MoneyPerActivity, MoneyPerCapacity, MoneyPerCapacityPerYear, MoneyPerFlow, }; + use float_cmp::assert_approx_eq; use indexmap::indexmap; use itertools::{Itertools, assert_equal}; use rstest::{fixture, rstest}; use std::iter; use std::rc::Rc; + /// Number of expected children for divisible asset + #[allow(clippy::cast_possible_truncation)] + #[allow(clippy::cast_sign_loss)] + fn expected_children_for_divisible(asset: &Asset) -> usize { + (asset.capacity / asset.process.unit_size.expect("Asset is not divisible")) + .value() + .ceil() as usize + } + #[rstest] fn test_get_input_cost_from_prices( region_id: RegionID, @@ -1341,7 +1351,7 @@ mod tests { // Call function let cost = asset.get_input_cost_from_prices(&input_prices, &time_slice); // Should be -coeff * price = -(-2.0) * 3.0 = 6.0 - assert_eq!(cost.0, 6.0); + assert_approx_eq!(MoneyPerActivity, cost, MoneyPerActivity(6.0)); } #[rstest] @@ -1486,10 +1496,7 @@ mod tests { // Check number of children let children = asset_divisible.divide_asset(); - let expected_children = (asset_divisible.capacity - / asset_divisible.process.unit_size.unwrap()) - .value() - .ceil() as usize; + let expected_children = expected_children_for_divisible(&asset_divisible); assert_eq!( children.len(), expected_children, @@ -1502,7 +1509,7 @@ mod tests { assert!( child.capacity <= max_child_capacity, "Child capacity is too large!" - ) + ); } let children_capacity: Capacity = children.iter().map(|a| a.capacity).sum(); assert_eq!(asset_divisible.capacity, children_capacity); @@ -1541,10 +1548,7 @@ mod tests { #[rstest] fn test_asset_pool_commission_new_divisible(asset_divisible: Asset) { let commision_year = asset_divisible.commission_year; - let expected_children = (asset_divisible.capacity - / asset_divisible.process.unit_size.unwrap()) - .value() - .ceil() as usize; + let expected_children = expected_children_for_divisible(&asset_divisible); let mut asset_pool = AssetPool::new(vec![asset_divisible.clone()]); assert!(asset_pool.active.is_empty()); asset_pool.commission_new(commision_year); @@ -1694,10 +1698,7 @@ mod tests { .unwrap() .into(), ]; - let expected_children = (new_assets[0].capacity / new_assets[0].process.unit_size.unwrap()) - .value() - .ceil() as usize; - + let expected_children = expected_children_for_divisible(&new_assets[0]); asset_pool.extend(new_assets); assert_eq!(asset_pool.active.len(), original_count + expected_children); } diff --git a/src/commodity.rs b/src/commodity.rs index 008929ed1..dbafd8494 100644 --- a/src/commodity.rs +++ b/src/commodity.rs @@ -122,7 +122,7 @@ mod tests { let mut map = DemandMap::new(); map.insert(("North".into(), 2020, ts_selection.clone()), value); - assert_eq!(map[&("North".into(), 2020, ts_selection)], value) + assert_eq!(map[&("North".into(), 2020, ts_selection)], value); } #[test] @@ -134,7 +134,7 @@ mod tests { let value = MoneyPerFlow(0.5); let mut map = CommodityLevyMap::new(); assert!( - map.insert(("GBR".into(), 2010, ts.clone()), value.clone()) + map.insert(("GBR".into(), 2010, ts.clone()), value) .is_none() ); assert_eq!(map[&("GBR".into(), 2010, ts)], value); diff --git a/src/finance.rs b/src/finance.rs index 9950b6f18..7076afb4a 100644 --- a/src/finance.rs +++ b/src/finance.rs @@ -71,6 +71,7 @@ pub fn lcox( } #[cfg(test)] +#[allow(clippy::unreadable_literal)] mod tests { use super::*; use crate::time_slice::TimeSliceID; diff --git a/src/fixture.rs b/src/fixture.rs index ad9452c63..c8bbc7b79 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -111,7 +111,7 @@ pub fn commodity_id() -> CommodityID { pub fn svd_commodity() -> Commodity { Commodity { id: "commodity1".into(), - description: "".into(), + description: String::new(), kind: CommodityType::ServiceDemand, time_slice_level: TimeSliceLevel::DayNight, pricing_strategy: PricingStrategy::Shadow, @@ -188,7 +188,7 @@ pub fn process_parameter() -> ProcessParameter { } #[fixture] -/// Create a ProcessParameterMap with the specified parameters for each region and year +/// Create a `ProcessParameterMap` with the specified parameters for each region and year pub fn process_parameter_map( region_ids: IndexSet, process_parameter: ProcessParameter, @@ -202,13 +202,13 @@ pub fn process_parameter_map( } #[fixture] -/// Create a ProcessAvailabilities with full availability for all time slices +/// Create a `ProcessAvailabilities` with full availability for all time slices pub fn process_activity_limits(time_slice_info: TimeSliceInfo) -> ActivityLimits { ActivityLimits::new_with_full_availability(&time_slice_info) } #[fixture] -/// Create a ProcessActivityLimitsMap with full availability for each region and year +/// Create a `ProcessActivityLimitsMap` with full availability for each region and year pub fn process_activity_limits_map( region_ids: IndexSet, process_activity_limits: ActivityLimits, @@ -221,20 +221,20 @@ pub fn process_activity_limits_map( } #[fixture] -/// Create an empty set of ProcessInvestmentConstraints for a given region/year -/// Returns a HashMap keyed by (RegionID, year) with empty Rc +/// Create an empty set of `ProcessInvestmentConstraints` for a given region/year +/// Returns a `HashMap` keyed by (`RegionID`, year) with empty Rc pub fn process_investment_constraints() -> ProcessInvestmentConstraintsMap { HashMap::new() } #[fixture] -/// Create an empty set of ProcessFlows for a given region/year +/// Create an empty set of `ProcessFlows` for a given region/year pub fn process_flows() -> Rc> { Rc::new(IndexMap::new()) } #[fixture] -/// Create a ProcessFlowsMap with the provided flows for each region/year +/// Create a `ProcessFlowsMap` with the provided flows for each region/year pub fn process_flows_map( region_ids: IndexSet, process_flows: Rc>, @@ -281,7 +281,7 @@ pub fn agents() -> AgentMap { "agent1".into(), Agent { id: "agent1".into(), - description: "".into(), + description: String::new(), commodity_portions: AgentCommodityPortionsMap::new(), search_space: AgentSearchSpaceMap::new(), decision_rule: DecisionRule::Single, @@ -353,7 +353,7 @@ pub fn appraisal_output(asset: Asset, time_slice: TimeSliceID) -> AppraisalOutpu asset: AssetRef::from(asset), capacity: Capacity(42.0), coefficients: ObjectiveCoefficients { - capacity_coefficient: MoneyPerCapacity(3.14), + capacity_coefficient: MoneyPerCapacity(2.14), activity_coefficients, unmet_demand_coefficient: MoneyPerFlow(10000.0), }, diff --git a/src/input.rs b/src/input.rs index 08358facc..117f07222 100644 --- a/src/input.rs +++ b/src/input.rs @@ -337,7 +337,7 @@ mod tests { } } - /// Create an example CSV file in dir_path + /// Create an example CSV file in `dir_path` fn create_csv_file(dir_path: &Path, contents: &str) -> PathBuf { let file_path = dir_path.join("test.csv"); let mut file = File::create(&file_path).unwrap(); @@ -432,7 +432,7 @@ mod tests { assert!(read_toml::(&file_path).is_err()); } - /// Deserialise value with deserialise_proportion_nonzero() + /// Deserialise value with `deserialise_proportion_nonzero()` fn deserialise_f64(value: f64) -> Result { let deserialiser: F64Deserializer = value.into_deserializer(); deserialise_proportion_nonzero(deserialiser) @@ -489,7 +489,7 @@ mod tests { #[case(&[1,1],false)] #[case(&[1,3,2,4], false)] fn test_is_sorted_and_unique(#[case] values: &[u32], #[case] expected: bool) { - assert_eq!(is_sorted_and_unique(values), expected) + assert_eq!(is_sorted_and_unique(values), expected); } #[test] diff --git a/src/input/agent.rs b/src/input/agent.rs index d1d4c86c0..24ddb5857 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -169,14 +169,14 @@ mod tests { let region_ids = IndexSet::from(["GBR".into()]); let agent = AgentRaw { id: "agent".into(), - description: "".into(), + description: String::new(), decision_rule: "single".into(), decision_lexico_tolerance: None, regions: "GBR".into(), }; let agent_out = Agent { id: "agent".into(), - description: "".into(), + description: String::new(), commodity_portions: AgentCommodityPortionsMap::new(), search_space: AgentSearchSpaceMap::new(), decision_rule: DecisionRule::Single, @@ -191,14 +191,14 @@ mod tests { let agents = [ AgentRaw { id: "agent".into(), - description: "".into(), + description: String::new(), decision_rule: "single".into(), decision_lexico_tolerance: None, regions: "GBR".into(), }, AgentRaw { id: "agent".into(), - description: "".into(), + description: String::new(), decision_rule: "single".into(), decision_lexico_tolerance: None, regions: "GBR".into(), @@ -209,7 +209,7 @@ mod tests { // Lexico tolerance missing for lexico decision rule let agent = AgentRaw { id: "agent".into(), - description: "".into(), + description: String::new(), decision_rule: "lexico".into(), decision_lexico_tolerance: None, regions: "GBR".into(), diff --git a/src/input/asset.rs b/src/input/asset.rs index f4a5f279c..6fbc5f493 100644 --- a/src/input/asset.rs +++ b/src/input/asset.rs @@ -144,7 +144,7 @@ mod tests { region_id: "GBR".into(), capacity: Capacity(1.0), commission_year: 2010, - max_decommission_year: max_decommission_year, + max_decommission_year, }; let asset_out = Asset::new_future_with_max_decommission( "agent1".into(), diff --git a/src/input/commodity.rs b/src/input/commodity.rs index 3adf647fe..83f08f480 100644 --- a/src/input/commodity.rs +++ b/src/input/commodity.rs @@ -211,19 +211,19 @@ mod tests { #[test] fn test_validate_commodity_other_priced() { - let mut commodity = make_commodity(CommodityType::Other, PricingStrategy::MarginalCost); + let commodity = make_commodity(CommodityType::Other, PricingStrategy::MarginalCost); assert_error!( - validate_commodity(&mut commodity), + validate_commodity(&commodity), "Commodity ELC of type Other must be unpriced. Update its pricing strategy to 'unpriced' or 'default'." ); } #[test] fn test_validate_commodity_sed_unpriced() { - let mut commodity = + let commodity = make_commodity(CommodityType::SupplyEqualsDemand, PricingStrategy::Unpriced); assert_error!( - validate_commodity(&mut commodity), + validate_commodity(&commodity), "Commodity ELC of type SupplyEqualsDemand cannot be unpriced. Update its pricing strategy to a valid option." ); } diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs index 03146abbb..4374bdaea 100644 --- a/src/input/commodity/demand.rs +++ b/src/input/commodity/demand.rs @@ -419,7 +419,7 @@ mod tests { ); } - /// Create an example demand file in dir_path + /// Create an example demand file in `dir_path` fn create_demand_file(dir_path: &Path) { let file_path = dir_path.join(DEMAND_FILE_NAME); let mut file = File::create(file_path).unwrap(); diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs index 563d832bf..9a988e3dc 100644 --- a/src/input/commodity/demand_slicing.rs +++ b/src/input/commodity/demand_slicing.rs @@ -201,6 +201,25 @@ mod tests { ); } + fn demand_slice_entry( + season: &str, + time_of_day: &str, + fraction: Dimensionless, + ) -> ((CommodityID, RegionID, TimeSliceSelection), Dimensionless) { + ( + ( + "commodity1".into(), + "GBR".into(), + TimeSliceID { + season: season.into(), + time_of_day: time_of_day.into(), + } + .into(), + ), + fraction, + ) + } + #[rstest] fn test_read_demand_slices_from_iter_valid_multiple_time_slices( svd_commodity: Commodity, @@ -261,24 +280,6 @@ mod tests { }, ]; - fn demand_slice_entry( - season: &str, - time_of_day: &str, - fraction: Dimensionless, - ) -> ((CommodityID, RegionID, TimeSliceSelection), Dimensionless) { - ( - ( - "commodity1".into(), - "GBR".into(), - TimeSliceID { - season: season.into(), - time_of_day: time_of_day.into(), - } - .into(), - ), - fraction, - ) - } let expected = DemandSliceMap::from_iter([ demand_slice_entry("summer", "day", Dimensionless(3.0 / 16.0)), demand_slice_entry("summer", "night", Dimensionless(5.0 / 16.0)), diff --git a/src/input/commodity/levy.rs b/src/input/commodity/levy.rs index 9592906dd..14f4eb4d0 100644 --- a/src/input/commodity/levy.rs +++ b/src/input/commodity/levy.rs @@ -246,7 +246,7 @@ mod tests { let cost = MoneyPerFlow(1.0); let mut map = CommodityLevyMap::new(); - map.insert(("GBR".into(), 2020, time_slice.clone()), cost.clone()); + map.insert(("GBR".into(), 2020, time_slice.clone()), cost); map } diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 04e31825c..8ac2a18f8 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -344,7 +344,7 @@ mod tests { I: Clone + Iterator, { let years = years.unwrap_or(process.years.clone().collect()); - let map: Rc> = Rc::new(flows.clone().collect()); + let map: Rc> = Rc::new(flows.collect()); let flows_inner = iproduct!(&process.regions, years) .map(|(region_id, year)| ((region_id.clone(), year), map.clone())) .collect(); diff --git a/src/input/process/investment_constraints.rs b/src/input/process/investment_constraints.rs index 84dc490cf..dfd73dff7 100644 --- a/src/input/process/investment_constraints.rs +++ b/src/input/process/investment_constraints.rs @@ -139,7 +139,7 @@ mod tests { process_id: "test_process".into(), regions: "ALL".into(), commission_years: "2030".into(), - addition_limit: addition_limit, + addition_limit, }; constraint.validate() } @@ -182,8 +182,7 @@ mod tests { if ![2012, 2016].contains(&year) { assert!( !process_constraints.contains_key(&(gbr_region.clone(), year)), - "Should not contain constraint for year {}", - year + "Should not contain constraint for year {year}" ); } } @@ -295,12 +294,12 @@ mod tests { for &year in &milestone_years { let gbr_constraint = process_constraints .get(&(gbr_region.clone(), year)) - .expect(&format!("GBR {} constraint should exist", year)); + .unwrap_or_else(|| panic!("GBR {year} constraint should exist")); assert_eq!(gbr_constraint.addition_limit, Some(75.0)); let usa_constraint = process_constraints .get(&(usa_region.clone(), year)) - .expect(&format!("USA {} constraint should exist", year)); + .unwrap_or_else(|| panic!("USA {year} constraint should exist")); assert_eq!(usa_constraint.addition_limit, Some(75.0)); } diff --git a/src/input/region.rs b/src/input/region.rs index 0a5ba46c2..e46c4f44d 100644 --- a/src/input/region.rs +++ b/src/input/region.rs @@ -28,7 +28,7 @@ mod tests { use std::path::Path; use tempfile::tempdir; - /// Create an example regions file in dir_path + /// Create an example regions file in `dir_path` fn create_regions_file(dir_path: &Path) { let file_path = dir_path.join(REGIONS_FILE_NAME); let mut file = File::create(file_path).unwrap(); @@ -72,6 +72,6 @@ AP,Asia Pacific" } ), ]) - ) + ); } } diff --git a/src/input/time_slice.rs b/src/input/time_slice.rs index 4635430b4..517e989f1 100644 --- a/src/input/time_slice.rs +++ b/src/input/time_slice.rs @@ -107,7 +107,7 @@ mod tests { use std::path::Path; use tempfile::tempdir; - /// Create an example time slices file in dir_path + /// Create an example time slices file in `dir_path` fn create_time_slices_file(dir_path: &Path) { let file_path = dir_path.join(TIME_SLICES_FILE_NAME); let mut file = File::create(file_path).unwrap(); diff --git a/src/output.rs b/src/output.rs index 7c1f67b1e..d29838a2f 100644 --- a/src/output.rs +++ b/src/output.rs @@ -831,8 +831,8 @@ mod tests { let expected = UnmetDemandRow { milestone_year, run_description, - commodity_id: commodity_id, - region_id: region_id, + commodity_id, + region_id, time_slice, value, }; @@ -989,7 +989,7 @@ mod tests { process_id: asset.process_id().clone(), region_id: asset.region_id().clone(), capacity: Capacity(42.0), - capacity_coefficient: MoneyPerCapacity(3.14), + capacity_coefficient: MoneyPerCapacity(2.14), metric: 4.14, }; let records: Vec = @@ -1129,7 +1129,7 @@ mod tests { let output_dir = temp_dir.path().join("output_with_subdirs"); // Create directory structure with files - fs::create_dir_all(&output_dir.join("subdir")).unwrap(); + fs::create_dir_all(output_dir.join("subdir")).unwrap(); fs::write(output_dir.join("file1.txt"), "content1").unwrap(); fs::write(output_dir.join("subdir").join("file2.txt"), "content2").unwrap(); diff --git a/src/patch.rs b/src/patch.rs index dc2ff023e..1dc4390b0 100644 --- a/src/patch.rs +++ b/src/patch.rs @@ -378,9 +378,7 @@ mod tests { #[test] fn test_toml_patch() { // Patch to add an extra milestone year (2050) - let toml_patch = r#" - milestone_years = [2020, 2030, 2040, 2050] - "#; + let toml_patch = "milestone_years = [2020, 2030, 2040, 2050]\n"; // Build patched model into a temporary directory let model_dir = ModelPatch::from_example("simple") diff --git a/src/process.rs b/src/process.rs index fa2a73787..d8127dea2 100644 --- a/src/process.rs +++ b/src/process.rs @@ -573,8 +573,8 @@ mod tests { kind: CommodityType::ServiceDemand, time_slice_level: TimeSliceLevel::Annual, pricing_strategy: PricingStrategy::Shadow, - levies_prod: levies_prod, - levies_cons: levies_cons, + levies_prod, + levies_cons, demand: DemandMap::new(), }) } @@ -635,8 +635,8 @@ mod tests { kind: CommodityType::ServiceDemand, time_slice_level: TimeSliceLevel::Annual, pricing_strategy: PricingStrategy::Shadow, - levies_prod: levies_prod, - levies_cons: levies_cons, + levies_prod, + levies_cons, demand: DemandMap::new(), }) } @@ -1010,7 +1010,7 @@ mod tests { // Each timeslice from the info should be present in the limits for (ts_id, ts_len) in time_slice_info2.iter() { - let l = limits.get_limit_for_time_slice(&ts_id); + let l = limits.get_limit_for_time_slice(ts_id); // Lower bound should be zero and upper bound equal to timeslice length assert_eq!(*l.start(), Dimensionless(0.0)); assert_eq!(*l.end(), Dimensionless(ts_len.value())); @@ -1036,7 +1036,7 @@ mod tests { // Each timeslice upper bound should be capped by the seasonal upper bound (0.01) for (ts_id, _ts_len) in time_slice_info2.iter() { - let ts_limit = result.get_limit_for_time_slice(&ts_id); + let ts_limit = result.get_limit_for_time_slice(ts_id); assert_eq!(*ts_limit.end(), Dimensionless(0.01)); } @@ -1059,7 +1059,7 @@ mod tests { // Each timeslice upper bound should be capped by the annual upper bound (0.01) for (ts_id, _ts_len) in time_slice_info2.iter() { - let ts_limit = result.get_limit_for_time_slice(&ts_id); + let ts_limit = result.get_limit_for_time_slice(ts_id); assert_eq!(*ts_limit.end(), Dimensionless(0.01)); } diff --git a/src/simulation/prices.rs b/src/simulation/prices.rs index afc703511..afc1348a3 100644 --- a/src/simulation/prices.rs +++ b/src/simulation/prices.rs @@ -664,6 +664,7 @@ mod tests { } } + #[allow(clippy::too_many_arguments)] fn build_process( flows: IndexMap, region_id: &RegionID, diff --git a/src/time_slice.rs b/src/time_slice.rs index 577593a6c..ccab77dc6 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -30,7 +30,7 @@ pub struct TimeSliceID { impl From<&str> for TimeSliceID { fn from(value: &str) -> Self { let (season, time_of_day) = value - .split(".") + .split('.') .collect_tuple() .expect("Time slice not in form season.time_of_day"); TimeSliceID { diff --git a/tests/citation_cff.rs b/tests/citation_cff.rs index 49a206ae4..4f18c8acf 100644 --- a/tests/citation_cff.rs +++ b/tests/citation_cff.rs @@ -7,8 +7,7 @@ fn get_version_from_citation_cff() -> Result { let citation = fs::read_to_string("CITATION.cff")?; let yaml = YamlLoader::load_from_str(&citation)?; let yaml = yaml - .iter() - .next() + .first() .context("Empty YAML file")? .as_hash() .context("Not YAML object")?; @@ -28,5 +27,5 @@ fn test_citation_cff_version() { get_version_from_citation_cff().unwrap(), "Software version in Cargo.toml and CITATION.cff must match. If you are making a new \ release, please also update the CITATION.cff file." - ) + ); } diff --git a/tests/regression.rs b/tests/regression.rs index 3bb2a9453..45b47a241 100644 --- a/tests/regression.rs +++ b/tests/regression.rs @@ -97,7 +97,7 @@ fn compare_lines( if !compare_line(num, &line1, &line2, file_name, errors) { errors.push(format!( "{file_name}: line {num}:\n + \"{line1}\"\n - \"{line2}\"" - )) + )); } } } @@ -109,8 +109,8 @@ fn compare_line( file_name: &str, errors: &mut Vec, ) -> bool { - let fields1 = line1.split(",").collect_vec(); - let fields2 = line2.split(",").collect_vec(); + let fields1 = line1.split(',').collect_vec(); + let fields2 = line2.split(',').collect_vec(); if fields1.len() != fields2.len() { errors.push(format!( "{}: line {}: Different number of fields: {} vs {}", @@ -152,7 +152,10 @@ fn get_csv_file_names(dir_path: &Path) -> Vec { for entry in entries { let file_name = entry.unwrap().file_name(); let file_name = file_name.to_str().unwrap(); - if file_name.ends_with(".csv") { + if Path::new(file_name) + .extension() + .is_some_and(|ext| ext.eq_ignore_ascii_case("csv")) + { file_names.push(file_name.to_string()); } } diff --git a/tests/regression_missing_commodity.rs b/tests/regression_missing_commodity.rs index 7c48bfe63..1d34e7b2b 100644 --- a/tests/regression_missing_commodity.rs +++ b/tests/regression_missing_commodity.rs @@ -1,8 +1,8 @@ -//! A regression test for the "missing_commodity" example +//! A regression test for the `missing_commodity` example mod regression; use regression::run_regression_test; #[test] fn test_regression_missing_commodity() { - run_regression_test("missing_commodity") + run_regression_test("missing_commodity"); } diff --git a/tests/regression_muse1_default.rs b/tests/regression_muse1_default.rs index 4f96b5435..f24c62f28 100644 --- a/tests/regression_muse1_default.rs +++ b/tests/regression_muse1_default.rs @@ -1,8 +1,8 @@ -//! A regression test for the "muse1_default" example +//! A regression test for the `muse1_default` example mod regression; use regression::run_regression_test; #[test] fn test_regression_muse1_default() { - run_regression_test("muse1_default") + run_regression_test("muse1_default"); } diff --git a/tests/regression_simple.rs b/tests/regression_simple.rs index 6e36c6e9e..8ffe9e9fb 100644 --- a/tests/regression_simple.rs +++ b/tests/regression_simple.rs @@ -4,5 +4,5 @@ use regression::run_regression_test_with_debug_files; #[test] fn test_regression_simple() { - run_regression_test_with_debug_files("simple") + run_regression_test_with_debug_files("simple"); } diff --git a/tests/regression_two_outputs.rs b/tests/regression_two_outputs.rs index fa1e24aba..6d424b62f 100644 --- a/tests/regression_two_outputs.rs +++ b/tests/regression_two_outputs.rs @@ -1,8 +1,8 @@ -//! A regression test for the "two_outputs" example +//! A regression test for the `two_outputs` example mod regression; use regression::run_regression_test; #[test] fn test_regression_two_outputs() { - run_regression_test("two_outputs") + run_regression_test("two_outputs"); } diff --git a/tests/regression_two_regions.rs b/tests/regression_two_regions.rs index ea5b668c1..da3aa7168 100644 --- a/tests/regression_two_regions.rs +++ b/tests/regression_two_regions.rs @@ -1,8 +1,8 @@ -//! A regression test for the "two_regions" example +//! A regression test for the `two_regions` example mod regression; use regression::run_regression_test; #[test] fn test_regression_two_regions() { - run_regression_test("two_regions") + run_regression_test("two_regions"); } From 33e6ae0d045662297cafc2dc69d00d6835eff338 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 14:45:47 +0000 Subject: [PATCH 03/11] Remove `test_` prefix from test function names --- src/asset.rs | 67 ++++++++++----------- src/commodity.rs | 4 +- src/finance.rs | 10 +-- src/fixture.rs | 4 +- src/graph/investment.rs | 19 ++---- src/graph/validate.rs | 10 +-- src/id.rs | 4 +- src/input.rs | 12 ++-- src/input/agent.rs | 2 +- src/input/agent/commodity_portion.rs | 2 +- src/input/agent/objective.rs | 17 +++--- src/input/agent/search_space.rs | 6 +- src/input/asset.rs | 4 +- src/input/commodity.rs | 6 +- src/input/commodity/demand.rs | 19 +++--- src/input/commodity/demand_slicing.rs | 20 +++--- src/input/commodity/levy.rs | 10 +-- src/input/process/availability.rs | 6 +- src/input/process/investment_constraints.rs | 12 ++-- src/input/process/parameter.rs | 6 +- src/input/region.rs | 2 +- src/input/time_slice.rs | 4 +- src/model/parameters.rs | 12 ++-- src/output.rs | 36 +++++------ src/patch.rs | 10 +-- src/process.rs | 42 ++++++------- src/region.rs | 2 +- src/settings.rs | 6 +- src/simulation/investment.rs | 6 +- src/simulation/prices.rs | 8 +-- src/time_slice.rs | 19 ++---- src/year.rs | 4 +- tests/citation_cff.rs | 2 +- tests/graph.rs | 2 +- tests/regression_missing_commodity.rs | 2 +- tests/regression_muse1_default.rs | 2 +- tests/regression_simple.rs | 2 +- tests/regression_two_outputs.rs | 2 +- tests/regression_two_regions.rs | 2 +- tests/run.rs | 2 +- tests/validate.rs | 2 +- 41 files changed, 189 insertions(+), 220 deletions(-) diff --git a/src/asset.rs b/src/asset.rs index fe5a55976..e8f7e075e 100644 --- a/src/asset.rs +++ b/src/asset.rs @@ -1322,7 +1322,7 @@ mod tests { } #[rstest] - fn test_get_input_cost_from_prices( + fn get_input_cost_from_prices_works( region_id: RegionID, svd_commodity: Commodity, mut process: Process, @@ -1359,7 +1359,7 @@ mod tests { #[case(Capacity(0.5))] #[case(Capacity(1.0))] #[case(Capacity(100.0))] - fn test_asset_new_valid(process: Process, #[case] capacity: Capacity) { + fn asset_new_valid(process: Process, #[case] capacity: Capacity) { let agent_id = AgentID("agent1".into()); let region_id = RegionID("GBR".into()); let asset = Asset::new_future(agent_id, process.into(), region_id, capacity, 2015).unwrap(); @@ -1373,7 +1373,7 @@ mod tests { #[case(Capacity(f64::NAN))] #[case(Capacity(f64::INFINITY))] #[case(Capacity(f64::NEG_INFINITY))] - fn test_asset_new_invalid_capacity(process: Process, #[case] capacity: Capacity) { + fn asset_new_invalid_capacity(process: Process, #[case] capacity: Capacity) { let agent_id = AgentID("agent1".into()); let region_id = RegionID("GBR".into()); assert_error!( @@ -1383,7 +1383,7 @@ mod tests { } #[rstest] - fn test_asset_new_invalid_commission_year(process: Process) { + fn asset_new_invalid_commission_year(process: Process) { let agent_id = AgentID("agent1".into()); let region_id = RegionID("GBR".into()); assert_error!( @@ -1393,7 +1393,7 @@ mod tests { } #[rstest] - fn test_asset_new_invalid_region(process: Process) { + fn asset_new_invalid_region(process: Process) { let agent_id = AgentID("agent1".into()); let region_id = RegionID("FRA".into()); assert_error!( @@ -1476,7 +1476,7 @@ mod tests { } #[rstest] - fn test_asset_get_activity_per_capacity_limits( + fn asset_get_activity_per_capacity_limits( asset_with_activity_limits: Asset, time_slice: TimeSliceID, ) { @@ -1488,7 +1488,7 @@ mod tests { } #[rstest] - fn test_divide_asset(asset_divisible: Asset) { + fn divide_asset_works(asset_divisible: Asset) { assert!( asset_divisible.is_divisible(), "Divisbile asset cannot be divided!" @@ -1516,7 +1516,7 @@ mod tests { } #[rstest] - fn test_asset_pool_new(asset_pool: AssetPool) { + fn asset_pool_new(asset_pool: AssetPool) { // Should be in order of commission year assert!(asset_pool.active.is_empty()); assert!(asset_pool.future.len() == 2); @@ -1525,28 +1525,28 @@ mod tests { } #[rstest] - fn test_asset_pool_commission_new1(mut asset_pool: AssetPool) { + fn asset_pool_commission_new1(mut asset_pool: AssetPool) { // Asset to be commissioned in this year asset_pool.commission_new(2010); assert_equal(asset_pool.iter_active(), iter::once(&asset_pool.active[0])); } #[rstest] - fn test_asset_pool_commission_new2(mut asset_pool: AssetPool) { + fn asset_pool_commission_new2(mut asset_pool: AssetPool) { // Commission year has passed asset_pool.commission_new(2011); assert_equal(asset_pool.iter_active(), iter::once(&asset_pool.active[0])); } #[rstest] - fn test_asset_pool_commission_new3(mut asset_pool: AssetPool) { + fn asset_pool_commission_new3(mut asset_pool: AssetPool) { // Nothing to commission for this year asset_pool.commission_new(2000); assert!(asset_pool.iter_active().next().is_none()); // no active assets } #[rstest] - fn test_asset_pool_commission_new_divisible(asset_divisible: Asset) { + fn asset_pool_commission_new_divisible(asset_divisible: Asset) { let commision_year = asset_divisible.commission_year; let expected_children = expected_children_for_divisible(&asset_divisible); let mut asset_pool = AssetPool::new(vec![asset_divisible.clone()]); @@ -1562,7 +1562,7 @@ mod tests { } #[rstest] - fn test_asset_pool_commission_already_decommissioned(asset: Asset) { + fn asset_pool_commission_already_decommissioned(asset: Asset) { let year = asset.max_decommission_year(); let mut asset_pool = AssetPool::new(vec![asset]); assert!(asset_pool.active.is_empty()); @@ -1571,7 +1571,7 @@ mod tests { } #[rstest] - fn test_asset_pool_decommission_old(mut asset_pool: AssetPool) { + fn asset_pool_decommission_old(mut asset_pool: AssetPool) { asset_pool.commission_new(2020); assert!(asset_pool.future.is_empty()); assert_eq!(asset_pool.active.len(), 2); @@ -1597,14 +1597,14 @@ mod tests { } #[rstest] - fn test_asset_pool_get(mut asset_pool: AssetPool) { + fn asset_pool_get(mut asset_pool: AssetPool) { asset_pool.commission_new(2020); assert_eq!(asset_pool.get(AssetID(0)), Some(&asset_pool.active[0])); assert_eq!(asset_pool.get(AssetID(1)), Some(&asset_pool.active[1])); } #[rstest] - fn test_asset_pool_extend_empty(mut asset_pool: AssetPool) { + fn asset_pool_extend_empty(mut asset_pool: AssetPool) { // Start with commissioned assets asset_pool.commission_new(2020); let original_count = asset_pool.active.len(); @@ -1616,7 +1616,7 @@ mod tests { } #[rstest] - fn test_asset_pool_extend_existing_assets(mut asset_pool: AssetPool) { + fn asset_pool_extend_existing_assets(mut asset_pool: AssetPool) { // Start with some commissioned assets asset_pool.commission_new(2020); assert_eq!(asset_pool.active.len(), 2); @@ -1631,7 +1631,7 @@ mod tests { } #[rstest] - fn test_asset_pool_extend_new_assets(mut asset_pool: AssetPool, process: Process) { + fn asset_pool_extend_new_assets(mut asset_pool: AssetPool, process: Process) { // Start with some commissioned assets asset_pool.commission_new(2020); let original_count = asset_pool.active.len(); @@ -1676,10 +1676,7 @@ mod tests { } #[rstest] - fn test_asset_pool_extend_new_divisible_assets( - mut asset_pool: AssetPool, - mut process: Process, - ) { + fn asset_pool_extend_new_divisible_assets(mut asset_pool: AssetPool, mut process: Process) { // Start with some commissioned assets asset_pool.commission_new(2020); let original_count = asset_pool.active.len(); @@ -1704,7 +1701,7 @@ mod tests { } #[rstest] - fn test_asset_pool_extend_mixed_assets(mut asset_pool: AssetPool, process: Process) { + fn asset_pool_extend_mixed_assets(mut asset_pool: AssetPool, process: Process) { // Start with some commissioned assets asset_pool.commission_new(2020); @@ -1737,7 +1734,7 @@ mod tests { } #[rstest] - fn test_asset_pool_extend_maintains_sort_order(mut asset_pool: AssetPool, process: Process) { + fn asset_pool_extend_maintains_sort_order(mut asset_pool: AssetPool, process: Process) { // Start with some commissioned assets asset_pool.commission_new(2020); @@ -1775,7 +1772,7 @@ mod tests { } #[rstest] - fn test_asset_pool_extend_no_duplicates_expected(mut asset_pool: AssetPool) { + fn asset_pool_extend_no_duplicates_expected(mut asset_pool: AssetPool) { // Start with some commissioned assets asset_pool.commission_new(2020); let original_count = asset_pool.active.len(); @@ -1793,7 +1790,7 @@ mod tests { } #[rstest] - fn test_asset_pool_extend_increments_next_id(mut asset_pool: AssetPool, process: Process) { + fn asset_pool_extend_increments_next_id(mut asset_pool: AssetPool, process: Process) { // Start with some commissioned assets asset_pool.commission_new(2020); assert_eq!(asset_pool.next_id, 2); // Should be 2 after commissioning 2 assets @@ -1830,7 +1827,7 @@ mod tests { } #[rstest] - fn test_asset_pool_mothball_unretained(mut asset_pool: AssetPool) { + fn asset_pool_mothball_unretained(mut asset_pool: AssetPool) { // Commission some assets asset_pool.commission_new(2020); assert_eq!(asset_pool.active.len(), 2); @@ -1849,7 +1846,7 @@ mod tests { } #[rstest] - fn test_asset_pool_decommission_unused(mut asset_pool: AssetPool) { + fn asset_pool_decommission_unused(mut asset_pool: AssetPool) { // Commission some assets asset_pool.commission_new(2020); assert_eq!(asset_pool.active.len(), 2); @@ -1876,7 +1873,7 @@ mod tests { } #[rstest] - fn test_asset_pool_decommission_if_not_active_none_active(mut asset_pool: AssetPool) { + fn asset_pool_decommission_if_not_active_none_active(mut asset_pool: AssetPool) { // Commission some assets asset_pool.commission_new(2020); let all_assets = asset_pool.active.clone(); @@ -1897,7 +1894,7 @@ mod tests { #[rstest] #[should_panic(expected = "Cannot mothball asset that has not been commissioned")] - fn test_asset_pool_decommission_if_not_active_non_commissioned_asset( + fn asset_pool_decommission_if_not_active_non_commissioned_asset( mut asset_pool: AssetPool, process: Process, ) { @@ -1917,7 +1914,7 @@ mod tests { } #[rstest] - fn test_asset_commission(process: Process) { + fn asset_commission(process: Process) { // Test successful commissioning of Future asset let process_rc = Rc::new(process); let mut asset1 = Asset::new_future( @@ -1949,7 +1946,7 @@ mod tests { #[rstest] #[case::commission_during_process_lifetime(2024, 2024)] #[case::decommission_after_process_lifetime_ends(2026, 2025)] - fn test_asset_decommission( + fn asset_decommission( #[case] requested_decommission_year: u32, #[case] expected_decommission_year: u32, process: Process, @@ -1979,7 +1976,7 @@ mod tests { #[case::decommission_before_predefined_max_year(2024, 2024, Some(2025))] #[case::decommission_during_process_lifetime_end_no_max_year(2024, 2024, None)] #[case::decommission_after_process_lifetime_end_no_max_year(2026, 2025, None)] - fn test_asset_decommission_with_max_decommission_year_predefined( + fn asset_decommission_with_max_decommission_year_predefined( #[case] requested_decommission_year: u32, #[case] expected_decommission_year: u32, #[case] max_decommission_year: Option, @@ -2008,7 +2005,7 @@ mod tests { #[rstest] #[should_panic(expected = "Assets with state Candidate cannot be commissioned")] - fn test_commission_wrong_states(process: Process) { + fn commission_wrong_states(process: Process) { let mut asset = Asset::new_candidate(process.into(), "GBR".into(), Capacity(1.0), 2020).unwrap(); asset.commission(AssetID(1), None, ""); @@ -2016,7 +2013,7 @@ mod tests { #[rstest] #[should_panic(expected = "Cannot decommission an asset that hasn't been commissioned")] - fn test_decommission_wrong_state(process: Process) { + fn decommission_wrong_state(process: Process) { let mut asset = Asset::new_candidate(process.into(), "GBR".into(), Capacity(1.0), 2020).unwrap(); asset.decommission(2025, ""); diff --git a/src/commodity.rs b/src/commodity.rs index dbafd8494..ab41779d6 100644 --- a/src/commodity.rs +++ b/src/commodity.rs @@ -113,7 +113,7 @@ mod tests { use crate::time_slice::TimeSliceSelection; #[test] - fn test_demand_map() { + fn demand_map_works() { let ts_selection = TimeSliceSelection::Single(TimeSliceID { season: "all-year".into(), time_of_day: "all-day".into(), @@ -126,7 +126,7 @@ mod tests { } #[test] - fn test_commodity_levy_map() { + fn commodity_levy_map_works() { let ts = TimeSliceID { season: "winter".into(), time_of_day: "day".into(), diff --git a/src/finance.rs b/src/finance.rs index 7076afb4a..9cfc850df 100644 --- a/src/finance.rs +++ b/src/finance.rs @@ -84,7 +84,7 @@ mod tests { #[case(10, 0.0, 0.1)] // Other edge case: discount_rate==0 #[case(10, 0.05, 0.1295045749654567)] #[case(5, 0.03, 0.2183545714005762)] - fn test_capital_recovery_factor( + fn capital_recovery_factor_works( #[case] lifetime: u32, #[case] discount_rate: f64, #[case] expected: f64, @@ -98,7 +98,7 @@ mod tests { #[case(500.0, 5, 0.03, 109.17728570028798)] #[case(1000.0, 0, 0.05, 0.0)] // Zero lifetime #[case(2000.0, 20, 0.0, 100.0)] // Zero discount rate - fn test_annual_capital_cost( + fn annual_capital_cost_works( #[case] capital_cost: f64, #[case] lifetime: u32, #[case] discount_rate: f64, @@ -132,7 +132,7 @@ mod tests { vec![("winter", "day", 50.0)], f64::INFINITY // Zero capacity case )] - fn test_profitability_index( + fn profitability_index_works( #[case] capacity: f64, #[case] annual_fixed_cost: f64, #[case] activity_data: Vec<(&str, &str, f64)>, @@ -176,7 +176,7 @@ mod tests { } #[test] - fn test_profitability_index_zero_activity() { + fn profitability_index_zero_activity() { let capacity = Capacity(100.0); let annual_fixed_cost = MoneyPerCapacity(50.0); let activity = indexmap! {}; @@ -200,7 +200,7 @@ mod tests { vec![("winter", "day", 0.0)], 200.0 // (50*100 + 25*0) / 25 = 5000/25 )] - fn test_lcox( + fn lcox_works( #[case] capacity: f64, #[case] annual_fixed_cost: f64, #[case] activity_data: Vec<(&str, &str, f64)>, diff --git a/src/fixture.rs b/src/fixture.rs index c8bbc7b79..40704082f 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -381,14 +381,14 @@ mod tests { } #[test] - fn test_patch_and_validate_simple_fail() { + fn patch_and_validate_simple_fail() { let patch = FilePatch::new("commodities.csv") .with_deletion("RSHEAT,Residential heating,svd,daynight"); assert!(patch_and_validate_simple!(vec![patch]).is_err()); } #[test] - fn test_patch_and_run_simple_fail() { + fn patch_and_run_simple_fail() { let patch = FilePatch::new("commodities.csv") .with_deletion("RSHEAT,Residential heating,svd,daynight"); assert!(patch_and_run_simple!(vec![patch]).is_err()); diff --git a/src/graph/investment.rs b/src/graph/investment.rs index 0d99079e1..7d894f4eb 100644 --- a/src/graph/investment.rs +++ b/src/graph/investment.rs @@ -509,7 +509,7 @@ mod tests { use std::rc::Rc; #[test] - fn test_order_sccs_simple_cycle() { + fn order_sccs_simple_cycle() { let markets = ["A", "B", "C"].map(|id| InvestmentSet::Single((id.into(), "GBR".into()))); // Create graph with cycle edges plus an extra dependency B ← D (see doc comment) @@ -549,10 +549,7 @@ mod tests { } #[rstest] - fn test_solve_investment_order_linear_graph( - sed_commodity: Commodity, - svd_commodity: Commodity, - ) { + fn solve_investment_order_linear_graph(sed_commodity: Commodity, svd_commodity: Commodity) { // Create a simple linear graph: A -> B -> C let mut graph = Graph::new(); @@ -582,7 +579,7 @@ mod tests { } #[rstest] - fn test_solve_investment_order_cyclic_graph(sed_commodity: Commodity) { + fn solve_investment_order_cyclic_graph(sed_commodity: Commodity) { // Create a simple cyclic graph: A -> B -> A let mut graph = Graph::new(); @@ -610,10 +607,7 @@ mod tests { } #[rstest] - fn test_solve_investment_order_layered_graph( - sed_commodity: Commodity, - svd_commodity: Commodity, - ) { + fn solve_investment_order_layered_graph(sed_commodity: Commodity, svd_commodity: Commodity) { // Create a graph with layers: // A // / \ @@ -657,10 +651,7 @@ mod tests { } #[rstest] - fn test_solve_investment_order_multiple_regions( - sed_commodity: Commodity, - svd_commodity: Commodity, - ) { + fn solve_investment_order_multiple_regions(sed_commodity: Commodity, svd_commodity: Commodity) { // Create a simple linear graph: A -> B -> C let mut graph = Graph::new(); diff --git a/src/graph/validate.rs b/src/graph/validate.rs index 10f92c562..1ae20b552 100644 --- a/src/graph/validate.rs +++ b/src/graph/validate.rs @@ -240,7 +240,7 @@ mod tests { use std::rc::Rc; #[rstest] - fn test_validate_commodities_graph( + fn validate_commodities_graph_works( other_commodity: Commodity, sed_commodity: Commodity, svd_commodity: Commodity, @@ -267,7 +267,7 @@ mod tests { } #[rstest] - fn test_validate_commodities_graph_invalid_svd_consumed( + fn validate_commodities_graph_invalid_svd_consumed( svd_commodity: Commodity, sed_commodity: Commodity, other_commodity: Commodity, @@ -295,7 +295,7 @@ mod tests { } #[rstest] - fn test_validate_commodities_graph_invalid_svd_not_produced(svd_commodity: Commodity) { + fn validate_commodities_graph_invalid_svd_not_produced(svd_commodity: Commodity) { let mut graph = Graph::new(); let mut commodities = CommodityMap::new(); @@ -315,7 +315,7 @@ mod tests { } #[rstest] - fn test_validate_commodities_graph_invalid_sed(sed_commodity: Commodity) { + fn validate_commodities_graph_invalid_sed(sed_commodity: Commodity) { let mut graph = Graph::new(); let mut commodities = CommodityMap::new(); @@ -336,7 +336,7 @@ mod tests { } #[rstest] - fn test_validate_commodities_graph_invalid_oth( + fn validate_commodities_graph_invalid_oth( other_commodity: Commodity, sed_commodity: Commodity, ) { diff --git a/src/id.rs b/src/id.rs index 2ccc8fca8..9da2714b1 100644 --- a/src/id.rs +++ b/src/id.rs @@ -164,7 +164,7 @@ mod tests { #[case("some commodity")] #[case("PROCESS")] #[case("café")] // unicode supported - fn test_deserialise_id_valid(#[case] id: &str) { + fn deserialise_id_valid(#[case] id: &str) { assert_eq!(deserialise_id(id).unwrap().id.to_string(), id); } @@ -174,7 +174,7 @@ mod tests { #[case("annual")] #[case("ALL")] #[case(" ALL ")] - fn test_deserialise_id_invalid(#[case] id: &str) { + fn deserialise_id_invalid(#[case] id: &str) { assert!(deserialise_id(id).is_err()); } } diff --git a/src/input.rs b/src/input.rs index 117f07222..61f0ce9b1 100644 --- a/src/input.rs +++ b/src/input.rs @@ -347,7 +347,7 @@ mod tests { /// Test a normal read #[test] - fn test_read_csv() { + fn read_csv_works() { let dir = tempdir().unwrap(); let file_path = create_csv_file(dir.path(), "id,value\nhello,1\nworld,2\n"); let records: Vec = read_csv(&file_path).unwrap().collect(); @@ -408,7 +408,7 @@ mod tests { } #[test] - fn test_read_toml() { + fn read_toml_works() { let dir = tempdir().unwrap(); let file_path = dir.path().join("test.toml"); { @@ -439,7 +439,7 @@ mod tests { } #[test] - fn test_deserialise_proportion_nonzero() { + fn deserialise_proportion_nonzero_works() { // Valid inputs assert_eq!(deserialise_f64(0.01), Ok(Dimensionless(0.01))); assert_eq!(deserialise_f64(0.5), Ok(Dimensionless(0.5))); @@ -454,7 +454,7 @@ mod tests { } #[test] - fn test_check_values_sum_to_one_approx() { + fn check_values_sum_to_one_approx_works() { // Single input, valid assert!(check_values_sum_to_one_approx([Dimensionless(1.0)].into_iter()).is_ok()); @@ -488,12 +488,12 @@ mod tests { #[case(&[2,1],false)] #[case(&[1,1],false)] #[case(&[1,3,2,4], false)] - fn test_is_sorted_and_unique(#[case] values: &[u32], #[case] expected: bool) { + fn is_sorted_and_unique_works(#[case] values: &[u32], #[case] expected: bool) { assert_eq!(is_sorted_and_unique(values), expected); } #[test] - fn test_format_items_with_cap() { + fn format_items_with_cap_works() { let items = vec!["a", "b", "c"]; assert_eq!(format_items_with_cap(&items), r#"["a", "b", "c"]"#); diff --git a/src/input/agent.rs b/src/input/agent.rs index 24ddb5857..fbe00e923 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -164,7 +164,7 @@ mod tests { use std::iter; #[test] - fn test_read_agents_file_from_iter() { + fn read_agents_file_from_iter_works() { // Valid case let region_ids = IndexSet::from(["GBR".into()]); let agent = AgentRaw { diff --git a/src/input/agent/commodity_portion.rs b/src/input/agent/commodity_portion.rs index 10dbba414..97dd0136d 100644 --- a/src/input/agent/commodity_portion.rs +++ b/src/input/agent/commodity_portion.rs @@ -196,7 +196,7 @@ mod tests { use std::rc::Rc; #[test] - fn test_validate_agent_commodity_portions() { + fn validate_agent_commodity_portions_works() { let region_ids = IndexSet::from([RegionID::new("region1"), RegionID::new("region2")]); let milestone_years = [2020]; let agents = IndexMap::from([( diff --git a/src/input/agent/objective.rs b/src/input/agent/objective.rs index d0aa8217f..b6f2775e7 100644 --- a/src/input/agent/objective.rs +++ b/src/input/agent/objective.rs @@ -182,7 +182,7 @@ mod tests { } #[test] - fn test_check_objective_parameter_single() { + fn check_objective_parameter_single() { // DecisionRule::Single let decision_rule = DecisionRule::Single; let objective = objective!(None, None); @@ -194,7 +194,7 @@ mod tests { } #[test] - fn test_check_objective_parameter_weighted() { + fn check_objective_parameter_weighted() { // DecisionRule::Weighted let decision_rule = DecisionRule::Weighted; let objective = objective!(Some(Dimensionless(1.0)), None); @@ -206,7 +206,7 @@ mod tests { } #[test] - fn test_check_objective_parameter_lexico() { + fn check_objective_parameter_lexico() { // DecisionRule::Lexicographical let decision_rule = DecisionRule::Lexicographical { tolerance: 1.0 }; let objective = objective!(None, Some(1)); @@ -229,10 +229,7 @@ mod tests { } #[rstest] - fn test_read_agent_objectives_from_iter_valid( - agents: AgentMap, - objective_raw: AgentObjectiveRaw, - ) { + fn read_agent_objectives_from_iter_valid(agents: AgentMap, objective_raw: AgentObjectiveRaw) { let milestone_years = [2020]; let expected = iter::once(( "agent1".into(), @@ -249,7 +246,7 @@ mod tests { } #[rstest] - fn test_read_agent_objectives_from_iter_invalid_no_objective_for_agent(agents: AgentMap) { + fn read_agent_objectives_from_iter_invalid_no_objective_for_agent(agents: AgentMap) { // Missing objective for agent assert_error!( read_agent_objectives_from_iter(iter::empty(), &agents, &[2020]), @@ -258,7 +255,7 @@ mod tests { } #[rstest] - fn test_read_agent_objectives_from_iter_invalid_no_objective_for_year( + fn read_agent_objectives_from_iter_invalid_no_objective_for_year( agents: AgentMap, objective_raw: AgentObjectiveRaw, ) { @@ -270,7 +267,7 @@ mod tests { } #[rstest] - fn test_read_agent_objectives_from_iter_invalid_bad_param(agents: AgentMap) { + fn read_agent_objectives_from_iter_invalid_bad_param(agents: AgentMap) { // Bad parameter let bad_objective = AgentObjectiveRaw { agent_id: "agent1".into(), diff --git a/src/input/agent/search_space.rs b/src/input/agent/search_space.rs index 35b332bd8..e4426acad 100644 --- a/src/input/agent/search_space.rs +++ b/src/input/agent/search_space.rs @@ -242,7 +242,7 @@ mod tests { } #[rstest] - fn test_search_space_raw_into_search_space_valid( + fn search_space_raw_into_search_space_valid( agents: AgentMap, processes: ProcessMap, commodity_ids: HashSet, @@ -261,7 +261,7 @@ mod tests { } #[rstest] - fn test_search_space_raw_into_search_space_invalid_commodity_id( + fn search_space_raw_into_search_space_invalid_commodity_id( agents: AgentMap, processes: ProcessMap, commodity_ids: HashSet, @@ -280,7 +280,7 @@ mod tests { } #[rstest] - fn test_search_space_raw_into_search_space_invalid_process_id( + fn search_space_raw_into_search_space_invalid_process_id( agents: AgentMap, processes: ProcessMap, commodity_ids: HashSet, diff --git a/src/input/asset.rs b/src/input/asset.rs index 6fbc5f493..bd34748e1 100644 --- a/src/input/asset.rs +++ b/src/input/asset.rs @@ -132,7 +132,7 @@ mod tests { #[rstest] #[case::max_decommission_year_provided(Some(2015))] #[case::max_decommission_year_not_provided(None)] - fn test_read_assets_from_iter_valid( + fn read_assets_from_iter_valid( #[case] max_decommission_year: Option, agent_ids: IndexSet, processes: ProcessMap, @@ -195,7 +195,7 @@ mod tests { commission_year: 2010, max_decommission_year: Some(2005), })] - fn test_read_assets_from_iter_invalid( + fn read_assets_from_iter_invalid( #[case] asset: AssetRaw, agent_ids: IndexSet, processes: ProcessMap, diff --git a/src/input/commodity.rs b/src/input/commodity.rs index 83f08f480..4cb72115e 100644 --- a/src/input/commodity.rs +++ b/src/input/commodity.rs @@ -204,13 +204,13 @@ mod tests { } #[test] - fn test_validate_commodity() { + fn validate_commodity_works() { let commodity = make_commodity(CommodityType::SupplyEqualsDemand, PricingStrategy::Shadow); assert!(validate_commodity(&commodity).is_ok()); } #[test] - fn test_validate_commodity_other_priced() { + fn validate_commodity_other_priced() { let commodity = make_commodity(CommodityType::Other, PricingStrategy::MarginalCost); assert_error!( validate_commodity(&commodity), @@ -219,7 +219,7 @@ mod tests { } #[test] - fn test_validate_commodity_sed_unpriced() { + fn validate_commodity_sed_unpriced() { let commodity = make_commodity(CommodityType::SupplyEqualsDemand, PricingStrategy::Unpriced); assert_error!( diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs index 4374bdaea..87fa36f14 100644 --- a/src/input/commodity/demand.rs +++ b/src/input/commodity/demand.rs @@ -226,7 +226,7 @@ mod tests { use tempfile::tempdir; #[rstest] - fn test_read_demand_from_iter(svd_commodity: Commodity, region_ids: IndexSet) { + fn read_demand_from_iter_works(svd_commodity: Commodity, region_ids: IndexSet) { let svd_commodities = get_svd_map(&svd_commodity); let demand = [ Demand { @@ -251,7 +251,7 @@ mod tests { } #[rstest] - fn test_read_demand_from_iter_bad_commodity_id( + fn read_demand_from_iter_bad_commodity_id( svd_commodity: Commodity, region_ids: IndexSet, ) { @@ -284,7 +284,7 @@ mod tests { } #[rstest] - fn test_read_demand_from_iter_bad_region_id( + fn read_demand_from_iter_bad_region_id( svd_commodity: Commodity, region_ids: IndexSet, ) { @@ -311,10 +311,7 @@ mod tests { } #[rstest] - fn test_read_demand_from_iter_bad_year( - svd_commodity: Commodity, - region_ids: IndexSet, - ) { + fn read_demand_from_iter_bad_year(svd_commodity: Commodity, region_ids: IndexSet) { // Bad year let svd_commodities = get_svd_map(&svd_commodity); let demand = [ @@ -343,7 +340,7 @@ mod tests { #[case(f64::NAN)] #[case(f64::NEG_INFINITY)] #[case(f64::INFINITY)] - fn test_read_demand_from_iter_bad_demand( + fn read_demand_from_iter_bad_demand( svd_commodity: Commodity, region_ids: IndexSet, #[case] quantity: f64, @@ -363,7 +360,7 @@ mod tests { } #[rstest] - fn test_read_demand_from_iter_multiple_entries( + fn read_demand_from_iter_multiple_entries( svd_commodity: Commodity, region_ids: IndexSet, ) { @@ -396,7 +393,7 @@ mod tests { } #[rstest] - fn test_read_demand_from_iter_missing_year( + fn read_demand_from_iter_missing_year( svd_commodity: Commodity, region_ids: IndexSet, ) { @@ -433,7 +430,7 @@ mod tests { } #[rstest] - fn test_read_demand_file(svd_commodity: Commodity, region_ids: IndexSet) { + fn read_demand_file_works(svd_commodity: Commodity, region_ids: IndexSet) { let svd_commodities = get_svd_map(&svd_commodity); let dir = tempdir().unwrap(); create_demand_file(dir.path()); diff --git a/src/input/commodity/demand_slicing.rs b/src/input/commodity/demand_slicing.rs index 9a988e3dc..995fa39c8 100644 --- a/src/input/commodity/demand_slicing.rs +++ b/src/input/commodity/demand_slicing.rs @@ -171,7 +171,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_valid( + fn read_demand_slices_from_iter_valid( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, @@ -221,7 +221,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_valid_multiple_time_slices( + fn read_demand_slices_from_iter_valid_multiple_time_slices( svd_commodity: Commodity, region_ids: IndexSet, ) { @@ -300,7 +300,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_empty_file( + fn read_demand_slices_from_iter_invalid_empty_file( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, @@ -319,7 +319,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_bad_commodity( + fn read_demand_slices_from_iter_invalid_bad_commodity( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, @@ -344,7 +344,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_bad_region( + fn read_demand_slices_from_iter_invalid_bad_region( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, @@ -369,7 +369,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_bad_time_slice( + fn read_demand_slices_from_iter_invalid_bad_time_slice( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, @@ -394,7 +394,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_missing_time_slices( + fn read_demand_slices_from_iter_invalid_missing_time_slices( svd_commodity: Commodity, region_ids: IndexSet, ) { @@ -442,7 +442,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_duplicate_time_slice( + fn read_demand_slices_from_iter_invalid_duplicate_time_slice( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, @@ -468,7 +468,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_season_time_slice_conflict( + fn read_demand_slices_from_iter_invalid_season_time_slice_conflict( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, @@ -500,7 +500,7 @@ mod tests { } #[rstest] - fn test_read_demand_slices_from_iter_invalid_bad_fractions( + fn read_demand_slices_from_iter_invalid_bad_fractions( svd_commodity: Commodity, region_ids: IndexSet, time_slice_info: TimeSliceInfo, diff --git a/src/input/commodity/levy.rs b/src/input/commodity/levy.rs index 14f4eb4d0..afb083a6b 100644 --- a/src/input/commodity/levy.rs +++ b/src/input/commodity/levy.rs @@ -251,7 +251,7 @@ mod tests { } #[rstest] - fn test_validate_commodity_levies_map_valid( + fn validate_commodity_levies_map_valid( cost_map: CommodityLevyMap, time_slice_info: TimeSliceInfo, region_ids: IndexSet, @@ -263,7 +263,7 @@ mod tests { } #[rstest] - fn test_validate_commodity_levies_map_invalid_missing_region( + fn validate_commodity_levies_map_invalid_missing_region( cost_map: CommodityLevyMap, time_slice_info: TimeSliceInfo, ) { @@ -276,7 +276,7 @@ mod tests { } #[rstest] - fn test_validate_commodity_levies_map_invalid_missing_year( + fn validate_commodity_levies_map_invalid_missing_year( cost_map: CommodityLevyMap, time_slice_info: TimeSliceInfo, region_ids: IndexSet, @@ -289,7 +289,7 @@ mod tests { } #[rstest] - fn test_validate_commodity_levies_map_invalid( + fn validate_commodity_levies_map_invalid( cost_map: CommodityLevyMap, region_ids: IndexSet, ) { @@ -314,7 +314,7 @@ mod tests { } #[rstest] - fn test_add_missing_region_to_commodity_levy_map( + fn add_missing_region_to_commodity_levy_map_works( cost_map: CommodityLevyMap, time_slice_info: TimeSliceInfo, region_id: RegionID, diff --git a/src/input/process/availability.rs b/src/input/process/availability.rs index 20648f03a..0be9dad21 100644 --- a/src/input/process/availability.rs +++ b/src/input/process/availability.rs @@ -235,7 +235,7 @@ mod tests { #[case("..0.9", Dimensionless(0.0)..=Dimensionless(0.9))] // Empty lower #[case("0.1..", Dimensionless(0.1)..=Dimensionless(1.0))] // Empty upper #[case("0.5..0.5", Dimensionless(0.5)..=Dimensionless(0.5))] // Equality - fn test_parse_availabilities_string_valid( + fn parse_availabilities_string_valid( #[case] input: &str, #[case] expected: RangeInclusive, ) { @@ -263,7 +263,7 @@ mod tests { "0.5", "Availability range must be of the form 'lower..upper', 'lower..' or '..upper'. Invalid: 0.5" )] - fn test_parse_availabilities_string_invalid(#[case] input: &str, #[case] error_msg: &str) { + fn parse_availabilities_string_invalid(#[case] input: &str, #[case] error_msg: &str) { assert_error!(parse_availabilities_string(input), error_msg); } @@ -271,7 +271,7 @@ mod tests { #[case("0.1..", Year(0.1), Dimensionless(0.01)..=Dimensionless(0.1))] // Lower bound #[case("..0.5", Year(0.1), Dimensionless(0.0)..=Dimensionless(0.05))] // Upper bound #[case("0.5..0.5", Year(0.1), Dimensionless(0.05)..=Dimensionless(0.05))] // Equality - fn test_to_bounds( + fn to_bounds( #[case] limits: &str, #[case] ts_length: Year, #[case] expected: RangeInclusive, diff --git a/src/input/process/investment_constraints.rs b/src/input/process/investment_constraints.rs index dfd73dff7..065220422 100644 --- a/src/input/process/investment_constraints.rs +++ b/src/input/process/investment_constraints.rs @@ -145,7 +145,7 @@ mod tests { } #[rstest] - fn test_read_constraints_only_uses_milestone_years_within_process_range(processes: ProcessMap) { + fn read_constraints_only_uses_milestone_years_within_process_range(processes: ProcessMap) { // Process years are 2010..=2020 from the fixture (excludes 2008) let milestone_years = vec![2008, 2012, 2016]; @@ -189,7 +189,7 @@ mod tests { } #[rstest] - fn test_read_process_investment_constraints_from_iter(processes: ProcessMap) { + fn read_process_investment_constraints_from_iter_works(processes: ProcessMap) { // Create milestone years matching the process years let milestone_years: Vec = vec![2010, 2015, 2020]; @@ -261,7 +261,7 @@ mod tests { } #[rstest] - fn test_read_constraints_all_regions_all_years(processes: ProcessMap) { + fn read_constraints_all_regions_all_years(processes: ProcessMap) { // Create milestone years matching the process years let milestone_years: Vec = vec![2010, 2015, 2020]; @@ -308,7 +308,7 @@ mod tests { } #[rstest] - fn test_read_constraints_year_outside_milestone_years(processes: ProcessMap) { + fn read_constraints_year_outside_milestone_years(processes: ProcessMap) { // Create constraint with year outside milestone years // Process years are 2010..=2020 from the fixture let milestone_years = vec![2010, 2015, 2020]; @@ -333,7 +333,7 @@ mod tests { } #[test] - fn test_validate_addition_with_finite_value() { + fn validate_addition_with_finite_value() { // Valid: addition constraint with positive value let valid = validate_raw_constraint(10.0); assert!(valid.is_ok()); @@ -351,7 +351,7 @@ mod tests { } #[test] - fn test_validate_addition_rejects_infinite() { + fn validate_addition_rejects_infinite() { // Invalid: infinite value let invalid = validate_raw_constraint(f64::INFINITY); assert_error!( diff --git a/src/input/process/parameter.rs b/src/input/process/parameter.rs index f964445ae..c43780b2a 100644 --- a/src/input/process/parameter.rs +++ b/src/input/process/parameter.rs @@ -212,7 +212,7 @@ mod tests { } #[test] - fn test_param_raw_into_param_ok() { + fn param_raw_into_param_ok() { // No missing values let raw = create_param_raw(1, Some(Dimensionless(1.0))); assert_eq!( @@ -283,7 +283,7 @@ mod tests { } #[test] - fn test_param_raw_validate_bad_lifetime() { + fn param_raw_validate_bad_lifetime() { // lifetime = 0 assert!( create_param_raw(0, Some(Dimensionless(1.0))) @@ -293,7 +293,7 @@ mod tests { } #[test] - fn test_param_raw_validate_bad_discount_rate() { + fn param_raw_validate_bad_discount_rate() { // discount rate = -1 assert!( create_param_raw(1, Some(Dimensionless(-1.0))) diff --git a/src/input/region.rs b/src/input/region.rs index e46c4f44d..50da592ff 100644 --- a/src/input/region.rs +++ b/src/input/region.rs @@ -43,7 +43,7 @@ AP,Asia Pacific" } #[test] - fn test_read_regions() { + fn read_regions_works() { let dir = tempdir().unwrap(); create_regions_file(dir.path()); let regions = read_regions(dir.path()).unwrap(); diff --git a/src/input/time_slice.rs b/src/input/time_slice.rs index 517e989f1..a271ae23b 100644 --- a/src/input/time_slice.rs +++ b/src/input/time_slice.rs @@ -123,7 +123,7 @@ autumn,evening,0.25" } #[test] - fn test_read_time_slice_info() { + fn read_time_slice_info_works() { let dir = tempdir().unwrap(); create_time_slices_file(dir.path()); @@ -184,7 +184,7 @@ autumn,evening,0.25" } #[test] - fn test_read_time_slice_info_non_existent() { + fn read_time_slice_info_non_existent() { let actual = read_time_slice_info(tempdir().unwrap().path()); assert_eq!(actual.unwrap(), TimeSliceInfo::default()); } diff --git a/src/model/parameters.rs b/src/model/parameters.rs index 95c89eb46..c7004e27b 100644 --- a/src/model/parameters.rs +++ b/src/model/parameters.rs @@ -255,7 +255,7 @@ mod tests { } #[test] - fn test_check_milestone_years() { + fn check_milestone_years_works() { // Valid assert!(check_milestone_years(&[1]).is_ok()); assert!(check_milestone_years(&[1, 2]).is_ok()); @@ -267,7 +267,7 @@ mod tests { } #[test] - fn test_model_params_from_path() { + fn model_params_from_path() { let dir = tempdir().unwrap(); { let mut file = File::create(dir.path().join(MODEL_PARAMETERS_FILE_NAME)).unwrap(); @@ -289,7 +289,7 @@ mod tests { #[case(f64::INFINITY, false)] // Invalid: infinite value #[case(f64::NEG_INFINITY, false)] // Invalid: negative infinite value #[case(f64::NAN, false)] // Invalid: NaN value - fn test_check_value_of_lost_load(#[case] value: f64, #[case] expected_valid: bool) { + fn check_value_of_lost_load_works(#[case] value: f64, #[case] expected_valid: bool) { let money_per_flow = MoneyPerFlow::new(value); let result = check_value_of_lost_load(money_per_flow); @@ -307,7 +307,7 @@ mod tests { #[case(100, true)] // Valid large value #[case(u32::MAX, true)] // Valid maximum value #[case(0, false)] // Invalid: zero - fn test_check_max_ironing_out_iterations(#[case] value: u32, #[case] expected_valid: bool) { + fn check_max_ironing_out_iterations_works(#[case] value: u32, #[case] expected_valid: bool) { let result = check_max_ironing_out_iterations(value); assert_validation_result( @@ -329,7 +329,7 @@ mod tests { #[case(f64::INFINITY, false)] // Invalid: infinite value #[case(f64::NEG_INFINITY, false)] // Invalid: negative infinite value #[case(f64::NAN, false)] // Invalid: NaN value - fn test_check_price_tolerance(#[case] value: f64, #[case] expected_valid: bool) { + fn check_price_tolerance_works(#[case] value: f64, #[case] expected_valid: bool) { let dimensionless = Dimensionless::new(value); let result = check_price_tolerance(dimensionless); @@ -349,7 +349,7 @@ mod tests { #[case(f64::INFINITY, false)] // Invalid: infinite value #[case(f64::NEG_INFINITY, false)] // Invalid: negative infinite value #[case(f64::NAN, false)] // Invalid: NaN value - fn test_check_capacity_margin(#[case] value: f64, #[case] expected_valid: bool) { + fn check_capacity_margin_works(#[case] value: f64, #[case] expected_valid: bool) { let result = check_capacity_margin(value); assert_validation_result( diff --git a/src/output.rs b/src/output.rs index d29838a2f..48edd6e47 100644 --- a/src/output.rs +++ b/src/output.rs @@ -674,7 +674,7 @@ mod tests { use tempfile::tempdir; #[rstest] - fn test_write_assets(assets: AssetPool) { + fn write_assets(assets: AssetPool) { let dir = tempdir().unwrap(); // Write an asset @@ -696,7 +696,7 @@ mod tests { } #[rstest] - fn test_write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) { + fn write_flows(assets: AssetPool, commodity_id: CommodityID, time_slice: TimeSliceID) { let milestone_year = 2020; let asset = assets.iter_active().next().unwrap(); let flow_map = indexmap! { @@ -729,7 +729,7 @@ mod tests { } #[rstest] - fn test_write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) { + fn write_prices(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) { let milestone_year = 2020; let price = MoneyPerFlow(42.0); let mut prices = CommodityPrices::default(); @@ -762,7 +762,7 @@ mod tests { } #[rstest] - fn test_write_commodity_balance_duals( + fn write_commodity_balance_duals( commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID, @@ -804,11 +804,7 @@ mod tests { } #[rstest] - fn test_write_unmet_demand( - commodity_id: CommodityID, - region_id: RegionID, - time_slice: TimeSliceID, - ) { + fn write_unmet_demand(commodity_id: CommodityID, region_id: RegionID, time_slice: TimeSliceID) { let milestone_year = 2020; let run_description = "test_run".to_string(); let value = Flow(0.5); @@ -846,7 +842,7 @@ mod tests { } #[rstest] - fn test_write_activity(assets: AssetPool, time_slice: TimeSliceID) { + fn write_activity(assets: AssetPool, time_slice: TimeSliceID) { let milestone_year = 2020; let run_description = "test_run".to_string(); let activity = Activity(100.5); @@ -892,7 +888,7 @@ mod tests { } #[rstest] - fn test_write_activity_with_missing_keys(assets: AssetPool, time_slice: TimeSliceID) { + fn write_activity_with_missing_keys(assets: AssetPool, time_slice: TimeSliceID) { let milestone_year = 2020; let run_description = "test_run".to_string(); let activity = Activity(100.5); @@ -936,7 +932,7 @@ mod tests { } #[rstest] - fn test_write_solver_values() { + fn write_solver_values() { let milestone_year = 2020; let run_description = "test_run".to_string(); let objective_value = Money(1234.56); @@ -967,7 +963,7 @@ mod tests { } #[rstest] - fn test_write_appraisal_results(asset: Asset, appraisal_output: AppraisalOutput) { + fn write_appraisal_results(asset: Asset, appraisal_output: AppraisalOutput) { let milestone_year = 2020; let run_description = "test_run".to_string(); let dir = tempdir().unwrap(); @@ -1002,7 +998,7 @@ mod tests { } #[rstest] - fn test_write_appraisal_time_slice_results( + fn write_appraisal_time_slice_results( asset: Asset, appraisal_output: AppraisalOutput, time_slice: TimeSliceID, @@ -1047,7 +1043,7 @@ mod tests { } #[test] - fn test_create_output_directory_new_directory() { + fn create_output_directory_new_directory() { let temp_dir = tempdir().unwrap(); let output_dir = temp_dir.path().join("new_output"); @@ -1059,7 +1055,7 @@ mod tests { } #[test] - fn test_create_output_directory_existing_empty_directory() { + fn create_output_directory_existing_empty_directory() { let temp_dir = tempdir().unwrap(); let output_dir = temp_dir.path().join("empty_output"); @@ -1074,7 +1070,7 @@ mod tests { } #[test] - fn test_create_output_directory_existing_with_files_no_overwrite() { + fn create_output_directory_existing_with_files_no_overwrite() { let temp_dir = tempdir().unwrap(); let output_dir = temp_dir.path().join("output_with_files"); @@ -1094,7 +1090,7 @@ mod tests { } #[test] - fn test_create_output_directory_existing_with_files_allow_overwrite() { + fn create_output_directory_existing_with_files_allow_overwrite() { let temp_dir = tempdir().unwrap(); let output_dir = temp_dir.path().join("output_with_files"); @@ -1112,7 +1108,7 @@ mod tests { } #[test] - fn test_create_output_directory_nested_path() { + fn create_output_directory_nested_path() { let temp_dir = tempdir().unwrap(); let output_dir = temp_dir.path().join("nested").join("path").join("output"); @@ -1124,7 +1120,7 @@ mod tests { } #[test] - fn test_create_output_directory_existing_subdirs_with_files_allow_overwrite() { + fn create_output_directory_existing_subdirs_with_files_allow_overwrite() { let temp_dir = tempdir().unwrap(); let output_dir = temp_dir.path().join("output_with_subdirs"); diff --git a/src/patch.rs b/src/patch.rs index 1dc4390b0..4c2b8555c 100644 --- a/src/patch.rs +++ b/src/patch.rs @@ -296,7 +296,7 @@ mod tests { use crate::patch::{FilePatch, ModelPatch}; #[test] - fn test_modify_base_with_patch() { + fn modify_base_with_patch_works() { let base = "col1,col2\nvalue1,value2\nvalue3,value4\nvalue5,value6\n"; // Create a patch to delete row3,row4 and add row7,row8 @@ -316,7 +316,7 @@ mod tests { } #[test] - fn test_modify_base_with_patch_mismatched_header() { + fn modify_base_with_patch_mismatched_header() { let base = "col1,col2\nvalue1,value2\n"; // Create a patch with a mismatched header @@ -329,7 +329,7 @@ mod tests { } #[test] - fn test_merge_model_toml_basic() { + fn merge_model_toml_basic() { let base = r#" field = "data" [section] @@ -356,7 +356,7 @@ mod tests { } #[test] - fn test_file_patch() { + fn file_patch() { // Patch with a small change to an asset capacity let assets_patch = FilePatch::new("assets.csv") .with_deletion("GASDRV,GBR,A0_GEX,4002.26,2020") @@ -376,7 +376,7 @@ mod tests { } #[test] - fn test_toml_patch() { + fn toml_patch() { // Patch to add an extra milestone year (2050) let toml_patch = "milestone_years = [2020, 2030, 2040, 2050]\n"; diff --git a/src/process.rs b/src/process.rs index d8127dea2..f12986e16 100644 --- a/src/process.rs +++ b/src/process.rs @@ -719,7 +719,7 @@ mod tests { } #[rstest] - fn test_get_levy_no_levies( + fn get_levy_no_levies( commodity_no_levies: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -738,7 +738,7 @@ mod tests { } #[rstest] - fn test_get_levy_with_levy( + fn get_levy_with_levy( commodity_with_levy: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -757,7 +757,7 @@ mod tests { } #[rstest] - fn test_get_levy_with_incentive( + fn get_levy_with_incentive( commodity_with_incentive: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -776,7 +776,7 @@ mod tests { } #[rstest] - fn test_get_levy_different_region(commodity_with_levy: Rc, time_slice: TimeSliceID) { + fn get_levy_different_region(commodity_with_levy: Rc, time_slice: TimeSliceID) { let flow = ProcessFlow { commodity: commodity_with_levy, coeff: FlowPerActivity(1.0), @@ -791,7 +791,7 @@ mod tests { } #[rstest] - fn test_get_levy_different_year( + fn get_levy_different_year( commodity_with_levy: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -810,7 +810,7 @@ mod tests { } #[rstest] - fn test_get_levy_different_time_slice(commodity_with_levy: Rc, region_id: RegionID) { + fn get_levy_different_time_slice(commodity_with_levy: Rc, region_id: RegionID) { let flow = ProcessFlow { commodity: commodity_with_levy, coeff: FlowPerActivity(1.0), @@ -830,7 +830,7 @@ mod tests { } #[rstest] - fn test_get_levy_consumption_positive_coeff( + fn get_levy_consumption_positive_coeff( commodity_with_consumption_levy: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -849,7 +849,7 @@ mod tests { } #[rstest] - fn test_get_levy_consumption_negative_coeff( + fn get_levy_consumption_negative_coeff( commodity_with_consumption_levy: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -868,7 +868,7 @@ mod tests { } #[rstest] - fn test_get_levy_production_positive_coeff( + fn get_levy_production_positive_coeff( commodity_with_production_levy: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -887,7 +887,7 @@ mod tests { } #[rstest] - fn test_get_levy_production_negative_coeff( + fn get_levy_production_negative_coeff( commodity_with_production_levy: Rc, region_id: RegionID, time_slice: TimeSliceID, @@ -906,7 +906,7 @@ mod tests { } #[rstest] - fn test_get_total_cost_base_cost( + fn get_total_cost_base_cost( flow_with_cost: ProcessFlow, region_id: RegionID, time_slice: TimeSliceID, @@ -918,7 +918,7 @@ mod tests { } #[rstest] - fn test_get_total_cost_with_levy( + fn get_total_cost_with_levy( flow_with_cost_and_levy: ProcessFlow, region_id: RegionID, time_slice: TimeSliceID, @@ -930,7 +930,7 @@ mod tests { } #[rstest] - fn test_get_total_cost_with_incentive( + fn get_total_cost_with_incentive( flow_with_cost_and_incentive: ProcessFlow, region_id: RegionID, time_slice: TimeSliceID, @@ -942,7 +942,7 @@ mod tests { } #[rstest] - fn test_get_total_cost_negative_coeff( + fn get_total_cost_negative_coeff( mut flow_with_cost: ProcessFlow, region_id: RegionID, time_slice: TimeSliceID, @@ -955,7 +955,7 @@ mod tests { } #[rstest] - fn test_get_total_cost_zero_coeff( + fn get_total_cost_zero_coeff( mut flow_with_cost: ProcessFlow, region_id: RegionID, time_slice: TimeSliceID, @@ -968,7 +968,7 @@ mod tests { } #[test] - fn test_is_input_and_is_output() { + fn is_input_and_is_output() { let commodity = Rc::new(Commodity { id: "test_commodity".into(), description: "Test commodity".into(), @@ -1005,7 +1005,7 @@ mod tests { } #[rstest] - fn test_new_with_full_availability(time_slice_info2: TimeSliceInfo) { + fn new_with_full_availability(time_slice_info2: TimeSliceInfo) { let limits = ActivityLimits::new_with_full_availability(&time_slice_info2); // Each timeslice from the info should be present in the limits @@ -1023,7 +1023,7 @@ mod tests { } #[rstest] - fn test_new_from_limits_with_seasonal_limit_applied(time_slice_info2: TimeSliceInfo) { + fn new_from_limits_with_seasonal_limit_applied(time_slice_info2: TimeSliceInfo) { let mut limits = HashMap::new(); // Set a seasonal upper limit that is stricter than the sum of timeslices @@ -1046,7 +1046,7 @@ mod tests { } #[rstest] - fn test_new_from_limits_with_annual_limit_applied(time_slice_info2: TimeSliceInfo) { + fn new_from_limits_with_annual_limit_applied(time_slice_info2: TimeSliceInfo) { let mut limits = HashMap::new(); // Set an annual upper limit that is stricter than the sum of timeslices @@ -1073,7 +1073,7 @@ mod tests { } #[rstest] - fn test_new_from_limits_missing_timeslices_error(time_slice_info2: TimeSliceInfo) { + fn new_from_limits_missing_timeslices_error(time_slice_info2: TimeSliceInfo) { let mut limits = HashMap::new(); // Add a single timeslice limit but do not provide limits for all timeslices @@ -1090,7 +1090,7 @@ mod tests { } #[rstest] - fn test_new_from_limits_incompatible_limits(time_slice_info2: TimeSliceInfo) { + fn new_from_limits_incompatible_limits(time_slice_info2: TimeSliceInfo) { let mut limits = HashMap::new(); // Time slice limits capping activity to 0.1 in each ts diff --git a/src/region.rs b/src/region.rs index 5c58535d9..58d078946 100644 --- a/src/region.rs +++ b/src/region.rs @@ -41,7 +41,7 @@ mod tests { use super::*; #[test] - fn test_parse_region_str() { + fn parse_region_str_works() { let region_ids: IndexSet = ["GBR".into(), "USA".into()].into_iter().collect(); // List of regions diff --git a/src/settings.rs b/src/settings.rs index 5363257a0..516d8b481 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -123,7 +123,7 @@ mod tests { use tempfile::tempdir; #[test] - fn test_settings_load_from_path_no_file() { + fn settings_load_from_path_no_file() { let dir = tempdir().unwrap(); let file_path = dir.path().join(SETTINGS_FILE_NAME); // NB: doesn't exist assert_eq!( @@ -133,7 +133,7 @@ mod tests { } #[test] - fn test_settings_load_from_path() { + fn settings_load_from_path() { let dir = tempdir().unwrap(); let file_path = dir.path().join(SETTINGS_FILE_NAME); @@ -152,7 +152,7 @@ mod tests { } #[test] - fn test_default_file_contents() { + fn default_file_contents() { assert!(!Settings::default_file_contents().is_empty()); } } diff --git a/src/simulation/investment.rs b/src/simulation/investment.rs index 2900ac899..37712f6f6 100644 --- a/src/simulation/investment.rs +++ b/src/simulation/investment.rs @@ -871,7 +871,7 @@ mod tests { use std::rc::Rc; #[rstest] - fn test_get_demand_limiting_capacity( + fn get_demand_limiting_capacity_works( time_slice: TimeSliceID, time_slice_info: TimeSliceInfo, svd_commodity: Commodity, @@ -905,7 +905,7 @@ mod tests { } #[rstest] - fn test_get_demand_limiting_capacity_multiple_time_slices( + fn get_demand_limiting_capacity_multiple_time_slices( time_slice_info2: TimeSliceInfo, svd_commodity: Commodity, mut process: Process, @@ -953,7 +953,7 @@ mod tests { } #[rstest] - fn test_compare_assets_fallback(process: Process, region_id: RegionID, agent_id: AgentID) { + fn compare_assets_fallback(process: Process, region_id: RegionID, agent_id: AgentID) { let process = Rc::new(process); let capacity = Capacity(2.0); let asset1 = Asset::new_commissioned( diff --git a/src/simulation/prices.rs b/src/simulation/prices.rs index afc1348a3..cdfdd172f 100644 --- a/src/simulation/prices.rs +++ b/src/simulation/prices.rs @@ -733,7 +733,7 @@ mod tests { #[case(MoneyPerFlow(0.0), MoneyPerFlow(-10.0), Dimensionless(0.1), false)] // comparing zero and negative #[case(MoneyPerFlow(10.0), MoneyPerFlow(0.0), Dimensionless(0.1), false)] // comparing positive and zero #[case(MoneyPerFlow(-10.0), MoneyPerFlow(0.0), Dimensionless(0.1), false)] // comparing negative and zero - fn test_within_tolerance_scenarios( + fn within_tolerance_scenarios( #[case] price1: MoneyPerFlow, #[case] price2: MoneyPerFlow, #[case] tolerance: Dimensionless, @@ -757,7 +757,7 @@ mod tests { } #[rstest] - fn test_time_slice_weighted_averages( + fn time_slice_weighted_averages( commodity_id: CommodityID, region_id: RegionID, time_slice_info: TimeSliceInfo, @@ -775,7 +775,7 @@ mod tests { } #[rstest] - fn test_marginal_cost_example( + fn marginal_cost_example( sed_commodity: Commodity, other_commodity: Commodity, region_id: RegionID, @@ -850,7 +850,7 @@ mod tests { } #[rstest] - fn test_full_cost_example( + fn full_cost_example( sed_commodity: Commodity, other_commodity: Commodity, region_id: RegionID, diff --git a/src/time_slice.rs b/src/time_slice.rs index ccab77dc6..c98eee223 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -440,10 +440,7 @@ mod tests { } #[rstest] - fn test_ts_selection_iter_annual( - time_slice_info1: TimeSliceInfo, - time_slices1: [TimeSliceID; 2], - ) { + fn ts_selection_iter_annual(time_slice_info1: TimeSliceInfo, time_slices1: [TimeSliceID; 2]) { assert_equal( TimeSliceSelection::Annual.iter(&time_slice_info1), time_slices1.iter().map(|ts| (ts, Year(0.5))), @@ -451,10 +448,7 @@ mod tests { } #[rstest] - fn test_ts_selection_iter_season( - time_slice_info1: TimeSliceInfo, - time_slices1: [TimeSliceID; 2], - ) { + fn ts_selection_iter_season(time_slice_info1: TimeSliceInfo, time_slices1: [TimeSliceID; 2]) { assert_equal( TimeSliceSelection::Season("winter".into()).iter(&time_slice_info1), iter::once((&time_slices1[0], Year(0.5))), @@ -462,10 +456,7 @@ mod tests { } #[rstest] - fn test_ts_selection_iter_single( - time_slice_info1: TimeSliceInfo, - time_slices1: [TimeSliceID; 2], - ) { + fn ts_selection_iter_single(time_slice_info1: TimeSliceInfo, time_slices1: [TimeSliceID; 2]) { let ts = time_slice_info1 .get_time_slice_id_from_str("summer.night") .unwrap(); @@ -505,7 +496,7 @@ mod tests { #[case(TimeSliceSelection::Single("winter.day".into()), TimeSliceLevel::Annual, None)] #[case(TimeSliceSelection::Single("winter.day".into()), TimeSliceLevel::Season, None)] #[case(TimeSliceSelection::Single("winter.day".into()), TimeSliceLevel::DayNight, Some(vec![("winter.day", Year(0.25))]))] - fn test_ts_selection_iter_at_level( + fn ts_selection_iter_at_level( time_slice_info2: TimeSliceInfo, #[case] selection: TimeSliceSelection, #[case] level: TimeSliceLevel, @@ -527,7 +518,7 @@ mod tests { #[case(TimeSliceSelection::Single("winter.day".into()), TimeSliceLevel::Annual, None)] #[case(TimeSliceSelection::Single("winter.day".into()), TimeSliceLevel::Season, None)] #[case(TimeSliceSelection::Single("winter.day".into()), TimeSliceLevel::DayNight, Some(vec![("winter.day", Dimensionless(8.0))]))] - fn test_calculate_share( + fn calculate_share( time_slice_info2: TimeSliceInfo, #[case] selection: TimeSliceSelection, #[case] level: TimeSliceLevel, diff --git a/src/year.rs b/src/year.rs index aeeb5f3ba..2f87f52a6 100644 --- a/src/year.rs +++ b/src/year.rs @@ -132,7 +132,7 @@ mod tests { #[case("..2023", &[2020,2025], &[2020])] // Empty start #[case("2021..", &[2020,2025], &[2025])] // Empty end #[case("..", &[2020,2025], &[2020,2025])] - fn test_parse_year_str_valid( + fn parse_year_str_valid( #[case] input: &str, #[case] milestone_years: &[u32], #[case] expected: &[u32], @@ -151,7 +151,7 @@ mod tests { #[case("2021..2024", &[2020,2025], "No valid years found in year range string 2021..2024")] #[case("..2020..2025", &[2020,2025], "Year range must be of the form 'start..end', 'start..' or '..end'. Invalid: ..2020..2025")] #[case("2020...2025", &[2020,2025], "Invalid end year in range: .2025")] - fn test_parse_year_str_invalid( + fn parse_year_str_invalid( #[case] input: &str, #[case] milestone_years: &[u32], #[case] error_msg: &str, diff --git a/tests/citation_cff.rs b/tests/citation_cff.rs index 4f18c8acf..35796f932 100644 --- a/tests/citation_cff.rs +++ b/tests/citation_cff.rs @@ -21,7 +21,7 @@ fn get_version_from_citation_cff() -> Result { } #[test] -fn test_citation_cff_version() { +fn citation_cff_version() { assert_eq!( env!("CARGO_PKG_VERSION"), get_version_from_citation_cff().unwrap(), diff --git a/tests/graph.rs b/tests/graph.rs index 8719e3227..5b34c6339 100644 --- a/tests/graph.rs +++ b/tests/graph.rs @@ -14,7 +14,7 @@ fn get_model_dir() -> PathBuf { /// /// We also check that the logger is initialised after it is run. #[test] -fn test_handle_graph_command() { +fn handle_graph_command() { unsafe { std::env::set_var("MUSE2_LOG_LEVEL", "off") }; assert!(!is_logger_initialised()); diff --git a/tests/regression_missing_commodity.rs b/tests/regression_missing_commodity.rs index 1d34e7b2b..4c32a654c 100644 --- a/tests/regression_missing_commodity.rs +++ b/tests/regression_missing_commodity.rs @@ -3,6 +3,6 @@ mod regression; use regression::run_regression_test; #[test] -fn test_regression_missing_commodity() { +fn regression_missing_commodity() { run_regression_test("missing_commodity"); } diff --git a/tests/regression_muse1_default.rs b/tests/regression_muse1_default.rs index f24c62f28..d2e1f876a 100644 --- a/tests/regression_muse1_default.rs +++ b/tests/regression_muse1_default.rs @@ -3,6 +3,6 @@ mod regression; use regression::run_regression_test; #[test] -fn test_regression_muse1_default() { +fn regression_muse1_default() { run_regression_test("muse1_default"); } diff --git a/tests/regression_simple.rs b/tests/regression_simple.rs index 8ffe9e9fb..929011161 100644 --- a/tests/regression_simple.rs +++ b/tests/regression_simple.rs @@ -3,6 +3,6 @@ mod regression; use regression::run_regression_test_with_debug_files; #[test] -fn test_regression_simple() { +fn regression_simple() { run_regression_test_with_debug_files("simple"); } diff --git a/tests/regression_two_outputs.rs b/tests/regression_two_outputs.rs index 6d424b62f..b2a9d7d03 100644 --- a/tests/regression_two_outputs.rs +++ b/tests/regression_two_outputs.rs @@ -3,6 +3,6 @@ mod regression; use regression::run_regression_test; #[test] -fn test_regression_two_outputs() { +fn regression_two_outputs() { run_regression_test("two_outputs"); } diff --git a/tests/regression_two_regions.rs b/tests/regression_two_regions.rs index da3aa7168..a4be8c000 100644 --- a/tests/regression_two_regions.rs +++ b/tests/regression_two_regions.rs @@ -3,6 +3,6 @@ mod regression; use regression::run_regression_test; #[test] -fn test_regression_two_regions() { +fn regression_two_regions() { run_regression_test("two_regions"); } diff --git a/tests/run.rs b/tests/run.rs index 59923efe4..1a19e7fc7 100644 --- a/tests/run.rs +++ b/tests/run.rs @@ -14,7 +14,7 @@ fn get_model_dir() -> PathBuf { /// /// We also check that the logger is initialised after it is run. #[test] -fn test_handle_run_command() { +fn handle_run_command_works() { unsafe { std::env::set_var("MUSE2_LOG_LEVEL", "off") }; assert!(!is_logger_initialised()); diff --git a/tests/validate.rs b/tests/validate.rs index d886ac7d6..04c09dfca 100644 --- a/tests/validate.rs +++ b/tests/validate.rs @@ -13,7 +13,7 @@ fn get_model_dir() -> PathBuf { /// /// We also check that the logger is initialised after it is run. #[test] -fn test_handle_validate_command() { +fn handle_validate_command_works() { unsafe { std::env::set_var("MUSE2_LOG_LEVEL", "off") }; assert!(!is_logger_initialised()); From 11c6b4143e9d4f14b756a729a50e654e3bcb38b4 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 14:47:20 +0000 Subject: [PATCH 04/11] clippy: Enable `redundant_test_prefix` lint --- Cargo.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index 55165f433..cc704451a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -50,6 +50,8 @@ built = {version = "0.8.0", features = ["chrono", "git2"]} # Disallow lints from "all" and "pedantic" groups by default all = {level = "deny", priority = -1} pedantic = {level = "deny", priority = -1} +# Extra lints to disallow +redundant_test_prefix = "deny" # Whitelist some lints from "pedantic" group similar_names = "allow" must_use_candidate = "allow" From ff16a7bab22920519b630248725ca29357be4d40 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 15:18:43 +0000 Subject: [PATCH 05/11] clippy: Enable `assertions_on_result_states` lint --- Cargo.toml | 1 + src/fixture.rs | 4 ++-- src/graph/validate.rs | 2 +- src/id.rs | 2 +- src/input.rs | 20 ++++++++--------- src/input/agent.rs | 4 ++-- src/input/agent/commodity_portion.rs | 18 +++++++--------- src/input/agent/objective.rs | 6 +++--- src/input/agent/search_space.rs | 6 ++---- src/input/asset.rs | 4 +--- src/input/commodity.rs | 2 +- src/input/commodity/demand.rs | 21 +++++++----------- src/input/commodity/levy.rs | 4 +--- src/input/process/flow.rs | 24 +++++++-------------- src/input/process/investment_constraints.rs | 4 ++-- src/input/process/parameter.rs | 4 ++-- src/model/parameters.rs | 4 ++-- src/region.rs | 4 ++-- 18 files changed, 56 insertions(+), 78 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index cc704451a..b22d3ccb4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -52,6 +52,7 @@ all = {level = "deny", priority = -1} pedantic = {level = "deny", priority = -1} # Extra lints to disallow redundant_test_prefix = "deny" +assertions_on_result_states = "deny" # Whitelist some lints from "pedantic" group similar_names = "allow" must_use_candidate = "allow" diff --git a/src/fixture.rs b/src/fixture.rs index 40704082f..50ad4eecd 100644 --- a/src/fixture.rs +++ b/src/fixture.rs @@ -371,13 +371,13 @@ mod tests { #[test] fn patch_and_validate_simple_ok() { let patches = Vec::new(); - assert!(patch_and_validate_simple!(patches).is_ok()); + patch_and_validate_simple!(patches).unwrap(); } #[test] fn patch_and_run_simple_ok() { let patches = Vec::new(); - assert!(patch_and_run_simple!(patches).is_ok()); + patch_and_run_simple!(patches).unwrap(); } #[test] diff --git a/src/graph/validate.rs b/src/graph/validate.rs index 1ae20b552..614c46f08 100644 --- a/src/graph/validate.rs +++ b/src/graph/validate.rs @@ -263,7 +263,7 @@ mod tests { graph.add_edge(node_c, node_d, GraphEdge::Demand); // Validate the graph at DayNight level - assert!(validate_commodities_graph(&graph, &commodities, TimeSliceLevel::Annual).is_ok()); + validate_commodities_graph(&graph, &commodities, TimeSliceLevel::Annual).unwrap(); } #[rstest] diff --git a/src/id.rs b/src/id.rs index 9da2714b1..db64906a4 100644 --- a/src/id.rs +++ b/src/id.rs @@ -175,6 +175,6 @@ mod tests { #[case("ALL")] #[case(" ALL ")] fn deserialise_id_invalid(#[case] id: &str) { - assert!(deserialise_id(id).is_err()); + deserialise_id(id).unwrap_err(); } } diff --git a/src/input.rs b/src/input.rs index 61f0ce9b1..3e71dac29 100644 --- a/src/input.rs +++ b/src/input.rs @@ -429,7 +429,7 @@ mod tests { writeln!(file, "bad toml syntax").unwrap(); } - assert!(read_toml::(&file_path).is_err()); + read_toml::(&file_path).unwrap_err(); } /// Deserialise value with `deserialise_proportion_nonzero()` @@ -446,23 +446,21 @@ mod tests { assert_eq!(deserialise_f64(1.0), Ok(Dimensionless(1.0))); // Invalid inputs - assert!(deserialise_f64(0.0).is_err()); - assert!(deserialise_f64(-1.0).is_err()); - assert!(deserialise_f64(2.0).is_err()); - assert!(deserialise_f64(f64::NAN).is_err()); - assert!(deserialise_f64(f64::INFINITY).is_err()); + deserialise_f64(0.0).unwrap_err(); + deserialise_f64(-1.0).unwrap_err(); + deserialise_f64(2.0).unwrap_err(); + deserialise_f64(f64::NAN).unwrap_err(); + deserialise_f64(f64::INFINITY).unwrap_err(); } #[test] fn check_values_sum_to_one_approx_works() { // Single input, valid - assert!(check_values_sum_to_one_approx([Dimensionless(1.0)].into_iter()).is_ok()); + check_values_sum_to_one_approx([Dimensionless(1.0)].into_iter()).unwrap(); // Multiple inputs, valid - assert!( - check_values_sum_to_one_approx([Dimensionless(0.4), Dimensionless(0.6)].into_iter()) - .is_ok() - ); + check_values_sum_to_one_approx([Dimensionless(0.4), Dimensionless(0.6)].into_iter()) + .unwrap(); // Single input, invalid assert!(check_values_sum_to_one_approx([Dimensionless(0.5)].into_iter()).is_err()); diff --git a/src/input/agent.rs b/src/input/agent.rs index fbe00e923..ad22328b5 100644 --- a/src/input/agent.rs +++ b/src/input/agent.rs @@ -204,7 +204,7 @@ mod tests { regions: "GBR".into(), }, ]; - assert!(read_agents_file_from_iter(agents.into_iter(), ®ion_ids).is_err()); + read_agents_file_from_iter(agents.into_iter(), ®ion_ids).unwrap_err(); // Lexico tolerance missing for lexico decision rule let agent = AgentRaw { @@ -214,6 +214,6 @@ mod tests { decision_lexico_tolerance: None, regions: "GBR".into(), }; - assert!(read_agents_file_from_iter(iter::once(agent), ®ion_ids).is_err()); + read_agents_file_from_iter(iter::once(agent), ®ion_ids).unwrap_err(); } } diff --git a/src/input/agent/commodity_portion.rs b/src/input/agent/commodity_portion.rs index 97dd0136d..2cef76b96 100644 --- a/src/input/agent/commodity_portion.rs +++ b/src/input/agent/commodity_portion.rs @@ -229,16 +229,14 @@ mod tests { let mut map = AgentCommodityPortionsMap::new(); map.insert(("commodity1".into(), 2020), Dimensionless(1.0)); let agent_commodity_portions = HashMap::from([("agent1".into(), map)]); - assert!( - validate_agent_commodity_portions( - &agent_commodity_portions, - &agents, - &commodities, - ®ion_ids, - &milestone_years - ) - .is_ok() - ); + validate_agent_commodity_portions( + &agent_commodity_portions, + &agents, + &commodities, + ®ion_ids, + &milestone_years, + ) + .unwrap(); // Invalid case: portions do not sum to 1 let mut map_v2 = AgentCommodityPortionsMap::new(); diff --git a/src/input/agent/objective.rs b/src/input/agent/objective.rs index b6f2775e7..eb2811df2 100644 --- a/src/input/agent/objective.rs +++ b/src/input/agent/objective.rs @@ -186,7 +186,7 @@ mod tests { // DecisionRule::Single let decision_rule = DecisionRule::Single; let objective = objective!(None, None); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + check_objective_parameter(&objective, &decision_rule).unwrap(); let objective = objective!(Some(Dimensionless(1.0)), None); assert!(check_objective_parameter(&objective, &decision_rule).is_err()); let objective = objective!(None, Some(1)); @@ -198,7 +198,7 @@ mod tests { // DecisionRule::Weighted let decision_rule = DecisionRule::Weighted; let objective = objective!(Some(Dimensionless(1.0)), None); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + check_objective_parameter(&objective, &decision_rule).unwrap(); let objective = objective!(None, None); assert!(check_objective_parameter(&objective, &decision_rule).is_err()); let objective = objective!(None, Some(1)); @@ -210,7 +210,7 @@ mod tests { // DecisionRule::Lexicographical let decision_rule = DecisionRule::Lexicographical { tolerance: 1.0 }; let objective = objective!(None, Some(1)); - assert!(check_objective_parameter(&objective, &decision_rule).is_ok()); + check_objective_parameter(&objective, &decision_rule).unwrap(); let objective = objective!(None, None); assert!(check_objective_parameter(&objective, &decision_rule).is_err()); let objective = objective!(Some(Dimensionless(1.0)), None); diff --git a/src/input/agent/search_space.rs b/src/input/agent/search_space.rs index e4426acad..be3d6f2ac 100644 --- a/src/input/agent/search_space.rs +++ b/src/input/agent/search_space.rs @@ -254,10 +254,8 @@ mod tests { years: "2020".into(), search_space: "A;B".into(), }; - assert!( - raw.into_agent_search_space(&agents, &processes, &commodity_ids, &[2020]) - .is_ok() - ); + raw.into_agent_search_space(&agents, &processes, &commodity_ids, &[2020]) + .unwrap(); } #[rstest] diff --git a/src/input/asset.rs b/src/input/asset.rs index bd34748e1..d75eca617 100644 --- a/src/input/asset.rs +++ b/src/input/asset.rs @@ -201,8 +201,6 @@ mod tests { processes: ProcessMap, region_ids: IndexSet, ) { - assert!( - read_assets_from_iter(iter::once(asset), &agent_ids, &processes, ®ion_ids).is_err() - ); + read_assets_from_iter(iter::once(asset), &agent_ids, &processes, ®ion_ids).unwrap_err(); } } diff --git a/src/input/commodity.rs b/src/input/commodity.rs index 4cb72115e..4f4c7ab63 100644 --- a/src/input/commodity.rs +++ b/src/input/commodity.rs @@ -206,7 +206,7 @@ mod tests { #[test] fn validate_commodity_works() { let commodity = make_commodity(CommodityType::SupplyEqualsDemand, PricingStrategy::Shadow); - assert!(validate_commodity(&commodity).is_ok()); + validate_commodity(&commodity).unwrap(); } #[test] diff --git a/src/input/commodity/demand.rs b/src/input/commodity/demand.rs index 87fa36f14..4af12f3db 100644 --- a/src/input/commodity/demand.rs +++ b/src/input/commodity/demand.rs @@ -244,10 +244,7 @@ mod tests { ]; // Valid - assert!( - read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020]) - .is_ok() - ); + read_demand_from_iter(demand.into_iter(), &svd_commodities, ®ion_ids, &[2020]).unwrap(); } #[rstest] @@ -405,15 +402,13 @@ mod tests { commodity_id: "commodity1".to_string(), demand: Flow(10.0), }; - assert!( - read_demand_from_iter( - std::iter::once(demand), - &svd_commodities, - ®ion_ids, - &[2020, 2030] - ) - .is_err() - ); + read_demand_from_iter( + std::iter::once(demand), + &svd_commodities, + ®ion_ids, + &[2020, 2030], + ) + .unwrap_err(); } /// Create an example demand file in `dir_path` diff --git a/src/input/commodity/levy.rs b/src/input/commodity/levy.rs index afb083a6b..f31c54bb8 100644 --- a/src/input/commodity/levy.rs +++ b/src/input/commodity/levy.rs @@ -257,9 +257,7 @@ mod tests { region_ids: IndexSet, ) { // Valid map - assert!( - validate_commodity_levy_map(&cost_map, ®ion_ids, &[2020], &time_slice_info).is_ok() - ); + validate_commodity_levy_map(&cost_map, ®ion_ids, &[2020], &time_slice_info).unwrap(); } #[rstest] diff --git a/src/input/process/flow.rs b/src/input/process/flow.rs index 8ac2a18f8..24389070b 100644 --- a/src/input/process/flow.rs +++ b/src/input/process/flow.rs @@ -363,10 +363,8 @@ mod tests { std::iter::once((commodity.id.clone(), flow(commodity.clone(), 1.0))), None, ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .unwrap(); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity.id.clone()) @@ -416,10 +414,8 @@ mod tests { .into_iter(), None, ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .unwrap(); assert_eq!( processes.values().exactly_one().unwrap().primary_output, Some(commodity2.id.clone()) @@ -444,10 +440,8 @@ mod tests { .into_iter(), None, ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .unwrap(); assert_eq!( processes.values().exactly_one().unwrap().primary_output, None @@ -499,9 +493,7 @@ mod tests { .into_iter(), Some(milestone_years.clone()), ); - assert!( - validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) - .is_ok() - ); + validate_flows_and_update_primary_output(&mut processes, &flows_map, &milestone_years) + .unwrap(); } } diff --git a/src/input/process/investment_constraints.rs b/src/input/process/investment_constraints.rs index 065220422..859805f82 100644 --- a/src/input/process/investment_constraints.rs +++ b/src/input/process/investment_constraints.rs @@ -336,11 +336,11 @@ mod tests { fn validate_addition_with_finite_value() { // Valid: addition constraint with positive value let valid = validate_raw_constraint(10.0); - assert!(valid.is_ok()); + valid.unwrap(); // Valid: addition constraint with zero value let valid = validate_raw_constraint(0.0); - assert!(valid.is_ok()); + valid.unwrap(); // Not valid: addition constraint with negative value let invalid = validate_raw_constraint(-10.0); diff --git a/src/input/process/parameter.rs b/src/input/process/parameter.rs index c43780b2a..453eafb74 100644 --- a/src/input/process/parameter.rs +++ b/src/input/process/parameter.rs @@ -239,7 +239,7 @@ mod tests { param_map.insert(process_id, process_parameter_map.clone()); let result = check_process_parameters(&processes, ¶m_map, &milestone_years); - assert!(result.is_ok()); + result.unwrap(); } #[rstest] @@ -257,7 +257,7 @@ mod tests { param_map.insert(process_id, process_parameter_map); let result = check_process_parameters(&processes, ¶m_map, &milestone_years); - assert!(result.is_ok()); + result.unwrap(); } #[rstest] diff --git a/src/model/parameters.rs b/src/model/parameters.rs index c7004e27b..05606f21c 100644 --- a/src/model/parameters.rs +++ b/src/model/parameters.rs @@ -257,8 +257,8 @@ mod tests { #[test] fn check_milestone_years_works() { // Valid - assert!(check_milestone_years(&[1]).is_ok()); - assert!(check_milestone_years(&[1, 2]).is_ok()); + check_milestone_years(&[1]).unwrap(); + check_milestone_years(&[1, 2]).unwrap(); // Invalid assert!(check_milestone_years(&[]).is_err()); diff --git a/src/region.rs b/src/region.rs index 58d078946..bc45af53f 100644 --- a/src/region.rs +++ b/src/region.rs @@ -63,10 +63,10 @@ mod tests { // Empty string let result = parse_region_str("", ®ion_ids); - assert!(result.is_err()); + result.unwrap_err(); // Invalid region let result = parse_region_str("GBR;INVALID", ®ion_ids); - assert!(result.is_err()); + result.unwrap_err(); } } From defe6a2ee8971cd581906f0ccf3af7db2533f324 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 15:22:02 +0000 Subject: [PATCH 06/11] clippy: Enable `get_unwrap` lint --- Cargo.toml | 1 + src/graph.rs | 9 +-------- src/graph/investment.rs | 2 +- src/simulation/optimisation/constraints.rs | 6 +----- 4 files changed, 4 insertions(+), 14 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index b22d3ccb4..0451d8a37 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -53,6 +53,7 @@ pedantic = {level = "deny", priority = -1} # Extra lints to disallow redundant_test_prefix = "deny" assertions_on_result_states = "deny" +get_unwrap = "deny" # Whitelist some lints from "pedantic" group similar_names = "allow" must_use_candidate = "allow" diff --git a/src/graph.rs b/src/graph.rs index 66f7c65e1..1dc1ad533 100644 --- a/src/graph.rs +++ b/src/graph.rs @@ -91,14 +91,7 @@ fn get_flow_for_year( if *region != target_region { continue; } - if year - + process - .parameters - .get(&(region.clone(), *year)) - .unwrap() - .lifetime - >= target_year - { + if year + process.parameters[&(region.clone(), *year)].lifetime >= target_year { return Some(value.clone()); } } diff --git a/src/graph/investment.rs b/src/graph/investment.rs index 7d894f4eb..8d014b78a 100644 --- a/src/graph/investment.rs +++ b/src/graph/investment.rs @@ -449,7 +449,7 @@ fn compute_layers(graph: &InvestmentGraph, order: &[NodeIndex]) -> Vec> = vec![Vec::new(); max_rank + 1]; for node_idx in order { - let rank = *ranks.get(node_idx).unwrap(); + let rank = ranks[node_idx]; let w = graph.node_weight(*node_idx).unwrap().clone(); groups[rank].push(w); } diff --git a/src/simulation/optimisation/constraints.rs b/src/simulation/optimisation/constraints.rs index 8dc5f612e..8fb093dec 100644 --- a/src/simulation/optimisation/constraints.rs +++ b/src/simulation/optimisation/constraints.rs @@ -160,11 +160,7 @@ where // For SED commodities, the LHS must be >=0 and for SVD commodities, it must be >= // the exogenous demand supplied by the user let min = if commodity.kind == CommodityType::ServiceDemand { - commodity - .demand - .get(&(region_id.clone(), year, ts_selection.clone())) - .unwrap() - .value() + commodity.demand[&(region_id.clone(), year, ts_selection.clone())].value() } else { 0.0 }; From 6b10b0c66ff8a6afcc86bc64124c7d88e091b539 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 15:25:51 +0000 Subject: [PATCH 07/11] clippy: Enable `if_then_some_else_none` lint --- Cargo.toml | 1 + src/simulation/optimisation.rs | 15 +++++++-------- src/year.rs | 6 +----- 3 files changed, 9 insertions(+), 13 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0451d8a37..dec0c16ac 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,6 +54,7 @@ pedantic = {level = "deny", priority = -1} redundant_test_prefix = "deny" assertions_on_result_states = "deny" get_unwrap = "deny" +if_then_some_else_none = "deny" # Whitelist some lints from "pedantic" group similar_names = "allow" must_use_candidate = "allow" diff --git a/src/simulation/optimisation.rs b/src/simulation/optimisation.rs index 8e5d13baa..db5cf543a 100644 --- a/src/simulation/optimisation.rs +++ b/src/simulation/optimisation.rs @@ -294,14 +294,13 @@ impl Solution<'_> { self.constraint_keys .activity_keys .zip_duals(self.solution.dual_rows()) - .filter_map(move |((asset, ts_selection), dual)| { - if matches!(ts_selection, TimeSliceSelection::Single(_)) { - // `iter(...).next()` is safe here because we just matched Single(_) - let (time_slice, _) = ts_selection.iter(self.time_slice_info).next().unwrap(); - Some((asset, time_slice, dual)) - } else { - None - } + .filter(|&((_asset, ts_selection), _dual)| { + matches!(ts_selection, TimeSliceSelection::Single(_)) + }) + .map(|((asset, ts_selection), dual)| { + // `unwrap` is safe here because we just matched Single(_) + let (time_slice, _) = ts_selection.iter(self.time_slice_info).next().unwrap(); + (asset, time_slice, dual) }) } diff --git a/src/year.rs b/src/year.rs index 2f87f52a6..c223c2aa6 100644 --- a/src/year.rs +++ b/src/year.rs @@ -6,11 +6,7 @@ use itertools::Itertools; /// Parse a single year from a string and check it is in `valid_years` fn parse_and_validate_year(s: &str, valid_years: &[u32]) -> Option { let year = s.trim().parse::().ok()?; - if valid_years.binary_search(&year).is_ok() { - Some(year) - } else { - None - } + valid_years.binary_search(&year).is_ok().then_some(year) } /// Parse a string of years separated by semicolons into a vector of u32 years. From 29f142aaf5ef96bf335696ebc2554e21e8d5b6a6 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 15:33:16 +0000 Subject: [PATCH 08/11] clippy: Enable `renamed_function_params` lint --- Cargo.toml | 1 + src/time_slice.rs | 8 ++++---- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index dec0c16ac..3844136a3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -55,6 +55,7 @@ redundant_test_prefix = "deny" assertions_on_result_states = "deny" get_unwrap = "deny" if_then_some_else_none = "deny" +renamed_function_params = "deny" # Whitelist some lints from "pedantic" group similar_names = "allow" must_use_candidate = "allow" diff --git a/src/time_slice.rs b/src/time_slice.rs index c98eee223..23e0dece9 100644 --- a/src/time_slice.rs +++ b/src/time_slice.rs @@ -47,11 +47,11 @@ impl Display for TimeSliceID { } impl<'de> Deserialize<'de> for TimeSliceID { - fn deserialize(deserialiser: D) -> std::result::Result + fn deserialize(deserializer: D) -> std::result::Result where D: serde::Deserializer<'de>, { - let s: &str = Deserialize::deserialize(deserialiser)?; + let s: &str = Deserialize::deserialize(deserializer)?; let (season, time_of_day) = s.split('.').collect_tuple().ok_or_else(|| { D::Error::custom(format!( "Invalid input '{s}': Should be in form season.time_of_day" @@ -65,11 +65,11 @@ impl<'de> Deserialize<'de> for TimeSliceID { } impl Serialize for TimeSliceID { - fn serialize(&self, serialiser: S) -> std::result::Result + fn serialize(&self, serializer: S) -> std::result::Result where S: serde::Serializer, { - serialiser.collect_str(self) + serializer.collect_str(self) } } From 4179ed22c1896125cb4bf447ef0a15c0f2261286 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 16:14:01 +0000 Subject: [PATCH 09/11] Clippy: Enable `string_slice` lint --- Cargo.toml | 1 + src/settings.rs | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 3844136a3..647fe4764 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -56,6 +56,7 @@ assertions_on_result_states = "deny" get_unwrap = "deny" if_then_some_else_none = "deny" renamed_function_params = "deny" +string_slice = "deny" # Whitelist some lints from "pedantic" group similar_names = "allow" must_use_candidate = "allow" diff --git a/src/settings.rs b/src/settings.rs index 516d8b481..3b4f55d25 100644 --- a/src/settings.rs +++ b/src/settings.rs @@ -97,9 +97,9 @@ impl Settings { // Iterate through the generated TOML, commenting out lines and adding docs let mut out = DEFAULT_SETTINGS_FILE_HEADER.to_string(); for line in settings_raw.split('\n') { - if let Some(last) = line.find('=') { + if let Some((field, _)) = line.split_once('=') { // Add documentation from doc comments - let field = line[..last].trim(); + let field = field.trim(); // Use doc comment to document parameter. All fields should have doc comments. let docs = Settings::get_field_docs(field).expect("Missing doc comment for field"); From 4b0599b8089f49ad8fc491ebaf2c20166f676af6 Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 22 Dec 2025 16:19:43 +0000 Subject: [PATCH 10/11] clippy: Enable some more lints from `restriction` group --- Cargo.toml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/Cargo.toml b/Cargo.toml index 647fe4764..3c7903d69 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -57,6 +57,13 @@ get_unwrap = "deny" if_then_some_else_none = "deny" renamed_function_params = "deny" string_slice = "deny" +dbg_macro = "deny" +infinite_loop = "deny" +integer_division = "deny" +needless_raw_strings = "deny" +redundant_type_annotations = "deny" +return_and_then = "deny" +suspicious_xor_used_as_pow = "deny" # Whitelist some lints from "pedantic" group similar_names = "allow" must_use_candidate = "allow" From 2d5d080201c52d68a980096c17c83f7d94eac68e Mon Sep 17 00:00:00 2001 From: Alex Dewar Date: Mon, 5 Jan 2026 10:26:50 +0000 Subject: [PATCH 11/11] Suppress clippy warning for `built_info` module --- src/output/metadata.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/output/metadata.rs b/src/output/metadata.rs index 0f33f084a..9d09dee8a 100644 --- a/src/output/metadata.rs +++ b/src/output/metadata.rs @@ -11,6 +11,7 @@ const METADATA_FILE_NAME: &str = "metadata.toml"; /// Information about the program build via `built` crate #[allow(clippy::doc_markdown)] +#[allow(clippy::needless_raw_strings)] mod built_info { // The file has been placed there by the build script. include!(concat!(env!("OUT_DIR"), "/built.rs"));