diff --git a/.vscode/settings.json b/.vscode/settings.json index d5d5c186d..7d5280386 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -3,8 +3,6 @@ "cargo", "clippy", "--package=clarinet-cli", - "--message-format=json", - "--", - "--no-deps" + "--message-format=json" ] } diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 000000000..69e7d68a0 --- /dev/null +++ b/clippy.toml @@ -0,0 +1,2 @@ +type-complexity-threshold = 1000 +too-many-arguments-threshold = 12 diff --git a/components/clarinet-cli/src/deployments/mod.rs b/components/clarinet-cli/src/deployments/mod.rs index b1a72c2d3..efd1fa89b 100644 --- a/components/clarinet-cli/src/deployments/mod.rs +++ b/components/clarinet-cli/src/deployments/mod.rs @@ -1,21 +1,13 @@ pub mod types; mod ui; +use std::fs::{self}; +use std::path::PathBuf; pub use ui::start_ui; -use hiro_system_kit; - use clarinet_deployments::types::{DeploymentGenerationArtifacts, DeploymentSpecification}; - -use clarinet_files::{FileLocation, ProjectManifest}; - use clarinet_files::chainhook_types::StacksNetwork; - -use serde_yaml; - -use std::fs::{self}; - -use std::path::PathBuf; +use clarinet_files::{FileLocation, ProjectManifest}; #[derive(Deserialize, Debug)] pub struct Balance { @@ -79,7 +71,7 @@ fn get_deployments_files( let is_extension_valid = file .extension() .and_then(|ext| ext.to_str()) - .and_then(|ext| Some(ext == "yml" || ext == "yaml")); + .map(|ext| ext == "yml" || ext == "yaml"); if let Some(true) = is_extension_valid { let relative_path = file.clone(); @@ -99,13 +91,13 @@ pub fn write_deployment( if target_location.exists() && prompt_override { println!( "Deployment {} already exists.\n{}?", - target_location.to_string(), + target_location, yellow!("Overwrite [Y/n]") ); let mut buffer = String::new(); std::io::stdin().read_line(&mut buffer).unwrap(); - if buffer.starts_with("n") { - return Err(format!("deployment update aborted")); + if buffer.starts_with('n') { + return Err("deployment update aborted".to_string()); } } diff --git a/components/clarinet-cli/src/deployments/types.rs b/components/clarinet-cli/src/deployments/types.rs index 304a82e9b..af4a9d1a7 100644 --- a/components/clarinet-cli/src/deployments/types.rs +++ b/components/clarinet-cli/src/deployments/types.rs @@ -34,11 +34,11 @@ impl DeploymentSynthesis { Ok(res) => res, Err(err) => panic!("unable to serialize deployment {}", err), }; - return DeploymentSynthesis { + DeploymentSynthesis { total_cost, blocks_count, content, - }; + } } } diff --git a/components/clarinet-cli/src/deployments/ui/mod.rs b/components/clarinet-cli/src/deployments/ui/mod.rs index 9be60d282..bb61eb4c2 100644 --- a/components/clarinet-cli/src/deployments/ui/mod.rs +++ b/components/clarinet-cli/src/deployments/ui/mod.rs @@ -1,6 +1,7 @@ #[allow(dead_code)] mod app; -#[allow(dead_code)] + +#[allow(clippy::module_inception)] mod ui; use app::App; diff --git a/components/clarinet-cli/src/frontend/cli.rs b/components/clarinet-cli/src/frontend/cli.rs index 621cf6c11..4997ee214 100644 --- a/components/clarinet-cli/src/frontend/cli.rs +++ b/components/clarinet-cli/src/frontend/cli.rs @@ -55,12 +55,13 @@ use super::telemetry::{telemetry_report_event, DeveloperUsageDigest, DeveloperUs /// For Clarinet documentation, refer to https://docs.hiro.so/clarinet/introduction. /// Report any issues here https://github.com/hirosystems/clarinet/issues/new. #[derive(Parser, PartialEq, Clone, Debug)] -#[clap(version = option_env!("CARGO_PKG_VERSION").expect("Unable to detect version"), name = "clarinet", bin_name = "clarinet")] +#[clap(version = env!("CARGO_PKG_VERSION"), name = "clarinet", bin_name = "clarinet")] struct Opts { #[clap(subcommand)] command: Command, } +#[allow(clippy::upper_case_acronyms)] #[derive(Subcommand, PartialEq, Clone, Debug)] enum Command { /// Create and scaffold a new project @@ -123,6 +124,7 @@ enum Requirements { AddRequirement(AddRequirement), } +#[allow(clippy::enum_variant_names)] #[derive(Subcommand, PartialEq, Clone, Debug)] #[clap(bin_name = "deployment", aliases = &["deployment"])] enum Deployments { @@ -137,6 +139,7 @@ enum Deployments { ApplyDeployment(ApplyDeployment), } +#[allow(clippy::enum_variant_names)] #[derive(Subcommand, PartialEq, Clone, Debug)] #[clap(bin_name = "chainhook", aliases = &["chainhook"])] enum Chainhooks { @@ -663,7 +666,7 @@ pub fn main() { #[cfg(feature = "telemetry")] telemetry_report_event(DeveloperUsageEvent::NewProject(DeveloperUsageDigest::new( &project_opts.name, - &vec![], + &[], ))); } } @@ -834,9 +837,9 @@ pub fn main() { println!("{}", yellow!("Continue [Y/n]?")); let mut buffer = String::new(); std::io::stdin().read_line(&mut buffer).unwrap(); - if !buffer.starts_with("Y") - && !buffer.starts_with("y") - && !buffer.starts_with("\n") + if !buffer.starts_with('Y') + && !buffer.starts_with('y') + && !buffer.starts_with('\n') { println!("Deployment aborted"); std::process::exit(1); @@ -1154,8 +1157,7 @@ pub fn main() { } if success { - println!("{} Syntax of contract successfully checked", green!("✔")); - return; + println!("{} Syntax of contract successfully checked", green!("✔")) } else { std::process::exit(1); } @@ -1394,9 +1396,8 @@ fn load_manifest_or_exit(path: Option) -> ProjectManifest { } fn load_manifest_or_warn(path: Option) -> Option { - let manifest_location = get_manifest_location_or_warn(path); - if manifest_location.is_some() { - let manifest = match ProjectManifest::from_location(&manifest_location.unwrap()) { + if let Some(manifest_location) = get_manifest_location_or_warn(path) { + let manifest = match ProjectManifest::from_location(&manifest_location) { Ok(manifest) => manifest, Err(message) => { println!( @@ -1584,7 +1585,7 @@ pub fn load_deployment_if_exists( println!("{}", yellow!("Overwrite? [Y/n]")); let mut buffer = String::new(); std::io::stdin().read_line(&mut buffer).unwrap(); - if buffer.starts_with("n") { + if buffer.starts_with('n') { Some(load_deployment(manifest, &default_deployment_location)) } else { default_deployment_location diff --git a/components/clarinet-cli/src/frontend/telemetry.rs b/components/clarinet-cli/src/frontend/telemetry.rs index 76b983509..14bc8ef3f 100644 --- a/components/clarinet-cli/src/frontend/telemetry.rs +++ b/components/clarinet-cli/src/frontend/telemetry.rs @@ -24,12 +24,12 @@ pub struct DeveloperUsageDigest { } impl DeveloperUsageDigest { - pub fn new(project_id: &str, team_id: &Vec) -> Self { + pub fn new(project_id: &str, team_id: &[String]) -> Self { let hashed_project_id = Hash160::from_data(project_id.as_bytes()); let hashed_team_id = Hash160::from_data(team_id.join(",").as_bytes()); Self { - project_id: format!("0x{}", bytes_to_hex(&hashed_project_id.to_bytes().to_vec())), - team_id: format!("0x{}", bytes_to_hex(&hashed_team_id.to_bytes().to_vec())), + project_id: format!("0x{}", bytes_to_hex(hashed_project_id.to_bytes().as_ref())), + team_id: format!("0x{}", bytes_to_hex(hashed_team_id.to_bytes().as_ref())), } } } @@ -48,9 +48,7 @@ pub fn telemetry_report_event(event: DeveloperUsageEvent) { async fn send_event(event: DeveloperUsageEvent) { let segment_api_key = "Q3xpmFRvy0psXnwBEXErtMBIeabOVjbC"; - let clarinet_version = option_env!("CARGO_PKG_VERSION") - .expect("Unable to detect version") - .to_string(); + let clarinet_version = env!("CARGO_PKG_VERSION").to_string(); let ci_mode = option_env!("CLARINET_MODE_CI").unwrap_or("0").to_string(); let os = std::env::consts::OS; @@ -153,10 +151,10 @@ async fn send_event(event: DeveloperUsageEvent) { segment_api_key.to_string(), Message::from(Track { user: User::UserId { - user_id: format!("0x{}", bytes_to_hex(&user_id.to_bytes().to_vec())), + user_id: format!("0x{}", bytes_to_hex(user_id.to_bytes().as_ref())), }, event: event_name.into(), - properties: properties, + properties, ..Default::default() }), ) diff --git a/components/clarinet-cli/src/generate/chainhook.rs b/components/clarinet-cli/src/generate/chainhook.rs index 18a29ca5e..e5cb176eb 100644 --- a/components/clarinet-cli/src/generate/chainhook.rs +++ b/components/clarinet-cli/src/generate/chainhook.rs @@ -65,7 +65,7 @@ networks: .expect("unable to retrieve project root"); new_file.append_path(&format!("chainhooks/{}", name))?; if new_file.exists() { - return Err(format!("{} already exists", new_file.to_string())); + return Err(format!("{} already exists", new_file)); } let change = FileCreation { comment: format!("{} chainhooks/{}", green!("Created file"), name), diff --git a/components/clarinet-cli/src/generate/contract.rs b/components/clarinet-cli/src/generate/contract.rs index aba4eb032..a4213d8c0 100644 --- a/components/clarinet-cli/src/generate/contract.rs +++ b/components/clarinet-cli/src/generate/contract.rs @@ -79,7 +79,7 @@ impl GetChangesForNewContract { new_file.append_path("contracts")?; new_file.append_path(&name)?; if new_file.exists() { - return Err(format!("{} already exists", new_file.to_string())); + return Err(format!("{} already exists", new_file)); } let change = FileCreation { comment: format!("{} contracts/{}", green!("Created file"), name), @@ -121,7 +121,7 @@ describe("example tests", () => { new_file.append_path("tests")?; new_file.append_path(&name)?; if new_file.exists() { - return Err(format!("{} already exists", new_file.to_string())); + return Err(format!("{} already exists", new_file)); } let change = FileCreation { comment: format!("{} tests/{}", green!("Created file"), name), diff --git a/components/clarinet-cli/src/integrate/mod.rs b/components/clarinet-cli/src/integrate/mod.rs index f1cc0ceb4..745f9e0ae 100644 --- a/components/clarinet-cli/src/integrate/mod.rs +++ b/components/clarinet-cli/src/integrate/mod.rs @@ -44,12 +44,12 @@ pub fn run_devnet( .network_config .as_ref() .and_then(|c| c.devnet.as_ref()) - .and_then(|d| Some(d.working_dir.to_string())) + .map(|d| d.working_dir.to_string()) .ok_or("unable to read settings/Devnet.toml")?; fs::create_dir_all(&working_dir) .map_err(|_| format!("unable to create dir {}", working_dir))?; let mut log_path = PathBuf::from_str(&working_dir) - .map_err(|e| format!("unable to working_dir {}\n{}", working_dir, e.to_string()))?; + .map_err(|e| format!("unable to working_dir {}\n{}", working_dir, e))?; log_path.push("devnet.log"); let file = OpenOptions::new() @@ -57,7 +57,7 @@ pub fn run_devnet( .write(true) .truncate(true) .open(log_path) - .map_err(|e| format!("unable to create log file {}", e.to_string()))?; + .map_err(|e| format!("unable to create log file {}", e))?; let decorator = slog_term::PlainDecorator::new(file); let drain = slog_term::FullFormat::new(decorator).build().fuse(); diff --git a/components/clarinet-cli/src/lsp/mod.rs b/components/clarinet-cli/src/lsp/mod.rs index 09cc7edb2..48148b91e 100644 --- a/components/clarinet-cli/src/lsp/mod.rs +++ b/components/clarinet-cli/src/lsp/mod.rs @@ -7,14 +7,12 @@ use clarity_repl::clarity::vm::diagnostic::{ }; use crossbeam_channel::unbounded; use std::sync::mpsc; -use tokio; use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range}; use tower_lsp::{LspService, Server}; pub fn run_lsp() { - match block_on(do_run_lsp()) { - Err(_e) => std::process::exit(1), - _ => {} + if let Err(_e) = block_on(do_run_lsp()) { + std::process::exit(1) }; } @@ -49,7 +47,7 @@ async fn do_run_lsp() -> Result<(), String> { } pub fn clarity_diagnostics_to_tower_lsp_type( - diagnostics: &mut Vec, + diagnostics: &mut [ClarityDiagnostic], ) -> Vec { let mut dst = vec![]; for d in diagnostics.iter_mut() { diff --git a/components/clarinet-cli/src/lsp/native_bridge.rs b/components/clarinet-cli/src/lsp/native_bridge.rs index 30f1c2ec1..923ca09a3 100644 --- a/components/clarinet-cli/src/lsp/native_bridge.rs +++ b/components/clarinet-cli/src/lsp/native_bridge.rs @@ -106,7 +106,7 @@ impl LanguageServer for LspNativeBridge { }; let response_rx = self.response_rx.lock().expect("failed to lock response_rx"); - let ref response = response_rx.recv().expect("failed to get value from recv"); + let response = &response_rx.recv().expect("failed to get value from recv"); if let LspResponse::Request(LspRequestResponse::Initialize(initialize)) = response { return Ok(initialize.to_owned()); } @@ -130,7 +130,7 @@ impl LanguageServer for LspNativeBridge { }; let response_rx = self.response_rx.lock().expect("failed to lock response_rx"); - let ref response = response_rx.recv().expect("failed to get value from recv"); + let response = &response_rx.recv().expect("failed to get value from recv"); if let LspResponse::Request(LspRequestResponse::CompletionItems(items)) = response { return Ok(Some(CompletionResponse::from(items.to_vec()))); } @@ -148,7 +148,7 @@ impl LanguageServer for LspNativeBridge { }; let response_rx = self.response_rx.lock().expect("failed to lock response_rx"); - let ref response = response_rx.recv().expect("failed to get value from recv"); + let response = &response_rx.recv().expect("failed to get value from recv"); if let LspResponse::Request(LspRequestResponse::Definition(Some(data))) = response { return Ok(Some(GotoDefinitionResponse::Scalar(data.to_owned()))); } @@ -166,7 +166,7 @@ impl LanguageServer for LspNativeBridge { }; let response_rx = self.response_rx.lock().expect("failed to lock response_rx"); - let ref response = response_rx.recv().expect("failed to get value from recv"); + let response = &response_rx.recv().expect("failed to get value from recv"); if let LspResponse::Request(LspRequestResponse::DocumentSymbol(symbols)) = response { return Ok(Some(DocumentSymbolResponse::Nested(symbols.to_vec()))); } @@ -181,7 +181,7 @@ impl LanguageServer for LspNativeBridge { }; let response_rx = self.response_rx.lock().expect("failed to lock response_rx"); - let ref response = response_rx.recv().expect("failed to get value from recv"); + let response = &response_rx.recv().expect("failed to get value from recv"); if let LspResponse::Request(LspRequestResponse::Hover(data)) = response { return Ok(data.to_owned()); } @@ -196,7 +196,7 @@ impl LanguageServer for LspNativeBridge { }; let response_rx = self.response_rx.lock().expect("failed to lock response_rx"); - let ref response = response_rx.recv().expect("failed to get value from recv"); + let response = &response_rx.recv().expect("failed to get value from recv"); if let LspResponse::Request(LspRequestResponse::SignatureHelp(data)) = response { return Ok(data.to_owned()); } @@ -233,12 +233,10 @@ impl LanguageServer for LspNativeBridge { let mut aggregated_diagnostics = vec![]; let mut notification = None; if let Ok(response_rx) = self.response_rx.lock() { - if let Ok(ref mut response) = response_rx.recv() { - if let LspResponse::Notification(notification_response) = response { - aggregated_diagnostics - .append(&mut notification_response.aggregated_diagnostics); - notification = notification_response.notification.take(); - } + if let Ok(LspResponse::Notification(ref mut notification_response)) = response_rx.recv() + { + aggregated_diagnostics.append(&mut notification_response.aggregated_diagnostics); + notification = notification_response.notification.take(); } } for (location, mut diags) in aggregated_diagnostics.drain(..) { @@ -253,9 +251,7 @@ impl LanguageServer for LspNativeBridge { } } if let Some((level, message)) = notification { - self.client - .show_message(message_level_type_to_tower_lsp_type(&level), message) - .await; + self.client.show_message(level, message).await; } } @@ -279,12 +275,10 @@ impl LanguageServer for LspNativeBridge { let mut aggregated_diagnostics = vec![]; let mut notification = None; if let Ok(response_rx) = self.response_rx.lock() { - if let Ok(ref mut response) = response_rx.recv() { - if let LspResponse::Notification(notification_response) = response { - aggregated_diagnostics - .append(&mut notification_response.aggregated_diagnostics); - notification = notification_response.notification.take(); - } + if let Ok(LspResponse::Notification(ref mut notification_response)) = response_rx.recv() + { + aggregated_diagnostics.append(&mut notification_response.aggregated_diagnostics); + notification = notification_response.notification.take(); } } @@ -300,9 +294,7 @@ impl LanguageServer for LspNativeBridge { } } if let Some((level, message)) = notification { - self.client - .show_message(message_level_type_to_tower_lsp_type(&level), message) - .await; + self.client.show_message(level, message).await; } } @@ -325,14 +317,3 @@ impl LanguageServer for LspNativeBridge { } } } - -pub fn message_level_type_to_tower_lsp_type( - level: &clarity_lsp::lsp_types::MessageType, -) -> tower_lsp::lsp_types::MessageType { - match level { - &clarity_lsp::lsp_types::MessageType::ERROR => tower_lsp::lsp_types::MessageType::ERROR, - &clarity_lsp::lsp_types::MessageType::WARNING => tower_lsp::lsp_types::MessageType::WARNING, - &clarity_lsp::lsp_types::MessageType::INFO => tower_lsp::lsp_types::MessageType::INFO, - _ => tower_lsp::lsp_types::MessageType::LOG, - } -} diff --git a/components/clarinet-deployments/src/diagnostic_digest.rs b/components/clarinet-deployments/src/diagnostic_digest.rs index d71a5330f..0a5931559 100644 --- a/components/clarinet-deployments/src/diagnostic_digest.rs +++ b/components/clarinet-deployments/src/diagnostic_digest.rs @@ -33,8 +33,8 @@ impl DiagnosticsDigest { let mut outputs = vec![]; let total = deployment.contracts.len(); - for (contract_id, diags) in contracts_diags.into_iter() { - let (source, contract_location) = match deployment.contracts.get(&contract_id) { + for (contract_id, diags) in contracts_diags.iter() { + let (source, contract_location) = match deployment.contracts.get(contract_id) { Some(entry) => { contracts_checked += 1; entry @@ -68,7 +68,7 @@ impl DiagnosticsDigest { } Level::Note => { outputs.push(format!("{}: {}", "note:".blue().bold(), diagnostic.message)); - outputs.append(&mut output_code(&diagnostic, &formatted_lines)); + outputs.append(&mut output_code(diagnostic, &formatted_lines)); continue; } } @@ -86,10 +86,10 @@ impl DiagnosticsDigest { span.start_column )); } - outputs.append(&mut output_code(&diagnostic, &formatted_lines)); + outputs.append(&mut output_code(diagnostic, &formatted_lines)); if let Some(ref suggestion) = diagnostic.suggestion { - outputs.push(format!("{}", suggestion)); + outputs.push(suggestion.to_string()); } } } diff --git a/components/clarinet-deployments/src/lib.rs b/components/clarinet-deployments/src/lib.rs index 6b2044040..19436b2f7 100644 --- a/components/clarinet-deployments/src/lib.rs +++ b/components/clarinet-deployments/src/lib.rs @@ -42,7 +42,7 @@ pub fn setup_session_with_deployment( deployment: &DeploymentSpecification, contracts_asts: Option<&BTreeMap>, ) -> DeploymentGenerationArtifacts { - let mut session = initiate_session_from_deployment(&manifest); + let mut session = initiate_session_from_deployment(manifest); update_session_with_genesis_accounts(&mut session, deployment); let results = update_session_with_contracts_executions( &mut session, @@ -61,12 +61,9 @@ pub fn setup_session_with_deployment( match res { Ok(execution_result) => { diags.insert(contract_id.clone(), execution_result.diagnostics); - match execution_result.result { - EvaluationResult::Contract(contract_result) => { - asts.insert(contract_id.clone(), contract_result.contract.ast); - contracts_analysis.insert(contract_id, contract_result.contract.analysis); - } - _ => (), + if let EvaluationResult::Contract(contract_result) = execution_result.result { + asts.insert(contract_id.clone(), contract_result.contract.ast); + contracts_analysis.insert(contract_id, contract_result.contract.analysis); } } Err(errors) => { @@ -76,23 +73,23 @@ pub fn setup_session_with_deployment( } } - let artifacts = DeploymentGenerationArtifacts { + DeploymentGenerationArtifacts { asts, deps, diags, success, session, analysis: contracts_analysis, - }; - artifacts + } } pub fn initiate_session_from_deployment(manifest: &ProjectManifest) -> Session { - let mut settings = SessionSettings::default(); - settings.repl_settings = manifest.repl_settings.clone(); - settings.disk_cache_enabled = true; - let session = Session::new(settings); - session + let settings = SessionSettings { + repl_settings: manifest.repl_settings.clone(), + disk_cache_enabled: true, + ..Default::default() + }; + Session::new(settings) } pub fn update_session_with_genesis_accounts( @@ -135,7 +132,7 @@ pub fn update_session_with_contracts_executions( (None, Some(min_epoch)) => std::cmp::max(min_epoch, DEFAULT_EPOCH), _ => DEFAULT_EPOCH, }; - session.update_epoch(epoch.clone()); + session.update_epoch(epoch); for transaction in batch.transactions.iter() { match transaction { TransactionSpecification::RequirementPublish(_) @@ -161,7 +158,7 @@ pub fn update_session_with_contracts_executions( let mut contract_ast = contracts_asts .as_ref() .and_then(|m| m.get(&contract_id)) - .and_then(|c| Some(c.clone())); + .cloned(); let contract = ClarityContract { code_source: ClarityCodeSource::ContractInMemory(tx.source.clone()), deployer: ContractDeployer::Address(tx.emulated_sender.to_string()), @@ -203,7 +200,7 @@ pub async fn generate_default_deployment( manifest: &ProjectManifest, network: &StacksNetwork, no_batch: bool, - file_accessor: Option<&Box>, + file_accessor: Option<&dyn FileAccessor>, forced_min_epoch: Option, ) -> Result<(DeploymentSpecification, DeploymentGenerationArtifacts), String> { let network_manifest = match file_accessor { @@ -238,8 +235,8 @@ pub async fn generate_default_deployment( (stacks_node, bitcoin_node) } None => { - let stacks_node = format!("http://localhost:20443"); - let bitcoin_node = format!("http://devnet:devnet@localhost:18443"); + let stacks_node = "http://localhost:20443".to_string(); + let bitcoin_node = "http://devnet:devnet@localhost:18443".to_string(); (stacks_node, bitcoin_node) } }; @@ -250,13 +247,9 @@ pub async fn generate_default_deployment( .network .stacks_node_rpc_address .unwrap_or("https://api.testnet.hiro.so".to_string()); - let bitcoin_node = - network_manifest - .network - .bitcoin_node_rpc_address - .unwrap_or(format!( - "http://blockstack:blockstacksystem@bitcoind.testnet.stacks.co:18332" - )); + let bitcoin_node = network_manifest.network.bitcoin_node_rpc_address.unwrap_or( + "http://blockstack:blockstacksystem@bitcoind.testnet.stacks.co:18332".to_string(), + ); (Some(stacks_node), Some(bitcoin_node)) } StacksNetwork::Mainnet => { @@ -276,7 +269,7 @@ pub async fn generate_default_deployment( let default_deployer = match network_manifest.accounts.get("deployer") { Some(deployer) => deployer, None => { - return Err(format!("unable to retrieve default deployer account")); + return Err("unable to retrieve default deployer account".to_string()); } }; let default_deployer_address = @@ -295,8 +288,10 @@ pub async fn generate_default_deployment( let mut requirements_data = BTreeMap::new(); let mut requirements_deps = BTreeMap::new(); - let mut settings = SessionSettings::default(); - settings.repl_settings = manifest.repl_settings.clone(); + let settings = SessionSettings { + repl_settings: manifest.repl_settings.clone(), + ..Default::default() + }; let session = Session::new(settings.clone()); @@ -362,7 +357,7 @@ pub async fn generate_default_deployment( let (source, epoch, clarity_version, contract_location) = requirements::retrieve_contract( &contract_id, - &cache_location, + cache_location, &file_accessor, ) .await?; @@ -372,7 +367,7 @@ pub async fn generate_default_deployment( None => epoch, }; - contract_epochs.insert(contract_id.clone(), epoch.clone()); + contract_epochs.insert(contract_id.clone(), epoch); // Build the struct representing the requirement in the deployment if network.is_simnet() { @@ -381,7 +376,7 @@ pub async fn generate_default_deployment( emulated_sender: contract_id.issuer.clone(), source: source.clone(), location: contract_location, - clarity_version: clarity_version.clone(), + clarity_version, }; emulated_contracts_publish.insert(contract_id.clone(), data); } else if network.either_devnet_or_testnet() { @@ -435,7 +430,7 @@ pub async fn generate_default_deployment( None => { let (_, _, clarity_version, _) = requirements::retrieve_contract( &contract_id, - &cache_location, + cache_location, &file_accessor, ) .await?; @@ -452,15 +447,19 @@ pub async fn generate_default_deployment( // Extract the known / unknown dependencies match dependencies { Ok(inferable_dependencies) => { - // Looping could be confusing - in this case, we submitted a HashMap with one contract, so we have at most one - // result in the `inferable_dependencies` map. We will just extract and keep the associated data (source, ast, deps). - for (contract_id, dependencies) in inferable_dependencies.into_iter() { + if inferable_dependencies.len() > 1 { + println!("warning: inferable_dependencies contains more than one entry"); + } + // We submitted a HashMap with one contract, so we have at most one result in the `inferable_dependencies` map. + // We will extract and keep the associated data (source, ast, deps). + if let Some((contract_id, dependencies)) = + inferable_dependencies.into_iter().next() + { for dependency in dependencies.iter() { queue.push_back((dependency.contract_id.clone(), None)); } requirements_deps.insert(contract_id.clone(), dependencies); requirements_data.insert(contract_id.clone(), (clarity_version, ast)); - break; } } Err((inferable_dependencies, non_inferable_dependencies)) => { @@ -534,7 +533,7 @@ pub async fn generate_default_deployment( for (_, contract_config) in manifest.contracts.iter() { let mut contract_location = base_location.clone(); contract_location - .append_path(&contract_config.expect_contract_path_as_str()) + .append_path(contract_config.expect_contract_path_as_str()) .map_err(|_| { format!( "unable to build path for contract {}", @@ -542,12 +541,9 @@ pub async fn generate_default_deployment( ) })?; - let source = contract_location.read_content_as_utf8().map_err(|_| { - format!( - "unable to find contract at {}", - contract_location.to_string() - ) - })?; + let source = contract_location + .read_content_as_utf8() + .map_err(|_| format!("unable to find contract at {}", contract_location))?; sources.insert(contract_location.to_string(), source); } sources @@ -555,11 +551,11 @@ pub async fn generate_default_deployment( Some(file_accessor) => { let contracts_location = manifest .contracts - .iter() - .map(|(_, contract_config)| { + .values() + .map(|contract_config| { let mut contract_location = base_location.clone(); contract_location - .append_path(&contract_config.expect_contract_path_as_str()) + .append_path(contract_config.expect_contract_path_as_str()) .unwrap(); contract_location.to_string() }) @@ -601,7 +597,7 @@ pub async fn generate_default_deployment( }; let mut contract_location = base_location.clone(); - contract_location.append_path(&contract_config.expect_contract_path_as_str())?; + contract_location.append_path(contract_config.expect_contract_path_as_str())?; let source = sources .get(&contract_location.to_string()) .ok_or(format!( @@ -698,11 +694,11 @@ pub async fn generate_default_deployment( // Track the latest epoch that a contract is deployed in, so that we can // ensure that all contracts are deployed after their dependencies. for contract_id in ordered_contracts_ids.into_iter() { - if requirements_data.contains_key(&contract_id) { + if requirements_data.contains_key(contract_id) { continue; } let tx = contracts - .remove(&contract_id) + .remove(contract_id) .expect("unable to retrieve contract"); match tx { @@ -763,7 +759,7 @@ pub async fn generate_default_deployment( } let name = match network { - StacksNetwork::Simnet => format!("Simulated deployment, used as a default for `clarinet console`, `clarinet test` and `clarinet check`"), + StacksNetwork::Simnet => "Simulated deployment, used as a default for `clarinet console`, `clarinet test` and `clarinet check`".to_string(), _ => format!("{:?} deployment", network) }; @@ -833,15 +829,14 @@ pub fn load_deployment( ) -> Result { let project_root_location = manifest.location.get_project_root_location()?; let spec = match DeploymentSpecification::from_config_file( - &deployment_plan_location, + deployment_plan_location, &project_root_location, ) { Ok(spec) => spec, Err(msg) => { return Err(format!( "error: {} syntax incorrect\n{}", - deployment_plan_location.to_string(), - msg + deployment_plan_location, msg )); } }; diff --git a/components/clarinet-deployments/src/onchain/bitcoin_deployment.rs b/components/clarinet-deployments/src/onchain/bitcoin_deployment.rs index a24c64612..3bff105cd 100644 --- a/components/clarinet-deployments/src/onchain/bitcoin_deployment.rs +++ b/components/clarinet-deployments/src/onchain/bitcoin_deployment.rs @@ -113,7 +113,7 @@ pub fn sign_transaction( Message::from_slice(&sig_hash_bytes[..]).expect("Unable to create Message"); let secp = Secp256k1::new(); let signature = secp.sign_ecdsa_recoverable(&message, signer); - let public_key = PublicKey::from_secret_key(&secp, &signer); + let public_key = PublicKey::from_secret_key(&secp, signer); let sig_der = signature.to_standard().serialize_der(); (sig_der, public_key) }; diff --git a/components/clarinet-deployments/src/onchain/mod.rs b/components/clarinet-deployments/src/onchain/mod.rs index 3931bdbcf..979c44356 100644 --- a/components/clarinet-deployments/src/onchain/mod.rs +++ b/components/clarinet-deployments/src/onchain/mod.rs @@ -27,6 +27,7 @@ use clarity_repl::repl::{Session, SessionSettings}; use reqwest::Url; use stacks_rpc_client::StacksRpc; use std::collections::{BTreeMap, HashSet, VecDeque}; +use std::str::FromStr; use std::sync::mpsc::{Receiver, Sender}; use tiny_hderive::bip32::ExtendedPrivKey; @@ -73,7 +74,7 @@ fn get_stacks_address(public_key: &PublicKey, network: &StacksNetwork) -> Stacks let wrapped_public_key = Secp256k1PublicKey::from_slice(&public_key.serialize_compressed()).unwrap(); - let signer_addr = StacksAddress::from_public_keys( + StacksAddress::from_public_keys( match network { StacksNetwork::Mainnet => C32_ADDRESS_VERSION_MAINNET_SINGLESIG, _ => C32_ADDRESS_VERSION_TESTNET_SINGLESIG, @@ -82,9 +83,7 @@ fn get_stacks_address(public_key: &PublicKey, network: &StacksNetwork) -> Stacks 1, &vec![wrapped_public_key], ) - .unwrap(); - - signer_addr + .unwrap() } fn sign_transaction_payload( @@ -99,9 +98,9 @@ fn sign_transaction_payload( let signer_addr = get_stacks_address(&public_key, network); let spending_condition = TransactionSpendingCondition::Singlesig(SinglesigSpendingCondition { - signer: signer_addr.bytes.clone(), - nonce: nonce, - tx_fee: tx_fee, + signer: signer_addr.bytes, + nonce, + tx_fee, hash_mode: SinglesigHashMode::P2PKH, key_encoding: TransactionPublicKeyEncoding::Compressed, signature: MessageSignature::empty(), @@ -117,11 +116,11 @@ fn sign_transaction_payload( StacksNetwork::Mainnet => 0x00000001, _ => 0x80000000, }, - auth: auth, - anchor_mode: anchor_mode, + auth, + anchor_mode, post_condition_mode: TransactionPostConditionMode::Allow, post_conditions: vec![], - payload: payload, + payload, }; let mut unsigned_tx_bytes = vec![]; @@ -242,7 +241,7 @@ pub fn update_deployment_costs( .stacks_node .as_ref() .expect("unable to get stacks node rcp address"); - let stacks_rpc = StacksRpc::new(&stacks_node_url); + let stacks_rpc = StacksRpc::new(stacks_node_url); let mut session = Session::new(SessionSettings::default()); for batch_spec in deployment.plan.batches.iter_mut() { @@ -252,7 +251,7 @@ pub fn update_deployment_costs( let transaction_payload = TransactionPayload::TokenTransfer( tx.recipient.clone(), tx.mstx_amount, - TokenTransferMemo(tx.memo.clone()), + TokenTransferMemo(tx.memo), ); match stacks_rpc.estimate_transaction_fee(&transaction_payload, priority) { @@ -260,7 +259,7 @@ pub fn update_deployment_costs( tx.cost = fee; } Err(e) => { - println!("unable to estimate fee for transaction: {}", e.to_string()); + println!("unable to estimate fee for transaction: {}", e); continue; } }; @@ -283,7 +282,7 @@ pub fn update_deployment_costs( contract_name: tx.contract_id.name.clone(), address: StacksAddress::from(tx.contract_id.issuer.clone()), function_name: tx.method.clone(), - function_args: function_args, + function_args, }); match stacks_rpc.estimate_transaction_fee(&transaction_payload, priority) { @@ -291,7 +290,7 @@ pub fn update_deployment_costs( tx.cost = fee; } Err(e) => { - println!("unable to estimate fee for transaction: {}", e.to_string()); + println!("unable to estimate fee for transaction: {}", e); continue; } }; @@ -310,7 +309,7 @@ pub fn update_deployment_costs( tx.cost = fee; } Err(e) => { - println!("unable to estimate fee for transaction: {}", e.to_string()); + println!("unable to estimate fee for transaction: {}", e); continue; } }; @@ -419,7 +418,7 @@ pub fn apply_on_chain_deployment( TransactionSpecification::StxTransfer(tx) => { let issuer_address = tx.expected_sender.to_address(); let nonce = match accounts_cached_nonces.get(&issuer_address) { - Some(cached_nonce) => cached_nonce.clone(), + Some(cached_nonce) => *cached_nonce, None => stacks_rpc .get_nonce(&issuer_address) .expect("Unable to retrieve account"), @@ -435,7 +434,7 @@ pub fn apply_on_chain_deployment( tx.recipient.clone(), tx.mstx_amount, tx.memo, - *account, + account, nonce, tx.cost, anchor_mode, @@ -453,9 +452,7 @@ pub fn apply_on_chain_deployment( accounts_cached_nonces.insert(issuer_address.clone(), nonce + 1); let name = format!( "STX transfer ({}µSTX from {} to {})", - tx.mstx_amount, - issuer_address, - tx.recipient.to_string(), + tx.mstx_amount, issuer_address, tx.recipient, ); let check = TransactionCheck::NonceCheck(tx.expected_sender.clone(), nonce); TransactionTracker { @@ -502,7 +499,7 @@ pub fn apply_on_chain_deployment( TransactionSpecification::ContractCall(tx) => { let issuer_address = tx.expected_sender.to_address(); let nonce = match accounts_cached_nonces.get(&issuer_address) { - Some(cached_nonce) => cached_nonce.clone(), + Some(cached_nonce) => *cached_nonce, None => stacks_rpc .get_nonce(&issuer_address) .expect("Unable to retrieve account"), @@ -538,7 +535,7 @@ pub fn apply_on_chain_deployment( &tx.contract_id, tx.method.clone(), function_args, - *account, + account, nonce, tx.cost, anchor_mode, @@ -575,7 +572,7 @@ pub fn apply_on_chain_deployment( // Retrieve nonce for issuer let issuer_address = tx.expected_sender.to_address(); let nonce = match accounts_cached_nonces.get(&issuer_address) { - Some(cached_nonce) => cached_nonce.clone(), + Some(cached_nonce) => *cached_nonce, None => stacks_rpc .get_nonce(&issuer_address) .expect("Unable to retrieve account"), @@ -608,7 +605,7 @@ pub fn apply_on_chain_deployment( }; let clarity_version = if epoch >= EpochSpec::Epoch2_1 { - Some(tx.clarity_version.clone()) + Some(tx.clarity_version) } else { None }; @@ -617,7 +614,7 @@ pub fn apply_on_chain_deployment( &tx.contract_name, &source, clarity_version, - *account, + account, nonce, tx.cost, anchor_mode, @@ -635,11 +632,7 @@ pub fn apply_on_chain_deployment( }; accounts_cached_nonces.insert(issuer_address.clone(), nonce + 1); - let name = format!( - "Publish {}.{}", - tx.expected_sender.to_string(), - tx.contract_name - ); + let name = format!("Publish {}.{}", tx.expected_sender, tx.contract_name); let check = TransactionCheck::ContractPublish( tx.expected_sender.clone(), tx.contract_name.clone(), @@ -676,7 +669,7 @@ pub fn apply_on_chain_deployment( // Retrieve nonce for issuer let issuer_address = tx.remap_sender.to_address(); let nonce = match accounts_cached_nonces.get(&issuer_address) { - Some(cached_nonce) => cached_nonce.clone(), + Some(cached_nonce) => *cached_nonce, None => stacks_rpc .get_nonce(&issuer_address) .expect("Unable to retrieve account"), @@ -713,7 +706,7 @@ pub fn apply_on_chain_deployment( &tx.contract_id.name, &source, None, - *account, + account, nonce, tx.cost, anchor_mode, @@ -727,11 +720,7 @@ pub fn apply_on_chain_deployment( }; accounts_cached_nonces.insert(issuer_address.clone(), nonce + 1); - let name = format!( - "Publish {}.{}", - tx.remap_sender.to_string(), - tx.contract_id.name - ); + let name = format!("Publish {}.{}", tx.remap_sender, tx.contract_id.name); let check = TransactionCheck::ContractPublish( tx.remap_sender.clone(), tx.contract_id.name.clone(), @@ -788,16 +777,14 @@ pub fn apply_on_chain_deployment( if info.stacks_tip_height == 0 { // Always loop if we have not yet seen the genesis block. std::thread::sleep(std::time::Duration::from_secs( - delay_between_checks.into(), + delay_between_checks, )); continue; } (info.burn_block_height, info.stacks_tip_height) } Err(_e) => { - std::thread::sleep(std::time::Duration::from_secs( - delay_between_checks.into(), - )); + std::thread::sleep(std::time::Duration::from_secs(delay_between_checks)); continue; } }; @@ -805,7 +792,7 @@ pub fn apply_on_chain_deployment( // If no bitcoin block has been mined since `delay_between_checks`, // avoid flooding the stacks-node with status update requests. if bitcoin_block_tip <= current_bitcoin_block_height { - std::thread::sleep(std::time::Duration::from_secs(delay_between_checks.into())); + std::thread::sleep(std::time::Duration::from_secs(delay_between_checks)); continue; } @@ -814,7 +801,7 @@ pub fn apply_on_chain_deployment( // If no stacks block has been mined despite the new bitcoin block, // avoid flooding the stacks-node with status update requests. if stacks_block_tip <= current_block_height { - std::thread::sleep(std::time::Duration::from_secs(delay_between_checks.into())); + std::thread::sleep(std::time::Duration::from_secs(delay_between_checks)); continue; } @@ -823,7 +810,7 @@ pub fn apply_on_chain_deployment( if current_bitcoin_block_height > after_bitcoin_block { epoch_transition_successful = true; } else { - std::thread::sleep(std::time::Duration::from_secs(delay_between_checks.into())); + std::thread::sleep(std::time::Duration::from_secs(delay_between_checks)); } } } @@ -834,7 +821,7 @@ pub fn apply_on_chain_deployment( TransactionStatus::Encoded(transaction, check) => (transaction, check), _ => unreachable!(), }; - let _ = match stacks_rpc.post_transaction(&transaction) { + match stacks_rpc.post_transaction(&transaction) { Ok(res) => { tracker.status = TransactionStatus::Broadcasted(check); @@ -843,7 +830,7 @@ pub fn apply_on_chain_deployment( ongoing_batch.insert(res.txid, tracker); } Err(e) => { - let message = format!("unable to post transaction\n{}", e.to_string()); + let message = format!("unable to post transaction\n{}", e); tracker.status = TransactionStatus::Error(message.clone()); let _ = deployment_event_tx @@ -858,7 +845,7 @@ pub fn apply_on_chain_deployment( let (burn_block_height, stacks_tip_height) = match stacks_rpc.get_info() { Ok(info) => (info.burn_block_height, info.stacks_tip_height), _ => { - std::thread::sleep(std::time::Duration::from_secs(delay_between_checks.into())); + std::thread::sleep(std::time::Duration::from_secs(delay_between_checks)); continue; } }; @@ -866,7 +853,7 @@ pub fn apply_on_chain_deployment( // If no block has been mined since `delay_between_checks`, // avoid flooding the stacks-node with status update requests. if burn_block_height <= current_bitcoin_block_height { - std::thread::sleep(std::time::Duration::from_secs(delay_between_checks.into())); + std::thread::sleep(std::time::Duration::from_secs(delay_between_checks)); continue; } @@ -882,7 +869,7 @@ pub fn apply_on_chain_deployment( contract_name, )) => { let deployer_address = deployer.to_address(); - let res = stacks_rpc.get_contract_source(&deployer_address, &contract_name); + let res = stacks_rpc.get_contract_source(&deployer_address, contract_name); match res { Ok(_contract) => { tracker.status = TransactionStatus::Confirmed; @@ -974,7 +961,7 @@ pub fn get_initial_transactions_trackers( "STX transfer {} send {} µSTC to {}", tx.expected_sender.to_address(), tx.mstx_amount, - tx.recipient.to_string() + tx.recipient, ), status: TransactionStatus::Queued, }, diff --git a/components/clarinet-deployments/src/requirements.rs b/components/clarinet-deployments/src/requirements.rs index 78a56ef29..3161f00ae 100644 --- a/components/clarinet-deployments/src/requirements.rs +++ b/components/clarinet-deployments/src/requirements.rs @@ -21,7 +21,7 @@ impl Default for ContractMetadata { pub async fn retrieve_contract( contract_id: &QualifiedContractIdentifier, cache_location: &FileLocation, - file_accessor: &Option<&Box>, + file_accessor: &Option<&dyn FileAccessor>, ) -> Result<(String, StacksEpochId, ClarityVersion, FileLocation), String> { let contract_deployer = contract_id.issuer.to_address(); let contract_name = contract_id.name.to_string(); @@ -43,21 +43,16 @@ pub async fn retrieve_contract( ), }; - match (contract_source, metadata_json) { - // If both files are present in the cache, return the contract source and metadata. - (Ok(contract_source), Ok(metadata_json)) => { - let metadata: ContractMetadata = serde_json::from_str(&metadata_json) - .map_err(|e| format!("Unable to parse metadata file: {}", e))?; - - return Ok(( - contract_source, - metadata.epoch, - metadata.clarity_version, - contract_location, - )); - } - // Else, we'll fetch the contract source from the Stacks node. - _ => {} + if let (Ok(contract_source), Ok(metadata_json)) = (contract_source, metadata_json) { + let metadata: ContractMetadata = serde_json::from_str(&metadata_json) + .map_err(|e| format!("Unable to parse metadata file: {}", e))?; + + return Ok(( + contract_source, + metadata.epoch, + metadata.clarity_version, + contract_location, + )); } let is_mainnet = contract_deployer.starts_with("SP"); @@ -80,9 +75,7 @@ pub async fn retrieve_contract( Some(1) => ClarityVersion::Clarity1, Some(2) => ClarityVersion::Clarity2, Some(_) => { - return Err(format!( - "unable to parse clarity_version (can either be '1' or '2'", - )) + return Err("unable to parse clarity_version (can either be '1' or '2'".to_string()) } None => ClarityVersion::default_for_epoch(epoch), }; diff --git a/components/clarinet-deployments/src/types.rs b/components/clarinet-deployments/src/types.rs index ea1ca9239..959c21658 100644 --- a/components/clarinet-deployments/src/types.rs +++ b/components/clarinet-deployments/src/types.rs @@ -47,9 +47,9 @@ impl From for EpochSpec { } } -impl Into for EpochSpec { - fn into(self) -> StacksEpochId { - match self { +impl From for StacksEpochId { + fn from(val: EpochSpec) -> Self { + match val { EpochSpec::Epoch2_0 => StacksEpochId::Epoch20, EpochSpec::Epoch2_05 => StacksEpochId::Epoch2_05, EpochSpec::Epoch2_1 => StacksEpochId::Epoch21, @@ -263,7 +263,7 @@ pub mod memo_serde { match hex_bytes(&hex_memo[2..]) { Ok(ref mut bytes) => { bytes.resize(34, 0); - memo.copy_from_slice(&bytes); + memo.copy_from_slice(bytes); } Err(_) => { return Err(serde::de::Error::custom( @@ -322,16 +322,14 @@ impl StxTransferSpecification { let mut memo = [0u8; 34]; if let Some(ref hex_memo) = specs.memo { if !hex_memo.is_empty() && !hex_memo.starts_with("0x") { - return Err(format!( - "unable to parse memo (up to 34 bytes, starting with '0x')", - )); + return Err("unable to parse memo (up to 34 bytes, starting with '0x')".to_string()); } match hex_bytes(&hex_memo[2..]) { Ok(ref mut bytes) => { bytes.resize(34, 0); - memo.copy_from_slice(&bytes); + memo.copy_from_slice(bytes); } - Err(_) => return Err(format!("unable to parse memo (up to 34 bytes)",)), + Err(_) => return Err("unable to parse memo (up to 34 bytes)".to_string()), } } @@ -472,9 +470,7 @@ impl ContractPublishSpecification { } _ => None, } - .ok_or(format!( - "unable to parse file location (can either be 'path' or 'url'", - ))?; + .ok_or("unable to parse file location (can either be 'path' or 'url'".to_string())?; let source = location.read_content_as_utf8()?; @@ -485,9 +481,7 @@ impl ContractPublishSpecification { } else if clarity_version.eq(&2) { Ok(ClarityVersion::Clarity2) } else { - Err(format!( - "unable to parse clarity_version (can either be '1' or '2'", - )) + Err("unable to parse clarity_version (can either be '1' or '2'".to_string()) } } _ => Ok(DEFAULT_CLARITY_VERSION), @@ -497,7 +491,7 @@ impl ContractPublishSpecification { contract_name, expected_sender, source, - location: location, + location, cost: specs.cost, anchor_block_only: specs.anchor_block_only.unwrap_or(true), clarity_version, @@ -530,7 +524,7 @@ pub mod source_serde { where S: Serializer, { - let enc = b64.encode(&x); + let enc = b64.encode(x); s.serialize_str(&enc) } @@ -544,12 +538,12 @@ pub mod source_serde { pub fn base64_decode(encoded: &str) -> Result { let bytes = b64 - .decode(&encoded) - .map_err(|e| format!("unable to decode contract source: {}", e.to_string()))?; + .decode(encoded) + .map_err(|e| format!("unable to decode contract source: {}", e))?; let decoded = from_utf8(&bytes).map_err(|e| { format!( "invalid UTF-8 sequence when decoding contract source: {}", - e.to_string() + e ) })?; Ok(decoded.to_owned()) @@ -580,7 +574,7 @@ pub mod qualified_contract_identifier_serde { use clarity_repl::clarity::vm::types::QualifiedContractIdentifier; use serde::{Deserializer, Serializer}; - pub fn serialize<'ser, S>(x: &'ser QualifiedContractIdentifier, s: S) -> Result + pub fn serialize(x: &QualifiedContractIdentifier, s: S) -> Result where S: Serializer, { @@ -602,8 +596,8 @@ pub mod remap_principals_serde { use serde::{ser::SerializeMap, Deserializer, Serializer}; use std::collections::{BTreeMap, HashMap}; - pub fn serialize<'ser, S>( - target: &'ser BTreeMap, + pub fn serialize( + target: &BTreeMap, serializer: S, ) -> Result where @@ -690,7 +684,7 @@ impl RequirementPublishSpecification { let mut remap_principals = BTreeMap::new(); if let Some(ref remap_principals_spec) = specs.remap_principals { for (src_spec, dst_spec) in remap_principals_spec { - let src = match PrincipalData::parse_standard_principal(&src_spec) { + let src = match PrincipalData::parse_standard_principal(src_spec) { Ok(res) => res, Err(_) => { return Err(format!( @@ -699,7 +693,7 @@ impl RequirementPublishSpecification { )) } }; - let dst = match PrincipalData::parse_standard_principal(&dst_spec) { + let dst = match PrincipalData::parse_standard_principal(dst_spec) { Ok(res) => res, Err(_) => { return Err(format!( @@ -718,9 +712,7 @@ impl RequirementPublishSpecification { } _ => None, } - .ok_or(format!( - "unable to parse file location (can either be 'path' or 'url'", - ))?; + .ok_or("unable to parse file location (can either be 'path' or 'url'".to_string())?; let source = location.read_content_as_utf8()?; @@ -731,9 +723,7 @@ impl RequirementPublishSpecification { } else if clarity_version.eq(&2) { Ok(ClarityVersion::Clarity2) } else { - Err(format!( - "unable to parse clarity_version (can either be '1' or '2'", - )) + Err("unable to parse clarity_version (can either be '1' or '2'".to_string()) } } _ => Ok(DEFAULT_CLARITY_VERSION), @@ -745,7 +735,7 @@ impl RequirementPublishSpecification { remap_principals, source, clarity_version, - location: location, + location, cost: specs.cost, }) } @@ -847,9 +837,7 @@ impl EmulatedContractPublishSpecification { } _ => None, } - .ok_or(format!( - "unable to parse file location (can either be 'path' or 'url'", - ))?; + .ok_or("unable to parse file location (can either be 'path' or 'url'".to_string())?; let clarity_version = match specs.clarity_version { Some(clarity_version) => { @@ -858,9 +846,7 @@ impl EmulatedContractPublishSpecification { } else if clarity_version.eq(&2) { Ok(ClarityVersion::Clarity2) } else { - Err(format!( - "unable to parse clarity_version (can either be '1' or '2'", - )) + Err("unable to parse clarity_version (can either be '1' or '2'".to_string()) } } _ => Ok(DEFAULT_CLARITY_VERSION), @@ -902,8 +888,8 @@ pub mod contracts_serde { use super::source_serde; - pub fn serialize<'ser, S>( - target: &'ser BTreeMap, + pub fn serialize( + target: &BTreeMap, serializer: S, ) -> Result where @@ -911,7 +897,7 @@ pub mod contracts_serde { { let mut out = serializer.serialize_seq(Some(target.len()))?; for (contract_id, (source, file_location)) in target { - let encoded = b64.encode(&source); + let encoded = b64.encode(source); let mut map = BTreeMap::new(); map.insert("contract_id", contract_id.to_string()); map.insert("source", encoded); @@ -938,11 +924,9 @@ pub mod contracts_serde { for entry in container { let contract_id = match entry.get("contract_id") { - Some(contract_id) => { - QualifiedContractIdentifier::parse(&contract_id).map_err(|e| { - serde::de::Error::custom(format!("failed to parse contract id: {}", e)) - }) - } + Some(contract_id) => QualifiedContractIdentifier::parse(contract_id).map_err(|e| { + serde::de::Error::custom(format!("failed to parse contract id: {}", e)) + }), None => Err(serde::de::Error::custom( "Contract entry must have `contract_id` field", )), @@ -959,7 +943,7 @@ pub mod contracts_serde { let source = match entry.get("source") { Some(source) => { - source_serde::base64_decode(&source).map_err(serde::de::Error::custom) + source_serde::base64_decode(source).map_err(serde::de::Error::custom) } None => Err(serde::de::Error::custom( "Contract entry must have `source` field", @@ -1137,10 +1121,7 @@ impl DeploymentSpecification { stacks_node: self.stacks_node.clone(), bitcoin_node: self.bitcoin_node.clone(), node: None, - genesis: match self.genesis { - Some(ref g) => Some(g.to_specification_file()), - None => None, - }, + genesis: self.genesis.as_ref().map(|g| g.to_specification_file()), plan: Some(self.plan.to_specification_file()), } } @@ -1238,7 +1219,7 @@ impl WalletSpecification { } }; - let balance = match u128::from_str_radix(&specs.balance, 10) { + let balance = match specs.balance.parse::() { Ok(res) => res, Err(_) => { return Err(format!( diff --git a/components/clarinet-files/src/lib.rs b/components/clarinet-files/src/lib.rs index fb9dbcfef..7dc1f085c 100644 --- a/components/clarinet-files/src/lib.rs +++ b/components/clarinet-files/src/lib.rs @@ -32,7 +32,9 @@ pub use project_manifest::{ }; use serde::ser::{Serialize, SerializeMap, Serializer}; use std::collections::HashMap; +use std::fmt; use std::future::Future; +use std::path::Path; use std::pin::Pin; use std::{borrow::BorrowMut, path::PathBuf, str::FromStr}; use url::Url; @@ -56,6 +58,15 @@ pub enum FileLocation { Url { url: Url }, } +impl fmt::Display for FileLocation { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + FileLocation::FileSystem { path } => write!(f, "{}", path.display()), + FileLocation::Url { url } => write!(f, "{}", url), + } + } +} + impl FileLocation { pub fn try_parse( location_string: &str, @@ -119,7 +130,7 @@ impl FileLocation { FileLocation::Url { url } => { let mut paths_segments = url .path_segments_mut() - .map_err(|_| format!("unable to mutate url"))?; + .map_err(|_| "unable to mutate url".to_string())?; for component in path_to_append.components() { let segment = component .as_os_str() @@ -134,20 +145,15 @@ impl FileLocation { pub fn read_content_as_utf8(&self) -> Result { let content = self.read_content()?; - let contract_as_utf8 = String::from_utf8(content).map_err(|e| { - format!( - "unable to read content as utf8 {}\n{:?}", - self.to_string(), - e - ) - })?; + let contract_as_utf8 = String::from_utf8(content) + .map_err(|e| format!("unable to read content as utf8 {}\n{:?}", self, e))?; Ok(contract_as_utf8) } - fn fs_read_content(path: &PathBuf) -> Result, String> { + fn fs_read_content(path: &Path) -> Result, String> { use std::fs::File; use std::io::{BufReader, Read}; - let file = File::open(path.clone()) + let file = File::open(path) .map_err(|e| format!("unable to read file {}\n{:?}", path.display(), e))?; let mut file_reader = BufReader::new(file); let mut file_buffer = vec![]; @@ -157,7 +163,7 @@ impl FileLocation { Ok(file_buffer) } - fn fs_exists(path: &PathBuf) -> bool { + fn fs_exists(path: &Path) -> bool { path.exists() } @@ -173,7 +179,7 @@ impl FileLocation { e ) })?; - let mut file = File::create(&file_path) + let mut file = File::create(file_path) .map_err(|e| format!("unable to open file {}\n{}", file_path.display(), e))?; file.write_all(content) .map_err(|e| format!("unable to write file {}\n{}", file_path.display(), e))?; @@ -182,7 +188,7 @@ impl FileLocation { pub async fn get_project_manifest_location( &self, - file_accessor: Option<&Box>, + file_accessor: Option<&dyn FileAccessor>, ) -> Result { match file_accessor { None => { @@ -236,10 +242,7 @@ impl FileLocation { match manifest_found { true => Ok(project_root_location), - false => Err(format!( - "unable to find root location from {}", - self.to_string() - )), + false => Err(format!("unable to find root location from {}", self)), } } _ => { @@ -262,7 +265,7 @@ impl FileLocation { FileLocation::Url { url } => { let mut segments = url .path_segments_mut() - .map_err(|_| format!("unable to mutate url"))?; + .map_err(|_| "unable to mutate url".to_string())?; segments.pop(); } } @@ -292,9 +295,7 @@ impl FileLocation { } pub fn get_relative_location(&self) -> Result { - let base = self - .get_project_root_location() - .and_then(|l| Ok(l.to_string()))?; + let base = self.get_project_root_location().map(|l| l.to_string())?; let file = self.to_string(); Ok(file[(base.len() + 1)..].to_string()) } @@ -314,7 +315,7 @@ impl FileLocation { impl FileLocation { pub fn read_content(&self) -> Result, String> { let bytes = match &self { - FileLocation::FileSystem { path } => FileLocation::fs_read_content(&path), + FileLocation::FileSystem { path } => FileLocation::fs_read_content(path), FileLocation::Url { url } => match url.scheme() { #[cfg(not(feature = "wasm"))] "file" => { @@ -362,15 +363,6 @@ impl FileLocation { _ => unreachable!(), } } - - pub fn to_string(&self) -> String { - match self { - FileLocation::FileSystem { path } => { - format!("{}", path.display()) - } - FileLocation::Url { url } => url.to_string(), - } - } } impl Serialize for FileLocation { diff --git a/components/clarinet-files/src/network_manifest.rs b/components/clarinet-files/src/network_manifest.rs index b8fdf026f..5672e01e0 100644 --- a/components/clarinet-files/src/network_manifest.rs +++ b/components/clarinet-files/src/network_manifest.rs @@ -184,7 +184,7 @@ pub mod accounts_serde { S: Serializer, { let mut seq = serializer.serialize_seq(Some(target.len()))?; - for (_, account) in target { + for account in target.values() { seq.serialize_element(account)?; } seq.end() @@ -338,7 +338,7 @@ impl NetworkManifest { pub async fn from_project_manifest_location_using_file_accessor( location: &FileLocation, networks: &(BitcoinNetwork, StacksNetwork), - file_accessor: &Box, + file_accessor: &dyn FileAccessor, ) -> Result { let mut network_manifest_location = location.get_parent_location()?; network_manifest_location.append_path("settings/Devnet.toml")?; @@ -347,7 +347,7 @@ impl NetworkManifest { .await?; let mut network_manifest_file: NetworkManifestFile = - toml::from_slice(&content.as_bytes()).unwrap(); + toml::from_slice(content.as_bytes()).unwrap(); NetworkManifest::from_network_manifest_file( &mut network_manifest_file, networks, @@ -388,7 +388,7 @@ impl NetworkManifest { }; let network = NetworkConfig { name: network_manifest_file.network.name.clone(), - stacks_node_rpc_address: stacks_node_rpc_address, + stacks_node_rpc_address, bitcoin_node_rpc_address: network_manifest_file .network .bitcoin_node_rpc_address @@ -403,64 +403,57 @@ impl NetworkManifest { let mut accounts = BTreeMap::new(); let is_mainnet = networks.1.is_mainnet(); - match &network_manifest_file.accounts { - Some(Value::Table(entries)) => { - for (account_name, account_settings) in entries.iter() { - match account_settings { - Value::Table(account_settings) => { - let balance = match account_settings.get("balance") { - Some(Value::Integer(balance)) => *balance as u64, - _ => 0, - }; - - let mnemonic = match account_settings.get("mnemonic") { - Some(Value::String(words)) => { - match Mnemonic::parse_in_normalized(Language::English, words) { - Ok(result) => result.to_string(), - Err(e) => { - return Err(format!( - "mnemonic for wallet '{}' invalid: {}", - account_name, - e.to_string() - )); - } - } + if let Some(Value::Table(entries)) = &network_manifest_file.accounts { + for (account_name, account_settings) in entries.iter() { + if let Value::Table(account_settings) = account_settings { + let balance = match account_settings.get("balance") { + Some(Value::Integer(balance)) => *balance as u64, + _ => 0, + }; + + let mnemonic = match account_settings.get("mnemonic") { + Some(Value::String(words)) => { + match Mnemonic::parse_in_normalized(Language::English, words) { + Ok(result) => result.to_string(), + Err(e) => { + return Err(format!( + "mnemonic for wallet '{}' invalid: {}", + account_name, e + )); } - _ => { - let entropy = &[ - 0x33, 0xE4, 0x6B, 0xB1, 0x3A, 0x74, 0x6E, 0xA4, 0x1C, 0xDD, - 0xE4, 0x5C, 0x90, 0x84, 0x6A, 0x79, - ]; // TODO(lgalabru): rand - Mnemonic::from_entropy(entropy).unwrap().to_string() - } - }; - - let derivation = match account_settings.get("derivation") { - Some(Value::String(derivation)) => derivation.to_string(), - _ => DEFAULT_DERIVATION_PATH.to_string(), - }; - - let (stx_address, btc_address, _) = - compute_addresses(&mnemonic, &derivation, networks); - - accounts.insert( - account_name.to_string(), - AccountConfig { - label: account_name.to_string(), - mnemonic: mnemonic.to_string(), - derivation, - balance, - stx_address, - btc_address, - is_mainnet, - }, - ); + } + } + _ => { + let entropy = &[ + 0x33, 0xE4, 0x6B, 0xB1, 0x3A, 0x74, 0x6E, 0xA4, 0x1C, 0xDD, 0xE4, + 0x5C, 0x90, 0x84, 0x6A, 0x79, + ]; // TODO(lgalabru): rand + Mnemonic::from_entropy(entropy).unwrap().to_string() } - _ => {} - } + }; + + let derivation = match account_settings.get("derivation") { + Some(Value::String(derivation)) => derivation.to_string(), + _ => DEFAULT_DERIVATION_PATH.to_string(), + }; + + let (stx_address, btc_address, _) = + compute_addresses(&mnemonic, &derivation, networks); + + accounts.insert( + account_name.to_string(), + AccountConfig { + label: account_name.to_string(), + mnemonic: mnemonic.to_string(), + derivation, + balance, + stx_address, + btc_address, + is_mainnet, + }, + ); } } - _ => {} }; let devnet = if networks.1.is_devnet() { @@ -647,31 +640,31 @@ impl NetworkManifest { } if let Some(ref val) = devnet_override.epoch_2_0 { - devnet_config.epoch_2_0 = Some(val.clone()); + devnet_config.epoch_2_0 = Some(*val); } if let Some(ref val) = devnet_override.epoch_2_05 { - devnet_config.epoch_2_05 = Some(val.clone()); + devnet_config.epoch_2_05 = Some(*val); } if let Some(ref val) = devnet_override.epoch_2_1 { - devnet_config.epoch_2_1 = Some(val.clone()); + devnet_config.epoch_2_1 = Some(*val); } if let Some(ref val) = devnet_override.epoch_2_2 { - devnet_config.epoch_2_2 = Some(val.clone()); + devnet_config.epoch_2_2 = Some(*val); } if let Some(ref val) = devnet_override.epoch_2_3 { - devnet_config.epoch_2_3 = Some(val.clone()); + devnet_config.epoch_2_3 = Some(*val); } if let Some(ref val) = devnet_override.epoch_2_4 { - devnet_config.epoch_2_4 = Some(val.clone()); + devnet_config.epoch_2_4 = Some(*val); } if let Some(ref val) = devnet_override.pox_2_activation { - devnet_config.pox_2_activation = Some(val.clone()); + devnet_config.pox_2_activation = Some(*val); } if let Some(val) = devnet_override.network_id { @@ -762,7 +755,7 @@ impl NetworkManifest { let remapped_subnet_contract_id = format!("{}.{}", default_deployer.stx_address, contract_id.name); - let mut config = DevnetConfig { + let config = DevnetConfig { name: devnet_config.name.take().unwrap_or("devnet".into()), network_id: devnet_config.network_id, orchestrator_ingestion_port: devnet_config.orchestrator_port.unwrap_or(20445), @@ -919,9 +912,6 @@ impl NetworkManifest { .docker_platform .unwrap_or(DEFAULT_DOCKER_PLATFORM.to_string()), }; - if !config.disable_stacks_api && config.disable_stacks_api { - config.disable_stacks_api = false; - } Some(config) } else { None @@ -941,7 +931,7 @@ pub fn compute_addresses( derivation_path: &str, networks: &(BitcoinNetwork, StacksNetwork), ) -> (String, String, String) { - let bip39_seed = match get_bip39_seed_from_mnemonic(&mnemonic, "") { + let bip39_seed = match get_bip39_seed_from_mnemonic(mnemonic, "") { Ok(bip39_seed) => bip39_seed, Err(_) => panic!(), }; @@ -976,9 +966,6 @@ pub fn compute_addresses( (stx_address.to_string(), btc_address, miner_secret_key_hex) } -#[cfg(not(feature = "wasm"))] -use bitcoin; - #[cfg(not(feature = "wasm"))] fn compute_btc_address(public_key: &PublicKey, network: &BitcoinNetwork) -> String { let public_key = bitcoin::PublicKey::from_slice(&public_key.serialize_compressed()) @@ -996,5 +983,5 @@ fn compute_btc_address(public_key: &PublicKey, network: &BitcoinNetwork) -> Stri #[cfg(feature = "wasm")] fn compute_btc_address(_public_key: &PublicKey, _network: &BitcoinNetwork) -> String { - format!("__not_implemented__") + "__not_implemented__".to_string() } diff --git a/components/clarinet-files/src/project_manifest.rs b/components/clarinet-files/src/project_manifest.rs index 9fa7acc7f..ad2888393 100644 --- a/components/clarinet-files/src/project_manifest.rs +++ b/components/clarinet-files/src/project_manifest.rs @@ -233,18 +233,18 @@ pub struct RequirementConfig { impl ProjectManifest { pub async fn from_file_accessor( location: &FileLocation, - file_accessor: &Box, + file_accessor: &dyn FileAccessor, ) -> Result { let content = file_accessor.read_file(location.to_string()).await?; - let project_manifest_file: ProjectManifestFile = match toml::from_slice(&content.as_bytes()) + let project_manifest_file: ProjectManifestFile = match toml::from_slice(content.as_bytes()) { Ok(s) => s, Err(e) => { return Err(format!("Clarinet.toml file malformatted {:?}", e)); } }; - ProjectManifest::from_project_manifest_file(project_manifest_file, &location) + ProjectManifest::from_project_manifest_file(project_manifest_file, location) } pub fn from_location(location: &FileLocation) -> Result { @@ -294,7 +294,7 @@ impl ProjectManifest { .project .description .unwrap_or("".into()), - authors: project_manifest_file.project.authors.unwrap_or(vec![]), + authors: project_manifest_file.project.authors.unwrap_or_default(), telemetry: project_manifest_file.project.telemetry.unwrap_or(false), cache_location, boot_contracts: vec![ @@ -321,81 +321,66 @@ impl ProjectManifest { let mut contracts_settings = HashMap::new(); let mut config_requirements: Vec = Vec::new(); - match project_manifest_file.project.requirements { - Some(Value::Array(requirements)) => { - for link_settings in requirements.iter() { - match link_settings { - Value::Table(link_settings) => { - let contract_id = match link_settings.get("contract_id") { - Some(Value::String(contract_id)) => contract_id.to_string(), - _ => continue, - }; - config_requirements.push(RequirementConfig { contract_id }); - } - _ => {} - } + if let Some(Value::Array(requirements)) = project_manifest_file.project.requirements { + for link_settings in requirements.iter() { + if let Value::Table(link_settings) = link_settings { + let contract_id = match link_settings.get("contract_id") { + Some(Value::String(contract_id)) => contract_id.to_string(), + _ => continue, + }; + config_requirements.push(RequirementConfig { contract_id }); } } - _ => {} }; - match project_manifest_file.contracts { - Some(Value::Table(contracts)) => { - for (contract_name, contract_settings) in contracts.iter() { - match contract_settings { - Value::Table(contract_settings) => { - let contract_path = match contract_settings.get("path") { - Some(Value::String(path)) => path, - _ => continue, - }; - let code_source = match PathBuf::from_str(contract_path) { - Ok(path) => ClarityCodeSource::ContractOnDisk(path), - Err(e) => { - return Err(format!( - "unable to parse path {} ({})", - contract_path, e - )) - } - }; - let deployer = match contract_settings.get("deployer") { - Some(Value::String(path)) => { - ContractDeployer::LabeledDeployer(path.clone()) - } - _ => ContractDeployer::DefaultDeployer, - }; - - let (epoch, clarity_version) = get_epoch_and_clarity_version( - contract_settings.get("epoch"), - contract_settings.get("clarity_version"), - )?; - - config_contracts.insert( - contract_name.to_string(), - ClarityContract { - name: contract_name.to_string(), - deployer: deployer.clone(), - code_source, - clarity_version, - epoch, - }, - ); - - let mut contract_location = project_root_location.clone(); - contract_location.append_path(contract_path)?; - contracts_settings.insert( - contract_location, - ClarityContractMetadata { - name: contract_name.to_string(), - deployer, - clarity_version, - epoch, - }, - ); + if let Some(Value::Table(contracts)) = project_manifest_file.contracts { + for (contract_name, contract_settings) in contracts.iter() { + if let Value::Table(contract_settings) = contract_settings { + let contract_path = match contract_settings.get("path") { + Some(Value::String(path)) => path, + _ => continue, + }; + let code_source = match PathBuf::from_str(contract_path) { + Ok(path) => ClarityCodeSource::ContractOnDisk(path), + Err(e) => { + return Err(format!("unable to parse path {} ({})", contract_path, e)) + } + }; + let deployer = match contract_settings.get("deployer") { + Some(Value::String(path)) => { + ContractDeployer::LabeledDeployer(path.clone()) } - _ => {} - } + _ => ContractDeployer::DefaultDeployer, + }; + + let (epoch, clarity_version) = get_epoch_and_clarity_version( + contract_settings.get("epoch"), + contract_settings.get("clarity_version"), + )?; + + config_contracts.insert( + contract_name.to_string(), + ClarityContract { + name: contract_name.to_string(), + deployer: deployer.clone(), + code_source, + clarity_version, + epoch, + }, + ); + + let mut contract_location = project_root_location.clone(); + contract_location.append_path(contract_path)?; + contracts_settings.insert( + contract_location, + ClarityContractMetadata { + name: contract_name.to_string(), + deployer, + clarity_version, + epoch, + }, + ); } } - _ => {} }; config.contracts = config_contracts; config.contracts_settings = contracts_settings; diff --git a/components/clarinet-files/src/wasm_fs_accessor.rs b/components/clarinet-files/src/wasm_fs_accessor.rs index 49b3e9d08..1097aa9c3 100644 --- a/components/clarinet-files/src/wasm_fs_accessor.rs +++ b/components/clarinet-files/src/wasm_fs_accessor.rs @@ -62,7 +62,7 @@ impl FileAccessor for WASMFileSystemAccessor { Box::pin(async move { file_exists_request .await - .and_then(|r| Ok(decode_from_js(r).map_err(|err| err.to_string())?)) + .and_then(|r| decode_from_js(r).map_err(|err| err.to_string())) }) } @@ -73,7 +73,7 @@ impl FileAccessor for WASMFileSystemAccessor { Box::pin(async move { read_file_promise .await - .and_then(|r| Ok(decode_from_js(r).map_err(|err| err.to_string())?)) + .and_then(|r| decode_from_js(r).map_err(|err| err.to_string())) }) } @@ -91,7 +91,7 @@ impl FileAccessor for WASMFileSystemAccessor { Box::pin(async move { read_contract_promise .await - .and_then(|r| Ok(decode_from_js(r).map_err(|err| err.to_string())?)) + .and_then(|r| decode_from_js(r).map_err(|err| err.to_string())) }) } @@ -104,6 +104,6 @@ impl FileAccessor for WASMFileSystemAccessor { }, ); - Box::pin(async move { write_file_promise.await.and_then(|_| Ok(())) }) + Box::pin(async move { write_file_promise.await.map(|_| ()) }) } } diff --git a/components/clarinet-sdk-wasm/src/core.rs b/components/clarinet-sdk-wasm/src/core.rs index a837cc1b3..056725b49 100644 --- a/components/clarinet-sdk-wasm/src/core.rs +++ b/components/clarinet-sdk-wasm/src/core.rs @@ -297,7 +297,7 @@ impl SDK { let manifest_location = FileLocation::try_parse(&manifest_path, Some(&project_root)) .ok_or("Failed to parse manifest location")?; let manifest = - ProjectManifest::from_file_accessor(&manifest_location, &self.file_accessor).await?; + ProjectManifest::from_file_accessor(&manifest_location, &*self.file_accessor).await?; let (deployment, artifacts) = match self.cache.get(&manifest_location) { Some(cache) => cache.clone(), @@ -306,7 +306,7 @@ impl SDK { &manifest, &StacksNetwork::Simnet, false, - Some(&self.file_accessor), + Some(&*self.file_accessor), Some(StacksEpochId::Epoch21), ) .await?; diff --git a/components/clarinet-sdk/clarinet-sdk.code-workspace b/components/clarinet-sdk/clarinet-sdk.code-workspace index 76769a8f1..81e627f4a 100644 --- a/components/clarinet-sdk/clarinet-sdk.code-workspace +++ b/components/clarinet-sdk/clarinet-sdk.code-workspace @@ -1,15 +1,15 @@ { "folders": [{ "path": "../../" }], "settings": { + "rust-analyzer.cargo.noDefaultFeatures": true, + "rust-analyzer.cargo.features": ["clarinet-sdk-wasm/wasm"], "rust-analyzer.check.overrideCommand": [ "cargo", "clippy", "--no-default-features", "--package=clarinet-sdk-wasm", "--features=wasm", - "--message-format=json", - "--", - "--no-deps" + "--message-format=json" ] } } diff --git a/components/clarity-lsp/src/common/backend.rs b/components/clarity-lsp/src/common/backend.rs index b2154eaa8..d62ed9599 100644 --- a/components/clarity-lsp/src/common/backend.rs +++ b/components/clarity-lsp/src/common/backend.rs @@ -26,7 +26,7 @@ impl EditorStateInput { F: FnOnce(&EditorState) -> R, { match self { - EditorStateInput::Owned(editor_state) => Ok(closure(&editor_state)), + EditorStateInput::Owned(editor_state) => Ok(closure(editor_state)), EditorStateInput::RwLock(editor_state_lock) => match editor_state_lock.try_read() { Ok(editor_state) => Ok(closure(&editor_state)), Err(_) => Err("failed to read editor_state".to_string()), @@ -58,20 +58,13 @@ pub enum LspNotification { ContractClosed(FileLocation), } -#[derive(Debug, PartialEq, Deserialize, Serialize)] +#[derive(Debug, Default, PartialEq, Deserialize, Serialize)] pub struct LspNotificationResponse { pub aggregated_diagnostics: Vec<(FileLocation, Vec)>, pub notification: Option<(MessageType, String)>, } impl LspNotificationResponse { - pub fn default() -> LspNotificationResponse { - LspNotificationResponse { - aggregated_diagnostics: vec![], - notification: None, - } - } - pub fn error(message: &str) -> LspNotificationResponse { LspNotificationResponse { aggregated_diagnostics: vec![], @@ -83,7 +76,7 @@ impl LspNotificationResponse { pub async fn process_notification( command: LspNotification, editor_state: &mut EditorStateInput, - file_accessor: Option<&Box>, + file_accessor: Option<&dyn FileAccessor>, ) -> Result { match command { LspNotification::ManifestOpened(manifest_location) => { @@ -100,13 +93,13 @@ pub async fn process_notification( .try_write(|es| es.index_protocol(manifest_location, protocol_state))?; let (aggregated_diagnostics, notification) = editor_state.try_read(|es| es.get_aggregated_diagnostics())?; - return Ok(LspNotificationResponse { + Ok(LspNotificationResponse { aggregated_diagnostics, notification, - }); + }) } - Err(e) => return Ok(LspNotificationResponse::error(&e)), - }; + Err(e) => Ok(LspNotificationResponse::error(&e)), + } } LspNotification::ManifestSaved(manifest_location) => { @@ -118,13 +111,13 @@ pub async fn process_notification( .try_write(|es| es.index_protocol(manifest_location, protocol_state))?; let (aggregated_diagnostics, notification) = editor_state.try_read(|es| es.get_aggregated_diagnostics())?; - return Ok(LspNotificationResponse { + Ok(LspNotificationResponse { aggregated_diagnostics, notification, - }); + }) } - Err(e) => return Ok(LspNotificationResponse::error(&e)), - }; + Err(e) => Ok(LspNotificationResponse::error(&e)), + } } LspNotification::ContractOpened(contract_location) => { @@ -142,12 +135,9 @@ pub async fn process_notification( }?; let metadata = editor_state.try_read(|es| { - match es.contracts_lookup.get(&contract_location) { - Some(metadata) => { - Some((metadata.clarity_version, metadata.deployer.clone())) - } - None => None, - } + es.contracts_lookup + .get(&contract_location) + .map(|metadata| (metadata.clarity_version, metadata.deployer.clone())) })?; // if the contract isn't in lookup yet, fallback on manifest, to be improved in #668 @@ -202,13 +192,13 @@ pub async fn process_notification( .try_write(|es| es.index_protocol(manifest_location, protocol_state))?; let (aggregated_diagnostics, notification) = editor_state.try_read(|es| es.get_aggregated_diagnostics())?; - return Ok(LspNotificationResponse { + Ok(LspNotificationResponse { aggregated_diagnostics, notification, - }); + }) } - Err(e) => return Ok(LspNotificationResponse::error(&e)), - }; + Err(e) => Ok(LspNotificationResponse::error(&e)), + } } LspNotification::ContractSaved(contract_location) => { @@ -236,13 +226,13 @@ pub async fn process_notification( let (aggregated_diagnostics, notification) = editor_state.try_read(|es| es.get_aggregated_diagnostics())?; - return Ok(LspNotificationResponse { + Ok(LspNotificationResponse { aggregated_diagnostics, notification, - }); + }) } - Err(e) => return Ok(LspNotificationResponse::error(&e)), - }; + Err(e) => Ok(LspNotificationResponse::error(&e)), + } } LspNotification::ContractChanged(contract_location, contract_source) => { diff --git a/components/clarity-lsp/src/common/requests/api_ref.rs b/components/clarity-lsp/src/common/requests/api_ref.rs index 20cb60c11..00921a81e 100644 --- a/components/clarity-lsp/src/common/requests/api_ref.rs +++ b/components/clarity-lsp/src/common/requests/api_ref.rs @@ -8,7 +8,7 @@ use lazy_static::lazy_static; use std::collections::HashMap; fn code(code: &str) -> String { - vec!["```clarity", code.trim(), "```"].join("\n") + ["```clarity", code.trim(), "```"].join("\n") } lazy_static! { @@ -62,7 +62,7 @@ lazy_static! { &reference.description, separator, "**Example**", - &code(&reference.example), + &code(reference.example), separator, &format!("**Introduced in:** {}", &reference.version), ]) diff --git a/components/clarity-lsp/src/common/requests/completion.rs b/components/clarity-lsp/src/common/requests/completion.rs index 095f49efc..233f709ef 100644 --- a/components/clarity-lsp/src/common/requests/completion.rs +++ b/components/clarity-lsp/src/common/requests/completion.rs @@ -81,12 +81,12 @@ pub struct ContractDefinedData { } impl<'a> ContractDefinedData { - pub fn new(expressions: &Vec, position: &Position) -> Self { + pub fn new(expressions: &[SymbolicExpression], position: &Position) -> Self { let mut defined_data = ContractDefinedData { - position: position.clone(), + position: *position, ..Default::default() }; - traverse(&mut defined_data, &expressions); + traverse(&mut defined_data, expressions); defined_data } @@ -97,7 +97,7 @@ impl<'a> ContractDefinedData { &mut self, expr: &SymbolicExpression, name: &ClarityName, - parameters: &Vec>, + parameters: &[TypedVar<'a>], ) { let mut completion_args: Vec = vec![]; for (i, typed_var) in parameters.iter().enumerate() { @@ -131,32 +131,32 @@ impl<'a> ContractDefinedData { &self, version: &ClarityVersion, name: &String, - snippet: &String, + snippet: &str, ) -> Option { - if VAR_FUNCTIONS.contains(name) && self.vars.len() > 0 { + if VAR_FUNCTIONS.contains(name) && !self.vars.is_empty() { let choices = self.vars.join(","); return Some(snippet.replace("${1:var}", &format!("${{1|{}|}}", choices))); } - if MAP_FUNCTIONS.contains(name) && self.maps.len() > 0 { + if MAP_FUNCTIONS.contains(name) && !self.maps.is_empty() { let choices = self.maps.join(","); return Some(snippet.replace("${1:map-name}", &format!("${{1|{}|}}", choices))); } - if FT_FUNCTIONS.contains(name) && self.fts.len() > 0 { + if FT_FUNCTIONS.contains(name) && !self.fts.is_empty() { let choices = self.fts.join(","); return Some(snippet.replace("${1:token-name}", &format!("${{1|{}|}}", choices))); } - if NFT_FUNCTIONS.contains(name) && self.nfts.len() > 0 { + if NFT_FUNCTIONS.contains(name) && !self.nfts.is_empty() { let choices = self.nfts.join(","); return Some(snippet.replace("${1:asset-name}", &format!("${{1|{}|}}", choices))); } - if ITERATOR_FUNCTIONS.contains(name) && self.functions_completion_items.len() > 0 { + if ITERATOR_FUNCTIONS.contains(name) && !self.functions_completion_items.is_empty() { let mut choices = self .functions_completion_items .iter() .map(|f| f.label.to_string()) .collect::>() .join(","); - choices.push_str(","); + choices.push(','); choices.push_str( &get_iterator_cb_completion_item(version, name) .iter() @@ -240,7 +240,7 @@ impl<'a> ASTVisitor<'a> for ContractDefinedData { parameters: Option>>, _body: &'a SymbolicExpression, ) -> bool { - self.set_function_completion_with_bindings(expr, name, ¶meters.unwrap_or(vec![])); + self.set_function_completion_with_bindings(expr, name, ¶meters.unwrap_or_default()); true } @@ -251,7 +251,7 @@ impl<'a> ASTVisitor<'a> for ContractDefinedData { parameters: Option>>, _body: &'a SymbolicExpression, ) -> bool { - self.set_function_completion_with_bindings(expr, name, ¶meters.unwrap_or(vec![])); + self.set_function_completion_with_bindings(expr, name, ¶meters.unwrap_or_default()); true } @@ -262,7 +262,7 @@ impl<'a> ASTVisitor<'a> for ContractDefinedData { parameters: Option>>, _body: &'a SymbolicExpression, ) -> bool { - self.set_function_completion_with_bindings(expr, name, ¶meters.unwrap_or(vec![])); + self.set_function_completion_with_bindings(expr, name, ¶meters.unwrap_or_default()); true } @@ -303,8 +303,7 @@ pub fn get_contract_calls(analysis: &ContractAnalysis) -> Vec { let (snippet_args, doc_args) = build_contract_calls_args(signature); let label = format!( "contract-call? .{} {}", - analysis.contract_identifier.name.to_string(), - name.to_string() + analysis.contract_identifier.name, name, ); let documentation = MarkupContent { kind: MarkupKind::Markdown, @@ -314,9 +313,9 @@ pub fn get_contract_calls(analysis: &ContractAnalysis) -> Vec { }; let insert_text = format!( "contract-call? .{} {} {}", - analysis.contract_identifier.name.to_string(), - name.to_string(), - snippet_args.join(" ") + analysis.contract_identifier.name, + name, + snippet_args.join(" "), ); inter_contract.push(CompletionItem { @@ -341,7 +340,7 @@ pub fn build_completion_item_list( should_wrap: bool, include_native_placeholders: bool, ) -> Vec { - if let Some((function_name, param)) = get_function_at_position(position, expressions.as_ref()) { + if let Some((function_name, param)) = get_function_at_position(position, expressions) { // - for var-*, map-*, ft-* or nft-* methods, return the corresponding data names let mut completion_strings: Option> = None; if VAR_FUNCTIONS.contains(&function_name.to_string()) && param == Some(0) { @@ -368,9 +367,7 @@ pub fn build_completion_item_list( &mut active_contract_defined_data .functions_completion_items .iter() - .map(|f| { - CompletionItem::new_simple(String::from(f.label.clone()), String::from("")) - }) + .map(|f| CompletionItem::new_simple(f.label.clone(), String::from(""))) .collect::>(), ); completion_items.append(&mut get_iterator_cb_completion_item( @@ -508,7 +505,7 @@ pub fn build_default_native_keywords_list(version: ClarityVersion) -> Vec = NativeFunctions::ALL .iter() .filter_map(|func| { - let mut api = make_api_reference(&func); + let mut api = make_api_reference(func); if api.version > version { return None; } @@ -517,7 +514,7 @@ pub fn build_default_native_keywords_list(version: ClarityVersion) -> Vec Vec = DefineFunctions::ALL .iter() .filter_map(|func| { - let api = make_define_reference(&func); + let api = make_define_reference(func); if api.version > version { return None; } @@ -563,7 +560,7 @@ pub fn build_default_native_keywords_list(version: ClarityVersion) -> Vec = NativeVariables::ALL .iter() .filter_map(|var| { - if let Some(api) = make_keyword_reference(&var) { + if let Some(api) = make_keyword_reference(var) { if api.version > version { return None; } @@ -601,7 +598,7 @@ pub fn build_default_native_keywords_list(version: ClarityVersion) -> Vec Vec Vec { - vec![ + [ NativeFunctions::Add, NativeFunctions::Subtract, NativeFunctions::Multiply, @@ -685,7 +682,7 @@ pub fn build_map_valid_cb_completion_items(version: ClarityVersion) -> Vec Vec { - vec![ + [ NativeFunctions::And, NativeFunctions::Or, NativeFunctions::Not, @@ -696,7 +693,7 @@ pub fn build_filter_valid_cb_completion_items(version: ClarityVersion) -> Vec Vec { - vec![ + [ NativeFunctions::Add, NativeFunctions::Subtract, NativeFunctions::Multiply, @@ -732,7 +729,7 @@ fn build_iterator_cb_completion_item( func: &NativeFunctions, version: ClarityVersion, ) -> Option { - let api = make_api_reference(&func); + let api = make_api_reference(func); if api.version > version { return None; } @@ -766,10 +763,10 @@ fn get_iterator_cb_completion_item(version: &ClarityVersion, func: &str) -> Vec< ClarityVersion::Clarity2 => VALID_FILTER_FUNCTIONS_CLARITY_1.to_vec(), }; } - return match version { + match version { ClarityVersion::Clarity1 => VALID_FOLD_FUNCTIONS_CLARITY_1.to_vec(), ClarityVersion::Clarity2 => VALID_FOLD_FUNCTIONS_CLARITY_1.to_vec(), - }; + } } #[cfg(test)] diff --git a/components/clarity-lsp/src/common/requests/definitions.rs b/components/clarity-lsp/src/common/requests/definitions.rs index 70a02befb..e038c0cb5 100644 --- a/components/clarity-lsp/src/common/requests/definitions.rs +++ b/components/clarity-lsp/src/common/requests/definitions.rs @@ -37,8 +37,8 @@ impl<'a> Definitions { } } - pub fn run(&mut self, expressions: &'a Vec) { - traverse(self, &expressions); + pub fn run(&mut self, expressions: &'a [SymbolicExpression]) { + traverse(self, expressions); } fn set_function_parameters_scope(&mut self, expr: &SymbolicExpression) -> Option<()> { @@ -82,7 +82,7 @@ impl<'a> ASTVisitor<'a> for Definitions { AtomValue(value) => self.visit_atom_value(expr, value), Atom(name) => self.visit_atom(expr, name), List(exprs) => { - let result = self.traverse_list(expr, &exprs); + let result = self.traverse_list(expr, exprs); // clear local scope after traversing it self.local.remove(&expr.id); result @@ -490,8 +490,8 @@ impl<'a> ASTVisitor<'a> for Definitions { self.local.insert(expr.id, local_scope); } - for (_, val) in bindings { - if !self.traverse_expr(val) { + for binding in bindings.values() { + if !self.traverse_expr(binding) { return false; } } @@ -546,7 +546,7 @@ impl<'a> ASTVisitor<'a> for Definitions { } pub fn get_definitions( - expressions: &Vec, + expressions: &[SymbolicExpression], issuer: Option, ) -> HashMap<(u32, u32), DefinitionLocation> { let mut definitions_visitor = Definitions::new(issuer); @@ -565,18 +565,16 @@ pub fn get_public_function_definitions( .and_then(|l| l.split_first()) .and_then(|(function_name, args)| Some((function_name.match_atom()?, args))) { - match DefineFunctions::lookup_by_name(function_name) { - Some(DefineFunctions::PublicFunction | DefineFunctions::ReadOnlyFunction) => { - if let Some(function_name) = args - .split_first() - .and_then(|(args_list, _)| args_list.match_list()?.split_first()) - .and_then(|(function_name, _)| function_name.match_atom()) - { - definitions - .insert(function_name.to_owned(), span_to_range(&expression.span)); - } + if let Some(DefineFunctions::PublicFunction | DefineFunctions::ReadOnlyFunction) = + DefineFunctions::lookup_by_name(function_name) + { + if let Some(function_name) = args + .split_first() + .and_then(|(args_list, _)| args_list.match_list()?.split_first()) + .and_then(|(function_name, _)| function_name.match_atom()) + { + definitions.insert(function_name.to_owned(), span_to_range(&expression.span)); } - _ => (), } } } diff --git a/components/clarity-lsp/src/common/requests/document_symbols.rs b/components/clarity-lsp/src/common/requests/document_symbols.rs index dc24b798c..a84ea21d1 100644 --- a/components/clarity-lsp/src/common/requests/document_symbols.rs +++ b/components/clarity-lsp/src/common/requests/document_symbols.rs @@ -11,12 +11,10 @@ use super::helpers::span_to_range; fn symbolic_expression_to_name(symbolic_expr: &SymbolicExpression) -> String { match &symbolic_expr.expr { - SymbolicExpressionType::Atom(name) => return name.to_string(), - SymbolicExpressionType::List(list) => { - return symbolic_expression_to_name(&(*list).to_vec()[0]) - } - _ => return "".to_string(), - }; + SymbolicExpressionType::Atom(name) => name.to_string(), + SymbolicExpressionType::List(list) => symbolic_expression_to_name(&(*list).to_vec()[0]), + _ => "".to_string(), + } } #[derive(Eq, PartialEq, Copy, Clone, Serialize, Deserialize)] @@ -55,7 +53,7 @@ fn build_symbol( detail, tags: None, deprecated: None, - selection_range: range.clone(), + selection_range: range, range, children, } @@ -75,8 +73,8 @@ impl<'a> ASTSymbols { } } - pub fn get_symbols(mut self, expressions: &'a Vec) -> Vec { - traverse(&mut self, &expressions); + pub fn get_symbols(mut self, expressions: &'a [SymbolicExpression]) -> Vec { + traverse(&mut self, expressions); self.symbols } } @@ -88,7 +86,7 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { trait_identifier: &clarity_repl::clarity::vm::types::TraitIdentifier, ) -> bool { self.symbols.push(build_symbol( - &"impl-trait", + "impl-trait", Some(trait_identifier.name.to_string()), ClaritySymbolKind::IMPL_TRAIT, &expr.span, @@ -104,7 +102,7 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { data_type: &'a SymbolicExpression, initial: &'a SymbolicExpression, ) -> bool { - let symbol_type = symbolic_expression_to_name(&data_type); + let symbol_type = symbolic_expression_to_name(data_type); self.symbols.push(build_symbol( &name.to_owned(), Some(symbol_type), @@ -123,19 +121,14 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { ) -> bool { let mut symbols: Vec = Vec::new(); for (name, expr) in values.iter() { - match name { - Some(name) => { - symbols.push(build_symbol( - name.as_str(), - None, - ClaritySymbolKind::VALUE, - &expr.span, - self.children_map.remove(&expr.id), - )); - } - None => { - (); - } + if let Some(name) = name { + symbols.push(build_symbol( + name.as_str(), + None, + ClaritySymbolKind::VALUE, + &expr.span, + self.children_map.remove(&expr.id), + )); } } self.children_map.insert(expr.id, symbols); @@ -165,21 +158,22 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { key_type: &'a SymbolicExpression, value_type: &'a SymbolicExpression, ) -> bool { - let mut children = Vec::new(); - children.push(build_symbol( - "key", - Some(symbolic_expression_to_name(&key_type)), - ClaritySymbolKind::KEY, - &key_type.span, - None, - )); - children.push(build_symbol( - "value", - Some(symbolic_expression_to_name(&value_type)), - ClaritySymbolKind::VALUE, - &value_type.span, - None, - )); + let children = vec![ + build_symbol( + "key", + Some(symbolic_expression_to_name(key_type)), + ClaritySymbolKind::KEY, + &key_type.span, + None, + ), + build_symbol( + "value", + Some(symbolic_expression_to_name(value_type)), + ClaritySymbolKind::VALUE, + &value_type.span, + None, + ), + ]; self.symbols.push(build_symbol( &name.to_owned(), @@ -317,11 +311,8 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { ) -> bool { let mut children = Vec::new(); for statement in statements.iter() { - match self.children_map.remove(&statement.id) { - Some(mut child) => { - children.append(&mut child); - } - None => (), + if let Some(mut child) = self.children_map.remove(&statement.id) { + children.append(&mut child); } } @@ -347,7 +338,7 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { let mut children: Vec = Vec::new(); let mut bindings_children: Vec = Vec::new(); - for (name, expr) in bindings.into_iter() { + for (name, expr) in bindings.iter() { bindings_children.push(build_symbol( name.as_str(), None, @@ -356,7 +347,7 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { self.children_map.remove(&expr.id), )) } - if bindings_children.len() > 0 { + if !bindings_children.is_empty() { let start = bindings_children.first().unwrap().range.start; let end = bindings_children.last().unwrap().range.start; let bindings_span = Span { @@ -376,16 +367,13 @@ impl<'a> ASTVisitor<'a> for ASTSymbols { let mut body_children = Vec::new(); for statement in body.iter() { - match self.children_map.remove(&statement.id) { - Some(children) => { - for child in children { - body_children.push(child); - } + if let Some(children) = self.children_map.remove(&statement.id) { + for child in children { + body_children.push(child); } - None => (), } } - if body_children.len() > 0 { + if !body_children.is_empty() { let start = body_children.first().unwrap().range.start; let end = body_children.last().unwrap().range.start; let body_span = Span { diff --git a/components/clarity-lsp/src/common/requests/helpers.rs b/components/clarity-lsp/src/common/requests/helpers.rs index a71c7c78e..c8a10b625 100644 --- a/components/clarity-lsp/src/common/requests/helpers.rs +++ b/components/clarity-lsp/src/common/requests/helpers.rs @@ -1,3 +1,5 @@ +use std::cmp::Ordering; + use clarity_repl::clarity::{representations::Span, ClarityName, SymbolicExpression}; use lsp_types::{Position, Range}; @@ -26,7 +28,7 @@ pub fn is_position_within_span(position: &Position, span: &Span, end_offset: u32 return false; } - return true; + true } pub fn get_expression_name_at_position( @@ -59,21 +61,23 @@ pub fn get_function_at_position( let mut position_in_parameters: i32 = -1; for parameter in expressions { - if position.line == parameter.span.end_line { - if position.character > parameter.span.end_column + 1 { - position_in_parameters += 1 + match position.line.cmp(¶meter.span.end_line) { + Ordering::Equal => { + if position.character > parameter.span.end_column + 1 { + position_in_parameters += 1 + } } - } else if position.line > parameter.span.end_line { - position_in_parameters += 1 + Ordering::Greater => position_in_parameters += 1, + _ => {} } } let (function_name, _) = expressions.split_first()?; - return Some(( + Some(( function_name.match_atom()?.to_owned(), position_in_parameters.try_into().ok(), - )); + )) } pub fn get_atom_start_at_position( diff --git a/components/clarity-lsp/src/common/requests/signature_help.rs b/components/clarity-lsp/src/common/requests/signature_help.rs index 5b82c6910..a4afd2683 100644 --- a/components/clarity-lsp/src/common/requests/signature_help.rs +++ b/components/clarity-lsp/src/common/requests/signature_help.rs @@ -48,7 +48,7 @@ pub fn get_signatures( signature_without_parenthesis.next_back(); let signature_without_parenthesis = signature_without_parenthesis.as_str(); let parameters = signature_without_parenthesis - .split(" ") + .split(' ') .collect::>(); let (_, parameters) = parameters.split_first().expect("invalid signature format"); diff --git a/components/clarity-lsp/src/common/state.rs b/components/clarity-lsp/src/common/state.rs index d73fc664c..40982344b 100644 --- a/components/clarity-lsp/src/common/state.rs +++ b/components/clarity-lsp/src/common/state.rs @@ -110,7 +110,7 @@ impl ActiveContractData { pub fn update_definitions(&mut self) { if let Some(expressions) = &self.expressions { - self.definitions = Some(get_definitions(&expressions, self.issuer.clone())); + self.definitions = Some(get_definitions(expressions, self.issuer.clone())); } } @@ -283,7 +283,7 @@ impl EditorState { &mut self, contract_location: &FileLocation, ) -> Option { - match self.contracts_lookup.get(&contract_location) { + match self.contracts_lookup.get(contract_location) { Some(contract_metadata) => { let manifest_location = contract_metadata.manifest_location.clone(); self.clear_protocol(&manifest_location); @@ -298,7 +298,7 @@ impl EditorState { contract_location: &FileLocation, position: &Position, ) -> Vec { - let active_contract = match self.active_contracts.get(&contract_location) { + let active_contract = match self.active_contracts.get(contract_location) { Some(contract) => contract, None => return vec![], }; @@ -307,7 +307,7 @@ impl EditorState { .contracts_lookup .get(contract_location) .and_then(|d| self.protocols.get(&d.manifest_location)) - .and_then(|p| Some(p.get_contract_calls_for_contract(contract_location))) + .map(|p| p.get_contract_calls_for_contract(contract_location)) .unwrap_or_default(); let expressions = active_contract.expressions.as_ref(); @@ -320,7 +320,7 @@ impl EditorState { build_completion_item_list( &active_contract.clarity_version, - &expressions.unwrap_or(&vec![]), + expressions.unwrap_or(&vec![]), &Position { line: position.line + 1, character: position.character + 1, @@ -336,7 +336,7 @@ impl EditorState { &self, contract_location: &FileLocation, ) -> Vec { - let active_contract = match self.active_contracts.get(&contract_location) { + let active_contract = match self.active_contracts.get(contract_location) { Some(contract) => contract, None => return vec![], }; @@ -347,7 +347,7 @@ impl EditorState { }; let ast_symbols = ASTSymbols::new(); - ast_symbols.get_symbols(&expressions) + ast_symbols.get_symbols(expressions) } pub fn get_definition_location( @@ -355,7 +355,7 @@ impl EditorState { contract_location: &FileLocation, position: &Position, ) -> Option { - let contract = self.active_contracts.get(&contract_location)?; + let contract = self.active_contracts.get(contract_location)?; let position = Position { line: position.line + 1, character: position.character + 1, @@ -368,12 +368,10 @@ impl EditorState { }; match definitions.get(&position_hash)? { - DefinitionLocation::Internal(range) => { - return Some(Location { - uri: Url::parse(&contract_location.to_string()).ok()?, - range: range.clone(), - }); - } + DefinitionLocation::Internal(range) => Some(Location { + uri: Url::parse(&contract_location.to_string()).ok()?, + range: *range, + }), DefinitionLocation::External(contract_identifier, function_name) => { let metadata = self.contracts_lookup.get(contract_location)?; let protocol = self.protocols.get(&metadata.manifest_location)?; @@ -387,23 +385,23 @@ impl EditorState { .get(definition_contract_location) .and_then(|c| c.expressions.as_ref()) { - let public_definitions = get_public_function_definitions(&expressions); + let public_definitions = get_public_function_definitions(expressions); return Some(Location { range: *public_definitions.get(function_name)?, uri: Url::parse(&definition_contract_location.to_string()).ok()?, }); }; - return Some(Location { + Some(Location { range: *protocol .contracts .get(definition_contract_location)? .definitions .get(function_name)?, uri: Url::parse(&definition_contract_location.to_string()).ok()?, - }); + }) } - }; + } } pub fn get_hover_data( @@ -411,7 +409,7 @@ impl EditorState { contract_location: &FileLocation, position: &lsp_types::Position, ) -> Option { - let contract = self.active_contracts.get(&contract_location)?; + let contract = self.active_contracts.get(contract_location)?; let position = Position { line: position.line + 1, character: position.character + 1, @@ -437,7 +435,7 @@ impl EditorState { position: &lsp_types::Position, active_signature: Option, ) -> Option { - let contract = self.active_contracts.get(&contract_location)?; + let contract = self.active_contracts.get(contract_location)?; let position = Position { line: position.line + 1, character: position.character + 1, @@ -576,7 +574,7 @@ impl ProtocolState { // Add / Replace new paths for (contract_id, contract_location) in locations.iter() { - let (contract_id, ast) = match asts.remove_entry(&contract_id) { + let (contract_id, ast) = match asts.remove_entry(contract_id) { Some(ast) => ast, None => continue, }; @@ -636,7 +634,7 @@ impl ProtocolState { pub async fn build_state( manifest_location: &FileLocation, protocol_state: &mut ProtocolState, - file_accessor: Option<&Box>, + file_accessor: Option<&dyn FileAccessor>, ) -> Result<(), String> { let mut locations = HashMap::new(); let mut analyses = HashMap::new(); @@ -688,18 +686,14 @@ pub async fn build_state( entry.append(&mut execution_result.diagnostics); } - match execution_result.result { - EvaluationResult::Contract(contract_result) => { - if let Some(ast) = artifacts.asts.get(&contract_id) { - definitions.insert( - contract_id.clone(), - get_public_function_definitions(&ast.expressions), - ); - } - analyses - .insert(contract_id.clone(), Some(contract_result.contract.analysis)); + if let EvaluationResult::Contract(contract_result) = execution_result.result { + if let Some(ast) = artifacts.asts.get(&contract_id) { + definitions.insert( + contract_id.clone(), + get_public_function_definitions(&ast.expressions), + ); } - _ => (), + analyses.insert(contract_id.clone(), Some(contract_result.contract.analysis)); }; } Err(ref mut diags) => { diff --git a/components/clarity-lsp/src/vscode_bridge.rs b/components/clarity-lsp/src/vscode_bridge.rs index 7b345edf0..3328b4c69 100644 --- a/components/clarity-lsp/src/vscode_bridge.rs +++ b/components/clarity-lsp/src/vscode_bridge.rs @@ -140,7 +140,7 @@ impl LspVscodeBridge { future_to_promise(async move { let mut result = - process_notification(command, &mut editor_state_lock, Some(&file_accessor)).await; + process_notification(command, &mut editor_state_lock, Some(&*file_accessor)).await; let mut aggregated_diagnostics = vec![]; if let Err(err) = result { @@ -161,13 +161,13 @@ impl LspVscodeBridge { aggregated_diagnostics.append(&mut response.aggregated_diagnostics); } - for (location, mut diags) in aggregated_diagnostics.into_iter() { + for (location, diags) in aggregated_diagnostics.into_iter() { if let Ok(uri) = Url::parse(&location.to_string()) { send_diagnostic.call1( &JsValue::NULL, &encode_to_js(&PublishDiagnosticsParams { uri, - diagnostics: clarity_diagnostics_to_lsp_type(&mut diags), + diagnostics: clarity_diagnostics_to_lsp_type(&diags), version: None, })?, )?; @@ -198,7 +198,7 @@ impl LspVscodeBridge { Completion::METHOD => { let lsp_response = process_request( LspRequest::Completion(decode_from_js(js_params)?), - &mut EditorStateInput::RwLock(self.editor_state_lock.clone()), + &EditorStateInput::RwLock(self.editor_state_lock.clone()), ); if let Ok(LspRequestResponse::CompletionItems(response)) = lsp_response { return response.serialize(&serializer).map_err(|_| JsValue::NULL); @@ -208,7 +208,7 @@ impl LspVscodeBridge { SignatureHelpRequest::METHOD => { let lsp_response = process_request( LspRequest::SignatureHelp(decode_from_js(js_params)?), - &mut EditorStateInput::RwLock(self.editor_state_lock.clone()), + &EditorStateInput::RwLock(self.editor_state_lock.clone()), ); if let Ok(LspRequestResponse::SignatureHelp(response)) = lsp_response { return response.serialize(&serializer).map_err(|_| JsValue::NULL); @@ -218,7 +218,7 @@ impl LspVscodeBridge { GotoDefinition::METHOD => { let lsp_response = process_request( LspRequest::Definition(decode_from_js(js_params)?), - &mut EditorStateInput::RwLock(self.editor_state_lock.clone()), + &EditorStateInput::RwLock(self.editor_state_lock.clone()), ); if let Ok(LspRequestResponse::Definition(response)) = lsp_response { return response.serialize(&serializer).map_err(|_| JsValue::NULL); @@ -228,7 +228,7 @@ impl LspVscodeBridge { DocumentSymbolRequest::METHOD => { let lsp_response = process_request( LspRequest::DocumentSymbol(decode_from_js(js_params)?), - &mut EditorStateInput::RwLock(self.editor_state_lock.clone()), + &EditorStateInput::RwLock(self.editor_state_lock.clone()), ); if let Ok(LspRequestResponse::DocumentSymbol(response)) = lsp_response { return response.serialize(&serializer).map_err(|_| JsValue::NULL); @@ -238,7 +238,7 @@ impl LspVscodeBridge { HoverRequest::METHOD => { let lsp_response = process_request( LspRequest::Hover(decode_from_js(js_params)?), - &mut EditorStateInput::RwLock(self.editor_state_lock.clone()), + &EditorStateInput::RwLock(self.editor_state_lock.clone()), ); if let Ok(LspRequestResponse::Hover(response)) = lsp_response { return response.serialize(&serializer).map_err(|_| JsValue::NULL); @@ -252,6 +252,6 @@ impl LspVscodeBridge { }; // expect for Initialize, the failing requests can be ignored - return Ok(JsValue::NULL); + Ok(JsValue::NULL) } } diff --git a/components/clarity-repl/src/analysis/annotation.rs b/components/clarity-repl/src/analysis/annotation.rs index fc08a8633..2cec97718 100644 --- a/components/clarity-repl/src/analysis/annotation.rs +++ b/components/clarity-repl/src/analysis/annotation.rs @@ -35,7 +35,7 @@ impl std::str::FromStr for AnnotationKind { .filter(|s| !s.is_empty()) .map(|s| ClarityName::try_from(s.trim()).unwrap()) .collect(); - if params.len() == 0 { + if params.is_empty() { Err("missing value for 'filter' annotation".to_string()) } else { Ok(AnnotationKind::Filter(params)) diff --git a/components/clarity-repl/src/analysis/ast_dependency_detector.rs b/components/clarity-repl/src/analysis/ast_dependency_detector.rs index f059d213c..a7994c29e 100644 --- a/components/clarity-repl/src/analysis/ast_dependency_detector.rs +++ b/components/clarity-repl/src/analysis/ast_dependency_detector.rs @@ -76,6 +76,7 @@ impl PartialEq for Dependency { } } +#[allow(clippy::incorrect_partial_ord_impl_on_ord_type)] impl PartialOrd for Dependency { fn partial_cmp(&self, other: &Self) -> Option { self.contract_id.partial_cmp(&other.contract_id) @@ -88,16 +89,14 @@ impl Ord for Dependency { } } -#[derive(Debug, Clone)] +#[derive(Debug, Clone, Default)] pub struct DependencySet { pub set: BTreeSet, } impl DependencySet { - pub fn new() -> DependencySet { - DependencySet { - set: BTreeSet::new(), - } + pub fn new() -> Self { + Self::default() } pub fn add_dependency( @@ -120,14 +119,12 @@ impl DependencySet { } pub fn has_dependency(&self, contract_id: &QualifiedContractIdentifier) -> Option { - if let Some(dep) = self.set.get(&Dependency { - contract_id: contract_id.clone(), - required_before_publish: false, - }) { - Some(dep.required_before_publish) - } else { - None - } + self.set + .get(&Dependency { + contract_id: contract_id.clone(), + required_before_publish: false, + }) + .map(|dep| dep.required_before_publish) } } @@ -220,16 +217,13 @@ impl<'a> ASTDependencyDetector<'a> { let mut lookup = BTreeMap::new(); let mut reverse_lookup = Vec::new(); - let mut index: usize = 0; - if dependencies.is_empty() { return Ok(vec![]); } - for (contract, _) in dependencies { + for (index, (contract, _)) in dependencies.iter().enumerate() { lookup.insert(contract, index); reverse_lookup.push(contract); - index += 1; } let mut graph = Graph::new(); @@ -385,20 +379,19 @@ impl<'a> ASTDependencyDetector<'a> { fn check_callee_type( &self, - arg_types: &Vec, + arg_types: &[TypeSignature], args: &'a [SymbolicExpression], ) -> Vec { let mut dependencies = Vec::new(); for (i, arg) in arg_types.iter().enumerate() { if matches!(arg, TypeSignature::CallableType(CallableSubtype::Trait(_))) | matches!(arg, TypeSignature::TraitReferenceType(_)) + && args.len() > i { - if args.len() > i { - if let Some(Value::Principal(PrincipalData::Contract(contract))) = - args[i].match_literal_value() - { - dependencies.push(contract.clone()); - } + if let Some(Value::Principal(PrincipalData::Contract(contract))) = + args[i].match_literal_value() + { + dependencies.push(contract.clone()); } } } @@ -634,7 +627,7 @@ impl<'a> ASTVisitor<'a> for ASTDependencyDetector<'a> { self.check_trait_dependencies(trait_definition, function_name, args) } else { self.add_pending_trait_check( - &self.current_contract.unwrap(), + self.current_contract.unwrap(), trait_identifier, function_name, args, @@ -811,7 +804,7 @@ impl Graph { } fn has_node_descendants(&self, expr_index: usize) -> bool { - self.adjacency_list[expr_index].len() > 0 + !self.adjacency_list[expr_index].is_empty() } fn nodes_count(&self) -> usize { @@ -853,7 +846,7 @@ impl GraphWalker { self.seen.insert(tle_index); if let Some(list) = graph.adjacency_list.get(tle_index) { for neighbor in list.iter() { - self.sort_dependencies_recursion(neighbor.clone(), graph, branch); + self.sort_dependencies_recursion(*neighbor, graph, branch); } } branch.push(tle_index); @@ -885,7 +878,7 @@ impl GraphWalker { } let nodes = HashSet::from_iter(sorted_indexes.iter().cloned()); - let deps = nodes.difference(&tainted).map(|i| *i).collect(); + let deps = nodes.difference(&tainted).copied().collect(); Some(deps) } } diff --git a/components/clarity-repl/src/analysis/ast_visitor.rs b/components/clarity-repl/src/analysis/ast_visitor.rs index 7b2b9a288..2453d40d6 100644 --- a/components/clarity-repl/src/analysis/ast_visitor.rs +++ b/components/clarity-repl/src/analysis/ast_visitor.rs @@ -26,7 +26,7 @@ pub trait ASTVisitor<'a> { match &expr.expr { AtomValue(value) => self.visit_atom_value(expr, value), Atom(name) => self.visit_atom(expr, name), - List(exprs) => self.traverse_list(expr, &exprs), + List(exprs) => self.traverse_list(expr, exprs), LiteralValue(value) => self.visit_literal_value(expr, value), Field(field) => self.visit_field(expr, field), TraitReference(name, trait_def) => self.visit_trait_reference(expr, name, trait_def), @@ -119,7 +119,7 @@ pub trait ASTVisitor<'a> { args.get(2).unwrap_or(&DEFAULT_EXPR), ), DefineFunctions::Trait => { - let params = if args.len() >= 1 { &args[1..] } else { &[] }; + let params = if !args.is_empty() { &args[1..] } else { &[] }; self.traverse_define_trait( expr, args.get(0) @@ -161,7 +161,7 @@ pub trait ASTVisitor<'a> { use clarity::vm::functions::NativeFunctions::*; rv = match native_function { Add | Subtract | Multiply | Divide | Modulo | Power | Sqrti | Log2 => { - self.traverse_arithmetic(expr, native_function, &args) + self.traverse_arithmetic(expr, native_function, args) } BitwiseXor => self.traverse_binary_bitwise( expr, @@ -170,10 +170,10 @@ pub trait ASTVisitor<'a> { args.get(1).unwrap_or(&DEFAULT_EXPR), ), CmpLess | CmpLeq | CmpGreater | CmpGeq | Equals => { - self.traverse_comparison(expr, native_function, &args) + self.traverse_comparison(expr, native_function, args) } - And | Or => self.traverse_lazy_logical(expr, native_function, &args), - Not => self.traverse_logical(expr, native_function, &args), + And | Or => self.traverse_lazy_logical(expr, native_function, args), + Not => self.traverse_logical(expr, native_function, args), ToInt | ToUInt => { self.traverse_int_cast(expr, args.get(0).unwrap_or(&DEFAULT_EXPR)) } @@ -186,7 +186,7 @@ pub trait ASTVisitor<'a> { Let => { let bindings = match_pairs(args.get(0).unwrap_or(&DEFAULT_EXPR)) .unwrap_or_default(); - let params = if args.len() >= 1 { &args[1..] } else { &[] }; + let params = if !args.is_empty() { &args[1..] } else { &[] }; self.traverse_let(expr, &bindings, params) } ElementAt | ElementAtAlias => self.traverse_element_at( @@ -205,7 +205,7 @@ pub trait ASTVisitor<'a> { .unwrap_or(&DEFAULT_EXPR) .match_atom() .unwrap_or(&DEFAULT_NAME); - let params = if args.len() >= 1 { &args[1..] } else { &[] }; + let params = if !args.is_empty() { &args[1..] } else { &[] }; self.traverse_map(expr, name, params) } Fold => { @@ -242,7 +242,7 @@ pub trait ASTVisitor<'a> { } } Len => self.traverse_len(expr, args.get(0).unwrap_or(&DEFAULT_EXPR)), - ListCons => self.traverse_list_cons(expr, &args), + ListCons => self.traverse_list_cons(expr, args), FetchVar => self.traverse_var_get( expr, args.get(0) @@ -342,7 +342,7 @@ pub trait ASTVisitor<'a> { args.get(0).unwrap_or(&DEFAULT_EXPR), args.get(1).unwrap_or(&DEFAULT_EXPR), ), - Begin => self.traverse_begin(expr, &args), + Begin => self.traverse_begin(expr, args), Hash160 | Sha256 | Sha512 | Sha512Trunc256 | Keccak256 => self .traverse_hash( expr, @@ -627,7 +627,7 @@ pub trait ASTVisitor<'a> { args.get(2).unwrap_or(&DEFAULT_EXPR), ), BitwiseAnd | BitwiseOr | BitwiseNot | BitwiseXor2 => { - self.traverse_bitwise(expr, native_function, &args) + self.traverse_bitwise(expr, native_function, args) } BitwiseLShift | BitwiseRShift => self.traverse_bit_shift( expr, @@ -1073,7 +1073,7 @@ pub trait ASTVisitor<'a> { name: &'a ClarityName, key: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, val) in key { + for val in key.values() { if !self.traverse_expr(val) { return false; } @@ -1097,12 +1097,12 @@ pub trait ASTVisitor<'a> { key: &HashMap, &'a SymbolicExpression>, value: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, key_val) in key { + for key_val in key.values() { if !self.traverse_expr(key_val) { return false; } } - for (_, val_val) in value { + for val_val in value.values() { if !self.traverse_expr(val_val) { return false; } @@ -1127,12 +1127,12 @@ pub trait ASTVisitor<'a> { key: &HashMap, &'a SymbolicExpression>, value: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, key_val) in key { + for key_val in key.values() { if !self.traverse_expr(key_val) { return false; } } - for (_, val_val) in value { + for val_val in value.values() { if !self.traverse_expr(val_val) { return false; } @@ -1156,7 +1156,7 @@ pub trait ASTVisitor<'a> { name: &'a ClarityName, key: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, val) in key { + for val in key.values() { if !self.traverse_expr(val) { return false; } @@ -1178,7 +1178,7 @@ pub trait ASTVisitor<'a> { expr: &'a SymbolicExpression, values: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, val) in values { + for val in values.values() { if !self.traverse_expr(val) { return false; } @@ -2006,7 +2006,7 @@ pub trait ASTVisitor<'a> { bindings: &HashMap<&'a ClarityName, &'a SymbolicExpression>, body: &'a [SymbolicExpression], ) -> bool { - for (_, val) in bindings { + for val in bindings.values() { if !self.traverse_expr(val) { return false; } @@ -2540,22 +2540,21 @@ fn match_pairs(expr: &SymbolicExpression) -> Option(list: &'a [SymbolicExpression]) -> Option>> { +fn match_pairs_list(list: &[SymbolicExpression]) -> Option>> { let mut vars = Vec::new(); for pair_list in list { let pair = pair_list.match_list()?; if pair.len() != 2 { return None; } - let name = pair[0].match_atom()?; vars.push(TypedVar { - name: name, + name: pair[0].match_atom()?, type_expr: &pair[1], decl_span: pair[0].span.clone(), }); } - return Some(vars); + Some(vars) } diff --git a/components/clarity-repl/src/analysis/call_checker.rs b/components/clarity-repl/src/analysis/call_checker.rs index fd01b2271..55829155e 100644 --- a/components/clarity-repl/src/analysis/call_checker.rs +++ b/components/clarity-repl/src/analysis/call_checker.rs @@ -33,7 +33,7 @@ impl<'a> CallChecker<'a> { traverse(&mut self, &contract_analysis.expressions); self.check_user_calls(); - if self.diagnostics.len() > 0 { + if !self.diagnostics.is_empty() { Err(self.diagnostics) } else { Ok(vec![]) diff --git a/components/clarity-repl/src/analysis/check_checker/mod.rs b/components/clarity-repl/src/analysis/check_checker/mod.rs index a779cff2f..80e9c2b8e 100644 --- a/components/clarity-repl/src/analysis/check_checker/mod.rs +++ b/components/clarity-repl/src/analysis/check_checker/mod.rs @@ -15,7 +15,7 @@ use std::collections::{HashMap, HashSet}; use std::convert::TryFrom; use std::hash::{Hash, Hasher}; -#[derive(Debug, Clone, Copy, Serialize, Deserialize)] +#[derive(Debug, Default, Clone, Copy, Serialize, Deserialize)] pub struct Settings { // Strict mode sets all other options to false strict: bool, @@ -39,17 +39,6 @@ pub struct SettingsFile { callee_filter: Option, } -impl Default for Settings { - fn default() -> Self { - Self { - strict: false, - trusted_sender: false, - trusted_caller: false, - callee_filter: false, - } - } -} - impl From for Settings { fn from(from_file: SettingsFile) -> Self { if from_file.strict.unwrap_or(false) { @@ -162,7 +151,7 @@ impl<'a, 'b> CheckChecker<'a, 'b> { let source_node = self.taint_sources.insert( node, TaintSource { - span: span, + span, children: HashSet::new(), }, ); @@ -208,24 +197,24 @@ impl<'a, 'b> CheckChecker<'a, 'b> { fn filter_source(&mut self, source_node: &Node<'a>, rollback: bool) { if let Some(source) = self.taint_sources.remove(source_node) { - self.tainted_nodes.remove(&source_node); + self.tainted_nodes.remove(source_node); // Remove each taint source from its children for child in &source.children { if let Some(mut child_node) = self.tainted_nodes.remove(child) { - child_node.sources.remove(&source_node); + child_node.sources.remove(source_node); // If the child is still tainted (by another source), add it back to the set - if child_node.sources.len() > 0 { - self.tainted_nodes.insert(child.clone(), child_node); + if !child_node.sources.is_empty() { + self.tainted_nodes.insert(*child, child_node); } else if rollback { if let Node::Expr(id) = child { // Remove any prior diagnostics for this node - self.diagnostics.remove(&id); + self.diagnostics.remove(id); } } } else if rollback { if let Node::Expr(id) = child { // Remove any prior diagnostics for this node - self.diagnostics.remove(&id); + self.diagnostics.remove(id); } } } @@ -268,10 +257,10 @@ impl<'a, 'b> CheckChecker<'a, 'b> { fn allow_unchecked_data(&self) -> bool { if let Some(idx) = self.active_annotation { let annotation = &self.annotations[idx]; - return match annotation.kind { - AnnotationKind::Allow(WarningKind::UncheckedData) => true, - _ => false, - }; + return matches!( + annotation.kind, + AnnotationKind::Allow(WarningKind::UncheckedData) + ); } false } @@ -280,10 +269,10 @@ impl<'a, 'b> CheckChecker<'a, 'b> { fn allow_unchecked_params(&self) -> bool { if let Some(idx) = self.active_annotation { let annotation = &self.annotations[idx]; - return match annotation.kind { - AnnotationKind::Allow(WarningKind::UncheckedParams) => true, - _ => false, - }; + return matches!( + annotation.kind, + AnnotationKind::Allow(WarningKind::UncheckedParams) + ); } false } @@ -347,7 +336,7 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { let result = match &expr.expr { AtomValue(value) => self.visit_atom_value(expr, value), Atom(name) => self.visit_atom(expr, name), - List(exprs) => self.traverse_list(expr, &exprs), + List(exprs) => self.traverse_list(expr, exprs), LiteralValue(value) => self.visit_literal_value(expr, value), Field(field) => self.visit_field(expr, field), TraitReference(name, trait_def) => self.visit_trait_reference(expr, name, trait_def), @@ -414,10 +403,10 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { let mut unchecked_params = vec![false; params.len()]; for (i, param) in params.iter().enumerate() { unchecked_params[i] = allow; - if allow || self.settings.callee_filter { - if !is_param_type_excluded_from_checked_requirement(param) { - self.add_taint_source(Node::Symbol(param.name), param.decl_span.clone()); - } + if (allow || self.settings.callee_filter) + && !is_param_type_excluded_from_checked_requirement(param) + { + self.add_taint_source(Node::Symbol(param.name), param.decl_span.clone()); } } info.unchecked_params = unchecked_params; @@ -592,7 +581,7 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { sources.extend(tainted.sources.clone()); } } - if sources.len() > 0 { + if !sources.is_empty() { self.add_tainted_expr(expr, sources); } true @@ -746,10 +735,10 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { key: &HashMap, &'a SymbolicExpression>, value: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, key_val) in key { + for key_val in key.values() { self.taint_check(key_val); } - for (_, val_val) in value { + for val_val in value.values() { self.taint_check(val_val); } true @@ -762,10 +751,10 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { key: &HashMap, &'a SymbolicExpression>, value: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, key_val) in key { + for key_val in key.values() { self.taint_check(key_val); } - for (_, val_val) in value { + for val_val in value.values() { self.taint_check(val_val); } true @@ -777,7 +766,7 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { name: &'a ClarityName, key: &HashMap, &'a SymbolicExpression>, ) -> bool { - for (_, val) in key { + for val in key.values() { self.taint_check(val); } true @@ -800,7 +789,7 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { name: &'a ClarityName, args: &'a [SymbolicExpression], ) -> bool { - if args.len() > 0 { + if !args.is_empty() { let default = vec![false; args.len()]; if let Some(info) = self.user_funcs.get(name) { let unchecked_args = &info.unchecked_params.clone(); @@ -854,10 +843,10 @@ impl<'a> ASTVisitor<'a> for CheckChecker<'a, '_> { } fn is_param_type_excluded_from_checked_requirement(param: &TypedVar) -> bool { - match TypeSignature::parse_type_repr(DEFAULT_EPOCH, param.type_expr, &mut ()) { - Ok(TypeSignature::BoolType) => true, - _ => false, - } + matches!( + TypeSignature::parse_type_repr(DEFAULT_EPOCH, param.type_expr, &mut ()), + Ok(TypeSignature::BoolType) + ) } fn is_tx_sender(expr: &SymbolicExpression) -> bool { diff --git a/components/clarity-repl/src/analysis/coverage.rs b/components/clarity-repl/src/analysis/coverage.rs index 61aaee789..569a9ba2a 100644 --- a/components/clarity-repl/src/analysis/coverage.rs +++ b/components/clarity-repl/src/analysis/coverage.rs @@ -136,7 +136,7 @@ impl CoverageReporter { let mut counts = vec![]; for id in expr_ids { if let Some(c) = coverage.get(id) { - counts.push(c.clone()); + counts.push(*c); } } let count = counts.iter().max().unwrap_or(&0); @@ -213,7 +213,7 @@ impl CoverageReporter { file_content } - fn retrieve_functions(&self, exprs: &Vec) -> Vec<(String, u32, u32)> { + fn retrieve_functions(&self, exprs: &[SymbolicExpression]) -> Vec<(String, u32, u32)> { let mut functions = vec![]; for cur_expr in exprs.iter() { if let Some(define_expr) = DefineFunctionsParsed::try_parse(cur_expr).ok().flatten() { @@ -240,7 +240,7 @@ impl CoverageReporter { fn retrieve_executable_lines_and_branches( &self, - exprs: &Vec, + exprs: &[SymbolicExpression], ) -> (ExecutableLines, ExecutableBranches) { let mut lines: ExecutableLines = HashMap::new(); let mut branches: ExecutableBranches = HashMap::new(); @@ -407,5 +407,5 @@ fn extract_expr_from_list(expr: &SymbolicExpression) -> SymbolicExpression { if let Some(first) = expr.match_list().and_then(|l| l.first()) { return extract_expr_from_list(first); } - return expr.to_owned(); + expr.to_owned() } diff --git a/components/clarity-repl/src/analysis/mod.rs b/components/clarity-repl/src/analysis/mod.rs index 2a1aab5fe..88f9f165b 100644 --- a/components/clarity-repl/src/analysis/mod.rs +++ b/components/clarity-repl/src/analysis/mod.rs @@ -105,6 +105,7 @@ impl From for Settings { } pub trait AnalysisPass { + #[allow(clippy::ptr_arg)] fn run_pass( contract_analysis: &mut ContractAnalysis, analysis_db: &mut AnalysisDatabase, @@ -138,7 +139,7 @@ pub fn run_analysis( analysis_db.execute(|db| { for pass in passes { // Collect warnings and continue, or if there is an error, return. - match pass(contract_analysis, db, annotations, &settings) { + match pass(contract_analysis, db, annotations, settings) { Ok(mut w) => errors.append(&mut w), Err(mut e) => { errors.append(&mut e); diff --git a/components/clarity-repl/src/codec/mod.rs b/components/clarity-repl/src/codec/mod.rs index 3475c9873..c5ec42201 100644 --- a/components/clarity-repl/src/codec/mod.rs +++ b/components/clarity-repl/src/codec/mod.rs @@ -33,6 +33,7 @@ use std::fmt; use std::io::{Read, Write}; use std::ops::Deref; use std::ops::DerefMut; +use std::str::FromStr; pub const MAX_BLOCK_LEN: u32 = 2 * 1024 * 1024; pub const MAX_TRANSACTION_LEN: u32 = MAX_BLOCK_LEN; @@ -178,7 +179,7 @@ macro_rules! impl_array_newtype { impl Clone for $thing { #[inline] fn clone(&self) -> $thing { - $thing::from(&self[..]) + *self } } @@ -309,21 +310,16 @@ pub enum TransactionAuthField { impl TransactionAuthField { pub fn is_public_key(&self) -> bool { - match *self { - TransactionAuthField::PublicKey(_) => true, - _ => false, - } + matches!(*self, TransactionAuthField::PublicKey(_)) } pub fn is_signature(&self) -> bool { - match *self { - TransactionAuthField::Signature(_, _) => true, - _ => false, - } + matches!(*self, TransactionAuthField::Signature(_, _)) } pub fn as_public_key(&self) -> Option { match *self { + #[allow(clippy::clone_on_copy)] TransactionAuthField::PublicKey(ref pubk) => Some(pubk.clone()), _ => None, } @@ -331,9 +327,7 @@ impl TransactionAuthField { pub fn as_signature(&self) -> Option<(TransactionPublicKeyEncoding, MessageSignature)> { match *self { - TransactionAuthField::Signature(ref key_fmt, ref sig) => { - Some((key_fmt.clone(), sig.clone())) - } + TransactionAuthField::Signature(ref key_fmt, ref sig) => Some((*key_fmt, *sig)), _ => None, } } @@ -341,15 +335,13 @@ impl TransactionAuthField { // TODO: enforce u8; 32 pub fn get_public_key(&self, sighash_bytes: &[u8]) -> Result { match *self { + // wasm does not compile with *pubk instead of pubk.clone() + #[allow(clippy::clone_on_copy)] TransactionAuthField::PublicKey(ref pubk) => Ok(pubk.clone()), TransactionAuthField::Signature(ref key_fmt, ref sig) => { let mut pubk = Secp256k1PublicKey::recover_to_pubkey(sighash_bytes, sig) .map_err(|e| CodecError::SigningError(e.to_string()))?; - pubk.set_compressed(if *key_fmt == TransactionPublicKeyEncoding::Compressed { - true - } else { - false - }); + pubk.set_compressed(*key_fmt == TransactionPublicKeyEncoding::Compressed); Ok(pubk) } } @@ -441,12 +433,12 @@ impl MultisigSpendingCondition { &mut self, key_encoding: TransactionPublicKeyEncoding, signature: MessageSignature, - ) -> () { + ) { self.fields .push(TransactionAuthField::Signature(key_encoding, signature)); } - pub fn push_public_key(&mut self, public_key: Secp256k1PublicKey) -> () { + pub fn push_public_key(&mut self, public_key: Secp256k1PublicKey) { self.fields .push(TransactionAuthField::PublicKey(public_key)); } @@ -458,14 +450,14 @@ impl MultisigSpendingCondition { pub fn address_mainnet(&self) -> StacksAddress { StacksAddress { version: C32_ADDRESS_VERSION_MAINNET_MULTISIG, - bytes: self.signer.clone(), + bytes: self.signer, } } pub fn address_testnet(&self) -> StacksAddress { StacksAddress { version: C32_ADDRESS_VERSION_TESTNET_MULTISIG, - bytes: self.signer.clone(), + bytes: self.signer, } } @@ -478,7 +470,7 @@ impl MultisigSpendingCondition { cond_code: &TransactionAuthFlags, ) -> Result { let mut pubkeys = vec![]; - let mut cur_sighash = initial_sighash.clone(); + let mut cur_sighash = *initial_sighash; let mut num_sigs: u16 = 0; let mut have_uncompressed = false; for field in self.fields.iter() { @@ -487,6 +479,7 @@ impl MultisigSpendingCondition { if !pubkey.compressed() { have_uncompressed = true; } + #[allow(clippy::clone_on_copy)] pubkey.clone() } TransactionAuthField::Signature(ref pubkey_encoding, ref sigbuf) => { @@ -560,7 +553,7 @@ pub struct SinglesigSpendingCondition { } impl SinglesigSpendingCondition { - pub fn set_signature(&mut self, signature: MessageSignature) -> () { + pub fn set_signature(&mut self, signature: MessageSignature) { self.signature = signature; } @@ -569,13 +562,10 @@ impl SinglesigSpendingCondition { return None; } - let ret = self.signature.clone(); + let ret = self.signature; self.signature = MessageSignature::empty(); - return Some(TransactionAuthField::Signature( - self.key_encoding.clone(), - ret, - )); + Some(TransactionAuthField::Signature(self.key_encoding, ret)) } pub fn address_mainnet(&self) -> StacksAddress { @@ -584,8 +574,8 @@ impl SinglesigSpendingCondition { SinglesigHashMode::P2WPKH => C32_ADDRESS_VERSION_MAINNET_MULTISIG, }; StacksAddress { - version: version, - bytes: self.signer.clone(), + version, + bytes: self.signer, } } @@ -595,8 +585,8 @@ impl SinglesigSpendingCondition { SinglesigHashMode::P2WPKH => C32_ADDRESS_VERSION_TESTNET_MULTISIG, }; StacksAddress { - version: version, - bytes: self.signer.clone(), + version, + bytes: self.signer, } } @@ -660,11 +650,11 @@ impl TransactionSpendingCondition { Some(TransactionSpendingCondition::Singlesig( SinglesigSpendingCondition { - signer: signer_addr.bytes.clone(), + signer: signer_addr.bytes, nonce: 0, tx_fee: 0, hash_mode: SinglesigHashMode::P2PKH, - key_encoding: key_encoding, + key_encoding, signature: MessageSignature::empty(), }, )) @@ -682,7 +672,7 @@ impl TransactionSpendingCondition { Some(TransactionSpendingCondition::Singlesig( SinglesigSpendingCondition { - signer: signer_addr.bytes.clone(), + signer: signer_addr.bytes, nonce: 0, tx_fee: 0, hash_mode: SinglesigHashMode::P2WPKH, @@ -705,7 +695,7 @@ impl TransactionSpendingCondition { Some(TransactionSpendingCondition::Multisig( MultisigSpendingCondition { - signer: signer_addr.bytes.clone(), + signer: signer_addr.bytes, nonce: 0, tx_fee: 0, hash_mode: MultisigHashMode::P2SH, @@ -728,7 +718,7 @@ impl TransactionSpendingCondition { Some(TransactionSpendingCondition::Multisig( MultisigSpendingCondition { - signer: signer_addr.bytes.clone(), + signer: signer_addr.bytes, nonce: 0, tx_fee: 0, hash_mode: MultisigHashMode::P2WSH, @@ -798,7 +788,7 @@ impl TransactionSpendingCondition { } } - pub fn set_nonce(&mut self, n: u64) -> () { + pub fn set_nonce(&mut self, n: u64) { match *self { TransactionSpendingCondition::Singlesig(ref mut singlesig_data) => { singlesig_data.nonce = n; @@ -809,7 +799,7 @@ impl TransactionSpendingCondition { } } - pub fn set_tx_fee(&mut self, tx_fee: u64) -> () { + pub fn set_tx_fee(&mut self, tx_fee: u64) { match *self { TransactionSpendingCondition::Singlesig(ref mut singlesig_data) => { singlesig_data.tx_fee = tx_fee; @@ -844,7 +834,7 @@ impl TransactionSpendingCondition { } /// Clear fee rate, nonces, signatures, and public keys - pub fn clear(&mut self) -> () { + pub fn clear(&mut self) { match *self { TransactionSpendingCondition::Singlesig(ref mut singlesig_data) => { singlesig_data.tx_fee = 0; @@ -881,8 +871,7 @@ impl TransactionSpendingCondition { assert!(new_tx_hash_bits.len() == new_tx_hash_bits_len as usize); - let next_sighash = Txid::from_sighash_bytes(&new_tx_hash_bits); - next_sighash + Txid::from_sighash_bytes(&new_tx_hash_bits) } pub fn make_sighash_postsign( @@ -908,8 +897,7 @@ impl TransactionSpendingCondition { assert!(new_tx_hash_bits.len() == new_tx_hash_bits_len as usize); - let next_sighash = Txid::from_sighash_bytes(&new_tx_hash_bits); - next_sighash + Txid::from_sighash_bytes(&new_tx_hash_bits) } /// Linear-complexity signing algorithm -- we sign a rolling hash over all data committed to by @@ -1004,45 +992,33 @@ pub enum TransactionAuth { impl TransactionAuth { pub fn from_p2pkh(privk: &Secp256k1PrivateKey) -> Option { - match TransactionSpendingCondition::new_singlesig_p2pkh(Secp256k1PublicKey::from_private( - privk, - )) { - Some(auth) => Some(TransactionAuth::Standard(auth)), - None => None, - } + TransactionSpendingCondition::new_singlesig_p2pkh(Secp256k1PublicKey::from_private(privk)) + .map(TransactionAuth::Standard) } - pub fn from_p2sh(privks: &Vec, num_sigs: u16) -> Option { + pub fn from_p2sh(privks: &[Secp256k1PrivateKey], num_sigs: u16) -> Option { let mut pubks = vec![]; for privk in privks.iter() { pubks.push(Secp256k1PublicKey::from_private(privk)); } - match TransactionSpendingCondition::new_multisig_p2sh(num_sigs, pubks) { - Some(auth) => Some(TransactionAuth::Standard(auth)), - None => None, - } + TransactionSpendingCondition::new_multisig_p2sh(num_sigs, pubks) + .map(TransactionAuth::Standard) } pub fn from_p2wpkh(privk: &Secp256k1PrivateKey) -> Option { - match TransactionSpendingCondition::new_singlesig_p2wpkh(Secp256k1PublicKey::from_private( - privk, - )) { - Some(auth) => Some(TransactionAuth::Standard(auth)), - None => None, - } + TransactionSpendingCondition::new_singlesig_p2wpkh(Secp256k1PublicKey::from_private(privk)) + .map(TransactionAuth::Standard) } - pub fn from_p2wsh(privks: &Vec, num_sigs: u16) -> Option { + pub fn from_p2wsh(privks: &[Secp256k1PrivateKey], num_sigs: u16) -> Option { let mut pubks = vec![]; for privk in privks.iter() { pubks.push(Secp256k1PublicKey::from_private(privk)); } - match TransactionSpendingCondition::new_multisig_p2wsh(num_sigs, pubks) { - Some(auth) => Some(TransactionAuth::Standard(auth)), - None => None, - } + TransactionSpendingCondition::new_multisig_p2wsh(num_sigs, pubks) + .map(TransactionAuth::Standard) } /// merge two standard auths into a sponsored auth. @@ -1073,17 +1049,11 @@ impl TransactionAuth { } pub fn is_standard(&self) -> bool { - match *self { - TransactionAuth::Standard(_) => true, - _ => false, - } + matches!(*self, TransactionAuth::Standard(_)) } pub fn is_sponsored(&self) -> bool { - match *self { - TransactionAuth::Sponsored(_, _) => true, - _ => false, - } + matches!(*self, TransactionAuth::Sponsored(_, _)) } /// When beginning to sign a sponsored transaction, the origin account will not commit to any @@ -1116,7 +1086,7 @@ impl TransactionAuth { self.origin().nonce() } - pub fn set_origin_nonce(&mut self, n: u64) -> () { + pub fn set_origin_nonce(&mut self, n: u64) { match *self { TransactionAuth::Standard(ref mut s) => s.set_nonce(n), TransactionAuth::Sponsored(ref mut s, _) => s.set_nonce(n), @@ -1131,10 +1101,7 @@ impl TransactionAuth { } pub fn get_sponsor_nonce(&self) -> Option { - match self.sponsor() { - None => None, - Some(s) => Some(s.nonce()), - } + self.sponsor().map(|s| s.nonce()) } pub fn set_sponsor_nonce(&mut self, n: u64) -> Result<(), CodecError> { @@ -1149,7 +1116,7 @@ impl TransactionAuth { } } - pub fn set_tx_fee(&mut self, tx_fee: u64) -> () { + pub fn set_tx_fee(&mut self, tx_fee: u64) { match *self { TransactionAuth::Standard(ref mut s) => s.set_tx_fee(tx_fee), TransactionAuth::Sponsored(_, ref mut s) => s.set_tx_fee(tx_fee), @@ -1180,12 +1147,12 @@ impl TransactionAuth { TransactionAuth::Standard(_) => Ok(()), TransactionAuth::Sponsored(_, ref sponsor_condition) => sponsor_condition .verify(&origin_sighash, &TransactionAuthFlags::AuthSponsored) - .and_then(|_sigh| Ok(())), + .map(|_sigh| ()), } } /// Clear out all transaction auth fields, nonces, and fee rates from the spending condition(s). - pub fn clear(&mut self) -> () { + pub fn clear(&mut self) { match *self { TransactionAuth::Standard(ref mut origin_condition) => { origin_condition.clear(); @@ -1214,13 +1181,25 @@ pub struct StacksString(Vec); impl fmt::Display for StacksString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(String::from_utf8_lossy(&self).into_owned().as_str()) + // guaranteed to always succeed because the string is ASCII + f.write_str(String::from_utf8_lossy(self).into_owned().as_str()) } } impl fmt::Debug for StacksString { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.write_str(String::from_utf8_lossy(&self).into_owned().as_str()) + f.write_str(String::from_utf8_lossy(self).into_owned().as_str()) + } +} + +impl std::str::FromStr for StacksString { + type Err = String; + + fn from_str(s: &str) -> Result { + if !StacksString::is_valid_string(&String::from(s)) { + return Err("Invalid string".to_string()); + } + Ok(StacksString(s.as_bytes().to_vec())) } } @@ -1265,7 +1244,7 @@ impl StacksString { // This is 0x20 through 0x7e, inclusive, as well as '\t' and '\n' // TODO: DRY up with vm::representations for c in s.as_bytes().iter() { - if (*c < 0x20 && *c != ('\t' as u8) && *c != ('\n' as u8)) || (*c > 0x7e) { + if (*c < 0x20 && *c != b'\t' && *c != b'\n') || (*c > 0x7e) { return false; } } @@ -1282,18 +1261,13 @@ impl StacksString { } Some(StacksString(s.as_bytes().to_vec())) } +} - pub fn from_str(s: &str) -> Option { - if !StacksString::is_valid_string(&String::from(s)) { - return None; - } - Some(StacksString(s.as_bytes().to_vec())) - } - - pub fn to_string(&self) -> String { - // guaranteed to always succeed because the string is ASCII - String::from_utf8(self.0.clone()).unwrap() - } +#[test] +fn test_display() { + let stxstr = StacksString::from_string(&"hello".to_string()).unwrap(); + println!("log: {:#?}", stxstr); + println!("log: {:#?}", stxstr.to_string()); } impl StacksMessageCodec for StacksString { @@ -1457,17 +1431,17 @@ impl NonfungibleConditionCode { } } - pub fn was_sent(nft_sent_condition: &Value, nfts_sent: &Vec) -> bool { + pub fn was_sent(nft_sent_condition: &Value, nfts_sent: &[Value]) -> bool { for asset_sent in nfts_sent.iter() { if *asset_sent == *nft_sent_condition { // asset was sent, and is no longer owned by this principal return true; } } - return false; + false } - pub fn check(&self, nft_sent_condition: &Value, nfts_sent: &Vec) -> bool { + pub fn check(&self, nft_sent_condition: &Value, nfts_sent: &[Value]) -> bool { match *self { NonfungibleConditionCode::Sent => { NonfungibleConditionCode::was_sent(nft_sent_condition, nfts_sent) @@ -1492,11 +1466,11 @@ impl PostConditionPrincipal { match *self { PostConditionPrincipal::Origin => origin_principal.clone(), PostConditionPrincipal::Standard(ref addr) => { - PrincipalData::Standard(StandardPrincipalData::from(addr.clone())) + PrincipalData::Standard(StandardPrincipalData::from(*addr)) } PostConditionPrincipal::Contract(ref addr, ref contract_name) => { PrincipalData::Contract(QualifiedContractIdentifier::new( - StandardPrincipalData::from(addr.clone()), + StandardPrincipalData::from(*addr), contract_name.clone(), )) } @@ -1564,18 +1538,16 @@ impl StacksTransaction { auth: TransactionAuth, payload: TransactionPayload, ) -> StacksTransaction { - let anchor_mode = match payload { - _ => TransactionAnchorMode::Any, - }; + let anchor_mode = TransactionAnchorMode::Any; StacksTransaction { - version: version, + version, chain_id: 0, - auth: auth, - anchor_mode: anchor_mode, + auth, + anchor_mode, post_condition_mode: TransactionPostConditionMode::Deny, post_conditions: vec![], - payload: payload, + payload, } } @@ -1585,7 +1557,7 @@ impl StacksTransaction { } /// Set fee rate - pub fn set_tx_fee(&mut self, tx_fee: u64) -> () { + pub fn set_tx_fee(&mut self, tx_fee: u64) { self.auth.set_tx_fee(tx_fee); } @@ -1600,7 +1572,7 @@ impl StacksTransaction { } /// set origin nonce - pub fn set_origin_nonce(&mut self, n: u64) -> () { + pub fn set_origin_nonce(&mut self, n: u64) { self.auth.set_origin_nonce(n); } @@ -1610,17 +1582,17 @@ impl StacksTransaction { } /// Set anchor mode - pub fn set_anchor_mode(&mut self, anchor_mode: TransactionAnchorMode) -> () { + pub fn set_anchor_mode(&mut self, anchor_mode: TransactionAnchorMode) { self.anchor_mode = anchor_mode; } /// Set post-condition mode - pub fn set_post_condition_mode(&mut self, postcond_mode: TransactionPostConditionMode) -> () { + pub fn set_post_condition_mode(&mut self, postcond_mode: TransactionPostConditionMode) { self.post_condition_mode = postcond_mode; } /// Add a post-condition - pub fn add_post_condition(&mut self, post_condition: TransactionPostCondition) -> () { + pub fn add_post_condition(&mut self, post_condition: TransactionPostCondition) { self.post_conditions.push(post_condition); } @@ -1712,6 +1684,7 @@ impl StacksTransaction { ) -> Result<(), CodecError> { match condition { TransactionSpendingCondition::Multisig(ref mut cond) => { + #[allow(clippy::clone_on_copy)] cond.push_public_key(pubkey.clone()); Ok(()) } @@ -1814,19 +1787,19 @@ impl StacksTransaction { /// Get the origin account's address pub fn origin_address(&self) -> StacksAddress { match (&self.version, &self.auth) { - (&TransactionVersion::Mainnet, &TransactionAuth::Standard(ref origin_condition)) => { + (&TransactionVersion::Mainnet, TransactionAuth::Standard(origin_condition)) => { origin_condition.address_mainnet() } - (&TransactionVersion::Testnet, &TransactionAuth::Standard(ref origin_condition)) => { + (&TransactionVersion::Testnet, TransactionAuth::Standard(origin_condition)) => { origin_condition.address_testnet() } ( &TransactionVersion::Mainnet, - &TransactionAuth::Sponsored(ref origin_condition, ref _unused), + TransactionAuth::Sponsored(origin_condition, _unused), ) => origin_condition.address_mainnet(), ( &TransactionVersion::Testnet, - &TransactionAuth::Sponsored(ref origin_condition, ref _unused), + TransactionAuth::Sponsored(origin_condition, _unused), ) => origin_condition.address_testnet(), } } @@ -1834,15 +1807,15 @@ impl StacksTransaction { /// Get the sponsor account's address, if this transaction is sponsored pub fn sponsor_address(&self) -> Option { match (&self.version, &self.auth) { - (&TransactionVersion::Mainnet, &TransactionAuth::Standard(ref _unused)) => None, - (&TransactionVersion::Testnet, &TransactionAuth::Standard(ref _unused)) => None, + (&TransactionVersion::Mainnet, TransactionAuth::Standard(_unused)) => None, + (&TransactionVersion::Testnet, TransactionAuth::Standard(_unused)) => None, ( &TransactionVersion::Mainnet, - &TransactionAuth::Sponsored(ref _unused, ref sponsor_condition), + TransactionAuth::Sponsored(_unused, sponsor_condition), ) => Some(sponsor_condition.address_mainnet()), ( &TransactionVersion::Testnet, - &TransactionAuth::Sponsored(ref _unused, ref sponsor_condition), + TransactionAuth::Sponsored(_unused, sponsor_condition), ) => Some(sponsor_condition.address_testnet()), } } @@ -1855,17 +1828,14 @@ impl StacksTransaction { /// Get a copy of the sending condition that will pay the tx fee pub fn get_payer(&self) -> TransactionSpendingCondition { match self.auth.sponsor() { - Some(ref tsc) => (*tsc).clone(), + Some(tsc) => tsc.clone(), None => self.auth.origin().clone(), } } /// Is this a mainnet transaction? false means 'testnet' pub fn is_mainnet(&self) -> bool { - match self.version { - TransactionVersion::Mainnet => true, - _ => false, - } + matches!(self.version, TransactionVersion::Mainnet) } pub fn tx_len(&self) -> u64 { @@ -2008,11 +1978,11 @@ impl StacksTransactionSigner { }) } - pub fn resume(&mut self, tx: &StacksTransaction) -> () { + pub fn resume(&mut self, tx: &StacksTransaction) { self.tx = tx.clone() } - pub fn disable_checks(&mut self) -> () { + pub fn disable_checks(&mut self) { self.check_oversign = false; self.check_overlap = false; } @@ -2073,7 +2043,7 @@ impl StacksTransactionSigner { )); } } - _ => {} + TransactionAuth::Standard(_) => todo!(), } let next_sighash = self.tx.sign_next_sponsor(&self.sighash, privk)?; @@ -2287,7 +2257,7 @@ pub fn build_contrat_call_transaction( let payload = TransactionContractCall { address: contract_id.issuer.into(), - contract_name: contract_id.name.into(), + contract_name: contract_id.name, function_name: function_name.try_into().unwrap(), function_args: args, }; @@ -2302,8 +2272,8 @@ pub fn build_contrat_call_transaction( .unwrap(); let spending_condition = TransactionSpendingCondition::Singlesig(SinglesigSpendingCondition { - signer: signer_addr.bytes.clone(), - nonce: nonce, + signer: signer_addr.bytes, + nonce, tx_fee: fee, hash_mode: SinglesigHashMode::P2PKH, key_encoding: TransactionPublicKeyEncoding::Compressed, @@ -2314,8 +2284,8 @@ pub fn build_contrat_call_transaction( let unsigned_tx = StacksTransaction { version: TransactionVersion::Testnet, chain_id: 0x80000000, // MAINNET=0x00000001 - auth: auth, - anchor_mode: anchor_mode, + auth, + anchor_mode, post_condition_mode: TransactionPostConditionMode::Allow, post_conditions: vec![], payload: TransactionPayload::ContractCall(payload), @@ -2328,9 +2298,8 @@ pub fn build_contrat_call_transaction( let mut tx_signer = StacksTransactionSigner::new(&unsigned_tx); tx_signer.sign_origin(&secret_key).unwrap(); - let signed_tx = tx_signer.get_tx().unwrap(); - signed_tx + tx_signer.get_tx().unwrap() } impl StacksMessageCodec for TransactionContractCall { @@ -2422,7 +2391,7 @@ impl StacksMessageCodec for TransactionPayload { if let Some(version) = version_opt { // caller requests a specific Clarity version write_next(fd, &(TransactionPayloadID::VersionedSmartContract as u8))?; - ClarityVersion_consensus_serialize(&version, fd)?; + ClarityVersion_consensus_serialize(version, fd)?; sc.consensus_serialize(fd)?; } else { // caller requests to use whatever the current clarity version is @@ -2731,7 +2700,7 @@ impl StacksMessageCodec for SinglesigSpendingCondition { write_next(fd, &self.signer)?; write_next(fd, &self.nonce)?; write_next(fd, &self.tx_fee)?; - write_next(fd, &(self.key_encoding.clone() as u8))?; + write_next(fd, &(self.key_encoding as u8))?; write_next(fd, &self.signature)?; Ok(()) } @@ -2769,12 +2738,12 @@ impl StacksMessageCodec for SinglesigSpendingCondition { } Ok(SinglesigSpendingCondition { - signer: signer, - nonce: nonce, - tx_fee: tx_fee, - hash_mode: hash_mode, - key_encoding: key_encoding, - signature: signature, + signer, + nonce, + tx_fee, + hash_mode, + key_encoding, + signature, }) } } diff --git a/components/clarity-repl/src/lib.rs b/components/clarity-repl/src/lib.rs index cdc17199c..4f1b624b7 100644 --- a/components/clarity-repl/src/lib.rs +++ b/components/clarity-repl/src/lib.rs @@ -33,7 +33,7 @@ pub mod repl; pub mod utils; pub mod clarity { - #[allow(ambiguous_glob_reexports)] + #![allow(ambiguous_glob_reexports)] pub use ::clarity::stacks_common::*; pub use ::clarity::vm::*; pub use ::clarity::*; @@ -60,9 +60,14 @@ pub async fn init_session() -> String { match WASM_GLOBAL_CONTEXT.session.take() { Some(session) => (session, "".to_string()), None => { - let mut settings = SessionSettings::default(); - settings.include_boot_contracts = - vec!["costs".into(), "costs-2".into(), "costs-3".into()]; + let settings = SessionSettings { + include_boot_contracts: vec![ + "costs".into(), + "costs-2".into(), + "costs-3".into(), + ], + ..Default::default() + }; let mut session = Session::new(settings); let output = session.start_wasm().await; (session, output) diff --git a/components/clarity-repl/src/repl/boot/mod.rs b/components/clarity-repl/src/repl/boot/mod.rs index 5ad73d9a1..b33d68841 100644 --- a/components/clarity-repl/src/repl/boot/mod.rs +++ b/components/clarity-repl/src/repl/boot/mod.rs @@ -16,29 +16,29 @@ // This code is copied from stacks-blockchain/src/chainstate/atacks/boot/mod.rs -const BOOT_CODE_POX_BODY: &'static str = std::include_str!("pox.clar"); -const BOOT_CODE_POX_TESTNET_CONSTS: &'static str = std::include_str!("pox-testnet.clar"); -const BOOT_CODE_POX_MAINNET_CONSTS: &'static str = std::include_str!("pox-mainnet.clar"); -const BOOT_CODE_LOCKUP: &'static str = std::include_str!("lockup.clar"); -pub const BOOT_CODE_COSTS: &'static str = std::include_str!("costs.clar"); -pub const BOOT_CODE_COSTS_2: &'static str = std::include_str!("costs-2.clar"); -pub const BOOT_CODE_COSTS_3: &'static str = std::include_str!("costs-3.clar"); -pub const BOOT_CODE_COSTS_2_TESTNET: &'static str = std::include_str!("costs-2-testnet.clar"); -pub const BOOT_CODE_COSTS_3_TESTNET: &'static str = std::include_str!("costs-3.clar"); -const BOOT_CODE_COST_VOTING_MAINNET: &'static str = std::include_str!("cost-voting.clar"); -const BOOT_CODE_BNS: &'static str = std::include_str!("bns.clar"); -const BOOT_CODE_GENESIS: &'static str = std::include_str!("genesis.clar"); -pub const POX_1_NAME: &'static str = "pox"; -pub const POX_2_NAME: &'static str = "pox-2"; -pub const POX_3_NAME: &'static str = "pox-3"; +const BOOT_CODE_POX_BODY: &str = std::include_str!("pox.clar"); +const BOOT_CODE_POX_TESTNET_CONSTS: &str = std::include_str!("pox-testnet.clar"); +const BOOT_CODE_POX_MAINNET_CONSTS: &str = std::include_str!("pox-mainnet.clar"); +const BOOT_CODE_LOCKUP: &str = std::include_str!("lockup.clar"); +pub const BOOT_CODE_COSTS: &str = std::include_str!("costs.clar"); +pub const BOOT_CODE_COSTS_2: &str = std::include_str!("costs-2.clar"); +pub const BOOT_CODE_COSTS_3: &str = std::include_str!("costs-3.clar"); +pub const BOOT_CODE_COSTS_2_TESTNET: &str = std::include_str!("costs-2-testnet.clar"); +pub const BOOT_CODE_COSTS_3_TESTNET: &str = std::include_str!("costs-3.clar"); +const BOOT_CODE_COST_VOTING_MAINNET: &str = std::include_str!("cost-voting.clar"); +const BOOT_CODE_BNS: &str = std::include_str!("bns.clar"); +const BOOT_CODE_GENESIS: &str = std::include_str!("genesis.clar"); +pub const POX_1_NAME: &str = "pox"; +pub const POX_2_NAME: &str = "pox-2"; +pub const POX_3_NAME: &str = "pox-3"; -const POX_2_TESTNET_CONSTS: &'static str = std::include_str!("pox-testnet.clar"); -const POX_2_MAINNET_CONSTS: &'static str = std::include_str!("pox-mainnet.clar"); -const POX_2_BODY: &'static str = std::include_str!("pox-2.clar"); -const POX_3_BODY: &'static str = std::include_str!("pox-3.clar"); +const POX_2_TESTNET_CONSTS: &str = std::include_str!("pox-testnet.clar"); +const POX_2_MAINNET_CONSTS: &str = std::include_str!("pox-mainnet.clar"); +const POX_2_BODY: &str = std::include_str!("pox-2.clar"); +const POX_3_BODY: &str = std::include_str!("pox-3.clar"); -pub const COSTS_1_NAME: &'static str = "costs"; -pub const COSTS_2_NAME: &'static str = "costs-2"; +pub const COSTS_1_NAME: &str = "costs"; +pub const COSTS_2_NAME: &str = "costs-2"; lazy_static! { pub static ref BOOT_CODE_POX_MAINNET: String = diff --git a/components/clarity-repl/src/repl/datastore.rs b/components/clarity-repl/src/repl/datastore.rs index 5b4b6f79f..90ca145c6 100644 --- a/components/clarity-repl/src/repl/datastore.rs +++ b/components/clarity-repl/src/repl/datastore.rs @@ -88,22 +88,22 @@ fn height_to_block(height: u32, genesis_time: Option) -> BlockInfo { let genesis_time = genesis_time.unwrap_or(0); let block_header_hash = { - let mut buffer = bytes.clone(); + let mut buffer = bytes; buffer[0] = 1; BlockHeaderHash(buffer) }; let burn_block_header_hash = { - let mut buffer = bytes.clone(); + let mut buffer = bytes; buffer[0] = 2; BurnchainHeaderHash(buffer) }; let consensus_hash = { - let mut buffer = bytes.clone(); + let mut buffer = bytes; buffer[0] = 3; ConsensusHash::from_bytes(&buffer[0..20]).unwrap() }; let vrf_seed = { - let mut buffer = bytes.clone(); + let mut buffer = bytes; buffer[0] = 4; VRFSeed(buffer) }; @@ -131,8 +131,14 @@ fn height_to_block(height: u32, genesis_time: Option) -> BlockInfo { } } +impl Default for Datastore { + fn default() -> Self { + Self::new() + } +} + impl Datastore { - pub fn new() -> Datastore { + pub fn new() -> Self { let id = height_to_id(0); let mut store = HashMap::new(); @@ -144,7 +150,7 @@ impl Datastore { let mut id_height_map = HashMap::new(); id_height_map.insert(id, 0); - Datastore { + Self { store, block_id_lookup, metadata: HashMap::new(), @@ -157,11 +163,10 @@ impl Datastore { pub fn advance_chain_tip(&mut self, count: u32) -> u32 { let cur_height = self.chain_height; - let current_lookup_id = self + let current_lookup_id = *self .block_id_lookup .get(&self.open_chain_tip) - .expect("Open chain tip missing in block id lookup table") - .clone(); + .expect("Open chain tip missing in block id lookup table"); for i in 1..=count { let height = cur_height + i; @@ -171,7 +176,7 @@ impl Datastore { self.height_at_chain_tip.insert(id, height); } - self.chain_height = self.chain_height + count; + self.chain_height += count; self.open_chain_tip = height_to_id(self.chain_height); self.current_chain_tip = self.open_chain_tip; self.chain_height @@ -193,7 +198,7 @@ impl ClarityBackingStore for Datastore { .expect("Could not find current chain tip in block_id_lookup map"); if let Some(map) = self.store.get(lookup_id) { - map.get(key).map(|v| v.clone()) + map.get(key).cloned() } else { panic!("Block does not exist for current chain tip"); } @@ -220,18 +225,18 @@ impl ClarityBackingStore for Datastore { /// i.e., it changes on time-shifted evaluation. the open_chain_tip functions always /// return data about the chain tip that is currently open for writing. fn get_current_block_height(&mut self) -> u32 { - self.height_at_chain_tip + *self + .height_at_chain_tip .get(self.get_chain_tip()) .unwrap_or(&u32::MAX) - .clone() } fn get_open_chain_tip_height(&mut self) -> u32 { - self.chain_height.clone() + self.chain_height } fn get_open_chain_tip(&mut self) -> StacksBlockId { - self.open_chain_tip.clone() + self.open_chain_tip } /// The contract commitment is the hash of the contract, plus the block height in @@ -263,7 +268,7 @@ impl ClarityBackingStore for Datastore { } fn get_with_proof(&mut self, key: &str) -> Option<(String, Vec)> { - return None; + None } fn get_contract_hash( @@ -291,7 +296,7 @@ impl ClarityBackingStore for Datastore { impl BurnDatastore { pub fn new(constants: StacksConstants) -> BurnDatastore { let bytes = height_to_hashed_bytes(0); - let id = StacksBlockId(bytes.clone()); + let id = StacksBlockId(bytes); let sortition_id = SortitionId(bytes); let genesis_time = chrono::Utc::now().timestamp() as u64; @@ -316,7 +321,7 @@ impl BurnDatastore { sortition_lookup.insert(sortition_id, id); let mut consensus_hash_lookup = HashMap::new(); - consensus_hash_lookup.insert(genesis_block.consensus_hash.clone(), sortition_id); + consensus_hash_lookup.insert(genesis_block.consensus_hash, sortition_id); let mut store = HashMap::new(); store.insert(id, genesis_block); @@ -343,28 +348,27 @@ impl BurnDatastore { pub fn advance_chain_tip(&mut self, count: u32) { let cur_height = self.chain_height; - let current_lookup_id = self + let current_lookup_id = *self .block_id_lookup .get(&self.open_chain_tip) - .expect("Open chain tip missing in block id lookup table") - .clone(); + .expect("Open chain tip missing in block id lookup table"); let genesis_time = self.genesis_time; for i in 1..=count { let height = cur_height + i; let bytes = height_to_hashed_bytes(height); - let id = StacksBlockId(bytes.clone()); - let sortition_id = SortitionId(bytes.clone()); + let id = StacksBlockId(bytes); + let sortition_id = SortitionId(bytes); let block_info = height_to_block(height, Some(genesis_time)); self.block_id_lookup.insert(id, current_lookup_id); self.height_at_chain_tip.insert(id, height); self.sortition_lookup.insert(sortition_id, id); self.consensus_hash_lookup - .insert(block_info.consensus_hash.clone(), sortition_id); + .insert(block_info.consensus_hash, sortition_id); self.store.insert(id, block_info); } - self.chain_height = self.chain_height + count; + self.chain_height += count; self.open_chain_tip = height_to_id(self.chain_height); self.current_chain_tip = self.open_chain_tip; } @@ -388,55 +392,43 @@ impl HeadersDB for BurnDatastore { &self, id_bhh: &StacksBlockId, ) -> Option { - self.store - .get(id_bhh) - .and_then(|id| Some(id.block_header_hash)) + self.store.get(id_bhh).map(|id| id.block_header_hash) } fn get_burn_header_hash_for_block( &self, id_bhh: &StacksBlockId, ) -> Option { - self.store - .get(id_bhh) - .and_then(|id| Some(id.burn_block_header_hash)) + self.store.get(id_bhh).map(|id| id.burn_block_header_hash) } fn get_consensus_hash_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store - .get(id_bhh) - .and_then(|id| Some(id.consensus_hash)) + self.store.get(id_bhh).map(|id| id.consensus_hash) } fn get_vrf_seed_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store.get(id_bhh).and_then(|id| Some(id.vrf_seed)) + self.store.get(id_bhh).map(|id| id.vrf_seed) } fn get_burn_block_time_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store - .get(id_bhh) - .and_then(|id| Some(id.burn_block_time)) + self.store.get(id_bhh).map(|id| id.burn_block_time) } fn get_burn_block_height_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store - .get(id_bhh) - .and_then(|id| Some(id.burn_block_height)) + self.store.get(id_bhh).map(|id| id.burn_block_height) } fn get_miner_address(&self, id_bhh: &StacksBlockId) -> Option { - self.store.get(id_bhh).and_then(|id| Some(id.miner)) + self.store.get(id_bhh).map(|id| id.miner) } fn get_burnchain_tokens_spent_for_block(&self, id_bhh: &StacksBlockId) -> Option { self.store .get(id_bhh) - .and_then(|id| Some(id.burnchain_tokens_spent_for_block)) + .map(|id| id.burnchain_tokens_spent_for_block) } fn get_burnchain_tokens_spent_for_winning_block(&self, id_bhh: &StacksBlockId) -> Option { self.store .get(id_bhh) - .and_then(|id| Some(id.get_burnchain_tokens_spent_for_winning_block)) + .map(|id| id.get_burnchain_tokens_spent_for_winning_block) } fn get_tokens_earned_for_block(&self, id_bhh: &StacksBlockId) -> Option { - self.store - .get(id_bhh) - .and_then(|id| Some(id.tokens_earned_for_block)) + self.store.get(id_bhh).map(|id| id.tokens_earned_for_block) } } @@ -458,7 +450,7 @@ impl BurnStateDB for BurnDatastore { self.sortition_lookup .get(sortition_id) .and_then(|id| self.store.get(id)) - .and_then(|block_info| Some(block_info.burn_block_height)) + .map(|block_info| block_info.burn_block_height) } /// Returns the height of the burnchain when the Stacks chain started running. @@ -489,7 +481,7 @@ impl BurnStateDB for BurnDatastore { self.sortition_lookup .get(sortition_id) .and_then(|id| self.store.get(id)) - .and_then(|block_info| Some(block_info.burn_block_header_hash)) + .map(|block_info| block_info.burn_block_header_hash) } /// Lookup a `SortitionId` keyed to a `ConsensusHash`. @@ -499,9 +491,7 @@ impl BurnStateDB for BurnDatastore { &self, consensus_hash: &ConsensusHash, ) -> Option { - self.consensus_hash_lookup - .get(consensus_hash) - .and_then(|id| Some(id.clone())) + self.consensus_hash_lookup.get(consensus_hash).copied() } /// The epoch is defined as by a start and end height. This returns @@ -523,7 +513,7 @@ impl BurnStateDB for BurnDatastore { self.sortition_lookup .get(sortition_id) .and_then(|id| self.store.get(id)) - .and_then(|block_info| Some(block_info.pox_payout_addrs.clone())) + .map(|block_info| block_info.pox_payout_addrs.clone()) } fn get_ast_rules(&self, height: u32) -> clarity::vm::ast::ASTRules { @@ -536,7 +526,7 @@ impl Datastore { Ok(Datastore::new()) } - pub fn as_analysis_db<'a>(&'a mut self) -> AnalysisDatabase<'a> { + pub fn as_analysis_db(&mut self) -> AnalysisDatabase<'_> { AnalysisDatabase::new(self) } @@ -587,7 +577,7 @@ impl Datastore { } pub fn set_chain_tip(&mut self, bhh: &StacksBlockId) { - self.current_chain_tip = bhh.clone(); + self.current_chain_tip = *bhh; } pub fn put(&mut self, key: &str, value: &str) { @@ -603,7 +593,7 @@ impl Datastore { self.open_chain_tip, self.store .get(lookup_id) - .expect(format!("Block with ID {:?} does not exist", lookup_id).as_str()) + .unwrap_or_else(|| panic!("Block with ID {:?} does not exist", lookup_id)) .clone(), ); diff --git a/components/clarity-repl/src/repl/debug/cli.rs b/components/clarity-repl/src/repl/debug/cli.rs index 1d067858d..28dbb883a 100644 --- a/components/clarity-repl/src/repl/debug/cli.rs +++ b/components/clarity-repl/src/repl/debug/cli.rs @@ -44,9 +44,8 @@ impl CLIDebugger { let resume = match readline { Ok(mut command) => { if command.is_empty() { - match self.editor.history().last() { - Some(prev) => command = prev.clone(), - None => (), + if let Some(prev) = self.editor.history().last() { + command = prev.clone() } } self.editor.add_history_entry(&command); @@ -82,6 +81,7 @@ impl CLIDebugger { let lines: Vec<&str> = contract_source.lines().collect(); let first_line = (span.start_line - 1).saturating_sub(3) as usize; let last_line = std::cmp::min(lines.len(), span.start_line as usize + 3); + #[allow(clippy::needless_range_loop)] for line in first_line..last_line { if line == (span.start_line as usize - 1) { print!("{}", blue!("-> ")); @@ -129,7 +129,6 @@ impl CLIDebugger { "{}:{}:{}", contract_id, expr.span.start_line, expr.span.start_column ); - return; } } } @@ -144,7 +143,7 @@ impl CLIDebugger { context: &LocalContext, expr: &SymbolicExpression, ) -> bool { - let (cmd, args) = match command.split_once(" ") { + let (cmd, args) = match command.split_once(' ') { None => (command, ""), Some((cmd, args)) => (cmd, args), }; @@ -223,7 +222,7 @@ impl CLIDebugger { if self.state.breakpoints.is_empty() { println!("No breakpoints set.") } else { - for (_, breakpoint) in &self.state.breakpoints { + for breakpoint in self.state.breakpoints.values() { println!("{}", breakpoint); } } @@ -418,7 +417,7 @@ impl CLIDebugger { if self.state.watchpoints.is_empty() { println!("No watchpoints set.") } else { - for (_, watchpoint) in &self.state.watchpoints { + for watchpoint in self.state.watchpoints.values() { println!("{}", watchpoint); } } @@ -462,7 +461,6 @@ impl CLIDebugger { Err(e) => { println!("{}", format_err!(e)); print_help_watchpoint(); - return; } }; } diff --git a/components/clarity-repl/src/repl/debug/dap/mod.rs b/components/clarity-repl/src/repl/debug/dap/mod.rs index 3c4f78fdc..aca2af6b4 100644 --- a/components/clarity-repl/src/repl/debug/dap/mod.rs +++ b/components/clarity-repl/src/repl/debug/dap/mod.rs @@ -81,6 +81,12 @@ pub struct DAPDebugger { variables: HashMap>, } +impl Default for DAPDebugger { + fn default() -> Self { + Self::new() + } +} + impl DAPDebugger { pub fn new() -> Self { let stdin = tokio::io::stdin(); @@ -199,7 +205,8 @@ impl DAPDebugger { command: RequestCommand, ) -> bool { use debug_types::requests::RequestCommand::*; - let proceed = match command { + + match command { Initialize(arguments) => self.initialize(seq, arguments), Launch(arguments) => self.launch(seq, arguments), ConfigurationDone => self.configuration_done(seq), @@ -225,9 +232,7 @@ impl DAPDebugger { }); false } - }; - - proceed + } } pub fn handle_event(&mut self, seq: i64, event: Event) { @@ -403,66 +408,60 @@ impl DAPDebugger { fn set_breakpoints(&mut self, seq: i64, arguments: SetBreakpointsArguments) -> bool { let mut results = vec![]; - match arguments.breakpoints { - Some(breakpoints) => { - let contract_id = match self - .path_to_contract_id - .get(&PathBuf::from(arguments.source.path.as_ref().unwrap())) - { - Some(contract_id) => contract_id, - None => { - self.send_response(Response { - request_seq: seq, - success: false, - message: Some(format!( - "contract not found for path {}\nmap: {:?}", - arguments.source.path.clone().unwrap(), - self.path_to_contract_id - )), - body: None, - }); - return false; - } - }; - let source = super::Source { - name: contract_id.clone(), - }; - for breakpoint in breakpoints { - let column = match breakpoint.column { - Some(column) => column, - None => 0, - }; - let source_breakpoint = super::Breakpoint { - id: 0, - verified: true, - data: super::BreakpointData::Source(super::SourceBreakpoint { - line: breakpoint.line, - column: breakpoint.column, - }), - source: source.clone(), - span: Some(Span { - start_line: breakpoint.line, - start_column: column, - end_line: breakpoint.line, - end_column: column, - }), - }; - let id = self.get_state().add_breakpoint(source_breakpoint); - results.push(Breakpoint { - id: Some(id), - verified: true, - message: breakpoint.log_message, - source: Some(arguments.source.clone()), - line: Some(breakpoint.line), - column: breakpoint.column, - end_line: Some(breakpoint.line), - end_column: breakpoint.column, - instruction_reference: None, - offset: None, + if let Some(breakpoints) = arguments.breakpoints { + let contract_id = match self + .path_to_contract_id + .get(&PathBuf::from(arguments.source.path.as_ref().unwrap())) + { + Some(contract_id) => contract_id, + None => { + self.send_response(Response { + request_seq: seq, + success: false, + message: Some(format!( + "contract not found for path {}\nmap: {:?}", + arguments.source.path.clone().unwrap(), + self.path_to_contract_id + )), + body: None, }); + return false; } + }; + let source = super::Source { + name: contract_id.clone(), + }; + for breakpoint in breakpoints { + let column = breakpoint.column.unwrap_or(0); + let source_breakpoint = super::Breakpoint { + id: 0, + verified: true, + data: super::BreakpointData::Source(super::SourceBreakpoint { + line: breakpoint.line, + column: breakpoint.column, + }), + source: source.clone(), + span: Some(Span { + start_line: breakpoint.line, + start_column: column, + end_line: breakpoint.line, + end_column: column, + }), + }; + let id = self.get_state().add_breakpoint(source_breakpoint); + results.push(Breakpoint { + id: Some(id), + verified: true, + message: breakpoint.log_message, + source: Some(arguments.source.clone()), + line: Some(breakpoint.line), + column: breakpoint.column, + end_line: Some(breakpoint.line), + end_column: breakpoint.column, + instruction_reference: None, + offset: None, + }); } - None => (), }; self.send_response(Response { @@ -689,11 +688,7 @@ impl DAPDebugger { .watch_variables .get(&(contract_id.clone(), name.to_string())) { - if set.is_empty() { - false - } else { - true - } + !set.is_empty() } else { false }; @@ -707,7 +702,7 @@ impl DAPDebugger { let value = env .global_context .database - .lookup_variable(contract_id, &name, data_types) + .lookup_variable(contract_id, name, data_types) .unwrap(); Response { request_seq: seq, @@ -1008,8 +1003,8 @@ impl EvalHook for DAPDebugger { self.save_scopes_for_frame( &stack_top, context, - &env.contract_context, - &mut env.global_context, + env.contract_context, + env.global_context, ); self.stack_frames .insert(current_function.clone(), stack_top); @@ -1030,8 +1025,8 @@ impl EvalHook for DAPDebugger { self.save_scopes_for_frame( &stack_frame, context, - &env.contract_context, - &mut env.global_context, + env.contract_context, + env.global_context, ); self.stack_frames diff --git a/components/clarity-repl/src/repl/debug/mod.rs b/components/clarity-repl/src/repl/debug/mod.rs index b5eca01ae..1f65dc984 100644 --- a/components/clarity-repl/src/repl/debug/mod.rs +++ b/components/clarity-repl/src/repl/debug/mod.rs @@ -353,7 +353,7 @@ impl DebugState { return Err(errors); } - match eval(&ast.expressions[0], env, &context) { + match eval(&ast.expressions[0], env, context) { Ok(value) => Ok(value), Err(e) => Err(vec![format_err!(e)]), } @@ -507,12 +507,7 @@ impl DebugState { match self.state { State::Continue | State::Quit | State::Finish(_) => return true, State::StepOver(step_over_id) => { - if self - .stack - .iter() - .find(|&state| state.id == step_over_id) - .is_some() - { + if self.stack.iter().any(|state| state.id == step_over_id) { // We're still inside the expression which should be stepped over, // so return to execution. return true; @@ -569,7 +564,7 @@ pub fn extract_watch_variable<'a>( let (contract_id, name) = match parts.len() { 1 => { if default_sender.is_some() { - return Err(format!("must use qualified name")); + return Err("must use qualified name".to_string()); } else { (env.contract_context.contract_identifier.clone(), parts[0]) } diff --git a/components/clarity-repl/src/repl/diagnostic.rs b/components/clarity-repl/src/repl/diagnostic.rs index 2c3ffd5fa..7b4a74e37 100644 --- a/components/clarity-repl/src/repl/diagnostic.rs +++ b/components/clarity-repl/src/repl/diagnostic.rs @@ -6,17 +6,17 @@ use std::fmt; fn level_to_string(level: &Level) -> String { match level { - Level::Note => format!("{}", blue!("note:")), - Level::Warning => format!("{}", yellow!("warning:")), - Level::Error => format!("{}", red!("error:")), + Level::Note => blue!("note:").to_string(), + Level::Warning => yellow!("warning:").to_string(), + Level::Error => red!("error:").to_string(), } } // Generate the formatted output for this diagnostic, given the source code. // TODO: Preferably a filename would be saved in the Span, but for now, pass a name here. -pub fn output_diagnostic(diagnostic: &Diagnostic, name: &str, lines: &Vec) -> Vec { +pub fn output_diagnostic(diagnostic: &Diagnostic, name: &str, lines: &[String]) -> Vec { let mut output = Vec::new(); - if diagnostic.spans.len() > 0 { + if !diagnostic.spans.is_empty() { output.push(format!( "{}:{}:{}: {} {}", name, // diagnostic.spans[0].filename, @@ -36,7 +36,7 @@ pub fn output_diagnostic(diagnostic: &Diagnostic, name: &str, lines: &Vec) -> Vec { +pub fn output_code(diagnostic: &Diagnostic, lines: &[String]) -> Vec { let mut output = Vec::new(); if diagnostic.spans.is_empty() { return output; @@ -59,7 +59,7 @@ pub fn output_code(diagnostic: &Diagnostic, lines: &Vec) -> Vec (span.end_column - span.start_column) as usize ); } - pointer = format!("{}", pointer); + pointer = pointer.to_string(); output.push(pointer); for span in diagnostic.spans.iter().skip(1) { @@ -80,6 +80,7 @@ pub fn output_code(diagnostic: &Diagnostic, lines: &Vec) -> Vec (span.end_column - span.start_column) as usize ); } else { + #[allow(clippy::needless_range_loop)] for line_num in (first_line + 1)..last_line { output.push(lines[line_num].clone()); } diff --git a/components/clarity-repl/src/repl/interpreter.rs b/components/clarity-repl/src/repl/interpreter.rs index d948e6364..1f653df3a 100644 --- a/components/clarity-repl/src/repl/interpreter.rs +++ b/components/clarity-repl/src/repl/interpreter.rs @@ -129,7 +129,7 @@ impl ClarityInterpreter { } } - pub fn run<'hooks>( + pub fn run( &mut self, contract: &ClarityContract, cost_track: bool, @@ -141,7 +141,7 @@ impl ClarityInterpreter { self.collect_annotations(&ast, contract.expect_in_memory_code_source()); diagnostics.append(&mut annotation_diagnostics); let (analysis, mut analysis_diagnostics) = - match self.run_analysis(&contract, &mut ast, &annotations) { + match self.run_analysis(contract, &mut ast, &annotations) { Ok((analysis, diagnostics)) => (analysis, diagnostics), Err((_, Some(diagnostic), _)) => { diagnostics.push(diagnostic); @@ -156,7 +156,7 @@ impl ClarityInterpreter { return Err(diagnostics); } - let mut result = match self.execute(&contract, &mut ast, analysis, cost_track, eval_hooks) { + let mut result = match self.execute(contract, &mut ast, analysis, cost_track, eval_hooks) { Ok(result) => result, Err((_, Some(diagnostic), _)) => { diagnostics.push(diagnostic); @@ -182,15 +182,15 @@ impl ClarityInterpreter { Ok(result) } - pub fn run_ast<'a, 'hooks>( - &'a mut self, + pub fn run_ast( + &mut self, contract: &ClarityContract, ast: &mut ContractAST, cost_track: bool, eval_hooks: Option>, ) -> Result> { let code_source = contract.expect_in_memory_code_source(); - let (annotations, mut diagnostics) = self.collect_annotations(&ast, &code_source); + let (annotations, mut diagnostics) = self.collect_annotations(ast, code_source); let (analysis, mut analysis_diagnostics) = match self.run_analysis(contract, ast, &annotations) { @@ -270,10 +270,10 @@ impl ClarityInterpreter { contract.expect_resolved_contract_identifier(Some(&self.tx_sender)); build_ast_with_diagnostics( &contract_identifier, - &source_code, + source_code, &mut (), - contract.clarity_version.clone(), - contract.epoch.clone(), + contract.clarity_version, + contract.epoch, ) } @@ -294,13 +294,13 @@ impl ClarityInterpreter { end_line: (n + 1) as u32, end_column: line.len() as u32, }; - if let Some(annotation_string) = annotation_string.strip_suffix("]") { + if let Some(annotation_string) = annotation_string.strip_suffix(']') { let kind: AnnotationKind = match annotation_string.trim().parse() { Ok(kind) => kind, Err(e) => { diagnostics.push(Diagnostic { level: Level::Warning, - message: format!("{}", e), + message: e.to_string(), spans: vec![span.clone()], suggestion: None, }); @@ -338,8 +338,8 @@ impl ClarityInterpreter { &mut analysis_db, false, LimitedCostTracker::new_free(), - contract.epoch.clone(), - contract.clarity_version.clone(), + contract.epoch, + contract.clarity_version, ) { Ok(res) => res, Err((error, cost_tracker)) => { @@ -442,8 +442,8 @@ impl ClarityInterpreter { } #[allow(unused_assignments)] - pub fn execute<'a, 'hooks>( - &'a mut self, + pub fn execute( + &mut self, contract: &ClarityContract, contract_ast: &mut ContractAST, contract_analysis: ContractAnalysis, @@ -502,7 +502,7 @@ impl ClarityInterpreter { let mut call_stack = CallStack::new(); let mut env = Environment::new( g, - &mut contract_context, + &contract_context, &mut call_stack, Some(tx_sender.clone()), Some(tx_sender.clone()), @@ -537,11 +537,9 @@ impl ClarityInterpreter { )?; Ok(Some(res)) } - _ => eval(&contract_ast.expressions[0], &mut env, &context) - .and_then(|r| Ok(Some(r))), + _ => eval(&contract_ast.expressions[0], &mut env, &context).map(Some), }, - _ => eval(&contract_ast.expressions[0], &mut env, &context) - .and_then(|r| Ok(Some(r))), + _ => eval(&contract_ast.expressions[0], &mut env, &context).map(Some), }; result } else { @@ -584,12 +582,12 @@ impl ClarityInterpreter { accounts_to_debit.push(( event_data.sender.to_string(), "STX".to_string(), - event_data.amount.clone(), + event_data.amount, )); accounts_to_credit.push(( event_data.recipient.to_string(), "STX".to_string(), - event_data.amount.clone(), + event_data.amount, )); } StacksTransactionEvent::STXEvent(STXEventType::STXMintEvent( @@ -598,7 +596,7 @@ impl ClarityInterpreter { accounts_to_credit.push(( event_data.recipient.to_string(), "STX".to_string(), - event_data.amount.clone(), + event_data.amount, )); } StacksTransactionEvent::STXEvent(STXEventType::STXBurnEvent( @@ -607,7 +605,7 @@ impl ClarityInterpreter { accounts_to_debit.push(( event_data.sender.to_string(), "STX".to_string(), - event_data.amount.clone(), + event_data.amount, )); } StacksTransactionEvent::FTEvent(FTEventType::FTTransferEvent( @@ -616,26 +614,26 @@ impl ClarityInterpreter { accounts_to_credit.push(( event_data.recipient.to_string(), event_data.asset_identifier.sugared(), - event_data.amount.clone(), + event_data.amount, )); accounts_to_debit.push(( event_data.sender.to_string(), event_data.asset_identifier.sugared(), - event_data.amount.clone(), + event_data.amount, )); } StacksTransactionEvent::FTEvent(FTEventType::FTMintEvent(ref event_data)) => { accounts_to_credit.push(( event_data.recipient.to_string(), event_data.asset_identifier.sugared(), - event_data.amount.clone(), + event_data.amount, )); } StacksTransactionEvent::FTEvent(FTEventType::FTBurnEvent(ref event_data)) => { accounts_to_debit.push(( event_data.sender.to_string(), event_data.asset_identifier.sugared(), - event_data.amount.clone(), + event_data.amount, )); } StacksTransactionEvent::NFTEvent(NFTEventType::NFTTransferEvent( @@ -677,8 +675,8 @@ impl ClarityInterpreter { events.push(event); } - contract_saved = - contract_context.functions.len() > 0 || contract_context.defined_traits.len() > 0; + contract_saved = !contract_context.functions.is_empty() + || !contract_context.defined_traits.is_empty(); let eval_result = if contract_saved { let mut functions = BTreeMap::new(); @@ -706,7 +704,7 @@ impl ClarityInterpreter { global_context .database - .insert_contract_hash(&contract_identifier, &snippet) + .insert_contract_hash(&contract_identifier, snippet) .unwrap(); let contract = Contract { contract_context }; global_context @@ -755,7 +753,7 @@ impl ClarityInterpreter { if contract_saved { let mut analysis_db = AnalysisDatabase::new(&mut self.datastore); - let _ = analysis_db + analysis_db .execute(|db| db.insert_contract(&contract_identifier, &contract_analysis)) .expect("Unable to save data"); } @@ -866,7 +864,7 @@ impl ClarityInterpreter { pub fn get_balance_for_account(&self, account: &str, token: &str) -> u128 { match self.tokens.get(token) { Some(balances) => match balances.get(account) { - Some(value) => value.clone(), + Some(value) => *value, _ => 0, }, _ => 0, diff --git a/components/clarity-repl/src/repl/mod.rs b/components/clarity-repl/src/repl/mod.rs index 1ed28fc67..d2c2886dc 100644 --- a/components/clarity-repl/src/repl/mod.rs +++ b/components/clarity-repl/src/repl/mod.rs @@ -124,7 +124,7 @@ impl ClarityContract { } ContractDeployer::Transient => StandardPrincipalData::transient(), ContractDeployer::Address(address) => { - PrincipalData::parse_standard_principal(&address).expect("unable to parse address") + PrincipalData::parse_standard_principal(address).expect("unable to parse address") } ContractDeployer::DefaultDeployer => default_deployer .expect("default provider should have been provided") diff --git a/components/clarity-repl/src/repl/session.rs b/components/clarity-repl/src/repl/session.rs index 768e75fa6..5db7f4fbb 100644 --- a/components/clarity-repl/src/repl/session.rs +++ b/components/clarity-repl/src/repl/session.rs @@ -52,9 +52,9 @@ pub static V3_BOOT_CONTRACTS: &[&str] = &["pox-3"]; lazy_static! { static ref BOOT_TESTNET_PRINCIPAL: StandardPrincipalData = - PrincipalData::parse_standard_principal(&BOOT_TESTNET_ADDRESS).unwrap(); + PrincipalData::parse_standard_principal(BOOT_TESTNET_ADDRESS).unwrap(); static ref BOOT_MAINNET_PRINCIPAL: StandardPrincipalData = - PrincipalData::parse_standard_principal(&BOOT_MAINNET_ADDRESS).unwrap(); + PrincipalData::parse_standard_principal(BOOT_MAINNET_ADDRESS).unwrap(); } lazy_static! { @@ -219,13 +219,13 @@ impl Session { let default_tx_sender = self.interpreter.get_tx_sender(); let boot_testnet_deployer = - PrincipalData::parse_standard_principal(&BOOT_TESTNET_ADDRESS) + PrincipalData::parse_standard_principal(BOOT_TESTNET_ADDRESS) .expect("Unable to parse deployer's address"); self.interpreter.set_tx_sender(boot_testnet_deployer); self.include_boot_contracts(false); let boot_mainnet_deployer = - PrincipalData::parse_standard_principal(&BOOT_MAINNET_ADDRESS) + PrincipalData::parse_standard_principal(BOOT_MAINNET_ADDRESS) .expect("Unable to parse deployer's address"); self.interpreter.set_tx_sender(boot_mainnet_deployer); self.include_boot_contracts(true); @@ -297,12 +297,9 @@ impl Session { None, ) { Ok((mut output, result)) => { - match result.result { - EvaluationResult::Contract(contract_result) => { - let snippet = format!("→ .{} contract successfully stored. Use (contract-call? ...) for invoking the public functions:", contract_result.contract.contract_identifier.clone()); - output.push(green!(snippet)); - } - _ => (), + if let EvaluationResult::Contract(contract_result) = result.result { + let snippet = format!("→ .{} contract successfully stored. Use (contract-call? ...) for invoking the public functions:", contract_result.contract.contract_identifier.clone()); + output.push(green!(snippet)); }; (output, result.cost.clone()) } @@ -318,7 +315,7 @@ impl Session { ]; let mut headers_cells = vec![]; for header in headers.iter() { - headers_cells.push(Cell::new(&header)); + headers_cells.push(Cell::new(header)); } let mut table = Table::new(); table.add_row(Row::new(headers_cells)); @@ -374,8 +371,8 @@ impl Session { format!("{calc:.2} %") } - pub fn formatted_interpretation<'a, 'hooks>( - &'a mut self, + pub fn formatted_interpretation( + &mut self, snippet: String, name: Option, cost_track: bool, @@ -399,7 +396,7 @@ impl Session { &formatted_lines, )); } - if result.events.len() > 0 { + if !result.events.is_empty() { output.push(black!("Events emitted")); for event in result.events.iter() { output.push(black!(format!("{}", utils::serialize_event(event)))); @@ -430,7 +427,7 @@ impl Session { pub fn debug(&mut self, output: &mut Vec, cmd: &str) { use crate::repl::debug::cli::CLIDebugger; - let snippet = match cmd.split_once(" ") { + let snippet = match cmd.split_once(' ') { Some((_, snippet)) => snippet, _ => return output.push(red!("Usage: ::debug ")), }; @@ -445,12 +442,9 @@ impl Session { None, ) { Ok((mut output, result)) => { - match result.result { - EvaluationResult::Contract(contract_result) => { - let snippet = format!("→ .{} contract successfully stored. Use (contract-call? ...) for invoking the public functions:", contract_result.contract.contract_identifier.clone()); - output.push(green!(snippet)); - } - _ => (), + if let EvaluationResult::Contract(contract_result) = result.result { + let snippet = format!("→ .{} contract successfully stored. Use (contract-call? ...) for invoking the public functions:", contract_result.contract.contract_identifier.clone()); + output.push(green!(snippet)); }; output } @@ -463,7 +457,7 @@ impl Session { pub fn trace(&mut self, output: &mut Vec, cmd: &str) { use super::tracer::Tracer; - let snippet = match cmd.split_once(" ") { + let snippet = match cmd.split_once(' ') { Some((_, snippet)) => snippet, _ => return output.push(red!("Usage: ::trace ")), }; @@ -492,7 +486,7 @@ impl Session { self.load_boot_contracts(); } - if self.settings.initial_accounts.len() > 0 { + if !self.settings.initial_accounts.is_empty() { let mut initial_accounts = self.settings.initial_accounts.clone(); for account in initial_accounts.drain(..) { let recipient = match PrincipalData::parse(&account.address) { @@ -521,7 +515,7 @@ impl Session { #[cfg(feature = "cli")] pub fn read(&mut self, output: &mut Vec, cmd: &str) { - let filename = match cmd.split_once(" ") { + let filename = match cmd.split_once(' ') { Some((_, filename)) => filename, _ => return output.push(red!("Usage: ::read ")), }; @@ -564,11 +558,7 @@ impl Session { return Err(vec![diagnostic]); } let mut hooks: Vec<&mut dyn EvalHook> = Vec::new(); - let mut coverage = if let Some(test_name) = test_name { - Some(TestCoverageReport::new(test_name.into())) - } else { - None - }; + let mut coverage = test_name.map(TestCoverageReport::new); if let Some(coverage) = &mut coverage { hooks.push(coverage); }; @@ -594,16 +584,13 @@ impl Session { if let Some(ref coverage) = coverage { self.coverage_reports.push(coverage.clone()); } - match &result.result { - EvaluationResult::Contract(contract_result) => { - self.asts - .insert(contract_id.clone(), contract_result.contract.ast.clone()); - self.contracts.insert( - contract_id.to_string(), - contract_result.contract.function_args.clone(), - ); - } - _ => (), + if let EvaluationResult::Contract(contract_result) = &result.result { + self.asts + .insert(contract_id.clone(), contract_result.contract.ast.clone()); + self.contracts.insert( + contract_id.to_string(), + contract_result.contract.function_args.clone(), + ); }; Ok(result) } @@ -615,13 +602,13 @@ impl Session { &mut self, contract: &str, method: &str, - args: &Vec, + args: &[String], sender: &str, test_name: String, ) -> Result<(ExecutionResult, QualifiedContractIdentifier), Vec> { let initial_tx_sender = self.get_tx_sender(); // Handle fully qualified contract_id and sugared syntax - let contract_id = if contract.starts_with("S") { + let contract_id = if contract.starts_with('S') { contract.to_string() } else { format!("{}.{}", initial_tx_sender, contract,) @@ -641,7 +628,7 @@ impl Session { code_source: ClarityCodeSource::ContractInMemory(contract_call), name: "contract-call".to_string(), deployer: ContractDeployer::Address(sender.to_string()), - epoch: self.current_epoch.clone(), + epoch: self.current_epoch, clarity_version: ClarityVersion::default_for_epoch(self.current_epoch), }; @@ -670,8 +657,8 @@ impl Session { Ok((execution, contract_identifier)) } - pub fn eval<'a>( - &'a mut self, + pub fn eval( + &mut self, snippet: String, eval_hooks: Option>, cost_track: bool, @@ -690,18 +677,15 @@ impl Session { match result { Ok(result) => { - match &result.result { - EvaluationResult::Contract(contract_result) => { - self.asts.insert( - contract_identifier.clone(), - contract_result.contract.ast.clone(), - ); - self.contracts.insert( - contract_result.contract.contract_identifier.clone(), - contract_result.contract.function_args.clone(), - ); - } - _ => (), + if let EvaluationResult::Contract(contract_result) = &result.result { + self.asts.insert( + contract_identifier.clone(), + contract_result.contract.ast.clone(), + ); + self.contracts.insert( + contract_result.contract.contract_identifier.clone(), + contract_result.contract.function_args.clone(), + ); }; Ok(result) } @@ -724,8 +708,8 @@ impl Session { pub fn get_api_reference_index(&self) -> Vec { let mut keys = self .api_reference - .iter() - .map(|(k, _)| k.to_string()) + .keys() + .map(|k| k.to_string()) .collect::>(); keys.sort(); keys @@ -734,8 +718,8 @@ impl Session { pub fn get_clarity_keywords(&self) -> Vec { let mut keys = self .keywords_reference - .iter() - .map(|(k, _)| k.to_string()) + .keys() + .map(|k| k.to_string()) .collect::>(); keys.sort(); keys @@ -833,14 +817,14 @@ impl Session { } #[cfg(not(feature = "wasm"))] - fn easter_egg(&self, output: &mut Vec) { + fn easter_egg(&self, output: &mut [String]) { let result = hiro_system_kit::nestable_block_on(fetch_message()); let message = result.unwrap_or("You found it!".to_string()); println!("{}", message); } #[cfg(feature = "wasm")] - fn easter_egg(&self, output: &mut Vec) {} + fn easter_egg(&self, output: &mut [String]) {} fn parse_and_advance_chain_tip(&mut self, output: &mut Vec, command: &str) { let args: Vec<_> = command.split(' ').collect(); @@ -877,7 +861,7 @@ impl Session { return; } - let tx_sender = match PrincipalData::parse_standard_principal(&args[1]) { + let tx_sender = match PrincipalData::parse_standard_principal(args[1]) { Ok(address) => address, _ => { output.push(red!("Unable to parse the address")); @@ -927,7 +911,7 @@ impl Session { } pub fn set_epoch(&mut self, output: &mut Vec, cmd: &str) { - let epoch = match cmd.split_once(" ") { + let epoch = match cmd.split_once(' ') { Some((_, epoch)) if epoch.eq("2.0") => StacksEpochId::Epoch20, Some((_, epoch)) if epoch.eq("2.05") => StacksEpochId::Epoch2_05, Some((_, epoch)) if epoch.eq("2.1") => StacksEpochId::Epoch21, @@ -949,7 +933,7 @@ impl Session { } pub fn encode(&mut self, output: &mut Vec, cmd: &str) { - let snippet = match cmd.split_once(" ") { + let snippet = match cmd.split_once(' ') { Some((_, snippet)) => snippet, _ => return output.push(red!("Usage: ::encode ")), }; @@ -968,9 +952,8 @@ impl Session { } EvaluationResult::Snippet(snippet_result) => snippet_result.result, }; - match value.consensus_serialize(&mut tx_bytes) { - Err(e) => return output.push(red!(format!("{}", e))), - _ => (), + if let Err(e) = value.consensus_serialize(&mut tx_bytes) { + return output.push(red!(format!("{}", e))); }; let mut s = String::with_capacity(2 * tx_bytes.len()); for byte in tx_bytes { @@ -981,7 +964,7 @@ impl Session { Err(diagnostics) => { let lines: Vec = snippet.split('\n').map(|s| s.to_string()).collect(); for d in diagnostics { - output.append(&mut output_diagnostic(&d, &"encode".to_string(), &lines)); + output.append(&mut output_diagnostic(&d, "encode", &lines)); } red!("encoding failed") } @@ -990,7 +973,7 @@ impl Session { } pub fn decode(&mut self, output: &mut Vec, cmd: &str) { - let byteString = match cmd.split_once(" ") { + let byteString = match cmd.split_once(' ') { Some((_, bytes)) => bytes, _ => return output.push(red!("Usage: ::decode ")), }; @@ -1009,7 +992,7 @@ impl Session { pub fn get_costs(&mut self, output: &mut Vec, cmd: &str) { let command: String = cmd.to_owned(); - let expr = match cmd.split_once(" ") { + let expr = match cmd.split_once(' ') { Some((_, expr)) => expr, _ => return output.push(red!("Usage: ::get_costs ")), }; @@ -1020,7 +1003,7 @@ impl Session { #[cfg(feature = "cli")] fn get_accounts(&self, output: &mut Vec) { let accounts = self.interpreter.get_accounts(); - if accounts.len() > 0 { + if !accounts.is_empty() { let tokens = self.interpreter.get_tokens(); let mut headers = vec!["Address".to_string()]; for token in tokens.iter() { @@ -1033,7 +1016,7 @@ impl Session { let mut headers_cells = vec![]; for header in headers.iter() { - headers_cells.push(Cell::new(&header)); + headers_cells.push(Cell::new(header)); } let mut table = Table::new(); table.add_row(Row::new(headers_cells)); @@ -1058,18 +1041,18 @@ impl Session { #[cfg(feature = "cli")] fn get_contracts(&self, output: &mut Vec) { - if self.contracts.len() > 0 { + if !self.contracts.is_empty() { let mut table = Table::new(); table.add_row(row!["Contract identifier", "Public functions"]); let contracts = self.contracts.clone(); for (contract_id, methods) in contracts.iter() { - if !contract_id.starts_with(&BOOT_TESTNET_ADDRESS) - && !contract_id.starts_with(&BOOT_MAINNET_ADDRESS) + if !contract_id.starts_with(BOOT_TESTNET_ADDRESS) + && !contract_id.starts_with(BOOT_MAINNET_ADDRESS) { let mut formatted_methods = vec![]; for (method_name, method_args) in methods.iter() { - let formatted_args = if method_args.len() == 0 { - format!("") + let formatted_args = if method_args.is_empty() { + String::new() } else if method_args.len() == 1 { format!(" {}", method_args.join(" ")) } else { @@ -1077,9 +1060,9 @@ impl Session { }; formatted_methods.push(format!("({}{})", method_name, formatted_args)); } - let formatted_spec = format!("{}", formatted_methods.join("\n")); + let formatted_spec = formatted_methods.join("\n").to_string(); table.add_row(Row::new(vec![ - Cell::new(&contract_id), + Cell::new(contract_id), Cell::new(&formatted_spec), ])); } @@ -1107,7 +1090,7 @@ impl Session { #[cfg(not(feature = "cli"))] fn get_accounts(&self, output: &mut Vec) { - if self.settings.initial_accounts.len() > 0 { + if !self.settings.initial_accounts.is_empty() { let mut initial_accounts = self.settings.initial_accounts.clone(); for account in initial_accounts.drain(..) { output.push(format!( @@ -1125,7 +1108,7 @@ impl Session { && !contract_id.ends_with(".bns") && !contract_id.ends_with(".costs") { - output.push(format!("{}", contract_id)); + output.push(contract_id.to_string()); } } } @@ -1138,7 +1121,7 @@ impl Session { return; } - let recipient = match PrincipalData::parse(&args[1]) { + let recipient = match PrincipalData::parse(args[1]) { Ok(address) => address, _ => { output.push(red!("Unable to parse the address")); @@ -1175,7 +1158,7 @@ impl Session { let keyword = { let mut s = command.to_string(); s = s.replace("::describe", ""); - s = s.replace(" ", ""); + s = s.replace(' ', ""); s }; @@ -1247,10 +1230,10 @@ fn decode_hex(byteString: &str) -> Result, DecodeHexError> { fn build_api_reference() -> HashMap { let mut api_reference = HashMap::new(); for func in NativeFunctions::ALL.iter() { - let api = make_api_reference(&func); + let api = make_api_reference(func); let description = { let mut s = api.description.to_string(); - s = s.replace("\n", " "); + s = s.replace('\n', " "); s }; let doc = format!( @@ -1261,10 +1244,10 @@ fn build_api_reference() -> HashMap { } for func in DefineFunctions::ALL.iter() { - let api = make_define_reference(&func); + let api = make_define_reference(func); let description = { let mut s = api.description.to_string(); - s = s.replace("\n", " "); + s = s.replace('\n', " "); s }; let doc = format!( @@ -1281,10 +1264,10 @@ fn clarity_keywords() -> HashMap { let mut keywords = HashMap::new(); for func in NativeVariables::ALL.iter() { - if let Some(key) = make_keyword_reference(&func) { + if let Some(key) = make_keyword_reference(func) { let description = { let mut s = key.description.to_string(); - s = s.replace("\n", " "); + s = s.replace('\n', " "); s }; let doc = format!("Description\n{}\n\nExamples\n{}", description, key.example); diff --git a/components/clarity-repl/src/repl/settings.rs b/components/clarity-repl/src/repl/settings.rs index cb2193601..f3bad4c56 100644 --- a/components/clarity-repl/src/repl/settings.rs +++ b/components/clarity-repl/src/repl/settings.rs @@ -20,13 +20,10 @@ pub struct InitialContract { impl InitialContract { pub fn get_contract_identifier(&self, is_mainnet: bool) -> Option { - match self.name { - Some(ref name) => Some(QualifiedContractIdentifier { - issuer: self.get_deployer_principal(is_mainnet).into(), - name: name.to_string().try_into().unwrap(), - }), - _ => None, - } + self.name.as_ref().map(|name| QualifiedContractIdentifier { + issuer: self.get_deployer_principal(is_mainnet), + name: name.to_string().try_into().unwrap(), + }) } pub fn get_deployer_principal(&self, is_mainnet: bool) -> StandardPrincipalData { @@ -60,19 +57,11 @@ pub struct SessionSettings { pub repl_settings: Settings, } -#[derive(Debug, Clone, Deserialize, Serialize)] +#[derive(Debug, Default, Clone, Deserialize, Serialize)] pub struct Settings { pub analysis: analysis::Settings, } -impl Default for Settings { - fn default() -> Self { - Self { - analysis: analysis::Settings::default(), - } - } -} - #[derive(Debug, Default, Clone, Deserialize, Serialize)] pub struct SettingsFile { pub analysis: Option, diff --git a/components/clarity-repl/src/repl/tracer.rs b/components/clarity-repl/src/repl/tracer.rs index c64de6e25..edf7e4437 100644 --- a/components/clarity-repl/src/repl/tracer.rs +++ b/components/clarity-repl/src/repl/tracer.rs @@ -16,7 +16,7 @@ pub struct Tracer { stack: Vec, pending_call_string: Vec, pending_args: Vec>, - emitted_events: usize, + nb_of_emitted_events: usize, } impl Tracer { @@ -27,7 +27,7 @@ impl Tracer { stack: vec![u64::MAX], pending_call_string: Vec::new(), pending_args: Vec::new(), - emitted_events: 0, + nb_of_emitted_events: 0, } } } @@ -81,7 +81,7 @@ impl EvalHook for Tracer { )); } - if args.len() > 0 { + if !args.is_empty() { lines.push(format!( "{}│ {}", "│ " @@ -124,7 +124,7 @@ impl EvalHook for Tracer { ) .as_str(), ); - if args.len() > 0 { + if !args.is_empty() { self.pending_call_string.push(call); self.pending_args .push(args.iter().map(|arg| arg.id).collect()); @@ -155,9 +155,8 @@ impl EvalHook for Tracer { .iter() .flat_map(|b| &b.events) .collect::>(); - if emitted_events.len() > self.emitted_events { - for i in self.emitted_events..emitted_events.len() { - let event = emitted_events[i]; + if emitted_events.len() > self.nb_of_emitted_events { + for event in emitted_events.iter().skip(self.nb_of_emitted_events) { println!( "{}│ {}", "│ ".repeat(self.stack.len() - self.pending_call_string.len() - 1), @@ -167,7 +166,7 @@ impl EvalHook for Tracer { )), ) } - self.emitted_events = emitted_events.len(); + self.nb_of_emitted_events = emitted_events.len(); } if let Some(last) = self.stack.last() { diff --git a/components/clarity-repl/src/utils.rs b/components/clarity-repl/src/utils.rs index ce125cd1d..721e97523 100644 --- a/components/clarity-repl/src/utils.rs +++ b/components/clarity-repl/src/utils.rs @@ -86,12 +86,12 @@ pub fn value_to_string(value: &Value) -> String { out } Value::Optional(opt_data) => match opt_data.data { - Some(ref x) => format!("(some {})", value_to_string(&**x)), + Some(ref x) => format!("(some {})", value_to_string(x)), None => "none".to_string(), }, Value::Response(res_data) => match res_data.committed { - true => format!("(ok {})", value_to_string(&*res_data.data)), - false => format!("(err {})", value_to_string(&*res_data.data)), + true => format!("(ok {})", value_to_string(&res_data.data)), + false => format!("(err {})", value_to_string(&res_data.data)), }, Value::Sequence(SequenceData::String(CharType::ASCII(data))) => { format!("\"{}\"", String::from_utf8(data.data.clone()).unwrap()) diff --git a/components/clarity-vscode/web-extension.code-workspace b/components/clarity-vscode/web-extension.code-workspace index 5c9cd5042..f9b7f7c34 100644 --- a/components/clarity-vscode/web-extension.code-workspace +++ b/components/clarity-vscode/web-extension.code-workspace @@ -12,7 +12,7 @@ "rust-analyzer.cargo.features": ["clarity-lsp/wasm"], "rust-analyzer.check.overrideCommand": [ "cargo", - "check", + "clippy", "--no-default-features", "--package=clarity-lsp", "--features=wasm", diff --git a/components/hiro-system-kit/src/tokio_helpers.rs b/components/hiro-system-kit/src/tokio_helpers.rs index 8d0dc9f08..e7917b9de 100644 --- a/components/hiro-system-kit/src/tokio_helpers.rs +++ b/components/hiro-system-kit/src/tokio_helpers.rs @@ -1,5 +1,4 @@ use std::future::Future; -use tokio; pub fn create_basic_runtime() -> tokio::runtime::Runtime { tokio::runtime::Builder::new_current_thread() @@ -18,8 +17,7 @@ pub fn nestable_block_on(future: F) -> F::Output { (rt.handle().clone(), Some(rt)) } }; - let response = handle.block_on(async { future.await }); - response + handle.block_on(future) } // pub fn spawn_async_thread_named(name: &str, f: F) -> io::Result> { diff --git a/components/stacks-network/src/chainhooks.rs b/components/stacks-network/src/chainhooks.rs index 2c55a2a14..7a0591337 100644 --- a/components/stacks-network/src/chainhooks.rs +++ b/components/stacks-network/src/chainhooks.rs @@ -21,7 +21,7 @@ pub fn parse_chainhook_full_specification( let mut hook_spec_file_reader = BufReader::new(path); let specification: ChainhookFullSpecification = serde_json::from_reader(&mut hook_spec_file_reader) - .map_err(|e| format!("unable to parse chainhook spec: {}", e.to_string()))?; + .map_err(|e| format!("unable to parse chainhook spec: {}", e))?; Ok(specification) } @@ -91,7 +91,7 @@ fn get_chainhooks_files( let is_extension_valid = file .extension() .and_then(|ext| ext.to_str()) - .and_then(|ext| Some(ext == "json")); + .map(|ext| ext == "json"); if let Some(true) = is_extension_valid { let relative_path = file.clone(); diff --git a/components/stacks-network/src/chains_coordinator.rs b/components/stacks-network/src/chains_coordinator.rs index 72801cb63..995e2149b 100644 --- a/components/stacks-network/src/chains_coordinator.rs +++ b/components/stacks-network/src/chains_coordinator.rs @@ -32,6 +32,7 @@ use clarity_repl::clarity::vm::types::{BuffData, SequenceData, TupleData}; use clarity_repl::clarity::vm::ClarityName; use clarity_repl::clarity::vm::Value as ClarityValue; use clarity_repl::codec; +use hiro_system_kit::yellow; use stacks_rpc_client::{PoxInfo, StacksRpc}; use std::convert::TryFrom; @@ -221,7 +222,7 @@ pub async fn start_chains_coordinator( let _ = hiro_system_kit::thread_named("Bitcoin mining").spawn(move || { let future = handle_bitcoin_mining(mining_command_rx, &devnet_config, &devnet_event_tx_moved); - let _ = hiro_system_kit::nestable_block_on(future); + hiro_system_kit::nestable_block_on(future); }); // Loop over events being received from Bitcoin and Stacks, @@ -349,7 +350,7 @@ pub async fn start_chains_coordinator( let _ = devnet_event_tx.send(DevnetEvent::ServiceStatus(ServiceStatusData { order: 1, status: Status::Green, - name: format!("stacks-node 2.1",), + name: "stacks-node 2.1".to_string(), comment: format!( "mining blocks (chaintip = #{})", known_tip.block.block_identifier.index @@ -412,8 +413,8 @@ pub async fn start_chains_coordinator( let res = mine_bitcoin_block( &config.services_map_hosts.bitcoin_node_host, config.devnet_config.bitcoin_node_username.as_str(), - &config.devnet_config.bitcoin_node_password.as_str(), - &config.devnet_config.miner_btc_address.as_str(), + config.devnet_config.bitcoin_node_password.as_str(), + config.devnet_config.miner_btc_address.as_str(), ) .await; if let Err(e) = res { @@ -446,7 +447,7 @@ pub async fn start_chains_coordinator( order: 5, status: Status::Green, name: "subnet-node".into(), - comment: format!("⚡️"), + comment: "⚡️".to_string(), }, )); subnet_initialized = true; @@ -530,12 +531,12 @@ pub fn relay_devnet_protocol_deployment( pub async fn publish_stacking_orders( devnet_config: &DevnetConfig, - accounts: &Vec, + accounts: &[AccountConfig], services_map_hosts: &ServicesMapHosts, fee_rate: u64, bitcoin_block_height: u32, ) -> Option { - if devnet_config.pox_stacking_orders.len() == 0 { + if devnet_config.pox_stacking_orders.is_empty() { return None; } @@ -552,8 +553,8 @@ pub async fn publish_stacking_orders( for pox_stacking_order in devnet_config.pox_stacking_orders.iter() { if pox_stacking_order.start_at_cycle - 1 == pox_info.reward_cycle_id { let mut account = None; - let mut accounts_iter = accounts.iter(); - while let Some(e) = accounts_iter.next() { + let accounts_iter = accounts.iter(); + for e in accounts_iter { if e.label == pox_stacking_order.wallet { account = Some(e.clone()); break; @@ -669,7 +670,7 @@ pub async fn mine_bitcoin_block( .map_err(|e| format!("unable to send request ({})", e))? .json::() .await - .map_err(|e| format!("unable to generate bitcoin block: ({})", e.to_string()))?; + .map_err(|e| format!("unable to generate bitcoin block: ({})", e))?; Ok(()) } @@ -682,8 +683,8 @@ async fn handle_bitcoin_mining( loop { let command = match mining_command_rx.recv() { Ok(cmd) => cmd, - Err(_e) => { - // TODO(lgalabru): cascade termination + Err(e) => { + print!("{} {}", yellow!("unexpected error:"), e); break; } }; @@ -723,8 +724,8 @@ async fn handle_bitcoin_mining( let res = mine_bitcoin_block( &config.services_map_hosts.bitcoin_node_host, config.devnet_config.bitcoin_node_username.as_str(), - &config.devnet_config.bitcoin_node_password.as_str(), - &config.devnet_config.miner_btc_address.as_str(), + config.devnet_config.bitcoin_node_password.as_str(), + config.devnet_config.miner_btc_address.as_str(), ) .await; if let Err(e) = res { @@ -734,8 +735,8 @@ async fn handle_bitcoin_mining( BitcoinMiningCommand::InvalidateChainTip => { invalidate_bitcoin_chain_tip( &config.services_map_hosts.bitcoin_node_host, - &config.devnet_config.bitcoin_node_username.as_str(), - &config.devnet_config.bitcoin_node_password.as_str(), + config.devnet_config.bitcoin_node_username.as_str(), + config.devnet_config.bitcoin_node_password.as_str(), ); } } diff --git a/components/stacks-network/src/lib.rs b/components/stacks-network/src/lib.rs index f109be664..a68f9ee4b 100644 --- a/components/stacks-network/src/lib.rs +++ b/components/stacks-network/src/lib.rs @@ -19,7 +19,6 @@ pub use event::DevnetEvent; pub use log::{LogData, LogLevel}; pub use orchestrator::DevnetOrchestrator; use orchestrator::ServicesMapHosts; - use std::{ sync::{ atomic::{AtomicBool, Ordering}, @@ -33,9 +32,7 @@ use std::{ use chainhook_sdk::chainhooks::types::ChainhookConfig; use chains_coordinator::start_chains_coordinator; use clarinet_deployments::types::DeploymentSpecification; -use hiro_system_kit; use hiro_system_kit::slog; -use tracing_appender; use self::chains_coordinator::DevnetEventObserverConfig; #[allow(dead_code)] @@ -178,7 +175,7 @@ async fn do_run_devnet( if display_dashboard { ctx.try_log(|logger| slog::info!(logger, "Starting Devnet")); let moved_chains_coordinator_commands_tx = chains_coordinator_commands_tx.clone(); - let _ = ui::start_ui( + ui::start_ui( devnet_events_tx, devnet_events_rx, moved_chains_coordinator_commands_tx, diff --git a/components/stacks-network/src/orchestrator.rs b/components/stacks-network/src/orchestrator.rs index ef67eecc3..895d18059 100644 --- a/components/stacks-network/src/orchestrator.rs +++ b/components/stacks-network/src/orchestrator.rs @@ -89,14 +89,14 @@ impl DevnetOrchestrator { working_dir } else { let mut cwd = std::env::current_dir() - .map_err(|e| format!("unable to retrieve current dir ({})", e.to_string()))?; + .map_err(|e| format!("unable to retrieve current dir ({})", e))?; cwd.push(&working_dir); let _ = fs::create_dir(&cwd); cwd.canonicalize().map_err(|e| { format!( "unable to canonicalize working_dir {} ({})", working_dir.display(), - e.to_string() + e ) })? }; @@ -130,7 +130,7 @@ impl DevnetOrchestrator { user_space_docker_socket.push("run"); user_space_docker_socket.push("docker.sock"); Docker::connect_with_socket( - &user_space_docker_socket.to_str().unwrap(), + user_space_docker_socket.to_str().unwrap(), 120, bollard::API_DEFAULT_VERSION, ) @@ -148,7 +148,7 @@ impl DevnetOrchestrator { network_name, manifest, network_config: Some(network_config), - docker_client: docker_client, + docker_client, can_exit: true, termination_success_tx: None, stacks_node_container_id: None, @@ -189,9 +189,9 @@ impl DevnetOrchestrator { let (docker, devnet_config) = match (&self.docker_client, &self.network_config) { (Some(ref docker), Some(ref network_config)) => match network_config.devnet { Some(ref devnet_config) => (docker, devnet_config), - _ => return Err(format!("unable to get devnet config")), + _ => return Err("unable to get devnet config".to_string()), }, - _ => return Err(format!("unable to get devnet config")), + _ => return Err("unable to get devnet config".to_string()), }; // First, let's make sure that we pruned staled resources correctly @@ -201,18 +201,12 @@ impl DevnetOrchestrator { labels.insert("project", self.network_name.as_str()); let mut options = HashMap::new(); - options.insert("enable_ip_masquerade".into(), "true".into()); - options.insert("enable_icc".into(), "true".into()); - options.insert("host_binding_ipv4".into(), "0.0.0.0".into()); - options.insert("com.docker.network.bridge.enable_icc".into(), "true".into()); - options.insert( - "com.docker.network.bridge.enable_ip_masquerade".into(), - "true".into(), - ); - options.insert( - "com.docker.network.bridge.host_binding_ipv4".into(), - "0.0.0.0".into(), - ); + options.insert("enable_ip_masquerade", "true"); + options.insert("enable_icc", "true"); + options.insert("host_binding_ipv4", "0.0.0.0"); + options.insert("com.docker.network.bridge.enable_icc", "true"); + options.insert("com.docker.network.bridge.enable_ip_masquerade", "true"); + options.insert("com.docker.network.bridge.host_binding_ipv4", "0.0.0.0"); let network_id = docker .create_network::<&str>(CreateNetworkOptions { @@ -222,14 +216,14 @@ impl DevnetOrchestrator { ..Default::default() }, labels, - options: options, + options, ..Default::default() }) .await .map_err(|e| { format!( "clarinet was unable to create network. Is docker running locally? (error: {})", - e.to_string() + e ) })? .id @@ -238,7 +232,7 @@ impl DevnetOrchestrator { let res = docker .inspect_network::<&str>(&network_id, None) .await - .map_err(|e| format!("unable to retrieve network: {}", e.to_string()))?; + .map_err(|e| format!("unable to retrieve network: {}", e))?; let gateway = res .ipam @@ -289,9 +283,9 @@ impl DevnetOrchestrator { let (_docker, devnet_config) = match (&self.docker_client, &self.network_config) { (Some(ref docker), Some(ref network_config)) => match network_config.devnet { Some(ref devnet_config) => (docker, devnet_config), - _ => return Err(format!("unable to get devnet config")), + _ => return Err("unable to get devnet config".to_string()), }, - _ => return Err(format!("unable to get devnet config")), + _ => return Err("unable to get devnet config".to_string()), }; let mut boot_index = 1; @@ -309,7 +303,7 @@ impl DevnetOrchestrator { let enable_subnet_node = devnet_config.enable_subnet_node; let disable_subnet_api = devnet_config.disable_subnet_api; - let _ = fs::create_dir(format!("{}", devnet_config.working_dir)); + let _ = fs::create_dir(&devnet_config.working_dir); let _ = fs::create_dir(format!("{}/conf", devnet_config.working_dir)); let _ = fs::create_dir(format!("{}/data", devnet_config.working_dir)); @@ -391,14 +385,14 @@ impl DevnetOrchestrator { ))); // Start bitcoind - let _ = event_tx.send(DevnetEvent::info(format!("Starting bitcoin-node"))); + let _ = event_tx.send(DevnetEvent::info("Starting bitcoin-node".to_string())); let _ = event_tx.send(DevnetEvent::ServiceStatus(ServiceStatusData { order: 0, status: Status::Yellow, name: "bitcoin-node".into(), comment: "preparing container".into(), })); - match self.prepare_bitcoin_node_container(&ctx).await { + match self.prepare_bitcoin_node_container(ctx).await { Ok(_) => {} Err(message) => { let _ = event_tx.send(DevnetEvent::FatalError(message.clone())); @@ -432,7 +426,7 @@ impl DevnetOrchestrator { name: "stacks-api".into(), comment: "preparing postgres container".into(), })); - let _ = event_tx.send(DevnetEvent::info(format!("Starting postgres"))); + let _ = event_tx.send(DevnetEvent::info("Starting postgres".to_string())); match self.prepare_postgres_container(ctx).await { Ok(_) => {} Err(message) => { @@ -456,7 +450,7 @@ impl DevnetOrchestrator { comment: "preparing container".into(), })); - let _ = event_tx.send(DevnetEvent::info(format!("Starting stacks-api"))); + let _ = event_tx.send(DevnetEvent::info("Starting stacks-api".to_string())); match self.prepare_stacks_api_container(ctx).await { Ok(_) => {} Err(message) => { @@ -483,7 +477,7 @@ impl DevnetOrchestrator { // Start subnet node if enable_subnet_node { - let _ = event_tx.send(DevnetEvent::info(format!("Starting subnet-node"))); + let _ = event_tx.send(DevnetEvent::info("Starting subnet-node".to_string())); match self.prepare_subnet_node_container(boot_index, ctx).await { Ok(_) => {} Err(message) => { @@ -508,7 +502,7 @@ impl DevnetOrchestrator { }; if !disable_subnet_api { - let _ = event_tx.send(DevnetEvent::info(format!("Starting subnet-api"))); + let _ = event_tx.send(DevnetEvent::info("Starting subnet-api".to_string())); match self.prepare_subnet_api_container(ctx).await { Ok(_) => {} Err(message) => { @@ -535,7 +529,7 @@ impl DevnetOrchestrator { } // Start stacks-blockchain - let _ = event_tx.send(DevnetEvent::info(format!("Starting stacks-node"))); + let _ = event_tx.send(DevnetEvent::info("Starting stacks-node".to_string())); let _ = event_tx.send(DevnetEvent::ServiceStatus(ServiceStatusData { order: 1, status: Status::Yellow, @@ -581,7 +575,7 @@ impl DevnetOrchestrator { return Err(message); } }; - let _ = event_tx.send(DevnetEvent::info(format!("Starting stacks-explorer"))); + let _ = event_tx.send(DevnetEvent::info("Starting stacks-explorer".to_string())); match self.boot_stacks_explorer_container(ctx).await { Ok(_) => {} Err(message) => { @@ -614,7 +608,7 @@ impl DevnetOrchestrator { return Err(message); } }; - let _ = event_tx.send(DevnetEvent::info(format!("Starting bitcoin-explorer"))); + let _ = event_tx.send(DevnetEvent::info("Starting bitcoin-explorer".to_string())); match self.boot_bitcoin_explorer_container(ctx).await { Ok(_) => {} Err(message) => { @@ -728,10 +722,10 @@ rpcport={bitcoin_node_rpc_port} let mut bitcoind_conf_path = PathBuf::from(&devnet_config.working_dir); bitcoind_conf_path.push("conf"); fs::create_dir_all(&bitcoind_conf_path) - .map_err(|e| format!("unable to create bitcoin conf directory: {}", e.to_string()))?; + .map_err(|e| format!("unable to create bitcoin conf directory: {}", e))?; bitcoind_conf_path.push("bitcoin.conf"); let mut file = File::create(bitcoind_conf_path) - .map_err(|e| format!("unable to create bitcoin.conf: {}", e.to_string()))?; + .map_err(|e| format!("unable to create bitcoin.conf: {}", e))?; file.write_all(bitcoind_conf.as_bytes()) .map_err(|e| format!("unable to write bitcoin.conf: {:?}", e))?; @@ -871,7 +865,7 @@ rpcport={bitcoin_node_rpc_port} let containers = match res { Ok(containers) => containers, Err(e) => { - let err = format!("unable to communicate with Docker: {}\nvisit https://docs.hiro.so/clarinet/troubleshooting#i-am-unable-to-start-devnet-though-my-docker-is-running to resolve this issue.", e.to_string()); + let err = format!("unable to communicate with Docker: {}\nvisit https://docs.hiro.so/clarinet/troubleshooting#i-am-unable-to-start-devnet-though-my-docker-is-running to resolve this issue.", e); return Err(err); } }; @@ -883,11 +877,11 @@ rpcport={bitcoin_node_rpc_port} None => continue, }; let _ = docker - .kill_container(&container_id, Some(options.clone())) + .kill_container(container_id, Some(options.clone())) .await; let _ = docker - .wait_container(&container_id, None::>) + .wait_container(container_id, None::>) .try_collect::>() .await; } @@ -898,7 +892,7 @@ rpcport={bitcoin_node_rpc_port} pub async fn boot_bitcoin_node_container(&mut self) -> Result<(), String> { let container = match &self.bitcoin_node_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -1005,15 +999,12 @@ events_keys = ["*"] r#" # Add stacks-api as an event observer [[events_observer]] -endpoint = "{}" +endpoint = stacks-api.{}:{} retry_count = 255 include_data_events = false events_keys = ["*"] "#, - format!( - "stacks-api.{}:{}", - self.network_name, devnet_config.stacks_api_events_port - ), + self.network_name, devnet_config.stacks_api_events_port )); } @@ -1022,14 +1013,11 @@ events_keys = ["*"] r#" # Add subnet-node as an event observer [[events_observer]] -endpoint = "{}" +endpoint = subnet-node.{}:{} retry_count = 255 events_keys = ["*"] "#, - format!( - "subnet-node.{}:{}", - self.network_name, devnet_config.subnet_events_ingestion_port - ), + self.network_name, devnet_config.subnet_events_ingestion_port )); } @@ -1226,7 +1214,7 @@ start_height = {epoch_2_4} pub async fn boot_stacks_node_container(&mut self) -> Result<(), String> { let container = match &self.stacks_node_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -1343,15 +1331,12 @@ events_keys = ["*"] r#" # Add subnet-api as an event observer [[events_observer]] -endpoint = "{}" +endpoint = subnet-api.{}:{} retry_count = 255 include_data_events = false events_keys = ["*"] "#, - format!( - "subnet-api.{}:{}", - self.network_name, devnet_config.subnet_api_events_port - ), + self.network_name, devnet_config.subnet_api_events_port )); } @@ -1484,7 +1469,7 @@ events_keys = ["*"] pub async fn boot_subnet_node_container(&self) -> Result<(), String> { let container = match &self.subnet_node_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -1607,7 +1592,7 @@ events_keys = ["*"] pub async fn boot_stacks_api_container(&self, _ctx: &Context) -> Result<(), String> { let container = match &self.stacks_api_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -1739,7 +1724,7 @@ events_keys = ["*"] let postgres_container = match &self.postgres_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let psql_command = format!( @@ -1770,7 +1755,7 @@ events_keys = ["*"] let container = match &self.subnet_api_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -1811,7 +1796,7 @@ events_keys = ["*"] let mut port_bindings = HashMap::new(); port_bindings.insert( - format!("5432/tcp"), + "5432/tcp".to_string(), Some(vec![PortBinding { host_ip: Some(String::from("0.0.0.0")), host_port: Some(format!("{}/tcp", devnet_config.postgres_port)), @@ -1862,7 +1847,7 @@ events_keys = ["*"] pub async fn boot_postgres_container(&self, _ctx: &Context) -> Result<(), String> { let container = match &self.postgres_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -1972,7 +1957,7 @@ events_keys = ["*"] pub async fn boot_stacks_explorer_container(&self, _ctx: &Context) -> Result<(), String> { let container = match &self.stacks_explorer_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -2091,7 +2076,7 @@ events_keys = ["*"] pub async fn boot_bitcoin_explorer_container(&self, _ctx: &Context) -> Result<(), String> { let container = match &self.bitcoin_explorer_container_id { Some(container) => container.clone(), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let docker = match &self.docker_client { @@ -2119,7 +2104,7 @@ events_keys = ["*"] (Some(c1), Some(c2), Some(c3), Some(c4), Some(c5), Some(c6)) => { (c1, c2, c3, c4, c5, c6) } - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let ( stacks_node_c_id, @@ -2177,7 +2162,7 @@ events_keys = ["*"] &self.postgres_container_id, ) { (Some(c1), Some(c2), Some(c3), Some(c4)) => (c1, c2, c3, c4), - _ => return Err(format!("unable to boot container")), + _ => return Err("unable to boot container".to_string()), }; let (stacks_api_c_id, stacks_explorer_c_id, bitcoin_explorer_c_id, postgres_c_id) = containers_ids; @@ -2266,7 +2251,9 @@ events_keys = ["*"] .kill_container(bitcoin_explorer_container_id, options.clone()) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating bitcoin-explorer")); - let _ = docker.remove_container(bitcoin_explorer_container_id, None); + let _ = docker + .remove_container(bitcoin_explorer_container_id, None) + .await; } if let Some(ref stacks_explorer_container_id) = self.stacks_explorer_container_id { @@ -2274,7 +2261,9 @@ events_keys = ["*"] .kill_container(stacks_explorer_container_id, options.clone()) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating stacks-explorer")); - let _ = docker.remove_container(stacks_explorer_container_id, None); + let _ = docker + .remove_container(stacks_explorer_container_id, None) + .await; } if let Some(ref bitcoin_node_container_id) = self.bitcoin_node_container_id { @@ -2282,7 +2271,9 @@ events_keys = ["*"] .kill_container(bitcoin_node_container_id, options.clone()) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating bitcoin-node")); - let _ = docker.remove_container(bitcoin_node_container_id, None); + let _ = docker + .remove_container(bitcoin_node_container_id, None) + .await; } if let Some(ref stacks_api_container_id) = self.stacks_api_container_id { @@ -2290,7 +2281,7 @@ events_keys = ["*"] .kill_container(stacks_api_container_id, options.clone()) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating stacks-api")); - let _ = docker.remove_container(stacks_api_container_id, None); + let _ = docker.remove_container(stacks_api_container_id, None).await; } if let Some(ref postgres_container_id) = self.postgres_container_id { @@ -2298,7 +2289,7 @@ events_keys = ["*"] .kill_container(postgres_container_id, options.clone()) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating postgres")); - let _ = docker.remove_container(postgres_container_id, None); + let _ = docker.remove_container(postgres_container_id, None).await; } if let Some(ref stacks_node_container_id) = self.stacks_node_container_id { @@ -2306,7 +2297,9 @@ events_keys = ["*"] .kill_container(stacks_node_container_id, options.clone()) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating stacks-node")); - let _ = docker.remove_container(stacks_node_container_id, None); + let _ = docker + .remove_container(stacks_node_container_id, None) + .await; } if let Some(ref subnet_node_container_id) = self.subnet_node_container_id { @@ -2314,7 +2307,9 @@ events_keys = ["*"] .kill_container(subnet_node_container_id, options.clone()) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating subnet-node")); - let _ = docker.remove_container(subnet_node_container_id, None); + let _ = docker + .remove_container(subnet_node_container_id, None) + .await; } if let Some(ref subnet_api_container_id) = self.subnet_api_container_id { @@ -2322,7 +2317,7 @@ events_keys = ["*"] .kill_container(subnet_api_container_id, options) .await; ctx.try_log(|logger| slog::info!(logger, "Terminating subnet-api")); - let _ = docker.remove_container(subnet_api_container_id, None); + let _ = docker.remove_container(subnet_api_container_id, None).await; } // Delete network @@ -2384,9 +2379,9 @@ events_keys = ["*"] let (devnet_config, accounts) = match &self.network_config { Some(ref network_config) => match network_config.devnet { Some(ref devnet_config) => (devnet_config, &network_config.accounts), - _ => return Err(format!("unable to initialize bitcoin node")), + _ => return Err("unable to initialize bitcoin node".to_string()), }, - _ => return Err(format!("unable to initialize bitcoin node")), + _ => return Err("unable to initialize bitcoin node".to_string()), }; let miner_address = Address::from_str(&devnet_config.miner_btc_address) @@ -2408,12 +2403,12 @@ events_keys = ["*"] http_client .post(node_url) .timeout(Duration::from_secs(3)) - .basic_auth(&username, Some(&password)) + .basic_auth(username, Some(&password)) .header("Content-Type", "application/json") .header("Host", &node_url[7..]) } - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Configuring bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Configuring bitcoin-node".to_string())); let max_errors = 30; @@ -2446,7 +2441,7 @@ events_keys = ["*"] } } std::thread::sleep(std::time::Duration::from_secs(1)); - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } let mut error_count = 0; @@ -2478,7 +2473,7 @@ events_keys = ["*"] } } std::thread::sleep(std::time::Duration::from_secs(1)); - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } let mut error_count = 0; @@ -2510,7 +2505,7 @@ events_keys = ["*"] } } std::thread::sleep(std::time::Duration::from_secs(1)); - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } let mut error_count = 0; @@ -2542,7 +2537,7 @@ events_keys = ["*"] } } std::thread::sleep(std::time::Duration::from_secs(1)); - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } let mut error_count = 0; @@ -2582,7 +2577,7 @@ events_keys = ["*"] } } std::thread::sleep(std::time::Duration::from_secs(1)); - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } let mut error_count = 0; @@ -2610,15 +2605,15 @@ events_keys = ["*"] let checksum = rpc_result .as_object() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .get("result") - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .as_object() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .get("checksum") - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .as_str() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .to_string(); let _ = devnet_event_tx.send(DevnetEvent::info(format!( @@ -2659,7 +2654,7 @@ events_keys = ["*"] } } std::thread::sleep(std::time::Duration::from_secs(1)); - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } let mut error_count = 0; @@ -2687,15 +2682,15 @@ events_keys = ["*"] let checksum = rpc_result .as_object() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .get("result") - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .as_object() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .get("checksum") - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .as_str() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .to_string(); let _ = devnet_event_tx.send(DevnetEvent::info(format!( @@ -2736,7 +2731,7 @@ events_keys = ["*"] } } std::thread::sleep(std::time::Duration::from_secs(1)); - let _ = devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + let _ = devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } // Index devnet's wallets by default for (_, account) in accounts.iter() { @@ -2768,15 +2763,15 @@ events_keys = ["*"] let checksum = rpc_result .as_object() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .get("result") - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .as_object() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .get("checksum") - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .as_str() - .ok_or(format!("unable to parse 'getdescriptorinfo'"))? + .ok_or("unable to parse 'getdescriptorinfo'".to_string())? .to_string(); let _ = devnet_event_tx.send(DevnetEvent::info(format!( @@ -2818,7 +2813,7 @@ events_keys = ["*"] } std::thread::sleep(std::time::Duration::from_secs(1)); let _ = - devnet_event_tx.send(DevnetEvent::info(format!("Waiting for bitcoin-node",))); + devnet_event_tx.send(DevnetEvent::info("Waiting for bitcoin-node".to_string())); } } Ok(()) diff --git a/components/stacks-network/src/ui/app.rs b/components/stacks-network/src/ui/app.rs index 228829587..421d2ce2b 100644 --- a/components/stacks-network/src/ui/app.rs +++ b/components/stacks-network/src/ui/app.rs @@ -58,11 +58,8 @@ impl<'a> App<'a> { } pub fn on_key(&mut self, c: char) { - match c { - 'q' => { - self.should_quit = true; - } - _ => {} + if c == 'q' { + self.should_quit = true; } } @@ -136,7 +133,7 @@ impl<'a> App<'a> { pub fn display_microblock(&mut self, block: StacksMicroblockData) { self.tabs.titles.push_front(Spans::from(Span::styled( - format!("[·]"), + "[·]".to_string(), Style::default().fg(Color::White), ))); self.blocks.push(BlockData::Microblock(block)); diff --git a/components/stacks-network/src/ui/mod.rs b/components/stacks-network/src/ui/mod.rs index 8d69a6ab0..23a2742a0 100644 --- a/components/stacks-network/src/ui/mod.rs +++ b/components/stacks-network/src/ui/mod.rs @@ -1,6 +1,6 @@ #[allow(dead_code)] mod app; -#[allow(dead_code)] +#[allow(clippy::module_inception)] mod ui; #[allow(dead_code)] mod util; @@ -59,16 +59,16 @@ pub fn do_start_ui( automining_enabled: bool, ctx: &Context, ) -> Result<(), String> { - enable_raw_mode().map_err(|e| format!("unable to start terminal ui: {}", e.to_string()))?; + enable_raw_mode().map_err(|e| format!("unable to start terminal ui: {}", e))?; let mut stdout = stdout(); execute!(stdout, EnterAlternateScreen) - .map_err(|e| format!("unable to start terminal ui: {}", e.to_string()))?; + .map_err(|e| format!("unable to start terminal ui: {}", e))?; let backend = CrosstermBackend::new(stdout); - let mut terminal = Terminal::new(backend) - .map_err(|e| format!("unable to start terminal ui: {}", e.to_string()))?; + let mut terminal = + Terminal::new(backend).map_err(|e| format!("unable to start terminal ui: {}", e))?; // Setup input handling let tick_rate = Duration::from_millis(500); @@ -85,7 +85,7 @@ pub fn do_start_ui( } } if last_tick.elapsed() >= tick_rate { - if let Err(_) = devnet_events_tx.send(DevnetEvent::Tick) { + if devnet_events_tx.send(DevnetEvent::Tick).is_err() { break; } last_tick = Instant::now(); @@ -97,14 +97,14 @@ pub fn do_start_ui( terminal .clear() - .map_err(|e| format!("unable to start terminal ui: {}", e.to_string()))?; + .map_err(|e| format!("unable to start terminal ui: {}", e))?; let mut mining_command_tx: Option> = None; loop { terminal .draw(|f| ui::draw(f, &mut app)) - .map_err(|e| format!("unable to update ui: {}", e.to_string()))?; + .map_err(|e| format!("unable to update ui: {}", e))?; let event = match devnet_events_rx.recv() { Ok(event) => event, Err(_e) => { @@ -130,9 +130,9 @@ pub fn do_start_ui( (KeyModifiers::NONE, KeyCode::Char('n')) => { if let Some(ref tx) = mining_command_tx { let _ = tx.send(BitcoinMiningCommand::Mine); - app.display_log(DevnetEvent::log_success(format!("Bitcoin block mining triggered manually")), ctx); + app.display_log(DevnetEvent::log_success("Bitcoin block mining triggered manually".to_string()), ctx); } else { - app.display_log(DevnetEvent::log_error(format!("Manual block mining not ready")), ctx); + app.display_log(DevnetEvent::log_error("Manual block mining not ready".to_string()), ctx); } } (KeyModifiers::NONE, KeyCode::Left) => app.on_left(), diff --git a/components/stacks-network/src/ui/ui.rs b/components/stacks-network/src/ui/ui.rs index e1c68b2c0..2b7f61a01 100644 --- a/components/stacks-network/src/ui/ui.rs +++ b/components/stacks-network/src/ui/ui.rs @@ -155,7 +155,7 @@ where .constraints([Constraint::Length(1), Constraint::Min(1)].as_ref()) .split(area); - let titles = app.tabs.titles.iter().map(|s| s.clone()).collect(); + let titles = app.tabs.titles.iter().cloned().collect(); let blocks = Tabs::new(titles) .divider("") .style(Style::default().fg(Color::White)) @@ -177,15 +177,15 @@ where } let transactions = match &app.blocks[(app.tabs.titles.len() - 1) - app.tabs.index] { BlockData::Block(selected_block) => { - draw_block_details(f, block_details_components[0], &selected_block); + draw_block_details(f, block_details_components[0], selected_block); &selected_block.transactions } BlockData::Microblock(selected_microblock) => { - draw_microblock_details(f, block_details_components[0], &selected_microblock); + draw_microblock_details(f, block_details_components[0], selected_microblock); &selected_microblock.transactions } }; - draw_transactions(f, block_details_components[1], &transactions); + draw_transactions(f, block_details_components[1], transactions); } fn draw_block_details(f: &mut Frame, area: Rect, block: &StacksBlockData) @@ -238,7 +238,7 @@ where .block(Block::default().borders(Borders::NONE)); f.render_widget(paragraph, labels[3]); - let value = format!("{}", block.block_identifier.hash); + let value = block.block_identifier.hash.to_string(); let paragraph = Paragraph::new(value) .style(Style::default().fg(Color::White)) .block(Block::default().borders(Borders::NONE)); @@ -262,7 +262,11 @@ where .block(Block::default().borders(Borders::NONE)); f.render_widget(paragraph, labels[7]); - let value = format!("{}", block.metadata.bitcoin_anchor_block_identifier.hash); + let value = block + .metadata + .bitcoin_anchor_block_identifier + .hash + .to_string(); let paragraph = Paragraph::new(value) .style(Style::default().fg(Color::White)) .block(Block::default().borders(Borders::NONE)); @@ -334,7 +338,7 @@ where .block(Block::default().borders(Borders::NONE)); f.render_widget(paragraph, labels[3]); - let value = format!("{}", microblock.block_identifier.hash); + let value = microblock.block_identifier.hash.to_string(); let paragraph = Paragraph::new(value) .style(Style::default().fg(Color::White)) .block(Block::default().borders(Borders::NONE)); @@ -358,14 +362,14 @@ where .block(Block::default().borders(Borders::NONE)); f.render_widget(paragraph, labels[7]); - let value = format!("{}", microblock.metadata.anchor_block_identifier.hash); + let value = microblock.metadata.anchor_block_identifier.hash.to_string(); let paragraph = Paragraph::new(value) .style(Style::default().fg(Color::White)) .block(Block::default().borders(Borders::NONE)); f.render_widget(paragraph, labels[8]); } -fn draw_transactions(f: &mut Frame, area: Rect, transactions: &Vec) +fn draw_transactions(f: &mut Frame, area: Rect, transactions: &[StacksTransactionData]) where B: Backend, { @@ -405,7 +409,7 @@ where .add_modifier(Modifier::BOLD), ) .highlight_symbol("* "); - let mut inner_area = area.clone(); + let mut inner_area = area; inner_area.height = inner_area.height.saturating_sub(1); f.render_widget(list, inner_area); } diff --git a/components/stacks-network/src/ui/util/mod.rs b/components/stacks-network/src/ui/util/mod.rs index 02d7ba45b..68a553978 100644 --- a/components/stacks-network/src/ui/util/mod.rs +++ b/components/stacks-network/src/ui/util/mod.rs @@ -18,13 +18,13 @@ impl<'a> TabsState<'a> { } } pub fn next(&mut self) { - if self.titles.len() > 0 { + if !self.titles.is_empty() { self.index = (self.index + 1) % self.titles.len(); } } pub fn previous(&mut self) { - if self.titles.len() > 0 { + if !self.titles.is_empty() { if self.index > 0 { self.index -= 1; } else { @@ -57,9 +57,7 @@ impl StatefulList { pub fn next(&mut self) { let i = match self.state.selected() { Some(i) => { - if self.items.len() == 0 { - 0 - } else if i >= self.items.len() - 1 { + if self.items.is_empty() || i >= self.items.len() - 1 { 0 } else { i + 1 @@ -73,7 +71,7 @@ impl StatefulList { pub fn previous(&mut self) { let i = match self.state.selected() { Some(i) => { - if self.items.len() == 0 { + if self.items.is_empty() { 0 } else if i == 0 { self.items.len() - 1 diff --git a/components/stacks-rpc-client/src/crypto.rs b/components/stacks-rpc-client/src/crypto.rs index a3250dbdf..f3cdeff00 100644 --- a/components/stacks-rpc-client/src/crypto.rs +++ b/components/stacks-rpc-client/src/crypto.rs @@ -1,3 +1,5 @@ +use std::str::FromStr; + use crate::clarity::codec::*; use crate::clarity::stacks_common::codec::StacksMessageCodec; use crate::clarity::stacks_common::types::chainstate::StacksAddress; @@ -24,7 +26,7 @@ pub struct Wallet { impl Wallet { pub fn compute_stacks_address(&self) -> StacksAddress { - let keypair = compute_keypair(&self); + let keypair = compute_keypair(self); compute_stacks_address(&keypair.public_key, self.mainnet) } } @@ -38,7 +40,7 @@ pub fn compute_stacks_address(public_key: &PublicKey, mainnet: bool) -> StacksAd let wrapped_public_key = Secp256k1PublicKey::from_slice(&public_key.serialize_compressed()).unwrap(); - let signer_addr = StacksAddress::from_public_keys( + StacksAddress::from_public_keys( match mainnet { true => C32_ADDRESS_VERSION_MAINNET_SINGLESIG, false => C32_ADDRESS_VERSION_TESTNET_SINGLESIG, @@ -47,9 +49,7 @@ pub fn compute_stacks_address(public_key: &PublicKey, mainnet: bool) -> StacksAd 1, &vec![wrapped_public_key], ) - .unwrap(); - - signer_addr + .unwrap() } pub fn compute_keypair(wallet: &Wallet) -> Keypair { @@ -78,9 +78,9 @@ pub fn sign_transaction_payload( let signer_addr = compute_stacks_address(&keypair.public_key, wallet.mainnet); let spending_condition = TransactionSpendingCondition::Singlesig(SinglesigSpendingCondition { - signer: signer_addr.bytes.clone(), - nonce: nonce, - tx_fee: tx_fee, + signer: signer_addr.bytes, + nonce, + tx_fee, hash_mode: SinglesigHashMode::P2PKH, key_encoding: TransactionPublicKeyEncoding::Compressed, signature: MessageSignature::empty(), @@ -96,11 +96,11 @@ pub fn sign_transaction_payload( true => 0x00000001, false => 0x80000000, }, - auth: auth, - anchor_mode: anchor_mode, + auth, + anchor_mode, post_condition_mode: TransactionPostConditionMode::Allow, post_conditions: vec![], - payload: payload, + payload, }; let mut unsigned_tx_bytes = vec![]; diff --git a/components/stacks-rpc-client/src/rpc_client.rs b/components/stacks-rpc-client/src/rpc_client.rs index c55a26b3b..434495e65 100644 --- a/components/stacks-rpc-client/src/rpc_client.rs +++ b/components/stacks-rpc-client/src/rpc_client.rs @@ -14,12 +14,12 @@ pub enum RpcError { Message(String), } -impl RpcError { - pub fn to_string(&self) -> String { +impl std::fmt::Display for RpcError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match &self { - RpcError::Message(e) => e.clone(), - RpcError::StatusCode(e) => format!("error status code {}", e), - RpcError::Generic => "unknown error".into(), + RpcError::Message(e) => write!(f, "{}", e), + RpcError::StatusCode(e) => write!(f, "error status code {}", e), + RpcError::Generic => write!(f, "unknown error"), } } } @@ -161,7 +161,7 @@ impl StacksRpc { let path = format!("{}/v2/fees/transaction", self.url); let res: FeeEstimationReport = self .client - .post(&path) + .post(path) .json(&payload) .send() .map_err(|e| RpcError::Message(e.to_string()))? @@ -179,7 +179,7 @@ impl StacksRpc { let path = format!("{}/v2/transactions", self.url); let res = self .client - .post(&path) + .post(path) .header("Content-Type", "application/octet-stream") .body(tx) .send() @@ -203,7 +203,7 @@ impl StacksRpc { let res: Balance = self .client - .get(&request_url) + .get(request_url) .send() .map_err(|e| RpcError::Message(e.to_string()))? .json() @@ -217,7 +217,7 @@ impl StacksRpc { let res: PoxInfo = self .client - .get(&request_url) + .get(request_url) .send() .map_err(|e| RpcError::Message(e.to_string()))? .json() @@ -230,7 +230,7 @@ impl StacksRpc { let res: NodeInfo = self .client - .get(&request_url) + .get(request_url) .send() .map_err(|e| RpcError::Message(e.to_string()))? .json() @@ -248,14 +248,14 @@ impl StacksRpc { self.url, principal, contract_name ); - let res = self.client.get(&request_url).send(); + let res = self.client.get(request_url).send(); match res { Ok(response) => match response.json() { Ok(value) => Ok(value), - Err(e) => Err(RpcError::Message(format!("{}", e.to_string()))), + Err(e) => Err(RpcError::Message(e.to_string())), }, - Err(e) => Err(RpcError::Message(format!("{}", e.to_string()))), + Err(e) => Err(RpcError::Message(e.to_string())), } } @@ -278,7 +278,7 @@ impl StacksRpc { .collect::>(); let res = self .client - .post(&path) + .post(path) .json(&json!({ "sender": sender, "arguments": arguments, @@ -307,7 +307,7 @@ impl StacksRpc { Some(raw_value) => raw_value, _ => panic!(), }; - let bytes = hex_bytes(&raw_value).unwrap(); + let bytes = hex_bytes(raw_value).unwrap(); let mut cursor = Cursor::new(&bytes); let value = Value::consensus_deserialize(&mut cursor).unwrap(); Ok(value)