From cd95f278058b0ecea2c8ea2d77ea35cabc1fe898 Mon Sep 17 00:00:00 2001 From: "Mr. Walker" Date: Sat, 20 Apr 2024 20:19:09 +0100 Subject: [PATCH] Prover service trait and SHARP client implementation --- .github/.DS_Store | Bin 6148 -> 0 bytes Cargo.lock | 5293 ++++++++++++++--- Cargo.toml | 54 +- .../da_clients/da-client-interface/Cargo.toml | 2 +- .../da_clients/da-client-interface/src/lib.rs | 3 +- crates/da_clients/ethereum/Cargo.toml | 3 +- crates/da_clients/ethereum/src/lib.rs | 13 +- crates/orchestrator/Cargo.toml | 5 + crates/orchestrator/src/config.rs | 53 +- .../src/controllers/jobs_controller.rs | 8 +- crates/orchestrator/src/database/mod.rs | 6 +- .../src/database/mongodb/config.rs | 3 +- .../orchestrator/src/database/mongodb/mod.rs | 29 +- crates/orchestrator/src/jobs/da_job/mod.rs | 19 +- crates/orchestrator/src/jobs/mod.rs | 33 +- .../jobs/prover_job/artifacts/fibonacci.zip | Bin 0 -> 1514 bytes .../orchestrator/src/jobs/prover_job/mod.rs | 64 + .../src/jobs/register_proof_job/mod.rs | 10 +- crates/orchestrator/src/jobs/snos_job/mod.rs | 10 +- .../src/jobs/state_update_job/mod.rs | 10 +- crates/orchestrator/src/jobs/types.rs | 8 +- crates/orchestrator/src/lib.rs | 2 - crates/orchestrator/src/main.rs | 2 +- crates/orchestrator/src/queue/job_queue.rs | 10 +- crates/orchestrator/src/queue/mod.rs | 6 +- crates/orchestrator/src/queue/sqs/mod.rs | 6 +- crates/orchestrator/src/routes.rs | 3 +- crates/orchestrator/src/tests/common/mod.rs | 25 +- .../orchestrator/src/tests/jobs/da_job/mod.rs | 19 +- crates/orchestrator/src/tests/server/mod.rs | 13 +- crates/orchestrator/src/utils/env_utils.rs | 13 - crates/orchestrator/src/utils/mod.rs | 1 - .../src/workers/proof_registration.rs | 3 +- crates/orchestrator/src/workers/proving.rs | 3 +- crates/orchestrator/src/workers/snos.rs | 3 +- .../orchestrator/src/workers/update_state.rs | 3 +- .../gps_fact_checker/Cargo.toml | 22 + .../src/artifacts/FactRegistry.json | 1 + .../gps_fact_checker/src/artifacts/README.md | 30 + .../src/artifacts/fibonacci.zip | Bin 0 -> 1514 bytes .../src/artifacts/get_fact.py | 14 + .../gps_fact_checker/src/error.rs | 53 + .../gps_fact_checker/src/fact_info.rs | 97 + .../gps_fact_checker/src/fact_node.rs | 117 + .../gps_fact_checker/src/fact_topology.rs | 102 + .../gps_fact_checker/src/lib.rs | 40 + .../prover_services/prover_service/Cargo.toml | 13 + .../prover_services/prover_service/src/lib.rs | 45 + .../prover_services/sharp_service/Cargo.toml | 24 + .../sharp_service/src/artifacts/fibonacci.zip | Bin 0 -> 1514 bytes .../sharp_service/src/artifacts/print.zip | Bin 0 -> 1303 bytes .../sharp_service/src/client.rs | 49 + .../sharp_service/src/config.rs | 27 + .../sharp_service/src/error.rs | 30 + .../prover_services/sharp_service/src/lib.rs | 136 + .../prover_services/stone_service/Cargo.toml | 14 + .../stone_service/src/error.rs | 10 + .../prover_services/stone_service/src/gps.rs | 29 + .../stone_service/src/integrity.rs | 23 + .../prover_services/stone_service/src/lib.rs | 45 + .../stone_service/src/registry.rs | 15 + .../stone_service/src/sovereign.rs | 24 + .../settlement-client-interface/src/lib.rs | 3 +- crates/utils/Cargo.toml | 2 + crates/utils/src/lib.rs | 13 + crates/utils/src/settings/default.rs | 13 + crates/utils/src/settings/mod.rs | 13 + e2e-tests/Cargo.toml | 16 + e2e-tests/src/lib.rs | 107 + e2e-tests/test_prover_jobs.rs | 20 + migrations/.DS_Store | Bin 6148 -> 0 bytes 71 files changed, 6029 insertions(+), 856 deletions(-) delete mode 100644 .github/.DS_Store create mode 100644 crates/orchestrator/src/jobs/prover_job/artifacts/fibonacci.zip create mode 100644 crates/orchestrator/src/jobs/prover_job/mod.rs delete mode 100644 crates/orchestrator/src/utils/env_utils.rs delete mode 100644 crates/orchestrator/src/utils/mod.rs create mode 100644 crates/prover_services/gps_fact_checker/Cargo.toml create mode 100644 crates/prover_services/gps_fact_checker/src/artifacts/FactRegistry.json create mode 100644 crates/prover_services/gps_fact_checker/src/artifacts/README.md create mode 100644 crates/prover_services/gps_fact_checker/src/artifacts/fibonacci.zip create mode 100644 crates/prover_services/gps_fact_checker/src/artifacts/get_fact.py create mode 100644 crates/prover_services/gps_fact_checker/src/error.rs create mode 100644 crates/prover_services/gps_fact_checker/src/fact_info.rs create mode 100644 crates/prover_services/gps_fact_checker/src/fact_node.rs create mode 100644 crates/prover_services/gps_fact_checker/src/fact_topology.rs create mode 100644 crates/prover_services/gps_fact_checker/src/lib.rs create mode 100644 crates/prover_services/prover_service/Cargo.toml create mode 100644 crates/prover_services/prover_service/src/lib.rs create mode 100644 crates/prover_services/sharp_service/Cargo.toml create mode 100644 crates/prover_services/sharp_service/src/artifacts/fibonacci.zip create mode 100644 crates/prover_services/sharp_service/src/artifacts/print.zip create mode 100644 crates/prover_services/sharp_service/src/client.rs create mode 100644 crates/prover_services/sharp_service/src/config.rs create mode 100644 crates/prover_services/sharp_service/src/error.rs create mode 100644 crates/prover_services/sharp_service/src/lib.rs create mode 100644 crates/prover_services/stone_service/Cargo.toml create mode 100644 crates/prover_services/stone_service/src/error.rs create mode 100644 crates/prover_services/stone_service/src/gps.rs create mode 100644 crates/prover_services/stone_service/src/integrity.rs create mode 100644 crates/prover_services/stone_service/src/lib.rs create mode 100644 crates/prover_services/stone_service/src/registry.rs create mode 100644 crates/prover_services/stone_service/src/sovereign.rs create mode 100644 crates/utils/src/settings/default.rs create mode 100644 crates/utils/src/settings/mod.rs create mode 100644 e2e-tests/Cargo.toml create mode 100644 e2e-tests/src/lib.rs create mode 100644 e2e-tests/test_prover_jobs.rs delete mode 100644 migrations/.DS_Store diff --git a/.github/.DS_Store b/.github/.DS_Store deleted file mode 100644 index daf430c92957832b27a3a56edcd4965cb3f86351..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKOG*SW5UtW#w78k2%Uoe@5Qp{za{(C#73?8{gSgGYLwNz0F2ucSkKwD2GK~>j zh=^1{@+zrM(hoXG5fRUCRx_e05p`&SEJ{OUx@p>T=LwKC$2kpD=$5v_MI+H)oRYmC zV8?~7>56vtKUqzudn*Jw6hM3J6ohXG>kqN2nK?I4+i9XNN9rDu^Q^pflf;Rpgf~hpi3"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [workspace.dependencies] -async-trait = { version = "0.1.77" } +alloy = { git = "https://github.com/alloy-rs/alloy", rev = "7373f6db761d5a19888e3a0c527e8a3ca31e7a1e" } +alloy-primitives = "0.7.4" +async-trait = "0.1.77" axum = { version = "0.7.4" } -axum-macros = { version = "0.4.1" } -color-eyre = { version = "0.6.2" } -dotenvy = { version = "0.15.7" } -futures = { version = "0.3.30" } +axum-macros = "0.4.1" +color-eyre = "0.6.2" +dotenvy = "0.15.7" +futures = "0.3.30" mongodb = { version = "2.8.1" } omniqueue = { version = "0.2.0" } -rstest = { version = "0.18.2" } +reqwest = { version = "0.11.24" } +rstest = "0.18.2" serde = { version = "1.0.197" } -serde_json = { version = "1.0.114" } -starknet = { version = "0.9.0" } -thiserror = { version = "1.0.57" } -tokio = { version = "1.36.0" } -tracing = { version = "0.1.40" } +serde_json = "1.0.114" +starknet = "0.9.0" +tempfile = "3.8.1" +thiserror = "1.0.57" +tokio = { version = "1.37.0" } +tracing = "0.1.40" tracing-subscriber = { version = "0.3.18" } -url = { version = "2.5.0" } -uuid = { version = "1.7.0" } +url = { version = "2.5.0", features = ["serde"] } +uuid = { version = "1.7.0", features = ["v4", "serde"] } httpmock = { version = "0.7.0" } +stark_evm_adapter = "0.1.1" +hex = "0.4" +itertools = "0.13.0" +mockall = "0.12.1" + +# Cairo VM (same version as in SNOS) +cairo-vm = { git = "https://github.com/lambdaclass/cairo-vm", rev = "f87be4d9cfad2100d4a5c085cf2aabc9caced40f", features = ["extensive_hints", "cairo-1-hints"] } + +# Sharp (Starkware) +snos = { git = "https://github.com/unstark/snos", branch = "bump-cairo-lang" } + +# Madara prover API +madara-prover-common = { git = "https://github.com/Moonsong-Labs/madara-prover-api", branch = "od/use-latest-cairo-vm" } +madara-prover-rpc-client = { git = "https://github.com/Moonsong-Labs/madara-prover-api", branch = "od/use-latest-cairo-vm" } + +# Project da-client-interface = { path = "crates/da_clients/da-client-interface" } ethereum-da-client = { path = "crates/da_clients/ethereum" } utils = { path = "crates/utils" } +prover-service = { path = "crates/prover_services/prover_service" } +gps-fact-checker = { path = "crates/prover_services/gps_fact_checker" } +sharp-service = { path = "crates/prover_services/sharp_service" } +stone-service = { path = "crates/prover_services/stone_service" } diff --git a/crates/da_clients/da-client-interface/Cargo.toml b/crates/da_clients/da-client-interface/Cargo.toml index f9dee4ca..d2887a27 100644 --- a/crates/da_clients/da-client-interface/Cargo.toml +++ b/crates/da_clients/da-client-interface/Cargo.toml @@ -9,5 +9,5 @@ edition.workspace = true async-trait = { workspace = true } axum = { workspace = true } color-eyre = { workspace = true } -mockall = "0.12.1" +mockall = { workspace = true } starknet = { workspace = true } diff --git a/crates/da_clients/da-client-interface/src/lib.rs b/crates/da_clients/da-client-interface/src/lib.rs index 62c5edb2..391e97a9 100644 --- a/crates/da_clients/da-client-interface/src/lib.rs +++ b/crates/da_clients/da-client-interface/src/lib.rs @@ -1,6 +1,7 @@ use async_trait::async_trait; use color_eyre::Result; -use mockall::{automock, predicate::*}; +use mockall::automock; +use mockall::predicate::*; use starknet::core::types::FieldElement; #[derive(Debug, Copy, Clone, PartialEq, Eq)] diff --git a/crates/da_clients/ethereum/Cargo.toml b/crates/da_clients/ethereum/Cargo.toml index e5ab4758..ec6d49f7 100644 --- a/crates/da_clients/ethereum/Cargo.toml +++ b/crates/da_clients/ethereum/Cargo.toml @@ -4,10 +4,11 @@ version.workspace = true edition.workspace = true [dependencies] -alloy = { git = "https://github.com/alloy-rs/alloy", rev = "86027c9bb984f3a12a30ffd2a3c5f2f06595f1d6", features = [ +alloy = { workspace = true, features = [ "providers", "rpc-client", "transport-http", + "reqwest", ] } async-trait = { workspace = true } color-eyre = { workspace = true } diff --git a/crates/da_clients/ethereum/src/lib.rs b/crates/da_clients/ethereum/src/lib.rs index 985c0a04..ae96f942 100644 --- a/crates/da_clients/ethereum/src/lib.rs +++ b/crates/da_clients/ethereum/src/lib.rs @@ -1,16 +1,15 @@ #![allow(missing_docs)] #![allow(clippy::missing_docs_in_private_items)] +use std::str::FromStr; + use alloy::rpc::client::RpcClient; -use alloy::transports::http::Http; +use alloy::transports::http::{Client, Http}; use async_trait::async_trait; use color_eyre::Result; -use reqwest::Client; -use starknet::core::types::FieldElement; -use std::str::FromStr; -use url::Url; - use config::EthereumDaConfig; use da_client_interface::{DaClient, DaVerificationStatus}; +use starknet::core::types::FieldElement; +use url::Url; pub mod config; pub struct EthereumDaClient { @@ -32,7 +31,7 @@ impl DaClient for EthereumDaClient { impl From for EthereumDaClient { fn from(config: EthereumDaConfig) -> Self { let provider = RpcClient::builder() - .reqwest_http(Url::from_str(config.rpc_url.as_str()).expect("Failed to parse ETHEREUM_RPC_URL")); + .http(Url::from_str(config.rpc_url.as_str()).expect("Failed to parse ETHEREUM_RPC_URL")); EthereumDaClient { provider } } } diff --git a/crates/orchestrator/Cargo.toml b/crates/orchestrator/Cargo.toml index 28401584..f0c33a6a 100644 --- a/crates/orchestrator/Cargo.toml +++ b/crates/orchestrator/Cargo.toml @@ -33,6 +33,11 @@ tracing = { workspace = true } tracing-subscriber = { workspace = true, features = ["env-filter"] } url = { workspace = true } uuid = { workspace = true, features = ["v4", "serde"] } +sharp-service = { workspace = true } +stone-service = { workspace = true } +prover-service = { workspace = true } +utils = { workspace = true } +cairo-vm = { workspace = true } [features] default = ["ethereum", "with_mongodb", "with_sqs"] diff --git a/crates/orchestrator/src/config.rs b/crates/orchestrator/src/config.rs index 1e3d81f0..e006c84f 100644 --- a/crates/orchestrator/src/config.rs +++ b/crates/orchestrator/src/config.rs @@ -1,18 +1,27 @@ -use crate::database::mongodb::config::MongoDbConfig; -use crate::database::mongodb::MongoDb; -use crate::database::{Database, DatabaseConfig}; -use crate::queue::sqs::SqsQueue; -use crate::queue::QueueProvider; -use crate::utils::env_utils::get_env_var_or_panic; -use da_client_interface::DaClient; -use da_client_interface::DaConfig; +use std::sync::Arc; + +use da_client_interface::{DaClient, DaConfig}; use dotenvy::dotenv; use ethereum_da_client::config::EthereumDaConfig; use ethereum_da_client::EthereumDaClient; +use prover_service::ProverService; +use sharp_service::SharpProverService; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::{JsonRpcClient, Url}; -use std::sync::Arc; +use stone_service::gps::GpsFactRegistry; +use stone_service::integrity::IntegrityFactRegistry; +use stone_service::sovereign::SovereignProofRegistry; +use stone_service::StoneProverService; use tokio::sync::OnceCell; +use utils::env_utils::get_env_var_or_panic; +use utils::settings::default::DefaultSettingsProvider; +use utils::settings::SettingsProvider; + +use crate::database::mongodb::config::MongoDbConfig; +use crate::database::mongodb::MongoDb; +use crate::database::{Database, DatabaseConfig}; +use crate::queue::sqs::SqsQueue; +use crate::queue::QueueProvider; /// The app config. It can be accessed from anywhere inside the service /// by calling `config` function. @@ -21,6 +30,8 @@ pub struct Config { starknet_client: Arc>, /// The DA client to interact with the DA layer da_client: Box, + /// The service that produces proof and registers it onchain + prover: Box, /// The database client database: Box, /// The queue provider @@ -42,7 +53,10 @@ pub async fn init_config() -> Config { // init the queue let queue = Box::new(SqsQueue {}); - Config { starknet_client: Arc::new(provider), da_client: build_da_client(), database, queue } + let settings_provider = DefaultSettingsProvider {}; + let prover = create_prover_service(&settings_provider); + + Config { starknet_client: Arc::new(provider), da_client: build_da_client(), prover, database, queue } } impl Config { @@ -50,10 +64,11 @@ impl Config { pub fn new( starknet_client: Arc>, da_client: Box, + prover: Box, database: Box, queue: Box, ) -> Self { - Self { starknet_client, da_client, database, queue } + Self { starknet_client, da_client, prover, database, queue } } /// Returns the starknet client @@ -66,6 +81,11 @@ impl Config { self.da_client.as_ref() } + /// Returns the proving service + pub fn prover(&self) -> &dyn ProverService { + self.prover.as_ref() + } + /// Returns the database client pub fn database(&self) -> &dyn Database { self.database.as_ref() @@ -96,3 +116,14 @@ fn build_da_client() -> Box { _ => panic!("Unsupported DA layer"), } } + +/// Creates prover service based on the environment variable PROVER_SERVICE +fn create_prover_service(settings_provider: &impl SettingsProvider) -> Box { + match get_env_var_or_panic("PROVER_SERVICE").as_str() { + "sharp" => Box::new(SharpProverService::with_settings(settings_provider)), + "stone_gps" => Box::new(StoneProverService::::with_settings(settings_provider)), + "stone_integrity" => Box::new(StoneProverService::::with_settings(settings_provider)), + "stone_sovereign" => Box::new(StoneProverService::::with_settings(settings_provider)), + _ => panic!("Unsupported prover service"), + } +} diff --git a/crates/orchestrator/src/controllers/jobs_controller.rs b/crates/orchestrator/src/controllers/jobs_controller.rs index 4ac8c388..43a67652 100644 --- a/crates/orchestrator/src/controllers/jobs_controller.rs +++ b/crates/orchestrator/src/controllers/jobs_controller.rs @@ -1,8 +1,10 @@ -use crate::controllers::errors::AppError; -use crate::jobs::types::JobType; +use std::collections::HashMap; + use axum::extract::Json; use serde::Deserialize; -use std::collections::HashMap; + +use crate::controllers::errors::AppError; +use crate::jobs::types::JobType; /// Client request to create a job #[derive(Debug, Deserialize)] diff --git a/crates/orchestrator/src/database/mod.rs b/crates/orchestrator/src/database/mod.rs index e9639cf1..aecf8c0f 100644 --- a/crates/orchestrator/src/database/mod.rs +++ b/crates/orchestrator/src/database/mod.rs @@ -1,10 +1,12 @@ -use crate::jobs::types::{JobItem, JobStatus, JobType}; +use std::collections::HashMap; + use async_trait::async_trait; use color_eyre::Result; use mockall::automock; -use std::collections::HashMap; use uuid::Uuid; +use crate::jobs::types::{JobItem, JobStatus, JobType}; + /// MongoDB pub mod mongodb; diff --git a/crates/orchestrator/src/database/mongodb/config.rs b/crates/orchestrator/src/database/mongodb/config.rs index aea02a43..6ec561da 100644 --- a/crates/orchestrator/src/database/mongodb/config.rs +++ b/crates/orchestrator/src/database/mongodb/config.rs @@ -1,5 +1,6 @@ +use utils::env_utils::get_env_var_or_panic; + use crate::database::DatabaseConfig; -use crate::utils::env_utils::get_env_var_or_panic; pub struct MongoDbConfig { pub url: String, diff --git a/crates/orchestrator/src/database/mongodb/mod.rs b/crates/orchestrator/src/database/mongodb/mod.rs index b86d4bde..77b8f4e1 100644 --- a/crates/orchestrator/src/database/mongodb/mod.rs +++ b/crates/orchestrator/src/database/mongodb/mod.rs @@ -1,19 +1,17 @@ -use crate::database::mongodb::config::MongoDbConfig; -use crate::database::Database; -use crate::jobs::types::{JobItem, JobStatus, JobType}; +use std::collections::HashMap; + use async_trait::async_trait; use color_eyre::eyre::eyre; use color_eyre::Result; -use mongodb::bson::Document; -use mongodb::options::UpdateOptions; -use mongodb::{ - bson::doc, - options::{ClientOptions, ServerApi, ServerApiVersion}, - Client, Collection, -}; -use std::collections::HashMap; +use mongodb::bson::{doc, Document}; +use mongodb::options::{ClientOptions, ServerApi, ServerApiVersion, UpdateOptions}; +use mongodb::{Client, Collection}; use uuid::Uuid; +use crate::database::mongodb::config::MongoDbConfig; +use crate::database::Database; +use crate::jobs::types::{JobItem, JobStatus, JobType}; + pub mod config; pub struct MongoDb { @@ -23,7 +21,8 @@ pub struct MongoDb { impl MongoDb { pub async fn new(config: MongoDbConfig) -> Self { let mut client_options = ClientOptions::parse(config.url).await.expect("Failed to parse MongoDB Url"); - // Set the server_api field of the client_options object to set the version of the Stable API on the client + // Set the server_api field of the client_options object to set the version of the Stable API on the + // client let server_api = ServerApi::builder().version(ServerApiVersion::V1).build(); client_options.server_api = Some(server_api); // Get a handle to the cluster @@ -39,9 +38,9 @@ impl MongoDb { self.client.database("orchestrator").collection("jobs") } - /// Updates the job in the database optimistically. This means that the job is updated only if the - /// version of the job in the database is the same as the version of the job passed in. If the version - /// is different, the update fails. + /// Updates the job in the database optimistically. This means that the job is updated only if + /// the version of the job in the database is the same as the version of the job passed in. + /// If the version is different, the update fails. async fn update_job_optimistically(&self, current_job: &JobItem, update: Document) -> Result<()> { let filter = doc! { "id": current_job.id, diff --git a/crates/orchestrator/src/jobs/da_job/mod.rs b/crates/orchestrator/src/jobs/da_job/mod.rs index b5108898..0fcf9a47 100644 --- a/crates/orchestrator/src/jobs/da_job/mod.rs +++ b/crates/orchestrator/src/jobs/da_job/mod.rs @@ -1,15 +1,17 @@ -use crate::config::Config; -use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; -use crate::jobs::Job; +use std::collections::HashMap; + use async_trait::async_trait; use color_eyre::eyre::eyre; use color_eyre::Result; use starknet::core::types::{BlockId, FieldElement, MaybePendingStateUpdate, StateUpdate, StorageEntry}; use starknet::providers::Provider; -use std::collections::HashMap; use tracing::log; use uuid::Uuid; +use crate::config::Config; +use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; +use crate::jobs::Job; + pub struct DaJob; #[async_trait] @@ -165,9 +167,10 @@ fn da_word(class_flag: bool, nonce_change: Option, num_changes: u6 #[cfg(test)] mod tests { - use super::*; use rstest::rstest; + use super::*; + #[rstest] #[case(false, 1, 1, "18446744073709551617")] #[case(false, 1, 0, "18446744073709551616")] @@ -186,11 +189,13 @@ mod tests { } mod test_state_update_to_blob_data { - use super::*; - use serde_json::Error; use std::fs::{read_to_string, File}; use std::io::{self, BufRead}; + use serde_json::Error; + + use super::*; + #[test] #[ignore] fn state_update_to_blob_data_works() { diff --git a/crates/orchestrator/src/jobs/mod.rs b/crates/orchestrator/src/jobs/mod.rs index f7011180..b05048c7 100644 --- a/crates/orchestrator/src/jobs/mod.rs +++ b/crates/orchestrator/src/jobs/mod.rs @@ -1,20 +1,23 @@ -use crate::config::{config, Config}; -use crate::jobs::constants::{JOB_PROCESS_ATTEMPT_METADATA_KEY, JOB_VERIFICATION_ATTEMPT_METADATA_KEY}; -use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; -use crate::queue::job_queue::{add_job_to_process_queue, add_job_to_verification_queue}; +use std::collections::HashMap; +use std::time::Duration; + use async_trait::async_trait; use color_eyre::eyre::eyre; use color_eyre::Result; -use std::collections::HashMap; -use std::time::Duration; use tracing::log; use uuid::Uuid; +use crate::config::{config, Config}; +use crate::jobs::constants::{JOB_PROCESS_ATTEMPT_METADATA_KEY, JOB_VERIFICATION_ATTEMPT_METADATA_KEY}; +use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; +use crate::queue::job_queue::{add_job_to_process_queue, add_job_to_verification_queue}; + mod constants; pub mod da_job; -mod register_proof_job; +pub mod prover_job; +pub mod register_proof_job; pub mod snos_job; -mod state_update_job; +pub mod state_update_job; pub mod types; /// The Job trait is used to define the methods that a job @@ -68,8 +71,8 @@ pub async fn create_job(job_type: JobType, internal_id: String, metadata: HashMa Ok(()) } -/// Processes the job, increments the process attempt count and updates the status of the job in the DB. -/// It then adds the job to the verification queue. +/// Processes the job, increments the process attempt count and updates the status of the job in the +/// DB. It then adds the job to the verification queue. pub async fn process_job(id: Uuid) -> Result<()> { let config = config().await; let job = get_job(id).await?; @@ -86,7 +89,8 @@ pub async fn process_job(id: Uuid) -> Result<()> { } } // this updates the version of the job. this ensures that if another thread was about to process - // the same job, it would fail to update the job in the database because the version would be outdated + // the same job, it would fail to update the job in the database because the version would be + // outdated config.database().update_job_status(&job, JobStatus::LockedForProcessing).await?; let job_handler = get_job_handler(&job.job_type); @@ -104,9 +108,10 @@ pub async fn process_job(id: Uuid) -> Result<()> { Ok(()) } -/// Verifies the job and updates the status of the job in the DB. If the verification fails, it retries -/// processing the job if the max attempts have not been exceeded. If the max attempts have been exceeded, -/// it marks the job as timedout. If the verification is still pending, it pushes the job back to the queue. +/// Verifies the job and updates the status of the job in the DB. If the verification fails, it +/// retries processing the job if the max attempts have not been exceeded. If the max attempts have +/// been exceeded, it marks the job as timedout. If the verification is still pending, it pushes the +/// job back to the queue. pub async fn verify_job(id: Uuid) -> Result<()> { let config = config().await; let job = get_job(id).await?; diff --git a/crates/orchestrator/src/jobs/prover_job/artifacts/fibonacci.zip b/crates/orchestrator/src/jobs/prover_job/artifacts/fibonacci.zip new file mode 100644 index 0000000000000000000000000000000000000000..b5943536870ae352374926593e2580178826de0d GIT binary patch literal 1514 zcmWIWW@Zs#U|`^2uyWrNvG>CD@1j7SFc9+qaan3nab|v=URH5_-s&(fzcV_g`t?uv zgl-Blx?&u%ij^S%Mf<`V3to9KGBB7hGcfQ1wdbamB&H;mB!cy?opjpoumg|l|I}-n zKir=8kIPBJ!C_)X)Yg?P4u8w%T`{xGL08rSF+QgPP)+2+`;*38rP{7YmklO^pyyywh`0lJ&@>o@;k3G!ISS zn0M0NW#PJ;kFF+Zm~GB_d8uJ`=G2?oN&(*zq$V~s#LuZKcJj3hx#StjLF=Y(x%l{rD+)Y~`DJ8dglW#hl-OGP_U(UbtF%Nd?UH?+>CVu;0 zi?iXTJ?H=aUizf)$BC{dC%?Y`dpT38>@im-cjlBQ3+KzlebbiEmxx*U_cE92_A8|- z++}lizRsu<*`o2G#?VT3m-NBAe}8hL272cmEx{+iKo{U+VBi7=dTxGErCw5IUhlP& zLAL`0S|4gZ<`g!THkir5siOGV`gA8IxI9Q~Tm74zX6rnDmK20P-BLC8UB$ZF zSLTM5?#{a#w;|8I;$F>%J&qstu-;L4`gw(=etm8Gj-6A^EBU>x#`r;nOq<{XP4aF1+O1o z`M#-t@jmg{$90$GysB#aCsvT8|F!t=iuor6+-vnq|4)@(o|B__6XD8I>7w$eJ`{2s= zx3WMZ-`M({{it~3zWbep1@jAD{roNobkR4~<+BiLlV^fAtqRlf@bTAWGs-HRBINDWWi;8#Tj9^G zD74(YtY~JC575v+AeI2))QZ&PQjqEKMXAO4rA5i9#o&ytd&-dOfC7)h#oZ}o8M{UH z_++RCI0_q{3^=m;`ohQm_b~QkT@iUy*&;f1)faJj%`NxD9(;?@F-{BHZ6T;uX!c5g zy(7B#@O+0m&;I~jz{sS>jJtpWIs*)rG=eDP;tE|Sdf5cg$H1_p(H_V|DXGx4qUSJ# z)-Aw%mzxU9Z|M5aV*{b@0I-0>7B?6spanU?1X&i`5fI?b$_7%&3WTOW7tRN&WncgR DephfO literal 0 HcmV?d00001 diff --git a/crates/orchestrator/src/jobs/prover_job/mod.rs b/crates/orchestrator/src/jobs/prover_job/mod.rs new file mode 100644 index 00000000..af2d20ee --- /dev/null +++ b/crates/orchestrator/src/jobs/prover_job/mod.rs @@ -0,0 +1,64 @@ +use std::collections::HashMap; +use std::path::PathBuf; + +use async_trait::async_trait; +use cairo_vm::vm::runners::cairo_pie::CairoPie; +use color_eyre::Result; +use prover_service::{Task, TaskStatus}; +use uuid::Uuid; + +use super::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; +use super::Job; +use crate::config::Config; + +pub struct ProverJob; + +#[async_trait] +impl Job for ProverJob { + async fn create_job( + &self, + _config: &Config, + internal_id: String, + metadata: HashMap, + ) -> Result { + Ok(JobItem { + id: Uuid::new_v4(), + internal_id, + job_type: JobType::ProofRegistration, + status: JobStatus::Created, + external_id: String::new().into(), + metadata, + version: 0, + }) + } + + async fn process_job(&self, config: &Config, _job: &JobItem) -> Result { + // TODO: load cairo PIE from database + let cairo_pie_path: PathBuf = + [env!("CARGO_MANIFEST_DIR"), "src", "artifacts", "fibonacci.zip"].iter().collect(); + let cairo_pie = CairoPie::read_zip_file(&cairo_pie_path).unwrap(); + let external_id = config.prover().submit_task(Task::CairoPie(cairo_pie)).await?; + Ok(external_id) + } + + async fn verify_job(&self, config: &Config, job: &JobItem) -> Result { + let task_id: String = job.external_id.unwrap_string()?.into(); + match config.prover().get_task_status(&task_id).await? { + TaskStatus::Processing => Ok(JobVerificationStatus::Pending), + TaskStatus::Succeeded => Ok(JobVerificationStatus::Verified), + TaskStatus::Failed(_) => Ok(JobVerificationStatus::Rejected), + } + } + + fn max_process_attempts(&self) -> u64 { + 1 + } + + fn max_verification_attempts(&self) -> u64 { + 1 + } + + fn verification_polling_delay_seconds(&self) -> u64 { + 1 + } +} diff --git a/crates/orchestrator/src/jobs/register_proof_job/mod.rs b/crates/orchestrator/src/jobs/register_proof_job/mod.rs index 5f7a36d4..2e9482f7 100644 --- a/crates/orchestrator/src/jobs/register_proof_job/mod.rs +++ b/crates/orchestrator/src/jobs/register_proof_job/mod.rs @@ -1,11 +1,13 @@ -use crate::config::Config; -use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; -use crate::jobs::Job; +use std::collections::HashMap; + use async_trait::async_trait; use color_eyre::Result; -use std::collections::HashMap; use uuid::Uuid; +use crate::config::Config; +use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; +use crate::jobs::Job; + pub struct RegisterProofJob; #[async_trait] diff --git a/crates/orchestrator/src/jobs/snos_job/mod.rs b/crates/orchestrator/src/jobs/snos_job/mod.rs index 50fe0013..13600d4b 100644 --- a/crates/orchestrator/src/jobs/snos_job/mod.rs +++ b/crates/orchestrator/src/jobs/snos_job/mod.rs @@ -1,11 +1,13 @@ -use crate::config::Config; -use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; -use crate::jobs::Job; +use std::collections::HashMap; + use async_trait::async_trait; use color_eyre::Result; -use std::collections::HashMap; use uuid::Uuid; +use crate::config::Config; +use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; +use crate::jobs::Job; + pub struct SnosJob; #[async_trait] diff --git a/crates/orchestrator/src/jobs/state_update_job/mod.rs b/crates/orchestrator/src/jobs/state_update_job/mod.rs index 384eaf71..6eb14bc8 100644 --- a/crates/orchestrator/src/jobs/state_update_job/mod.rs +++ b/crates/orchestrator/src/jobs/state_update_job/mod.rs @@ -1,11 +1,13 @@ -use crate::config::Config; -use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; -use crate::jobs::Job; +use std::collections::HashMap; + use async_trait::async_trait; use color_eyre::Result; -use std::collections::HashMap; use uuid::Uuid; +use crate::config::Config; +use crate::jobs::types::{JobItem, JobStatus, JobType, JobVerificationStatus}; +use crate::jobs::Job; + pub struct StateUpdateJob; #[async_trait] diff --git a/crates/orchestrator/src/jobs/types.rs b/crates/orchestrator/src/jobs/types.rs index bcd8556b..e389a701 100644 --- a/crates/orchestrator/src/jobs/types.rs +++ b/crates/orchestrator/src/jobs/types.rs @@ -1,9 +1,11 @@ -use color_eyre::{eyre::eyre, Result}; +use std::collections::HashMap; + +use color_eyre::eyre::eyre; +use color_eyre::Result; use da_client_interface::DaVerificationStatus; // TODO: job types shouldn't depend on mongodb use mongodb::bson::serde_helpers::uuid_1_as_binary; use serde::{Deserialize, Serialize}; -use std::collections::HashMap; use uuid::Uuid; /// An external id. @@ -77,7 +79,7 @@ pub enum JobType { /// Verifying the proof on the base layer ProofRegistration, /// Updaing the state root on the base layer - StateUpdation, + StateTransition, } #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, PartialOrd)] diff --git a/crates/orchestrator/src/lib.rs b/crates/orchestrator/src/lib.rs index 618c8481..4a19222d 100644 --- a/crates/orchestrator/src/lib.rs +++ b/crates/orchestrator/src/lib.rs @@ -12,8 +12,6 @@ pub mod jobs; pub mod queue; /// Contains the routes for the service pub mod routes; -/// Contains the utils -pub mod utils; /// Contains workers which act like cron jobs pub mod workers; diff --git a/crates/orchestrator/src/main.rs b/crates/orchestrator/src/main.rs index b6502683..f0c21064 100644 --- a/crates/orchestrator/src/main.rs +++ b/crates/orchestrator/src/main.rs @@ -2,12 +2,12 @@ use dotenvy::dotenv; use orchestrator::config::config; use orchestrator::queue::init_consumers; use orchestrator::routes::app_router; -use orchestrator::utils::env_utils::get_env_var_or_default; use orchestrator::workers::proof_registration::ProofRegistrationWorker; use orchestrator::workers::proving::ProvingWorker; use orchestrator::workers::snos::SnosWorker; use orchestrator::workers::update_state::UpdateStateWorker; use orchestrator::workers::*; +use utils::env_utils::get_env_var_or_default; /// Start the server #[tokio::main] diff --git a/crates/orchestrator/src/queue/job_queue.rs b/crates/orchestrator/src/queue/job_queue.rs index 78fbd284..0db6a65f 100644 --- a/crates/orchestrator/src/queue/job_queue.rs +++ b/crates/orchestrator/src/queue/job_queue.rs @@ -1,15 +1,17 @@ -use crate::config::config; -use crate::jobs::{process_job, verify_job}; +use std::future::Future; +use std::time::Duration; + use color_eyre::eyre::eyre; use color_eyre::Result; use omniqueue::QueueError; use serde::{Deserialize, Serialize}; -use std::future::Future; -use std::time::Duration; use tokio::time::sleep; use tracing::log; use uuid::Uuid; +use crate::config::config; +use crate::jobs::{process_job, verify_job}; + const JOB_PROCESSING_QUEUE: &str = "madara_orchestrator_job_processing_queue"; const JOB_VERIFICATION_QUEUE: &str = "madara_orchestrator_job_verification_queue"; diff --git a/crates/orchestrator/src/queue/mod.rs b/crates/orchestrator/src/queue/mod.rs index 74a9808a..01829892 100644 --- a/crates/orchestrator/src/queue/mod.rs +++ b/crates/orchestrator/src/queue/mod.rs @@ -1,12 +1,12 @@ pub mod job_queue; pub mod sqs; +use std::time::Duration; + use async_trait::async_trait; use color_eyre::Result; -use omniqueue::{Delivery, QueueError}; - use mockall::automock; -use std::time::Duration; +use omniqueue::{Delivery, QueueError}; /// The QueueProvider trait is used to define the methods that a queue /// should implement to be used as a queue for the orchestrator. The diff --git a/crates/orchestrator/src/queue/sqs/mod.rs b/crates/orchestrator/src/queue/sqs/mod.rs index 3f9d183a..0ba901fd 100644 --- a/crates/orchestrator/src/queue/sqs/mod.rs +++ b/crates/orchestrator/src/queue/sqs/mod.rs @@ -1,9 +1,11 @@ -use crate::queue::QueueProvider; +use std::time::Duration; + use async_trait::async_trait; use color_eyre::Result; use omniqueue::backends::{SqsBackend, SqsConfig, SqsConsumer, SqsProducer}; use omniqueue::{Delivery, QueueError}; -use std::time::Duration; + +use crate::queue::QueueProvider; pub struct SqsQueue; #[async_trait] diff --git a/crates/orchestrator/src/routes.rs b/crates/orchestrator/src/routes.rs index 41e1803a..39d8f3d4 100644 --- a/crates/orchestrator/src/routes.rs +++ b/crates/orchestrator/src/routes.rs @@ -1,9 +1,10 @@ -use crate::controllers::jobs_controller; use axum::http::StatusCode; use axum::response::IntoResponse; use axum::routing::{get, post}; use axum::Router; +use crate::controllers::jobs_controller; + pub fn app_router() -> Router { Router::new() .route("/health", get(root)) diff --git a/crates/orchestrator/src/tests/common/mod.rs b/crates/orchestrator/src/tests/common/mod.rs index a074eb48..ee77c7bd 100644 --- a/crates/orchestrator/src/tests/common/mod.rs +++ b/crates/orchestrator/src/tests/common/mod.rs @@ -1,30 +1,30 @@ pub mod constants; -use constants::*; -use rstest::*; - use std::collections::HashMap; use std::sync::Arc; -use crate::{ - config::Config, - jobs::types::{ExternalId, JobItem, JobStatus::Created, JobType::DataSubmission}, -}; - -use crate::database::MockDatabase; -use crate::queue::MockQueueProvider; use ::uuid::Uuid; +use constants::*; use da_client_interface::MockDaClient; +use prover_service::MockProverService; +use rstest::*; use starknet::providers::jsonrpc::HttpTransport; use starknet::providers::JsonRpcClient; - use url::Url; +use crate::config::Config; +use crate::database::MockDatabase; +use crate::jobs::types::JobStatus::Created; +use crate::jobs::types::JobType::DataSubmission; +use crate::jobs::types::{ExternalId, JobItem}; +use crate::queue::MockQueueProvider; + pub async fn init_config( rpc_url: Option, database: Option, queue: Option, da_client: Option, + prover: Option, ) -> Config { let _ = tracing_subscriber::fmt().with_max_level(tracing::Level::INFO).with_target(false).try_init(); @@ -32,11 +32,12 @@ pub async fn init_config( let database = database.unwrap_or_default(); let queue = queue.unwrap_or_default(); let da_client = da_client.unwrap_or_default(); + let prover = prover.unwrap_or_default(); // init starknet client let provider = JsonRpcClient::new(HttpTransport::new(Url::parse(rpc_url.as_str()).expect("Failed to parse URL"))); - Config::new(Arc::new(provider), Box::new(da_client), Box::new(database), Box::new(queue)) + Config::new(Arc::new(provider), Box::new(da_client), Box::new(prover), Box::new(database), Box::new(queue)) } #[fixture] diff --git a/crates/orchestrator/src/tests/jobs/da_job/mod.rs b/crates/orchestrator/src/tests/jobs/da_job/mod.rs index 81166995..f2726bae 100644 --- a/crates/orchestrator/src/tests/jobs/da_job/mod.rs +++ b/crates/orchestrator/src/tests/jobs/da_job/mod.rs @@ -1,22 +1,17 @@ -use rstest::*; -use starknet::core::types::StateUpdate; - use std::collections::HashMap; +use da_client_interface::{DaVerificationStatus, MockDaClient}; use httpmock::prelude::*; +use rstest::*; use serde_json::json; - -use super::super::common::{default_job_item, init_config}; +use starknet::core::types::StateUpdate; use starknet_core::types::{FieldElement, MaybePendingStateUpdate, StateDiff}; use uuid::Uuid; -use crate::jobs::types::ExternalId; -use crate::jobs::{ - da_job::DaJob, - types::{JobItem, JobStatus, JobType}, - Job, -}; -use da_client_interface::{DaVerificationStatus, MockDaClient}; +use super::super::common::{default_job_item, init_config}; +use crate::jobs::da_job::DaJob; +use crate::jobs::types::{ExternalId, JobItem, JobStatus, JobType}; +use crate::jobs::Job; #[rstest] #[tokio::test] diff --git a/crates/orchestrator/src/tests/server/mod.rs b/crates/orchestrator/src/tests/server/mod.rs index 79b427e6..8b6451cf 100644 --- a/crates/orchestrator/src/tests/server/mod.rs +++ b/crates/orchestrator/src/tests/server/mod.rs @@ -1,14 +1,15 @@ -use std::{io::Read, net::SocketAddr}; +use std::io::Read; +use std::net::SocketAddr; use axum::http::StatusCode; - -use hyper::{body::Buf, Body, Request}; - +use hyper::body::Buf; +use hyper::{Body, Request}; use rstest::*; - -use crate::{queue::init_consumers, routes::app_router, utils::env_utils::get_env_var_or_default}; +use utils::env_utils::get_env_var_or_default; use super::common::init_config; +use crate::queue::init_consumers; +use crate::routes::app_router; #[fixture] pub async fn setup_server() -> SocketAddr { diff --git a/crates/orchestrator/src/utils/env_utils.rs b/crates/orchestrator/src/utils/env_utils.rs deleted file mode 100644 index 78e11609..00000000 --- a/crates/orchestrator/src/utils/env_utils.rs +++ /dev/null @@ -1,13 +0,0 @@ -use color_eyre::Result; - -pub fn get_env_var(key: &str) -> Result { - std::env::var(key).map_err(|e| e.into()) -} - -pub fn get_env_var_or_panic(key: &str) -> String { - get_env_var(key).unwrap_or_else(|e| panic!("Failed to get env var {}: {}", key, e)) -} - -pub fn get_env_var_or_default(key: &str, default: &str) -> String { - get_env_var(key).unwrap_or(default.to_string()) -} diff --git a/crates/orchestrator/src/utils/mod.rs b/crates/orchestrator/src/utils/mod.rs deleted file mode 100644 index 6a65fdce..00000000 --- a/crates/orchestrator/src/utils/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod env_utils; diff --git a/crates/orchestrator/src/workers/proof_registration.rs b/crates/orchestrator/src/workers/proof_registration.rs index 5ad5bc2d..e02c6cc7 100644 --- a/crates/orchestrator/src/workers/proof_registration.rs +++ b/crates/orchestrator/src/workers/proof_registration.rs @@ -1,6 +1,7 @@ -use crate::workers::Worker; use async_trait::async_trait; +use crate::workers::Worker; + pub struct ProofRegistrationWorker; #[async_trait] diff --git a/crates/orchestrator/src/workers/proving.rs b/crates/orchestrator/src/workers/proving.rs index 9476ea71..6b75dd48 100644 --- a/crates/orchestrator/src/workers/proving.rs +++ b/crates/orchestrator/src/workers/proving.rs @@ -1,6 +1,7 @@ -use crate::workers::Worker; use async_trait::async_trait; +use crate::workers::Worker; + pub struct ProvingWorker; #[async_trait] diff --git a/crates/orchestrator/src/workers/snos.rs b/crates/orchestrator/src/workers/snos.rs index bdc04169..918396fa 100644 --- a/crates/orchestrator/src/workers/snos.rs +++ b/crates/orchestrator/src/workers/snos.rs @@ -1,6 +1,7 @@ -use crate::workers::Worker; use async_trait::async_trait; +use crate::workers::Worker; + pub struct SnosWorker; #[async_trait] diff --git a/crates/orchestrator/src/workers/update_state.rs b/crates/orchestrator/src/workers/update_state.rs index c359e99e..a6325ec8 100644 --- a/crates/orchestrator/src/workers/update_state.rs +++ b/crates/orchestrator/src/workers/update_state.rs @@ -1,6 +1,7 @@ -use crate::workers::Worker; use async_trait::async_trait; +use crate::workers::Worker; + pub struct UpdateStateWorker; #[async_trait] diff --git a/crates/prover_services/gps_fact_checker/Cargo.toml b/crates/prover_services/gps_fact_checker/Cargo.toml new file mode 100644 index 00000000..e9a434bc --- /dev/null +++ b/crates/prover_services/gps_fact_checker/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "gps-fact-checker" +version.workspace = true +edition.workspace = true + +[dependencies] +alloy = { workspace = true, features = [ + "sol-types", + "json", + "contract", + "providers", + "rpc-client", + "transport-http", + "reqwest", +] } +async-trait.workspace = true +url.workspace = true +itertools.workspace = true +starknet.workspace = true +cairo-vm.workspace = true +thiserror.workspace = true +utils.workspace = true diff --git a/crates/prover_services/gps_fact_checker/src/artifacts/FactRegistry.json b/crates/prover_services/gps_fact_checker/src/artifacts/FactRegistry.json new file mode 100644 index 00000000..8e5efd5d --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/artifacts/FactRegistry.json @@ -0,0 +1 @@ +{"abi":[{"type":"function","name":"hasRegisteredFact","inputs":[],"outputs":[{"name":"","type":"bool","internalType":"bool"}],"stateMutability":"view"},{"type":"function","name":"isValid","inputs":[{"name":"fact","type":"bytes32","internalType":"bytes32"}],"outputs":[{"name":"","type":"bool","internalType":"bool"}],"stateMutability":"view"}],"bytecode":{"object":"0x608060405234801561001057600080fd5b5060ce8061001f6000396000f3fe6080604052348015600f57600080fd5b506004361060325760003560e01c80636a938567146037578063d6354e15146065575b600080fd5b605160048036036020811015604b57600080fd5b5035606b565b604080519115158252519081900360200190f35b6051607a565b60006074826083565b92915050565b60015460ff1690565b60009081526020819052604090205460ff169056fea2646970667358221220553e722d7d055d1334a20223ec1ae1a12bf73d8488850f4be28de564102b902764736f6c634300060c0033","sourceMap":"118:1279:8:-:0;;;;;;;;;;;;;;;;;;;","linkReferences":{}},"deployedBytecode":{"object":"0x6080604052348015600f57600080fd5b506004361060325760003560e01c80636a938567146037578063d6354e15146065575b600080fd5b605160048036036020811015604b57600080fd5b5035606b565b604080519115158252519081900360200190f35b6051607a565b60006074826083565b92915050565b60015460ff1690565b60009081526020819052604090205460ff169056fea2646970667358221220553e722d7d055d1334a20223ec1ae1a12bf73d8488850f4be28de564102b902764736f6c634300060c0033","sourceMap":"118:1279:8:-:0;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;421:109;;;;;;;;;;;;;;;;-1:-1:-1;421:109:8;;:::i;:::-;;;;;;;;;;;;;;;;;;1287:108;;;:::i;421:109::-;484:4;507:16;518:4;507:10;:16::i;:::-;500:23;421:109;-1:-1:-1;;421:109:8:o;1287:108::-;1371:17;;;;1287:108;:::o;826:105::-;883:4;906:18;;;;;;;;;;;;;;826:105::o","linkReferences":{}},"methodIdentifiers":{"hasRegisteredFact()":"d6354e15","isValid(bytes32)":"6a938567"},"rawMetadata":"{\"compiler\":{\"version\":\"0.6.12+commit.27d51765\"},\"language\":\"Solidity\",\"output\":{\"abi\":[{\"inputs\":[],\"name\":\"hasRegisteredFact\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"},{\"inputs\":[{\"internalType\":\"bytes32\",\"name\":\"fact\",\"type\":\"bytes32\"}],\"name\":\"isValid\",\"outputs\":[{\"internalType\":\"bool\",\"name\":\"\",\"type\":\"bool\"}],\"stateMutability\":\"view\",\"type\":\"function\"}],\"devdoc\":{\"kind\":\"dev\",\"methods\":{},\"version\":1},\"userdoc\":{\"kind\":\"user\",\"methods\":{},\"version\":1}},\"settings\":{\"compilationTarget\":{\"contracts/src/components/FactRegistry.sol\":\"FactRegistry\"},\"evmVersion\":\"istanbul\",\"libraries\":{},\"metadata\":{\"bytecodeHash\":\"ipfs\"},\"optimizer\":{\"enabled\":true,\"runs\":200},\"remappings\":[]},\"sources\":{\"contracts/src/components/FactRegistry.sol\":{\"keccak256\":\"0xf1dde737bfeb616fad002bb7ad229c73fec98f1e539420566fa89805c5bb120d\",\"license\":\"Apache-2.0.\",\"urls\":[\"bzz-raw://1952c89d683c9f58ce06cf222f42772131113b3dd2442c6dc9150a2bde6d4d34\",\"dweb:/ipfs/QmT8u7c1gAYRVF4kCdW7v9QiE65TwwdVu76pdvsFzXnZWg\"]},\"contracts/src/interfaces/IFactRegistry.sol\":{\"keccak256\":\"0xab04b296b506dfb0b4a8828f3dc463072fd50449a5ad8327d1baf01438b0fb35\",\"license\":\"Apache-2.0.\",\"urls\":[\"bzz-raw://e451abe007f98081d1adfd759cc4168f81982992d8c0554650b94d37bc009e64\",\"dweb:/ipfs/QmVBNUWFhNX8PqSMcqRkHVaTbcm7KNpgSg91Sj6MepFG6u\"]},\"contracts/src/interfaces/IQueryableFactRegistry.sol\":{\"keccak256\":\"0x9689f96215bae9da993a5f1b16a7c1460b1abd478569d969d5b901fa4520b4b6\",\"license\":\"Apache-2.0.\",\"urls\":[\"bzz-raw://cfe2f9ca69bffdfaad8cdea188f0e6e385fbd2f5b1ee2194f989f25c76a30250\",\"dweb:/ipfs/QmSffz94BEf9MuqrQ41LuAcBL6FnsxqY4pxfxM8b4s3iSi\"]}},\"version\":1}","metadata":{"compiler":{"version":"0.6.12+commit.27d51765"},"language":"Solidity","output":{"abi":[{"inputs":[],"stateMutability":"view","type":"function","name":"hasRegisteredFact","outputs":[{"internalType":"bool","name":"","type":"bool"}]},{"inputs":[{"internalType":"bytes32","name":"fact","type":"bytes32"}],"stateMutability":"view","type":"function","name":"isValid","outputs":[{"internalType":"bool","name":"","type":"bool"}]}],"devdoc":{"kind":"dev","methods":{},"version":1},"userdoc":{"kind":"user","methods":{},"version":1}},"settings":{"remappings":[],"optimizer":{"enabled":true,"runs":200},"metadata":{"bytecodeHash":"ipfs"},"compilationTarget":{"contracts/src/components/FactRegistry.sol":"FactRegistry"},"libraries":{}},"sources":{"contracts/src/components/FactRegistry.sol":{"keccak256":"0xf1dde737bfeb616fad002bb7ad229c73fec98f1e539420566fa89805c5bb120d","urls":["bzz-raw://1952c89d683c9f58ce06cf222f42772131113b3dd2442c6dc9150a2bde6d4d34","dweb:/ipfs/QmT8u7c1gAYRVF4kCdW7v9QiE65TwwdVu76pdvsFzXnZWg"],"license":"Apache-2.0."},"contracts/src/interfaces/IFactRegistry.sol":{"keccak256":"0xab04b296b506dfb0b4a8828f3dc463072fd50449a5ad8327d1baf01438b0fb35","urls":["bzz-raw://e451abe007f98081d1adfd759cc4168f81982992d8c0554650b94d37bc009e64","dweb:/ipfs/QmVBNUWFhNX8PqSMcqRkHVaTbcm7KNpgSg91Sj6MepFG6u"],"license":"Apache-2.0."},"contracts/src/interfaces/IQueryableFactRegistry.sol":{"keccak256":"0x9689f96215bae9da993a5f1b16a7c1460b1abd478569d969d5b901fa4520b4b6","urls":["bzz-raw://cfe2f9ca69bffdfaad8cdea188f0e6e385fbd2f5b1ee2194f989f25c76a30250","dweb:/ipfs/QmSffz94BEf9MuqrQ41LuAcBL6FnsxqY4pxfxM8b4s3iSi"],"license":"Apache-2.0."}},"version":1},"ast":{"absolutePath":"contracts/src/components/FactRegistry.sol","id":1175,"exportedSymbols":{"FactRegistry":[1174]},"nodeType":"SourceUnit","src":"40:1358:8","nodes":[{"id":1110,"nodeType":"PragmaDirective","src":"40:24:8","nodes":[],"literals":["solidity","^","0.6",".12"]},{"id":1111,"nodeType":"ImportDirective","src":"66:50:8","nodes":[],"absolutePath":"contracts/src/interfaces/IQueryableFactRegistry.sol","file":"../interfaces/IQueryableFactRegistry.sol","scope":1175,"sourceUnit":6348,"symbolAliases":[],"unitAlias":""},{"id":1174,"nodeType":"ContractDefinition","src":"118:1279:8","nodes":[{"id":1117,"nodeType":"VariableDeclaration","src":"207:45:8","nodes":[],"constant":false,"mutability":"mutable","name":"verifiedFact","overrides":null,"scope":1174,"stateVariable":true,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_mapping$_t_bytes32_$_t_bool_$","typeString":"mapping(bytes32 => bool)"},"typeName":{"id":1116,"keyType":{"id":1114,"name":"bytes32","nodeType":"ElementaryTypeName","src":"215:7:8","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"}},"nodeType":"Mapping","src":"207:24:8","typeDescriptions":{"typeIdentifier":"t_mapping$_t_bytes32_$_t_bool_$","typeString":"mapping(bytes32 => bool)"},"valueType":{"id":1115,"name":"bool","nodeType":"ElementaryTypeName","src":"226:4:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}}},"value":null,"visibility":"private"},{"id":1119,"nodeType":"VariableDeclaration","src":"336:22:8","nodes":[],"constant":false,"mutability":"mutable","name":"anyFactRegistered","overrides":null,"scope":1174,"stateVariable":true,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"},"typeName":{"id":1118,"name":"bool","nodeType":"ElementaryTypeName","src":"336:4:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"value":null,"visibility":"internal"},{"id":1132,"nodeType":"FunctionDefinition","src":"421:109:8","nodes":[],"body":{"id":1131,"nodeType":"Block","src":"490:40:8","nodes":[],"statements":[{"expression":{"argumentTypes":null,"arguments":[{"argumentTypes":null,"id":1128,"name":"fact","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1121,"src":"518:4:8","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"}}],"expression":{"argumentTypes":[{"typeIdentifier":"t_bytes32","typeString":"bytes32"}],"id":1127,"name":"_factCheck","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1144,"src":"507:10:8","typeDescriptions":{"typeIdentifier":"t_function_internal_view$_t_bytes32_$returns$_t_bool_$","typeString":"function (bytes32) view returns (bool)"}},"id":1129,"isConstant":false,"isLValue":false,"isPure":false,"kind":"functionCall","lValueRequested":false,"names":[],"nodeType":"FunctionCall","src":"507:16:8","tryCall":false,"typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"functionReturnParameters":1126,"id":1130,"nodeType":"Return","src":"500:23:8"}]},"baseFunctions":[6327],"documentation":null,"functionSelector":"6a938567","implemented":true,"kind":"function","modifiers":[],"name":"isValid","overrides":{"id":1123,"nodeType":"OverrideSpecifier","overrides":[],"src":"466:8:8"},"parameters":{"id":1122,"nodeType":"ParameterList","parameters":[{"constant":false,"id":1121,"mutability":"mutable","name":"fact","nodeType":"VariableDeclaration","overrides":null,"scope":1132,"src":"438:12:8","stateVariable":false,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"},"typeName":{"id":1120,"name":"bytes32","nodeType":"ElementaryTypeName","src":"438:7:8","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"}},"value":null,"visibility":"internal"}],"src":"437:14:8"},"returnParameters":{"id":1126,"nodeType":"ParameterList","parameters":[{"constant":false,"id":1125,"mutability":"mutable","name":"","nodeType":"VariableDeclaration","overrides":null,"scope":1132,"src":"484:4:8","stateVariable":false,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"},"typeName":{"id":1124,"name":"bool","nodeType":"ElementaryTypeName","src":"484:4:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"value":null,"visibility":"internal"}],"src":"483:6:8"},"scope":1174,"stateMutability":"view","virtual":false,"visibility":"external"},{"id":1144,"nodeType":"FunctionDefinition","src":"826:105:8","nodes":[],"body":{"id":1143,"nodeType":"Block","src":"889:42:8","nodes":[],"statements":[{"expression":{"argumentTypes":null,"baseExpression":{"argumentTypes":null,"id":1139,"name":"verifiedFact","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1117,"src":"906:12:8","typeDescriptions":{"typeIdentifier":"t_mapping$_t_bytes32_$_t_bool_$","typeString":"mapping(bytes32 => bool)"}},"id":1141,"indexExpression":{"argumentTypes":null,"id":1140,"name":"fact","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1134,"src":"919:4:8","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"}},"isConstant":false,"isLValue":true,"isPure":false,"lValueRequested":false,"nodeType":"IndexAccess","src":"906:18:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"functionReturnParameters":1138,"id":1142,"nodeType":"Return","src":"899:25:8"}]},"documentation":null,"implemented":true,"kind":"function","modifiers":[],"name":"_factCheck","overrides":null,"parameters":{"id":1135,"nodeType":"ParameterList","parameters":[{"constant":false,"id":1134,"mutability":"mutable","name":"fact","nodeType":"VariableDeclaration","overrides":null,"scope":1144,"src":"846:12:8","stateVariable":false,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"},"typeName":{"id":1133,"name":"bytes32","nodeType":"ElementaryTypeName","src":"846:7:8","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"}},"value":null,"visibility":"internal"}],"src":"845:14:8"},"returnParameters":{"id":1138,"nodeType":"ParameterList","parameters":[{"constant":false,"id":1137,"mutability":"mutable","name":"","nodeType":"VariableDeclaration","overrides":null,"scope":1144,"src":"883:4:8","stateVariable":false,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"},"typeName":{"id":1136,"name":"bool","nodeType":"ElementaryTypeName","src":"883:4:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"value":null,"visibility":"internal"}],"src":"882:6:8"},"scope":1174,"stateMutability":"view","virtual":false,"visibility":"internal"},{"id":1164,"nodeType":"FunctionDefinition","src":"937:272:8","nodes":[],"body":{"id":1163,"nodeType":"Block","src":"986:223:8","nodes":[],"statements":[{"expression":{"argumentTypes":null,"id":1153,"isConstant":false,"isLValue":false,"isPure":false,"lValueRequested":false,"leftHandSide":{"argumentTypes":null,"baseExpression":{"argumentTypes":null,"id":1149,"name":"verifiedFact","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1117,"src":"1058:12:8","typeDescriptions":{"typeIdentifier":"t_mapping$_t_bytes32_$_t_bool_$","typeString":"mapping(bytes32 => bool)"}},"id":1151,"indexExpression":{"argumentTypes":null,"id":1150,"name":"factHash","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1146,"src":"1071:8:8","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"}},"isConstant":false,"isLValue":true,"isPure":false,"lValueRequested":true,"nodeType":"IndexAccess","src":"1058:22:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"nodeType":"Assignment","operator":"=","rightHandSide":{"argumentTypes":null,"hexValue":"74727565","id":1152,"isConstant":false,"isLValue":false,"isPure":true,"kind":"bool","lValueRequested":false,"nodeType":"Literal","src":"1083:4:8","subdenomination":null,"typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"},"value":"true"},"src":"1058:29:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"id":1154,"nodeType":"ExpressionStatement","src":"1058:29:8"},{"condition":{"argumentTypes":null,"id":1156,"isConstant":false,"isLValue":false,"isPure":false,"lValueRequested":false,"nodeType":"UnaryOperation","operator":"!","prefix":true,"src":"1134:18:8","subExpression":{"argumentTypes":null,"id":1155,"name":"anyFactRegistered","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1119,"src":"1135:17:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"falseBody":null,"id":1162,"nodeType":"IfStatement","src":"1130:73:8","trueBody":{"id":1161,"nodeType":"Block","src":"1154:49:8","statements":[{"expression":{"argumentTypes":null,"id":1159,"isConstant":false,"isLValue":false,"isPure":false,"lValueRequested":false,"leftHandSide":{"argumentTypes":null,"id":1157,"name":"anyFactRegistered","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1119,"src":"1168:17:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"nodeType":"Assignment","operator":"=","rightHandSide":{"argumentTypes":null,"hexValue":"74727565","id":1158,"isConstant":false,"isLValue":false,"isPure":true,"kind":"bool","lValueRequested":false,"nodeType":"Literal","src":"1188:4:8","subdenomination":null,"typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"},"value":"true"},"src":"1168:24:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"id":1160,"nodeType":"ExpressionStatement","src":"1168:24:8"}]}}]},"documentation":null,"implemented":true,"kind":"function","modifiers":[],"name":"registerFact","overrides":null,"parameters":{"id":1147,"nodeType":"ParameterList","parameters":[{"constant":false,"id":1146,"mutability":"mutable","name":"factHash","nodeType":"VariableDeclaration","overrides":null,"scope":1164,"src":"959:16:8","stateVariable":false,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"},"typeName":{"id":1145,"name":"bytes32","nodeType":"ElementaryTypeName","src":"959:7:8","typeDescriptions":{"typeIdentifier":"t_bytes32","typeString":"bytes32"}},"value":null,"visibility":"internal"}],"src":"958:18:8"},"returnParameters":{"id":1148,"nodeType":"ParameterList","parameters":[],"src":"986:0:8"},"scope":1174,"stateMutability":"nonpayable","virtual":false,"visibility":"internal"},{"id":1173,"nodeType":"FunctionDefinition","src":"1287:108:8","nodes":[],"body":{"id":1172,"nodeType":"Block","src":"1354:41:8","nodes":[],"statements":[{"expression":{"argumentTypes":null,"id":1170,"name":"anyFactRegistered","nodeType":"Identifier","overloadedDeclarations":[],"referencedDeclaration":1119,"src":"1371:17:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"functionReturnParameters":1169,"id":1171,"nodeType":"Return","src":"1364:24:8"}]},"baseFunctions":[6346],"documentation":null,"functionSelector":"d6354e15","implemented":true,"kind":"function","modifiers":[],"name":"hasRegisteredFact","overrides":{"id":1166,"nodeType":"OverrideSpecifier","overrides":[],"src":"1330:8:8"},"parameters":{"id":1165,"nodeType":"ParameterList","parameters":[],"src":"1313:2:8"},"returnParameters":{"id":1169,"nodeType":"ParameterList","parameters":[{"constant":false,"id":1168,"mutability":"mutable","name":"","nodeType":"VariableDeclaration","overrides":null,"scope":1173,"src":"1348:4:8","stateVariable":false,"storageLocation":"default","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"},"typeName":{"id":1167,"name":"bool","nodeType":"ElementaryTypeName","src":"1348:4:8","typeDescriptions":{"typeIdentifier":"t_bool","typeString":"bool"}},"value":null,"visibility":"internal"}],"src":"1347:6:8"},"scope":1174,"stateMutability":"view","virtual":false,"visibility":"external"}],"abstract":false,"baseContracts":[{"arguments":null,"baseName":{"contractScope":null,"id":1112,"name":"IQueryableFactRegistry","nodeType":"UserDefinedTypeName","referencedDeclaration":6347,"src":"143:22:8","typeDescriptions":{"typeIdentifier":"t_contract$_IQueryableFactRegistry_$6347","typeString":"contract IQueryableFactRegistry"}},"id":1113,"nodeType":"InheritanceSpecifier","src":"143:22:8"}],"contractDependencies":[6328,6347],"contractKind":"contract","documentation":null,"fullyImplemented":true,"linearizedBaseContracts":[1174,6347,6328],"name":"FactRegistry","scope":1175}],"license":"Apache-2.0."},"id":8} \ No newline at end of file diff --git a/crates/prover_services/gps_fact_checker/src/artifacts/README.md b/crates/prover_services/gps_fact_checker/src/artifacts/README.md new file mode 100644 index 00000000..140a37ee --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/artifacts/README.md @@ -0,0 +1,30 @@ +# How to generate artifacts + +## Solidity output for GPS verifier + +Clone https://github.com/starkware-libs/starkex-contracts and build the repo using Foundry. + +## Cairo PIEs + +In order to generate zip compressed Cairo PIEs follow the instructions at https://github.com/lambdaclass/cairo-vm/blob/main/cairo1-run/README.md + +Few things to note: +- Use `--cairo_pie_output` flag to specify the output path for the zipped PIE file +- Use `--append_return_values` flag to write program output to the related builtin segment +- Use the according layout (that includes `output` builtin at the very least, so by default `small`) depending on what particular program uses + +Example: +``` +cargo run ../cairo_programs/cairo-1-programs/fibonacci.cairo --append_return_values --cairo_pie_output fibonacci.zip --layout small +``` + +### Generate facts + +To create test vectors for SHARP facts you would need to install the Cairo0 toolchain as described here: https://docs.cairo-lang.org/quickstart.html#installation + +Then use the `get_fact.py` script to get the fact of the according zipped PIE. + +Example: +``` +python3 get_fact.py fibonacci.zip +``` diff --git a/crates/prover_services/gps_fact_checker/src/artifacts/fibonacci.zip b/crates/prover_services/gps_fact_checker/src/artifacts/fibonacci.zip new file mode 100644 index 0000000000000000000000000000000000000000..b5943536870ae352374926593e2580178826de0d GIT binary patch literal 1514 zcmWIWW@Zs#U|`^2uyWrNvG>CD@1j7SFc9+qaan3nab|v=URH5_-s&(fzcV_g`t?uv zgl-Blx?&u%ij^S%Mf<`V3to9KGBB7hGcfQ1wdbamB&H;mB!cy?opjpoumg|l|I}-n zKir=8kIPBJ!C_)X)Yg?P4u8w%T`{xGL08rSF+QgPP)+2+`;*38rP{7YmklO^pyyywh`0lJ&@>o@;k3G!ISS zn0M0NW#PJ;kFF+Zm~GB_d8uJ`=G2?oN&(*zq$V~s#LuZKcJj3hx#StjLF=Y(x%l{rD+)Y~`DJ8dglW#hl-OGP_U(UbtF%Nd?UH?+>CVu;0 zi?iXTJ?H=aUizf)$BC{dC%?Y`dpT38>@im-cjlBQ3+KzlebbiEmxx*U_cE92_A8|- z++}lizRsu<*`o2G#?VT3m-NBAe}8hL272cmEx{+iKo{U+VBi7=dTxGErCw5IUhlP& zLAL`0S|4gZ<`g!THkir5siOGV`gA8IxI9Q~Tm74zX6rnDmK20P-BLC8UB$ZF zSLTM5?#{a#w;|8I;$F>%J&qstu-;L4`gw(=etm8Gj-6A^EBU>x#`r;nOq<{XP4aF1+O1o z`M#-t@jmg{$90$GysB#aCsvT8|F!t=iuor6+-vnq|4)@(o|B__6XD8I>7w$eJ`{2s= zx3WMZ-`M({{it~3zWbep1@jAD{roNobkR4~<+BiLlV^fAtqRlf@bTAWGs-HRBINDWWi;8#Tj9^G zD74(YtY~JC575v+AeI2))QZ&PQjqEKMXAO4rA5i9#o&ytd&-dOfC7)h#oZ}o8M{UH z_++RCI0_q{3^=m;`ohQm_b~QkT@iUy*&;f1)faJj%`NxD9(;?@F-{BHZ6T;uX!c5g zy(7B#@O+0m&;I~jz{sS>jJtpWIs*)rG=eDP;tE|Sdf5cg$H1_p(H_V|DXGx4qUSJ# z)-Aw%mzxU9Z|M5aV*{b@0I-0>7B?6spanU?1X&i`5fI?b$_7%&3WTOW7tRN&WncgR DephfO literal 0 HcmV?d00001 diff --git a/crates/prover_services/gps_fact_checker/src/artifacts/get_fact.py b/crates/prover_services/gps_fact_checker/src/artifacts/get_fact.py new file mode 100644 index 00000000..3f4e9206 --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/artifacts/get_fact.py @@ -0,0 +1,14 @@ +#!/usr/bin/python3 + +import sys +from starkware.cairo.lang.vm.cairo_pie import CairoPie +from starkware.cairo.bootloaders.generate_fact import get_cairo_pie_fact_info +from starkware.cairo.bootloaders.hash_program import compute_program_hash_chain + +cairo_pie = CairoPie.from_file(sys.argv[1]) + +program_hash = compute_program_hash_chain(program=cairo_pie.program, use_poseidon=False) +print("Program hash: ", program_hash) + +fact_info = get_cairo_pie_fact_info(cairo_pie, program_hash) +print("Fact: ", fact_info.fact) diff --git a/crates/prover_services/gps_fact_checker/src/error.rs b/crates/prover_services/gps_fact_checker/src/error.rs new file mode 100644 index 00000000..166011be --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/error.rs @@ -0,0 +1,53 @@ +use cairo_vm::program_hash::ProgramHashError; + +#[derive(Debug, thiserror::Error)] +pub enum FactCheckerError { + #[error("Fact registry call failed: {0}")] + FactRegistry(#[source] alloy::contract::Error), + #[error("Failed to compute program hash: {0}")] + ProgramHashCompute(#[from] ProgramHashError), + #[error("There is no additional data for the output builtin in Cairo PIE")] + OutputBuiltinNoAdditionalData, + #[error("There is no segment info for the output builtin in Cairo PIE")] + OutputBuiltinNoSegmentInfo, + #[error("Tree structure length is not even")] + TreeStructureLenOdd, + #[error("Tree structure is empty")] + TreeStructureEmpty, + #[error("Tree structure is too large")] + TreeStructureTooLarge, + #[error("Tree structure contains invalid values")] + TreeStructureInvalid, + #[error("Output pages length is unexpected")] + OutputPagesLenUnexpected, + #[error("Output page {0} has invalid start {1} (expected 0 < x < {2})")] + OutputPagesInvalidStart(usize, usize, usize), + #[error("Output page {0} has expected start {1} (expected{2})")] + OutputPagesUnexpectedStart(usize, usize, usize), + #[error("Output page {0} has invalid size {1} (expected 0 < x < {2})")] + OutputPagesInvalidSize(usize, usize, usize), + #[error("Output page {0} has unexpected id (expected {1})")] + OutputPagesUnexpectedId(usize, usize), + #[error("Output pages cover only {0} out of {1} output elements")] + OutputPagesUncoveredOutput(usize, usize), + #[error("Output segment is not found in the memory")] + OutputSegmentNotFound, + #[error("Output segment does not fit into the memory")] + OutputSegmentInvalidRange, + #[error("Output segment contains inconsistent offset {0} (expected {1})")] + OutputSegmentInconsistentOffset(usize, usize), + #[error("Output segment contains unexpected relocatable at position {0}")] + OutputSegmentUnexpectedRelocatable(usize), + #[error("Tree structure: pages count {0} is in invalid range (expected <= {1})")] + TreeStructurePagesCountOutOfRange(usize, usize), + #[error("Tree structure: nodes count {0} is in invalid range (expected <= {1})")] + TreeStructureNodesCountOutOfRange(usize, usize), + #[error("Tree structure: node stack contains more than one node")] + TreeStructureRootInvalid, + #[error("Tree structure: {0} pages were not processed")] + TreeStructurePagesNotProcessed(usize), + #[error("Tree structure: end offset {0} does not match the output length {1}")] + TreeStructureEndOffsetInvalid(usize, usize), + #[error("Tree structure: root offset {0} does not match the output length {1}")] + TreeStructureRootOffsetInvalid(usize, usize), +} diff --git a/crates/prover_services/gps_fact_checker/src/fact_info.rs b/crates/prover_services/gps_fact_checker/src/fact_info.rs new file mode 100644 index 00000000..156cf03f --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/fact_info.rs @@ -0,0 +1,97 @@ +//! Fact info structure and helpers. +//! +//! Port of https://github.com/starkware-libs/cairo-lang/blob/master/src/starkware/cairo/bootloaders/generate_fact.py + +use alloy::primitives::{keccak256, B256}; +use cairo_vm::program_hash::compute_program_hash_chain; +use cairo_vm::types::builtin_name::BuiltinName; +use cairo_vm::types::relocatable::MaybeRelocatable; +use cairo_vm::vm::runners::cairo_pie::CairoPie; +use cairo_vm::Felt252; +use starknet::core::types::FieldElement; +use utils::ensure; + +use super::error::FactCheckerError; +use super::fact_node::generate_merkle_root; +use super::fact_topology::{get_fact_topology, FactTopology}; + +/// Default bootloader program version. +/// +/// https://github.com/starkware-libs/cairo-lang/blob/efa9648f57568aad8f8a13fbf027d2de7c63c2c0/src/starkware/cairo/bootloaders/hash_program.py#L11 +pub const BOOTLOADER_VERSION: usize = 0; + +pub struct FactInfo { + pub program_output: Vec, + pub fact_topology: FactTopology, + pub fact: B256, +} + +pub fn get_fact_info(cairo_pie: &CairoPie, program_hash: Option) -> Result { + let program_output = get_program_output(cairo_pie)?; + let fact_topology = get_fact_topology(cairo_pie, program_output.len())?; + let program_hash = match program_hash { + Some(hash) => hash, + None => compute_program_hash_chain(&cairo_pie.metadata.program, BOOTLOADER_VERSION)?, + }; + let output_root = generate_merkle_root(&program_output, &fact_topology)?; + let fact = keccak256([program_hash.to_bytes_be(), *output_root.node_hash].concat()); + Ok(FactInfo { program_output, fact_topology, fact }) +} + +pub fn get_program_output(cairo_pie: &CairoPie) -> Result, FactCheckerError> { + let segment_info = cairo_pie + .metadata + .builtin_segments + .get(&BuiltinName::output) + .ok_or(FactCheckerError::OutputBuiltinNoSegmentInfo)?; + + let segment_start = cairo_pie + .memory + .0 + .iter() + .enumerate() + .find_map(|(ptr, ((index, _), _))| if *index == segment_info.index as usize { Some(ptr) } else { None }) + .ok_or(FactCheckerError::OutputSegmentNotFound)?; + + let mut output = Vec::with_capacity(segment_info.size); + let mut expected_offset = 0; + + for i in segment_start..segment_start + segment_info.size { + let ((_, offset), value) = cairo_pie.memory.0.get(i).ok_or(FactCheckerError::OutputSegmentInvalidRange)?; + + ensure!( + *offset == expected_offset, + FactCheckerError::OutputSegmentInconsistentOffset(*offset, expected_offset) + ); + match value { + MaybeRelocatable::Int(felt) => output.push(felt.clone()), + MaybeRelocatable::RelocatableValue(_) => { + return Err(FactCheckerError::OutputSegmentUnexpectedRelocatable(*offset)); + } + } + + expected_offset += 1; + } + + Ok(output) +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + + use cairo_vm::vm::runners::cairo_pie::CairoPie; + + use super::get_fact_info; + + #[test] + fn test_fact_info() { + // Generated using the get_fact.py script + let expected_fact = "0xca15503f02f8406b599cb220879e842394f5cf2cef753f3ee430647b5981b782"; + let cairo_pie_path: PathBuf = + [env!("CARGO_MANIFEST_DIR"), "src", "artifacts", "fibonacci.zip"].iter().collect(); + let cairo_pie = CairoPie::read_zip_file(&cairo_pie_path).unwrap(); + let fact_info = get_fact_info(&cairo_pie, None).unwrap(); + assert_eq!(expected_fact, fact_info.fact.to_string()); + } +} diff --git a/crates/prover_services/gps_fact_checker/src/fact_node.rs b/crates/prover_services/gps_fact_checker/src/fact_node.rs new file mode 100644 index 00000000..93f98fa1 --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/fact_node.rs @@ -0,0 +1,117 @@ +//! Fact node structure and helpers. +//! +//! The fact of each task is stored as a (non-binary) Merkle tree. +//! Leaf nodes are labeled with the hash of their data. +//! Each non-leaf node is labeled as 1 + the hash of (node0, end0, node1, end1, ...) +//! where node* is a label of a child children and end* is the total number of data words up to +//! and including that node and its children (including the previous sibling nodes). +//! We add 1 to the result of the hash to prevent an attacker from using a preimage of a leaf node +//! as a preimage of a non-leaf hash and vice versa. +//! +//! The structure of the tree is passed as a list of pairs (n_pages, n_nodes), and the tree is +//! constructed using a stack of nodes (initialized to an empty stack) by repeating for each pair: +//! 1. Add #n_pages lead nodes to the stack. +//! 2. Pop the top #n_nodes, construct a parent node for them, and push it back to the stack. +//! After applying the steps above, the stack must contain exactly one node, which will +//! constitute the root of the Merkle tree. +//! +//! For example, [(2, 2)] will create a Merkle tree with a root and two direct children, while +//! [(3, 2), (0, 2)] will create a Merkle tree with a root whose left child is a leaf and +//! right child has two leaf children. +//! +//! Port of https://github.com/starkware-libs/cairo-lang/blob/master/src/starkware/cairo/bootloaders/compute_fact.py + +use alloy::primitives::{keccak256, B256}; +use cairo_vm::Felt252; +use itertools::Itertools; +use utils::ensure; + +use super::error::FactCheckerError; +use super::fact_topology::FactTopology; + +/// Node of the fact tree +#[derive(Debug, Clone)] +pub struct FactNode { + /// Page hash (leaf) or 1 + keccak{children} (non-leaf) + pub node_hash: B256, + /// Total number of data words up to that node (including it and its children) + pub end_offset: usize, + /// Page size + pub page_size: usize, + /// Child nodes + pub children: Vec, +} + +/// Generates the root of the output Merkle tree for the program fact computation. +/// +/// Basically it transforms the flat fact topology into a non-binary Merkle tree and then computes +/// its root, enriching the nodes with metadata such as page sizes and hashes. +pub fn generate_merkle_root( + program_output: &[Felt252], + fact_topology: &FactTopology, +) -> Result { + let FactTopology { tree_structure, mut page_sizes } = fact_topology.clone(); + + let mut end_offset: usize = 0; + let mut node_stack: Vec = Vec::with_capacity(page_sizes.len()); + let mut output_iter = program_output.iter(); + + for (n_pages, n_nodes) in tree_structure.into_iter().tuples() { + ensure!( + n_pages <= page_sizes.len(), + FactCheckerError::TreeStructurePagesCountOutOfRange(n_pages, page_sizes.len()) + ); + + // Push n_pages (leaves) to the stack + for _ in 0..n_pages { + let page_size = page_sizes.remove(0); + // Page size is already validated upon retrieving the topology + let page = output_iter.by_ref().take(page_size).map(|felt| felt.to_bytes_be().to_vec()).concat(); + let node_hash = keccak256(&page); + end_offset += page_size; + // Add lead node (no children) + node_stack.push(FactNode { node_hash, end_offset, page_size, children: vec![] }) + } + + ensure!( + n_nodes <= node_stack.len(), + FactCheckerError::TreeStructureNodesCountOutOfRange(n_nodes, node_stack.len()) + ); + + if n_nodes > 0 { + // Create a parent node to the last n_nodes in the head of the stack. + let children: Vec = node_stack.drain(node_stack.len() - n_nodes..).collect(); + let mut node_data = Vec::with_capacity(2 * 32 * children.len()); + let mut total_page_size = 0; + let mut child_end_offset = 0; + + for node in children.iter() { + node_data.copy_from_slice(node.node_hash.as_slice()); + node_data.copy_from_slice(&[0; 32 - (usize::BITS / 8) as usize]); // pad usize to 32 bytes + node_data.copy_from_slice(&node.page_size.to_be_bytes()); + total_page_size += node.page_size; + child_end_offset = node.end_offset; + } + + node_stack.push(FactNode { + node_hash: keccak256(&node_data), + end_offset: child_end_offset, + page_size: total_page_size, + children, + }) + } + + ensure!(node_stack.len() == 1, FactCheckerError::TreeStructureRootInvalid); + ensure!(page_sizes.len() == 0, FactCheckerError::TreeStructurePagesNotProcessed(page_sizes.len())); + ensure!( + end_offset == program_output.len(), + FactCheckerError::TreeStructureEndOffsetInvalid(end_offset, program_output.len()) + ); + ensure!( + node_stack[0].end_offset == program_output.len(), + FactCheckerError::TreeStructureRootOffsetInvalid(node_stack[0].end_offset, program_output.len(),) + ); + } + + Ok(node_stack.remove(0)) +} diff --git a/crates/prover_services/gps_fact_checker/src/fact_topology.rs b/crates/prover_services/gps_fact_checker/src/fact_topology.rs new file mode 100644 index 00000000..e2d7127c --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/fact_topology.rs @@ -0,0 +1,102 @@ +//! Fact topology type and helpers. +//! +//! Ported from https://github.com/starkware-libs/cairo-lang/blob/master/src/starkware/cairo/bootloaders/fact_topology.py + +use std::collections::HashMap; + +use cairo_vm::types::builtin_name::BuiltinName; +use cairo_vm::vm::runners::cairo_pie::{BuiltinAdditionalData, CairoPie, PublicMemoryPage}; +use utils::ensure; + +use super::error::FactCheckerError; + +pub const GPS_FACT_TOPOLOGY: &str = "gps_fact_topology"; + +/// Flattened fact tree +#[derive(Debug, Clone)] +pub struct FactTopology { + /// List of pairs (n_pages, n_nodes) + pub tree_structure: Vec, + /// Page sizes (pages are leaf nodes) + pub page_sizes: Vec, +} + +/// Returns the fact topology from the additional data of the output builtin. +pub fn get_fact_topology(cairo_pie: &CairoPie, output_size: usize) -> Result { + if let Some(BuiltinAdditionalData::Output(additional_data)) = cairo_pie.additional_data.0.get(&BuiltinName::output) + { + let tree_structure = match additional_data.attributes.get(GPS_FACT_TOPOLOGY) { + Some(tree_structure) => { + ensure!(!tree_structure.is_empty(), FactCheckerError::TreeStructureEmpty); + ensure!(tree_structure.len() % 2 == 0, FactCheckerError::TreeStructureLenOdd); + ensure!(tree_structure.len() <= 10, FactCheckerError::TreeStructureTooLarge); + ensure!(tree_structure.iter().all(|&x| x < 2 << 30), FactCheckerError::TreeStructureInvalid); + tree_structure.clone() + } + None => { + ensure!(additional_data.pages.is_empty(), FactCheckerError::OutputPagesLenUnexpected); + vec![1, 0] + } + }; + let page_sizes = get_page_sizes(&additional_data.pages, output_size)?; + Ok(FactTopology { tree_structure, page_sizes }) + } else { + Err(FactCheckerError::OutputBuiltinNoAdditionalData) + } +} + +/// Returns the sizes of the program output pages, given the pages dictionary that appears +/// in the additional attributes of the output builtin. +pub fn get_page_sizes( + pages: &HashMap, + output_size: usize, +) -> Result, FactCheckerError> { + let mut pages_list: Vec<(usize, usize, usize)> = + pages.iter().map(|(&id, page)| (id, page.start, page.size)).collect(); + pages_list.sort(); + + // The first page id is expected to be 1. + let mut expected_page_id = 1; + // We don't expect anything on its start value. + let mut expected_page_start = None; + + let mut page_sizes = Vec::with_capacity(pages_list.len() + 1); + // The size of page 0 is output_size if there are no other pages, or the start of page 1 otherwise. + page_sizes.push(output_size); + + for (page_id, page_start, page_size) in pages_list { + ensure!(page_id == expected_page_id, FactCheckerError::OutputPagesUnexpectedId(page_id, expected_page_id)); + + if page_id == 1 { + ensure!( + page_start > 0 && page_start < output_size, + FactCheckerError::OutputPagesInvalidStart(page_id, page_start, output_size) + ); + page_sizes[0] = page_start; + } else { + ensure!( + Some(page_start) == expected_page_start, + FactCheckerError::OutputPagesUnexpectedStart( + page_id, + page_start, + expected_page_start.unwrap_or_default(), + ) + ); + } + + ensure!( + page_size > 0 && page_size < output_size, + FactCheckerError::OutputPagesInvalidSize(page_id, page_size, output_size) + ); + expected_page_start = Some(page_start + page_size); + expected_page_id += 1; + + page_sizes.push(page_size); + } + + ensure!( + pages.is_empty() || expected_page_start == Some(output_size), + FactCheckerError::OutputPagesUncoveredOutput(expected_page_start.unwrap_or_default(), output_size) + ); + Ok(page_sizes) +} diff --git a/crates/prover_services/gps_fact_checker/src/lib.rs b/crates/prover_services/gps_fact_checker/src/lib.rs new file mode 100644 index 00000000..0d1d2a0f --- /dev/null +++ b/crates/prover_services/gps_fact_checker/src/lib.rs @@ -0,0 +1,40 @@ +pub mod error; +pub mod fact_info; +pub mod fact_node; +pub mod fact_topology; + +use alloy::primitives::{Address, B256}; +use alloy::providers::{ProviderBuilder, RootProvider}; +use alloy::sol; +use alloy::transports::http::{Client, Http}; +use url::Url; + +use self::error::FactCheckerError; + +sol!( + #[allow(missing_docs)] + #[sol(rpc)] + FactRegistry, + "src/artifacts/FactRegistry.json" +); + +pub struct FactChecker { + fact_registry: FactRegistry::FactRegistryInstance, +} + +type TransportT = Http; +type ProviderT = RootProvider; + +impl FactChecker { + pub fn new(rpc_node_url: Url, verifier_address: Address) -> Self { + let provider = ProviderBuilder::new().on_http(rpc_node_url); + let fact_registry = FactRegistry::new(verifier_address, provider); + Self { fact_registry } + } + + pub async fn is_valid(&self, fact: &B256) -> Result { + let FactRegistry::isValidReturn { _0 } = + self.fact_registry.isValid(fact.clone()).call().await.map_err(FactCheckerError::FactRegistry)?; + Ok(_0) + } +} diff --git a/crates/prover_services/prover_service/Cargo.toml b/crates/prover_services/prover_service/Cargo.toml new file mode 100644 index 00000000..e1e3be99 --- /dev/null +++ b/crates/prover_services/prover_service/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "prover-service" +version.workspace = true +edition.workspace = true + +[dependencies] +async-trait.workspace = true +cairo-vm.workspace = true +thiserror.workspace = true +utils.workspace = true +gps-fact-checker.workspace = true +snos.workspace = true +mockall.workspace = true diff --git a/crates/prover_services/prover_service/src/lib.rs b/crates/prover_services/prover_service/src/lib.rs new file mode 100644 index 00000000..1ba1e844 --- /dev/null +++ b/crates/prover_services/prover_service/src/lib.rs @@ -0,0 +1,45 @@ +use async_trait::async_trait; +use cairo_vm::vm::runners::cairo_pie::CairoPie; +use mockall::automock; + +/// Prover service provides an abstraction over different proving services that do the following: +/// - Accept a task containing Cairo intermediate execution artifacts (in PIE format) +/// - Aggregate multiple tasks and prove the execution (of the bootloader program where PIEs are +/// inputs) +/// - Register the proof onchain (individiual proof facts available for each task) +/// +/// A common Madara workflow would be single task per block (SNOS execution result) or per block +/// span (SNAR). +#[automock] +#[async_trait] +pub trait ProverService: Send + Sync { + async fn submit_task(&self, task: Task) -> Result; + async fn get_task_status(&self, task_id: &TaskId) -> Result; +} + +pub enum Task { + CairoPie(CairoPie), +} + +pub type TaskId = String; + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum TaskStatus { + Processing, + Succeeded, + Failed(String), +} + +#[derive(Debug, thiserror::Error)] +pub enum ProverServiceError { + #[error("Internal prover error: {0}")] + Internal(#[source] Box), + #[error("Stone prover failed: {0}")] + SettingsProvider(#[from] utils::settings::SettingsProviderError), + #[error("Task is invalid: {0}")] + TaskInvalid(TaskId), + #[error("Fact checker error: {0}")] + FactChecker(#[from] gps_fact_checker::error::FactCheckerError), + #[error("Failed to encode Cairo PIE: {0}")] + PieEncoding(#[source] snos::error::SnOsError), +} diff --git a/crates/prover_services/sharp_service/Cargo.toml b/crates/prover_services/sharp_service/Cargo.toml new file mode 100644 index 00000000..ba195719 --- /dev/null +++ b/crates/prover_services/sharp_service/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "sharp-service" +version.workspace = true +edition.workspace = true + +[dependencies] +alloy.workspace = true +alloy-primitives.workspace = true +async-trait.workspace = true +uuid.workspace = true +serde.workspace = true +serde_json.workspace = true +reqwest.workspace = true +cairo-vm.workspace = true +snos.workspace = true +gps-fact-checker.workspace = true +prover-service.workspace = true +utils.workspace = true +thiserror.workspace = true +hex.workspace = true +url.workspace = true + +[dev-dependencies] +tokio.workspace = true diff --git a/crates/prover_services/sharp_service/src/artifacts/fibonacci.zip b/crates/prover_services/sharp_service/src/artifacts/fibonacci.zip new file mode 100644 index 0000000000000000000000000000000000000000..b5943536870ae352374926593e2580178826de0d GIT binary patch literal 1514 zcmWIWW@Zs#U|`^2uyWrNvG>CD@1j7SFc9+qaan3nab|v=URH5_-s&(fzcV_g`t?uv zgl-Blx?&u%ij^S%Mf<`V3to9KGBB7hGcfQ1wdbamB&H;mB!cy?opjpoumg|l|I}-n zKir=8kIPBJ!C_)X)Yg?P4u8w%T`{xGL08rSF+QgPP)+2+`;*38rP{7YmklO^pyyywh`0lJ&@>o@;k3G!ISS zn0M0NW#PJ;kFF+Zm~GB_d8uJ`=G2?oN&(*zq$V~s#LuZKcJj3hx#StjLF=Y(x%l{rD+)Y~`DJ8dglW#hl-OGP_U(UbtF%Nd?UH?+>CVu;0 zi?iXTJ?H=aUizf)$BC{dC%?Y`dpT38>@im-cjlBQ3+KzlebbiEmxx*U_cE92_A8|- z++}lizRsu<*`o2G#?VT3m-NBAe}8hL272cmEx{+iKo{U+VBi7=dTxGErCw5IUhlP& zLAL`0S|4gZ<`g!THkir5siOGV`gA8IxI9Q~Tm74zX6rnDmK20P-BLC8UB$ZF zSLTM5?#{a#w;|8I;$F>%J&qstu-;L4`gw(=etm8Gj-6A^EBU>x#`r;nOq<{XP4aF1+O1o z`M#-t@jmg{$90$GysB#aCsvT8|F!t=iuor6+-vnq|4)@(o|B__6XD8I>7w$eJ`{2s= zx3WMZ-`M({{it~3zWbep1@jAD{roNobkR4~<+BiLlV^fAtqRlf@bTAWGs-HRBINDWWi;8#Tj9^G zD74(YtY~JC575v+AeI2))QZ&PQjqEKMXAO4rA5i9#o&ytd&-dOfC7)h#oZ}o8M{UH z_++RCI0_q{3^=m;`ohQm_b~QkT@iUy*&;f1)faJj%`NxD9(;?@F-{BHZ6T;uX!c5g zy(7B#@O+0m&;I~jz{sS>jJtpWIs*)rG=eDP;tE|Sdf5cg$H1_p(H_V|DXGx4qUSJ# z)-Aw%mzxU9Z|M5aV*{b@0I-0>7B?6spanU?1X&i`5fI?b$_7%&3WTOW7tRN&WncgR DephfO literal 0 HcmV?d00001 diff --git a/crates/prover_services/sharp_service/src/artifacts/print.zip b/crates/prover_services/sharp_service/src/artifacts/print.zip new file mode 100644 index 0000000000000000000000000000000000000000..799d5c50a52c0ba0dabcf743996d6bffe08ca93d GIT binary patch literal 1303 zcmWIWW@Zs#U|`^2=$o@CV(*3P-$j8uVIbxK;`R!y{zK=ywzb|erI$}_3NMT z3EdQAbj3Ji6)QskiuPV(y=gv-3=GoD3=F(L?YXHXi7AOCiD3O(C!O|Ub`)^^zx4Xv z73HDN!(CFY1hSn`Q4!{7`?GuM1aS*)^Su1|w@Y6aYP>%Cs`#?*=|=(gcCXa#R#!0+ zDplkRQgT@pBr2^W(79a3L`{&~%*2~tnlCvxZE zEc^v|NJf^37UYL9OZD86LA`OflF*E9Dww4Odt_`FVF zg?Y*6${m5u{qyqw_T2dEthcRSA2r;TJ$=Wg4Gb@RAO_`u#FUiG5@2#q%!!Am+SOsF z^mR2gbUXdE&Ysacc_w(%sxU1NAAemoqpZ>?Lf&3oMw7j~75?0cLdy#`iaPgt0}TxX zVhJEltw>ES1(_aSlv2Y9mrvke0SD-fCjU3dnlmVp5PmYC`k literal 0 HcmV?d00001 diff --git a/crates/prover_services/sharp_service/src/client.rs b/crates/prover_services/sharp_service/src/client.rs new file mode 100644 index 00000000..e1aa8c14 --- /dev/null +++ b/crates/prover_services/sharp_service/src/client.rs @@ -0,0 +1,49 @@ +use serde_json::json; +use snos::sharp::{CairoJobResponse, CairoStatusResponse}; +use url::Url; +use uuid::Uuid; + +use crate::error::SharpError; + +/// SHARP endpoint for Sepolia testnet +pub const DEFAULT_SHARP_URL: &str = "https://testnet.provingservice.io"; + +/// SHARP API async wrapper +pub struct SharpClient { + base_url: Url, + client: reqwest::Client, +} + +impl SharpClient { + pub fn new(url: Url) -> Self { + Self { base_url: url, client: reqwest::Client::new() } + } + + pub async fn add_job(&self, encoded_pie: &str) -> Result { + let data = json!({ "action": "add_job", "request": { "cairo_pie": encoded_pie } }); + let url = self.base_url.join("add_job").unwrap(); + let res = self.client.post(url).json(&data).send().await.map_err(SharpError::AddJobFailure)?; + + match res.status() { + reqwest::StatusCode::OK => res.json().await.map_err(SharpError::AddJobFailure), + code => Err(SharpError::SharpService(code)), + } + } + + pub async fn get_job_status(&self, job_key: &Uuid) -> Result { + let data = json!({ "action": "get_status", "request": { "cairo_job_key": job_key } }); + let url = self.base_url.join("get_status").unwrap(); + let res = self.client.post(url).json(&data).send().await.map_err(SharpError::GetJobStatusFailure)?; + + match res.status() { + reqwest::StatusCode::OK => res.json().await.map_err(SharpError::GetJobStatusFailure), + code => Err(SharpError::SharpService(code)), + } + } +} + +impl Default for SharpClient { + fn default() -> Self { + Self::new(DEFAULT_SHARP_URL.parse().unwrap()) + } +} diff --git a/crates/prover_services/sharp_service/src/config.rs b/crates/prover_services/sharp_service/src/config.rs new file mode 100644 index 00000000..6567448c --- /dev/null +++ b/crates/prover_services/sharp_service/src/config.rs @@ -0,0 +1,27 @@ +use alloy::primitives::Address; +use serde::{Deserialize, Serialize}; +use url::Url; + +use crate::client::DEFAULT_SHARP_URL; + +/// SHARP proving service configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct SharpConfig { + /// SHARP service url + pub service_url: Url, + /// EVM RPC node url + pub rpc_node_url: Url, + /// GPS verifier contract address (implements FactRegistry) + pub verifier_address: Address, +} + +impl Default for SharpConfig { + /// Default config for Sepolia testnet + fn default() -> Self { + Self { + service_url: DEFAULT_SHARP_URL.parse().unwrap(), + rpc_node_url: "https://sepolia.drpc.org".parse().unwrap(), + verifier_address: "0x07ec0D28e50322Eb0C159B9090ecF3aeA8346DFe".parse().unwrap(), + } + } +} diff --git a/crates/prover_services/sharp_service/src/error.rs b/crates/prover_services/sharp_service/src/error.rs new file mode 100644 index 00000000..beebe370 --- /dev/null +++ b/crates/prover_services/sharp_service/src/error.rs @@ -0,0 +1,30 @@ +use alloy_primitives::hex::FromHexError; +use gps_fact_checker::error::FactCheckerError; +use prover_service::ProverServiceError; +use reqwest::StatusCode; + +#[derive(Debug, thiserror::Error)] +pub enum SharpError { + #[error("Failed to to add SHARP job: {0}")] + AddJobFailure(#[source] reqwest::Error), + #[error("Failed to to get status of a SHARP job: {0}")] + GetJobStatusFailure(#[source] reqwest::Error), + #[error("Fact checker error: {0}")] + FactChecker(#[from] FactCheckerError), + #[error("SHARP service returned an error {0}")] + SharpService(StatusCode), + #[error("Failed to parse job key: {0}")] + JobKeyParse(uuid::Error), + #[error("Failed to parse fact: {0}")] + FactParse(FromHexError), + #[error("Failed to split task id into job key and fact")] + TaskIdSplit, + #[error("Failed to encode PIE")] + PieEncode(#[source] snos::error::SnOsError), +} + +impl From for ProverServiceError { + fn from(value: SharpError) -> Self { + Self::Internal(Box::new(value)) + } +} diff --git a/crates/prover_services/sharp_service/src/lib.rs b/crates/prover_services/sharp_service/src/lib.rs new file mode 100644 index 00000000..09459048 --- /dev/null +++ b/crates/prover_services/sharp_service/src/lib.rs @@ -0,0 +1,136 @@ +pub mod client; +pub mod config; +pub mod error; + +use std::str::FromStr; + +use alloy::primitives::B256; +use async_trait::async_trait; +use gps_fact_checker::fact_info::get_fact_info; +use gps_fact_checker::FactChecker; +use prover_service::{ProverService, ProverServiceError, Task, TaskId, TaskStatus}; +use snos::sharp::CairoJobStatus; +use utils::settings::SettingsProvider; +use uuid::Uuid; + +use crate::client::SharpClient; +use crate::config::SharpConfig; +use crate::error::SharpError; + +pub const SHARP_SETTINGS_NAME: &str = "sharp"; + +/// SHARP (aka GPS) is a shared proving service hosted by Starkware. +pub struct SharpProverService { + sharp_client: SharpClient, + fact_checker: FactChecker, +} + +#[async_trait] +impl ProverService for SharpProverService { + async fn submit_task(&self, task: Task) -> Result { + match task { + Task::CairoPie(cairo_pie) => { + let fact_info = get_fact_info(&cairo_pie, None)?; + let encoded_pie = + snos::sharp::pie::encode_pie_mem(cairo_pie).map_err(ProverServiceError::PieEncoding)?; + let res = self.sharp_client.add_job(&encoded_pie).await?; + if let Some(job_key) = res.cairo_job_key { + Ok(combine_task_id(&job_key, &fact_info.fact)) + } else { + Err(ProverServiceError::TaskInvalid(res.error_message.unwrap_or_default()).into()) + } + } + } + } + + async fn get_task_status(&self, task_id: &TaskId) -> Result { + let (job_key, fact) = split_task_id(task_id)?; + let res = self.sharp_client.get_job_status(&job_key).await?; + match res.status { + CairoJobStatus::FAILED => Ok(TaskStatus::Failed(res.error_log.unwrap_or_default())), + CairoJobStatus::INVALID => { + Ok(TaskStatus::Failed(format!("Task is invalid: {:?}", res.invalid_reason.unwrap_or_default()))) + } + CairoJobStatus::UNKNOWN => Ok(TaskStatus::Failed(format!("Task not found: {}", task_id))), + CairoJobStatus::IN_PROGRESS | CairoJobStatus::NOT_CREATED | CairoJobStatus::PROCESSED => { + Ok(TaskStatus::Processing) + } + CairoJobStatus::ONCHAIN => { + if self.fact_checker.is_valid(&fact).await? { + Ok(TaskStatus::Succeeded) + } else { + Ok(TaskStatus::Failed(format!("Fact {} is not valid or not registed", hex::encode(&fact)))) + } + } + } + } +} + +impl SharpProverService { + pub fn new(sharp_client: SharpClient, fact_checker: FactChecker) -> Self { + Self { sharp_client, fact_checker } + } + + pub fn with_settings(settings: &impl SettingsProvider) -> Self { + let sharp_cfg: SharpConfig = settings.get_settings(SHARP_SETTINGS_NAME).unwrap(); + let sharp_client = SharpClient::new(sharp_cfg.service_url); + let fact_checker = FactChecker::new(sharp_cfg.rpc_node_url, sharp_cfg.verifier_address); + Self::new(sharp_client, fact_checker) + } +} + +/// Construct SHARP specific task ID from job key and proof fact +pub fn combine_task_id(job_key: &Uuid, fact: &B256) -> TaskId { + format!("{}:{}", job_key, fact) +} + +/// Split task ID into SHARP job key and proof fact +pub fn split_task_id(task_id: &TaskId) -> Result<(Uuid, B256), SharpError> { + let (job_key_str, fact_str) = task_id.split_once(':').ok_or(SharpError::TaskIdSplit)?; + let job_key = Uuid::from_str(job_key_str).map_err(SharpError::JobKeyParse)?; + let fact = B256::from_str(fact_str).map_err(SharpError::FactParse)?; + Ok((job_key, fact)) +} + +#[cfg(test)] +mod tests { + use std::path::PathBuf; + use std::time::Duration; + + use cairo_vm::vm::runners::cairo_pie::CairoPie; + use prover_service::{ProverService, Task, TaskStatus}; + use utils::settings::default::DefaultSettingsProvider; + + use crate::SharpProverService; + + /// DO NOT RUN THIS TEST IN CI PIPELINE + #[ignore] + #[tokio::test] + async fn sharp_smoke_test() { + let sharp_service = SharpProverService::with_settings(&DefaultSettingsProvider {}); + let cairo_pie_path: PathBuf = + [env!("CARGO_MANIFEST_DIR"), "src", "artifacts", "fibonacci.zip"].iter().collect(); + let cairo_pie = CairoPie::read_zip_file(&cairo_pie_path).unwrap(); + // Submit task to the testnet prover + let task_id = sharp_service.submit_task(Task::CairoPie(cairo_pie)).await.unwrap(); + println!("SHARP: task {} submitted", task_id); + for attempt in 0..10 { + tokio::time::sleep(Duration::from_millis((attempt + 1) * 1000)).await; + match sharp_service.get_task_status(&task_id).await.unwrap() { + TaskStatus::Failed(err) => { + println!("SHARP: task failed with {}", err); + panic!("{:#?}", err); + } + TaskStatus::Processing => { + println!("SHARP: task is processing (attempt {})", attempt); + continue; + } + TaskStatus::Succeeded => { + println!("SHARP: task is completed"); + return; + } + } + } + panic!("SHARP: timeout"); + } +} diff --git a/crates/prover_services/stone_service/Cargo.toml b/crates/prover_services/stone_service/Cargo.toml new file mode 100644 index 00000000..0dafe985 --- /dev/null +++ b/crates/prover_services/stone_service/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "stone-service" +version.workspace = true +edition.workspace = true + +[dependencies] +alloy.workspace = true +async-trait.workspace = true +cairo-vm.workspace = true +gps-fact-checker.workspace = true +prover-service.workspace = true +utils.workspace = true +madara-prover-common.workspace = true +thiserror.workspace = true diff --git a/crates/prover_services/stone_service/src/error.rs b/crates/prover_services/stone_service/src/error.rs new file mode 100644 index 00000000..40af849a --- /dev/null +++ b/crates/prover_services/stone_service/src/error.rs @@ -0,0 +1,10 @@ +use prover_service::ProverServiceError; + +#[derive(Debug, thiserror::Error)] +pub enum StoneProverError {} + +impl From for ProverServiceError { + fn from(value: StoneProverError) -> Self { + Self::Internal(Box::new(value)) + } +} diff --git a/crates/prover_services/stone_service/src/gps.rs b/crates/prover_services/stone_service/src/gps.rs new file mode 100644 index 00000000..6d8741e5 --- /dev/null +++ b/crates/prover_services/stone_service/src/gps.rs @@ -0,0 +1,29 @@ +use async_trait::async_trait; +use madara_prover_common::models::Proof; +use utils::settings::SettingsProvider; + +use crate::error::StoneProverError; +use crate::registry::StoneProofRegistry; + +/// GPS fact registry encapsulates a set of EVM contracts (https://github.com/starkware-libs/starkex-contracts) +/// used to verify proof components and register the fact. +/// +/// This is what Starknet and StarkEx appchains are using in production. +pub struct GpsFactRegistry { + // TODO: use code from https://github.com/zksecurity/stark-evm-adapter/blob/main/examples/verify_stone_proof.rs + // to submit the proof component by component. + // TODO: should contract interfaces/clients live in zaun? +} + +pub type Fact = [u8; 32]; + +#[async_trait] +impl StoneProofRegistry for GpsFactRegistry { + fn with_settings(settings: &impl SettingsProvider) -> Self { + todo!() + } + + async fn register_proof(&self, proof: Proof) -> Result<(), StoneProverError> { + todo!() + } +} diff --git a/crates/prover_services/stone_service/src/integrity.rs b/crates/prover_services/stone_service/src/integrity.rs new file mode 100644 index 00000000..0b97dd08 --- /dev/null +++ b/crates/prover_services/stone_service/src/integrity.rs @@ -0,0 +1,23 @@ +use async_trait::async_trait; +use madara_prover_common::models::Proof; +use utils::settings::SettingsProvider; + +use crate::error::StoneProverError; +use crate::registry::StoneProofRegistry; + +/// Integrity is the Stark verifier written in Cairo, aimed to be used by Starknet L3s (but not +/// only). +pub struct IntegrityFactRegistry { + // TODO: use https://github.com/cartridge-gg/cairo-proof-parser to convert the split proof and submit onchain +} + +#[async_trait] +impl StoneProofRegistry for IntegrityFactRegistry { + fn with_settings(settings: &impl SettingsProvider) -> Self { + todo!() + } + + async fn register_proof(&self, proof: Proof) -> Result<(), StoneProverError> { + todo!() + } +} diff --git a/crates/prover_services/stone_service/src/lib.rs b/crates/prover_services/stone_service/src/lib.rs new file mode 100644 index 00000000..4ea41650 --- /dev/null +++ b/crates/prover_services/stone_service/src/lib.rs @@ -0,0 +1,45 @@ +pub mod error; +pub mod gps; +pub mod integrity; +pub mod registry; +pub mod sovereign; + +use async_trait::async_trait; +use prover_service::{ProverService, ProverServiceError, Task, TaskId, TaskStatus}; +use utils::settings::SettingsProvider; + +use crate::registry::StoneProofRegistry; + +/// Stone prover service combines the Madara prover API and any proof registry compatible with the +/// Stone proof. +#[derive(Debug)] +pub struct StoneProverService { + registry: R, + // TODO: use https://github.com/Moonsong-Labs/madara-prover-api/blob/od/use-latest-cairo-vm/madara-prover-rpc-client/src/services/starknet_prover.rs + // to submit PIEs + + // What is missing: + // - Madara provier API does not provide "async job" interface atm, long running RPC calls might be unreliable + // - Check if resulting proof can be converted both to Integrity format and for submitting to the GPS statement + // verifier + // - Madara prover API/SDK depend on custom cairo-vm branch, need to make sure we can switch to upstream without + // breaking things + // - Can it handle a large number of PIEs? Maybe it makes sense to submit one by one +} + +#[async_trait] +impl ProverService for StoneProverService { + async fn submit_task(&self, task: Task) -> Result { + todo!() + } + + async fn get_task_status(&self, task_id: &TaskId) -> Result { + todo!() + } +} + +impl StoneProverService { + pub fn with_settings(settings: &impl SettingsProvider) -> Self { + Self { registry: R::with_settings(settings) } + } +} diff --git a/crates/prover_services/stone_service/src/registry.rs b/crates/prover_services/stone_service/src/registry.rs new file mode 100644 index 00000000..e42e1dbe --- /dev/null +++ b/crates/prover_services/stone_service/src/registry.rs @@ -0,0 +1,15 @@ +use async_trait::async_trait; +use madara_prover_common::models::Proof; +use utils::settings::SettingsProvider; + +use crate::error::StoneProverError; + +/// Proof registry is one or multiple smart contracts that verify proofs and register according +/// facts. +/// +/// Stone proof registry restricts that the backend must support the Madara prover format. +#[async_trait] +pub trait StoneProofRegistry: Send + Sync + Sized { + fn with_settings(settings: &impl SettingsProvider) -> Self; + async fn register_proof(&self, proof: Proof) -> Result<(), StoneProverError>; +} diff --git a/crates/prover_services/stone_service/src/sovereign.rs b/crates/prover_services/stone_service/src/sovereign.rs new file mode 100644 index 00000000..ca8e5e64 --- /dev/null +++ b/crates/prover_services/stone_service/src/sovereign.rs @@ -0,0 +1,24 @@ +use async_trait::async_trait; +use madara_prover_common::models::Proof; +use utils::settings::SettingsProvider; + +use crate::error::StoneProverError; +use crate::registry::StoneProofRegistry; + +/// Sovereign rollups publish state diffs and execution proof without onchain verification. +/// +/// Read more https://medium.com/@chainway_xyz/a-sovereign-zk-rollup-on-bitcoin-full-bitcoin-security-without-a-soft-fork-ca0389a0b658 +pub struct SovereignProofRegistry { + // TODO: use DA client specified in the settings to publish the proof +} + +#[async_trait] +impl StoneProofRegistry for SovereignProofRegistry { + fn with_settings(settings: &impl SettingsProvider) -> Self { + todo!() + } + + async fn register_proof(&self, proof: Proof) -> Result<(), StoneProverError> { + todo!() + } +} diff --git a/crates/settlement_clients/settlement-client-interface/src/lib.rs b/crates/settlement_clients/settlement-client-interface/src/lib.rs index da5a471c..7fc51a65 100644 --- a/crates/settlement_clients/settlement-client-interface/src/lib.rs +++ b/crates/settlement_clients/settlement-client-interface/src/lib.rs @@ -1,6 +1,7 @@ use async_trait::async_trait; use color_eyre::Result; -use mockall::{automock, predicate::*}; +use mockall::automock; +use mockall::predicate::*; use starknet::core::types::FieldElement; #[derive(Debug, Copy, Clone, PartialEq, Eq)] diff --git a/crates/utils/Cargo.toml b/crates/utils/Cargo.toml index 8fea4db4..1061637d 100644 --- a/crates/utils/Cargo.toml +++ b/crates/utils/Cargo.toml @@ -7,3 +7,5 @@ edition.workspace = true [dependencies] color-eyre = { workspace = true } +serde.workspace = true +thiserror.workspace = true diff --git a/crates/utils/src/lib.rs b/crates/utils/src/lib.rs index 6a65fdce..277ce5ab 100644 --- a/crates/utils/src/lib.rs +++ b/crates/utils/src/lib.rs @@ -1 +1,14 @@ pub mod env_utils; +pub mod settings; + +/// Evaluate `$x:expr` and if not true return `Err($y:expr)`. +/// +/// Used as `ensure!(expression_to_ensure, expression_to_return_on_false)`. +#[macro_export] +macro_rules! ensure { + ($x:expr, $y:expr $(,)?) => {{ + if !$x { + return Err($y); + } + }}; +} diff --git a/crates/utils/src/settings/default.rs b/crates/utils/src/settings/default.rs new file mode 100644 index 00000000..3166ab6e --- /dev/null +++ b/crates/utils/src/settings/default.rs @@ -0,0 +1,13 @@ +use super::SettingsProvider; + +#[derive(Debug, Clone, Default)] +pub struct DefaultSettingsProvider {} + +impl SettingsProvider for DefaultSettingsProvider { + fn get_settings( + &self, + _section: &'static str, + ) -> Result { + Ok(T::default()) + } +} diff --git a/crates/utils/src/settings/mod.rs b/crates/utils/src/settings/mod.rs new file mode 100644 index 00000000..cbf87c43 --- /dev/null +++ b/crates/utils/src/settings/mod.rs @@ -0,0 +1,13 @@ +pub mod default; + +use serde::de::DeserializeOwned; + +#[derive(Debug, thiserror::Error)] +pub enum SettingsProviderError { + #[error("Internal settings error: {0}")] + Internal(#[source] Box), +} + +pub trait SettingsProvider { + fn get_settings(&self, name: &'static str) -> Result; +} diff --git a/e2e-tests/Cargo.toml b/e2e-tests/Cargo.toml new file mode 100644 index 00000000..7d56d999 --- /dev/null +++ b/e2e-tests/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "e2e-tests" +version = "0.1.0" +edition = "2021" + +[dependencies] +tokio.workspace = true +tokio-util.workspace = true +tokio-stream.workspace = true +url.workspace = true +reqwest.workspace = true +serde_json.workspace = true + +[[test]] +name = "test_prover_jobs" +path = "test_prover_jobs.rs" diff --git a/e2e-tests/src/lib.rs b/e2e-tests/src/lib.rs new file mode 100644 index 00000000..12987b17 --- /dev/null +++ b/e2e-tests/src/lib.rs @@ -0,0 +1,107 @@ +use std::env; +use std::fs::{create_dir_all, File}; +use std::net::TcpListener; +use std::path::{Path, PathBuf}; +use std::process::{Child, Command, ExitStatus, Stdio}; +use std::time::Duration; + +use tokio::net::TcpStream; +use url::Url; + +const MIN_PORT: u16 = 49_152; +const MAX_PORT: u16 = 65_535; +const CONNECTION_ATTEMPTS: usize = 360; +const CONNECTION_ATTEMPT_DELAY_MS: u64 = 500; + +#[derive(Debug)] +pub struct Orchestrator { + process: Child, + address: String, +} + +impl Drop for Orchestrator { + fn drop(&mut self) { + let mut kill = + Command::new("kill").args(["-s", "TERM", &self.process.id().to_string()]).spawn().expect("Failed to kill"); + kill.wait().expect("Failed to kill the process"); + } +} + +fn get_free_port() -> u16 { + for port in MIN_PORT..=MAX_PORT { + if let Ok(listener) = TcpListener::bind(("127.0.0.1", port)) { + return listener.local_addr().expect("No local addr").port(); + } + // otherwise port is occupied + } + panic!("No free ports available"); +} + +fn get_repository_root() -> PathBuf { + let manifest_path = Path::new(&env!("CARGO_MANIFEST_DIR")); + let repository_root = manifest_path.parent().expect("Failed to get parent directory of CARGO_MANIFEST_DIR"); + repository_root.to_path_buf() +} + +impl Orchestrator { + fn cargo_run(root_dir: &Path, binary: &str, args: Vec<&str>, envs: Vec<(&str, &str)>) -> Child { + let arguments = [vec!["run", "--bin", binary, "--release", "--"], args].concat(); + + let logs_dir = Path::join(root_dir, Path::new("target/logs")); + create_dir_all(logs_dir.clone()).expect("Failed to create logs dir"); + + let stdout = Stdio::from(File::create(logs_dir.join(format!("{}-stdout.txt", binary))).unwrap()); + let stderr = Stdio::from(File::create(logs_dir.join(format!("{}-stderr.txt", binary))).unwrap()); + + Command::new("cargo") + .stdout(stdout) + .stderr(stderr) + .envs(envs) + .args(arguments) + .spawn() + .expect("Could not run DSN node") + } + + pub fn run(binary: &str, args: Vec<&str>) -> Self { + let port = get_free_port(); + let address = format!("127.0.0.1:{}", port); + let repository_root = &get_repository_root(); + + std::env::set_current_dir(repository_root).expect("Failed to change working directory"); + + let args = [args, vec!["--rpc-address", address.as_str()]].concat(); + let port_str = format!("{}", port); + + let process = Self::cargo_run(repository_root.as_path(), binary, args, vec![("PORT", port_str.as_str())]); + + Self { process, address } + } + + pub fn endpoint(&self) -> Url { + Url::parse(&format!("http://{}", self.address)).unwrap() + } + + pub fn has_exited(&mut self) -> Option { + self.process.try_wait().expect("Failed to get DSN node exit status") + } + + pub async fn wait_till_started(&mut self) { + let mut attempts = CONNECTION_ATTEMPTS; + loop { + match TcpStream::connect(&self.address).await { + Ok(_) => return, + Err(err) => { + if let Some(status) = self.has_exited() { + panic!("DSN node exited early with {}", status); + } + if attempts == 0 { + panic!("Failed to connect to {}: {}", self.address, err); + } + } + }; + + attempts -= 1; + tokio::time::sleep(Duration::from_millis(CONNECTION_ATTEMPT_DELAY_MS)).await; + } + } +} diff --git a/e2e-tests/test_prover_jobs.rs b/e2e-tests/test_prover_jobs.rs new file mode 100644 index 00000000..28cef186 --- /dev/null +++ b/e2e-tests/test_prover_jobs.rs @@ -0,0 +1,20 @@ +use e2e_tests::Orchestrator; +use serde_json::json; + +extern crate e2e_tests; + +#[tokio::test] +async fn test_sharp_job_completes() { + let mut orchestrator = Orchestrator::run("orchestrator", vec![]); + orchestrator.wait_till_started().await; + + let client = reqwest::Client::new(); + + let create_job = json!({ "job_type": "ProofRegistration", "internal_id": "12345" }); + + client.get(orchestrator.endpoint().join("/create_job").unwrap()) + .json(&create_job) + .send() + .await + .unwrap(); +} diff --git a/migrations/.DS_Store b/migrations/.DS_Store deleted file mode 100644 index 60e51e56ddd77652cc36abb4b383072dd075bf78..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeH~y-EW?5XWcrgrH3t!OjI=z=L40JHz<|0UL`mF~JWmUcjWXx!$L;u(Ghy$~W*O z{LjwlB_tM+_<_tEv;X~?x$F=2HcLdVHSWbkbt0<4S)+9nbBxE?XKY1#c+iC$yHq_h z)2+h@7=b@VfcNgs#ICLO?|Uty9$iqMuHiXls9(WH@J?uR|A@vk%+KQXX)zee3fJzI zmw8a6X*Vyr$XI(kzIl0ixIb8C_TD#UAD&NA@%Y)aNij9Se4qC4G^v4h17t6$pe{IP z;MqYfr~WikceGgaB)8k`?Dbi=E%5$9QL*`2hB@nJ24=rc0B1H^Z9~*bBVYuKzy|^T zK6p55qFRcEPX~%{1%O8A4u(44B{;^bCaR?uelhezG z(>FW4p}6pNXxF~I>Ei6gT)|tie tS?l2^a5kP-iiQvrdK`xeAH`enU>NgUfhMY@D0*P_M<8IZ!U+5*fj8WRff4`!