Skip to content

Commit

Permalink
Prover service trait and SHARP client implementation
Browse files Browse the repository at this point in the history
  • Loading branch information
unstark committed May 17, 2024
1 parent b420e1d commit 82fb16c
Show file tree
Hide file tree
Showing 41 changed files with 5,564 additions and 631 deletions.
5,056 changes: 4,492 additions & 564 deletions Cargo.lock

Large diffs are not rendered by default.

30 changes: 27 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,21 @@ authors = ["Apoorv Sadana <@apoorvsadana>"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
alloy = { git = "https://github.com/alloy-rs/alloy", rev = "86027c9bb984f3a12a30ffd2a3c5f2f06595f1d6", features = [
alloy = { git = "https://github.com/alloy-rs/alloy", rev = "7373f6db761d5a19888e3a0c527e8a3ca31e7a1e", features = [
"sol-types",
"json",
"contract",
"providers",
"rpc-client",
"transport-http",
"reqwest",
], optional = true }
async-trait = "0.1.77"
axum = { version = "0.7.4", features = ["macros"] }
axum-macros = "0.4.1"
bincode = { version = "2.0.0-rc.3", default-features = false, features = [
"serde",
] }
color-eyre = "0.6.2"
dotenvy = "0.15.7"
futures = "0.3.30"
Expand All @@ -25,12 +32,29 @@ rstest = "0.18.2"
serde = { version = "1.0.197" }
serde_json = "1.0.114"
starknet = "0.9.0"
tempfile = "3.8.1"
thiserror = "1.0.57"
tokio = { version = "1.36.0", features = ["sync", "macros", "rt-multi-thread"] }
tokio = { version = "1.37.0", features = ["sync", "macros", "rt-multi-thread"] }
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
url = "2.5.0"
url = { version = "2.5.0", features = ["serde"] }
uuid = { version = "1.7.0", features = ["v4", "serde"] }
stark_evm_adapter = "0.1.1"
hex = "0.4"
itertools = "0.13.0"

# Cairo VM
cairo-vm = { git = "https://github.com/lambdaclass/cairo-vm" }

# Sharp (Starkware)
snos = { git = "https://github.com/unstark/snos", branch = "bump-cairo-lang" }

# Sharp P2P
# cairo-proof-parser = { git = "https://github.com/cartridge-gg/cairo-proof-parser", rev = "1cd7af307609d0f6a602a59d124d5044e56cc7b4" }

# Madara prover API
madara-prover-common = { git = "https://github.com/Moonsong-Labs/madara-prover-api", branch = "od/use-latest-cairo-vm" }
madara-prover-rpc-client = { git = "https://github.com/Moonsong-Labs/madara-prover-api", branch = "od/use-latest-cairo-vm" }

[features]
default = ["ethereum", "with_mongdb", "with_sqs"]
Expand Down
50 changes: 44 additions & 6 deletions src/config.rs
Original file line number Diff line number Diff line change
@@ -1,17 +1,28 @@
use std::sync::Arc;

use dotenvy::dotenv;
use starknet::providers::jsonrpc::HttpTransport;
use starknet::providers::{JsonRpcClient, Url};
use tokio::sync::OnceCell;

use crate::da_clients::ethereum::config::EthereumDaConfig;
use crate::da_clients::ethereum::EthereumDaClient;
use crate::da_clients::{DaClient, DaConfig};
use crate::database::mongodb::config::MongoDbConfig;
use crate::database::mongodb::MongoDb;
use crate::database::{Database, DatabaseConfig};
use crate::provers::iosis::IosisProverService;
use crate::provers::sharp::SharpProverService;
use crate::provers::stone::gps::GpsFactRegistry;
use crate::provers::stone::integrity::IntegrityFactRegistry;
use crate::provers::stone::sovereign::SovereignProofRegistry;
use crate::provers::stone::StoneProverService;
use crate::provers::ProverService;
use crate::queue::sqs::SqsQueue;
use crate::queue::QueueProvider;
use crate::settings::default::DefaultSettingsProvider;
use crate::settings::SettingsProvider;
use crate::utils::env_utils::get_env_var_or_panic;
use dotenvy::dotenv;
use starknet::providers::jsonrpc::HttpTransport;
use starknet::providers::{JsonRpcClient, Url};
use std::sync::Arc;
use tokio::sync::OnceCell;

/// The app config. It can be accessed from anywhere inside the service
/// by calling `config` function.
Expand All @@ -20,6 +31,8 @@ pub struct Config {
starknet_client: Arc<JsonRpcClient<HttpTransport>>,
/// The DA client to interact with the DA layer
da_client: Box<dyn DaClient>,
/// The service that produces proof and registers it onchain
prover: Box<dyn ProverService>,
/// The database client
database: Box<dyn Database>,
/// The queue provider
Expand All @@ -37,6 +50,11 @@ impl Config {
self.da_client.as_ref()
}

/// Returns the proving service
pub fn prover(&self) -> &dyn ProverService {
self.prover.as_ref()
}

/// Returns the database client
pub fn database(&self) -> &dyn Database {
self.database.as_ref()
Expand Down Expand Up @@ -67,7 +85,15 @@ async fn init_config() -> Config {
// init the queue
let queue = Box::new(SqsQueue {});

Config { starknet_client: Arc::new(provider), da_client: build_da_client(), database, queue }
let settings_provider = DefaultSettingsProvider {};

Config {
starknet_client: Arc::new(provider),
da_client: build_da_client(),
prover: create_prover_service(&settings_provider),
database,
queue,
}
}

/// Returns the app config. Initializes if not already done.
Expand All @@ -85,3 +111,15 @@ fn build_da_client() -> Box<dyn DaClient + Send + Sync> {
_ => panic!("Unsupported DA layer"),
}
}

/// Creates prover service based on the environment variable PROVER_SERVICE
fn create_prover_service(settings_provider: &impl SettingsProvider) -> Box<dyn ProverService> {
match get_env_var_or_panic("PROVER_SERVICE").as_str() {
"sharp" => Box::new(SharpProverService::with_settings(settings_provider)),
"iosis" => Box::new(IosisProverService::with_settings(settings_provider)),
"stone_gps" => Box::new(StoneProverService::<GpsFactRegistry>::with_settings(settings_provider)),
"stone_integrity" => Box::new(StoneProverService::<IntegrityFactRegistry>::with_settings(settings_provider)),
"stone_sovereign" => Box::new(StoneProverService::<SovereignProofRegistry>::with_settings(settings_provider)),
_ => panic!("Unsupported prover service"),
}
}
1 change: 1 addition & 0 deletions src/contracts/artifacts/FactRegistry.json

Large diffs are not rendered by default.

8 changes: 8 additions & 0 deletions src/contracts/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
use alloy::sol;

sol!(
#[allow(missing_docs)]
#[sol(rpc)]
FactRegistry,
"src/contracts/artifacts/FactRegistry.json"
);
5 changes: 3 additions & 2 deletions src/controllers/jobs_controller.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use crate::controllers::errors::AppError;
use crate::jobs::types::JobType;
use axum::extract::Json;
use serde::Deserialize;

use crate::controllers::errors::AppError;
use crate::jobs::types::JobType;

/// Client request to create a job
#[derive(Debug, Deserialize)]
pub struct CreateJobRequest {
Expand Down
8 changes: 4 additions & 4 deletions src/da_clients/ethereum/mod.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
#![allow(missing_docs)]
#![allow(clippy::missing_docs_in_private_items)]
use std::str::FromStr;

use alloy::rpc::client::RpcClient;
use alloy::transports::http::Http;
use alloy::transports::http::{Client, Http};
use async_trait::async_trait;
use color_eyre::Result;
use reqwest::Client;
use starknet::core::types::FieldElement;
use std::str::FromStr;
use url::Url;

use crate::da_clients::ethereum::config::EthereumDaConfig;
Expand All @@ -33,7 +33,7 @@ impl DaClient for EthereumDaClient {
impl From<EthereumDaConfig> for EthereumDaClient {
fn from(config: EthereumDaConfig) -> Self {
let provider = RpcClient::builder()
.reqwest_http(Url::from_str(config.rpc_url.as_str()).expect("Failed to parse ETHEREUM_RPC_URL"));
.http(Url::from_str(config.rpc_url.as_str()).expect("Failed to parse ETHEREUM_RPC_URL"));
EthereumDaClient { provider }
}
}
3 changes: 2 additions & 1 deletion src/da_clients/mod.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use crate::jobs::types::JobVerificationStatus;
use axum::async_trait;
use color_eyre::Result;
use starknet::core::types::FieldElement;

use crate::jobs::types::JobVerificationStatus;

/// Ethereum client
pub mod ethereum;

Expand Down
6 changes: 4 additions & 2 deletions src/database/mod.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
use crate::jobs::types::{JobItem, JobStatus, JobType};
use std::collections::HashMap;

use async_trait::async_trait;
use color_eyre::Result;
use std::collections::HashMap;
use uuid::Uuid;

use crate::jobs::types::{JobItem, JobStatus, JobType};

/// MongoDB
pub mod mongodb;

Expand Down
29 changes: 14 additions & 15 deletions src/database/mongodb/mod.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,17 @@
use crate::database::mongodb::config::MongoDbConfig;
use crate::database::Database;
use crate::jobs::types::{JobItem, JobStatus, JobType};
use std::collections::HashMap;

use async_trait::async_trait;
use color_eyre::eyre::eyre;
use color_eyre::Result;
use mongodb::bson::Document;
use mongodb::options::UpdateOptions;
use mongodb::{
bson::doc,
options::{ClientOptions, ServerApi, ServerApiVersion},
Client, Collection,
};
use std::collections::HashMap;
use mongodb::bson::{doc, Document};
use mongodb::options::{ClientOptions, ServerApi, ServerApiVersion, UpdateOptions};
use mongodb::{Client, Collection};
use uuid::Uuid;

use crate::database::mongodb::config::MongoDbConfig;
use crate::database::Database;
use crate::jobs::types::{JobItem, JobStatus, JobType};

pub mod config;

pub struct MongoDb {
Expand All @@ -23,7 +21,8 @@ pub struct MongoDb {
impl MongoDb {
pub async fn new(config: MongoDbConfig) -> Self {
let mut client_options = ClientOptions::parse(config.url).await.expect("Failed to parse MongoDB Url");
// Set the server_api field of the client_options object to set the version of the Stable API on the client
// Set the server_api field of the client_options object to set the version of the Stable API on the
// client
let server_api = ServerApi::builder().version(ServerApiVersion::V1).build();
client_options.server_api = Some(server_api);
// Get a handle to the cluster
Expand All @@ -39,9 +38,9 @@ impl MongoDb {
self.client.database("orchestrator").collection("jobs")
}

/// Updates the job in the database optimistically. This means that the job is updated only if the
/// version of the job in the database is the same as the version of the job passed in. If the version
/// is different, the update fails.
/// Updates the job in the database optimistically. This means that the job is updated only if
/// the version of the job in the database is the same as the version of the job passed in.
/// If the version is different, the update fails.
async fn update_job_optimistically(&self, current_job: &JobItem, update: Document) -> Result<()> {
let filter = doc! {
"id": current_job.id,
Expand Down
49 changes: 49 additions & 0 deletions src/fact_checker/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
use cairo_vm::program_hash::ProgramHashError;

#[derive(Debug, thiserror::Error)]
pub enum FactCheckerError {
#[error("Fact registry call failed: {0}")]
FactRegistry(#[source] alloy::contract::Error),
#[error("Failed to compute program hash: {0}")]
ProgramHashCompute(#[from] ProgramHashError),
#[error("Tree structure length is not even")]
TreeStructureLenOdd,
#[error("Tree structure is empty")]
TreeStructureEmpty,
#[error("Tree structure is too large")]
TreeStructureTooLarge,
#[error("Tree structure contains invalid values")]
TreeStructureInvalid,
#[error("Output pages length is unexpected")]
OutputPagesLenUnexpected,
#[error("Output page {0} has invalid start {1} (expected 0 < x < {2})")]
OutputPagesInvalidStart(usize, usize, usize),
#[error("Output page {0} has expected start {1} (expected{2})")]
OutputPagesUnexpectedStart(usize, usize, usize),
#[error("Output page {0} has invalid size {1} (expected 0 < x < {2})")]
OutputPagesInvalidSize(usize, usize, usize),
#[error("Output page {0} has unexpected id (expected {1})")]
OutputPagesUnexpectedId(usize, usize),
#[error("Output pages cover only {0} out of {1} output elements")]
OutputPagesUncoveredOutput(usize, usize),
#[error("Output segment is not found in the memory")]
OutputSegmentNotFound,
#[error("Output segment does not fit into the memory")]
OutputSegmentInvalidRange,
#[error("Output segment contains inconsistent offset {0} (expected {1})")]
OutputSegmentInconsistentOffset(usize, usize),
#[error("Output segment contains unexpected relocatable at position {0}")]
OutputSegmentUnexpectedRelocatable(usize),
#[error("Tree structure: pages count {0} is in invalid range (expected <= {1})")]
TreeStructurePagesCountOutOfRange(usize, usize),
#[error("Tree structure: nodes count {0} is in invalid range (expected <= {1})")]
TreeStructureNodesCountOutOfRange(usize, usize),
#[error("Tree structure: node stack contains more than one node")]
TreeStructureRootInvalid,
#[error("Tree structure: {0} pages were not processed")]
TreeStructurePagesNotProcessed(usize),
#[error("Tree structure: end offset {0} does not match the output length {1}")]
TreeStructureEndOffsetInvalid(usize, usize),
#[error("Tree structure: root offset {0} does not match the output length {1}")]
TreeStructureRootOffsetInvalid(usize, usize),
}
72 changes: 72 additions & 0 deletions src/fact_checker/fact_info.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
//! Fact info structure and helpers.
//!
//! Port of https://github.com/starkware-libs/cairo-lang/blob/master/src/starkware/cairo/bootloaders/generate_fact.py
use alloy::primitives::{keccak256, B256};
use cairo_vm::{
program_hash::compute_program_hash_chain,
types::{builtin_name::BuiltinName, relocatable::MaybeRelocatable},
vm::runners::cairo_pie::CairoPie,
Felt252,
};
use starknet::core::types::FieldElement;

use super::{
error::FactCheckerError,
fact_node::generate_merkle_root,
fact_topology::{get_fact_topology, FactTopology},
};

pub const BOOTLOADER_VERSION: usize = 1;

pub struct FactInfo {
pub program_output: Vec<Felt252>,
pub fact_topology: FactTopology,
pub fact: B256,
}

pub fn get_fact_info(cairo_pie: &CairoPie, program_hash: Option<FieldElement>) -> Result<FactInfo, FactCheckerError> {
let program_output = get_program_output(cairo_pie)?;
let fact_topology = get_fact_topology(cairo_pie, program_output.len())?;
let program_hash = match program_hash {
Some(hash) => hash,
None => compute_program_hash_chain(&cairo_pie.metadata.program, BOOTLOADER_VERSION)?,
};
let output_root = generate_merkle_root(&program_output, &fact_topology)?;
let fact = keccak256([program_hash.to_bytes_be(), *output_root.node_hash].concat());
Ok(FactInfo { program_output, fact_topology, fact })
}

pub fn get_program_output(cairo_pie: &CairoPie) -> Result<Vec<Felt252>, FactCheckerError> {
let segment_info = cairo_pie.metadata.builtin_segments.get(&BuiltinName::output).unwrap();

let segment_start = cairo_pie
.memory
.0
.iter()
.enumerate()
.find_map(|(ptr, ((index, _), _))| if *index == segment_info.index as usize { Some(ptr) } else { None })
.ok_or(FactCheckerError::OutputSegmentNotFound)?;

let mut output = Vec::with_capacity(segment_info.size);
let mut expected_offset = 0;

for i in segment_start..segment_start + segment_info.size {
let ((_, offset), value) = cairo_pie.memory.0.get(i).ok_or(FactCheckerError::OutputSegmentInvalidRange)?;

if *offset != expected_offset {
return Err(FactCheckerError::OutputSegmentInconsistentOffset(*offset, expected_offset));
}

match value {
MaybeRelocatable::Int(felt) => output.push(felt.clone()),
MaybeRelocatable::RelocatableValue(_) => {
return Err(FactCheckerError::OutputSegmentUnexpectedRelocatable(*offset))
}
}

expected_offset += 1;
}

Ok(output)
}
Loading

0 comments on commit 82fb16c

Please sign in to comment.