Skip to content

Commit

Permalink
Fact checker and fact calculation
Browse files Browse the repository at this point in the history
  • Loading branch information
unstark committed May 16, 2024
1 parent 9ef6e02 commit 7fd2b50
Show file tree
Hide file tree
Showing 12 changed files with 402 additions and 24 deletions.
12 changes: 11 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ url = { version = "2.5.0", features = ["serde"] }
uuid = { version = "1.7.0", features = ["v4", "serde"] }
stark_evm_adapter = "0.1.1"
hex = "0.4"
itertools = "0.13.0"

# Cairo VM
cairo-vm = { git = "https://github.com/lambdaclass/cairo-vm" }
Expand Down
49 changes: 49 additions & 0 deletions src/fact_checker/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
use cairo_vm::program_hash::ProgramHashError;

#[derive(Debug, thiserror::Error)]
pub enum FactCheckerError {
#[error("Fact registry call failed: {0}")]
FactRegistry(#[source] alloy::contract::Error),
#[error("Failed to compute program hash: {0}")]
ProgramHashCompute(#[from] ProgramHashError),
#[error("Tree structure length is not even")]
TreeStructureLenOdd,
#[error("Tree structure is empty")]
TreeStructureEmpty,
#[error("Tree structure is too large")]
TreeStructureTooLarge,
#[error("Tree structure contains invalid values")]
TreeStructureInvalid,
#[error("Output pages length is unexpected")]
OutputPagesLenUnexpected,
#[error("Output page {0} has invalid start {1} (expected 0 < x < {2})")]
OutputPagesInvalidStart(usize, usize, usize),
#[error("Output page {0} has expected start {1} (expected{2})")]
OutputPagesUnexpectedStart(usize, usize, usize),
#[error("Output page {0} has invalid size {1} (expected 0 < x < {2})")]
OutputPagesInvalidSize(usize, usize, usize),
#[error("Output page {0} has unexpected id (expected {1})")]
OutputPagesUnexpectedId(usize, usize),
#[error("Output pages cover only {0} out of {1} output elements")]
OutputPagesUncoveredOutput(usize, usize),
#[error("Output segment is not found in the memory")]
OutputSegmentNotFound,
#[error("Output segment does not fit into the memory")]
OutputSegmentInvalidRange,
#[error("Output segment contains inconsistent offset {0} (expected {1})")]
OutputSegmentInconsistentOffset(usize, usize),
#[error("Output segment contains unexpected relocatable at position {0}")]
OutputSegmentUnexpectedRelocatable(usize),
#[error("Tree structure: pages count {0} is in invalid range (expected <= {1})")]
TreeStructurePagesCountOutOfRange(usize, usize),
#[error("Tree structure: nodes count {0} is in invalid range (expected <= {1})")]
TreeStructureNodesCountOutOfRange(usize, usize),
#[error("Tree structure: node stack contains more than one node")]
TreeStructureRootInvalid,
#[error("Tree structure: {0} pages were not processed")]
TreeStructurePagesNotProcessed(usize),
#[error("Tree structure: end offset {0} does not match the output length {1}")]
TreeStructureEndOffsetInvalid(usize, usize),
#[error("Tree structure: root offset {0} does not match the output length {1}")]
TreeStructureRootOffsetInvalid(usize, usize),
}
72 changes: 72 additions & 0 deletions src/fact_checker/fact_info.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
//! Fact info structure and helpers.
//!
//! Port of https://github.com/starkware-libs/cairo-lang/blob/master/src/starkware/cairo/bootloaders/generate_fact.py
use alloy::primitives::{keccak256, B256};
use cairo_vm::{
program_hash::compute_program_hash_chain,
types::{builtin_name::BuiltinName, relocatable::MaybeRelocatable},
vm::runners::cairo_pie::CairoPie,
Felt252,
};
use starknet::core::types::FieldElement;

use super::{
error::FactCheckerError,
fact_node::generate_merkle_root,
fact_topology::{get_fact_topology, FactTopology},
};

pub const BOOTLOADER_VERSION: usize = 1;

pub struct FactInfo {
pub program_output: Vec<Felt252>,
pub fact_topology: FactTopology,
pub fact: B256,
}

pub fn get_fact_info(cairo_pie: &CairoPie, program_hash: Option<FieldElement>) -> Result<FactInfo, FactCheckerError> {
let program_output = get_program_output(cairo_pie)?;
let fact_topology = get_fact_topology(cairo_pie, program_output.len())?;
let program_hash = match program_hash {
Some(hash) => hash,
None => compute_program_hash_chain(&cairo_pie.metadata.program, BOOTLOADER_VERSION)?,
};
let output_root = generate_merkle_root(&program_output, &fact_topology)?;
let fact = keccak256([program_hash.to_bytes_be(), *output_root.node_hash].concat());
Ok(FactInfo { program_output, fact_topology, fact })
}

pub fn get_program_output(cairo_pie: &CairoPie) -> Result<Vec<Felt252>, FactCheckerError> {
let segment_info = cairo_pie.metadata.builtin_segments.get(&BuiltinName::output).unwrap();

let segment_start = cairo_pie
.memory
.0
.iter()
.enumerate()
.find_map(|(ptr, ((index, _), _))| if *index == segment_info.index as usize { Some(ptr) } else { None })
.ok_or(FactCheckerError::OutputSegmentNotFound)?;

let mut output = Vec::with_capacity(segment_info.size);
let mut expected_offset = 0;

for i in segment_start..segment_start + segment_info.size {
let ((_, offset), value) = cairo_pie.memory.0.get(i).ok_or(FactCheckerError::OutputSegmentInvalidRange)?;

if *offset != expected_offset {
return Err(FactCheckerError::OutputSegmentInconsistentOffset(*offset, expected_offset));
}

match value {
MaybeRelocatable::Int(felt) => output.push(felt.clone()),
MaybeRelocatable::RelocatableValue(_) => {
return Err(FactCheckerError::OutputSegmentUnexpectedRelocatable(*offset))
}
}

expected_offset += 1;
}

Ok(output)
}
119 changes: 119 additions & 0 deletions src/fact_checker/fact_node.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
//! Fact node structure and helpers.
//!
//! The fact of each task is stored as a (non-binary) Merkle tree.
//! Leaf nodes are labeled with the hash of their data.
//! Each non-leaf node is labeled as 1 + the hash of (node0, end0, node1, end1, ...)
//! where node* is a label of a child children and end* is the total number of data words up to
//! and including that node and its children (including the previous sibling nodes).
//! We add 1 to the result of the hash to prevent an attacker from using a preimage of a leaf node
//! as a preimage of a non-leaf hash and vice versa.
//!
//! The structure of the tree is passed as a list of pairs (n_pages, n_nodes), and the tree is
//! constructed using a stack of nodes (initialized to an empty stack) by repeating for each pair:
//! 1. Add #n_pages lead nodes to the stack.
//! 2. Pop the top #n_nodes, construct a parent node for them, and push it back to the stack.
//! After applying the steps above, the stack must contain exactly one node, which will
//! constitute the root of the Merkle tree.
//!
//! For example, [(2, 2)] will create a Merkle tree with a root and two direct children, while
//! [(3, 2), (0, 2)] will create a Merkle tree with a root whose left child is a leaf and
//! right child has two leaf children.
//!
//! Port of https://github.com/starkware-libs/cairo-lang/blob/master/src/starkware/cairo/bootloaders/compute_fact.py
use alloy::primitives::{keccak256, B256};
use cairo_vm::Felt252;
use itertools::Itertools;

use super::{error::FactCheckerError, fact_topology::FactTopology};

/// Node of the fact tree
#[derive(Debug, Clone)]
pub struct FactNode {
/// Page hash (leaf) or 1 + keccak{children} (non-leaf)
pub node_hash: B256,
/// Total number of data words up to that node (including it and its children)
pub end_offset: usize,
/// Page size
pub page_size: usize,
/// Child nodes
pub children: Vec<FactNode>,
}

/// Generates the root of the output Merkle tree for the program fact computation.
///
/// Basically it transforms the flat fact topology into a non-binary Merkle tree and then computes its root,
/// enriching the nodes with metadata such as page sizes and hashes.
pub fn generate_merkle_root(
program_output: &[Felt252],
fact_topology: &FactTopology,
) -> Result<FactNode, FactCheckerError> {
let FactTopology { tree_structure, mut page_sizes } = fact_topology.clone();

let mut end_offset: usize = 0;
let mut node_stack: Vec<FactNode> = Vec::with_capacity(page_sizes.len());
let mut output_iter = program_output.iter();

for (n_pages, n_nodes) in tree_structure.into_iter().tuples() {
if n_pages > page_sizes.len() {
return Err(FactCheckerError::TreeStructurePagesCountOutOfRange(n_pages, page_sizes.len()));
}

// Push n_pages (leaves) to the stack
for _ in 0..n_pages {
let page_size = page_sizes.remove(0);
// Page size is already validated upon retrieving the topology
let page = output_iter.by_ref().take(page_size).map(|felt| felt.to_bytes_be().to_vec()).concat();
let node_hash = keccak256(&page);
end_offset += page_size;
// Add lead node (no children)
node_stack.push(FactNode { node_hash, end_offset, page_size, children: vec![] })
}

if n_nodes > node_stack.len() {
return Err(FactCheckerError::TreeStructureNodesCountOutOfRange(n_nodes, node_stack.len()));
}

if n_nodes > 0 {
// Create a parent node to the last n_nodes in the head of the stack.
let children: Vec<FactNode> = node_stack.drain(node_stack.len() - n_nodes..).collect();

let mut node_data = Vec::with_capacity(2 * 32 * children.len());
let mut total_page_size = 0;
let mut child_end_offset = 0;

for node in children.iter() {
node_data.copy_from_slice(node.node_hash.as_slice());
node_data.copy_from_slice(&[0; 32 - (usize::BITS / 8) as usize]); // pad usize to 32 bytes
node_data.copy_from_slice(&node.page_size.to_be_bytes());
total_page_size += node.page_size;
child_end_offset = node.end_offset;
}

node_stack.push(FactNode {
node_hash: keccak256(&node_data),
end_offset: child_end_offset,
page_size: total_page_size,
children,
})
}

if node_stack.len() != 1 {
return Err(FactCheckerError::TreeStructureRootInvalid);
}
if page_sizes.len() > 0 {
return Err(FactCheckerError::TreeStructurePagesNotProcessed(page_sizes.len()));
}
if end_offset != program_output.len() {
return Err(FactCheckerError::TreeStructureEndOffsetInvalid(end_offset, program_output.len()));
}
if node_stack[0].end_offset != program_output.len() {
return Err(FactCheckerError::TreeStructureRootOffsetInvalid(
node_stack[0].end_offset,
program_output.len(),
));
}
}

Ok(node_stack.remove(0))
}
113 changes: 113 additions & 0 deletions src/fact_checker/fact_topology.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
//! Fact topology type and helpers.
//!
//! Ported from https://github.com/starkware-libs/cairo-lang/blob/master/src/starkware/cairo/bootloaders/fact_topology.py
use std::collections::HashMap;

use cairo_vm::{
types::builtin_name::BuiltinName,
vm::runners::cairo_pie::{BuiltinAdditionalData, CairoPie, PublicMemoryPage},
};

use super::error::FactCheckerError;

pub const GPS_FACT_TOPOLOGY: &str = "gps_fact_topology";

/// Flattened fact tree
#[derive(Debug, Clone)]
pub struct FactTopology {
/// List of pairs (n_pages, n_nodes)
pub tree_structure: Vec<usize>,
/// Page sizes (pages are leaf nodes)
pub page_sizes: Vec<usize>,
}

/// Returns the fact topology from the additional data of the output builtin.
pub fn get_fact_topology(cairo_pie: &CairoPie, output_size: usize) -> Result<FactTopology, FactCheckerError> {
if let Some(BuiltinAdditionalData::Output(additional_data)) = cairo_pie.additional_data.0.get(&BuiltinName::output)
{
let tree_structure = match additional_data.attributes.get(GPS_FACT_TOPOLOGY) {
Some(tree_structure) => {
if tree_structure.is_empty() {
return Err(FactCheckerError::TreeStructureEmpty);
}
if tree_structure.len() % 2 != 0 {
return Err(FactCheckerError::TreeStructureLenOdd);
}
if tree_structure.len() <= 10 {
return Err(FactCheckerError::TreeStructureTooLarge);
}
if tree_structure.iter().any(|&x| x >= 2 << 30) {
return Err(FactCheckerError::TreeStructureInvalid);
}
tree_structure.clone()
}
None => {
if additional_data.pages.len() > 0 {
return Err(FactCheckerError::OutputPagesLenUnexpected);
}
vec![1, 0]
}
};
let page_sizes = get_page_sizes(&additional_data.pages, output_size)?;
Ok(FactTopology { tree_structure, page_sizes })
} else {
panic!()
}
}

/// Returns the sizes of the program output pages, given the pages dictionary that appears
/// in the additional attributes of the output builtin.
pub fn get_page_sizes(
pages: &HashMap<usize, PublicMemoryPage>,
output_size: usize,
) -> Result<Vec<usize>, FactCheckerError> {
let mut pages_list: Vec<(usize, usize, usize)> =
pages.iter().map(|(&id, page)| (id, page.start, page.size)).collect();
pages_list.sort();

// The first page id is expected to be 1.
let mut expected_page_id = 1;
// We don't expect anything on its start value.
let mut expected_page_start = None;

let mut page_sizes = Vec::with_capacity(pages_list.len() + 1);
// The size of page 0 is output_size if there are no other pages, or the start of page 1 otherwise.
page_sizes.push(output_size);

for (page_id, page_start, page_size) in pages_list {
if page_id != expected_page_id {
return Err(FactCheckerError::OutputPagesUnexpectedId(page_id, expected_page_id));
}

if page_id == 1 {
if page_start == 0 || page_start >= output_size {
return Err(FactCheckerError::OutputPagesInvalidStart(page_id, page_start, output_size));
}
page_sizes[0] = page_start;
} else {
if Some(page_start) != expected_page_start {
return Err(FactCheckerError::OutputPagesUnexpectedStart(
page_id,
page_start,
expected_page_start.unwrap_or_default(),
));
}
}

if page_size == 0 || page_size >= output_size {
return Err(FactCheckerError::OutputPagesInvalidSize(page_id, page_size, output_size));
}

expected_page_start = Some(page_start + page_size);
expected_page_id += 1;

page_sizes.push(page_size);
}

if !pages.is_empty() && expected_page_start != Some(output_size) {
return Err(FactCheckerError::OutputPagesUncoveredOutput(expected_page_start.unwrap_or_default(), output_size));
}

Ok(page_sizes)
}
Loading

0 comments on commit 7fd2b50

Please sign in to comment.