Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add max recursion depth param to avoid hydration stack overflow #74

Merged
merged 1 commit into from
Dec 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 12 additions & 1 deletion crates/parser/src/abi/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ impl AbiParser {
tokens_filtered
.into_iter()
.fold(HashMap::new(), |mut acc, (name, token)| {
acc.insert(name, Token::hydrate(token, &filtered));
acc.insert(name, Token::hydrate(token, &filtered, 10, 0));
acc
})
}
Expand Down Expand Up @@ -1141,4 +1141,15 @@ Composite {
}
filtered.iter().for_each(|(_, t)| check_token_inners(t));
}

#[test]
fn test_collect_tokens() {
Comment on lines +1145 to +1146
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would you mind moving this test out of this file since it's becoming huge? in this case having a parser_test.rs for now should suffice.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Or use include_str! and put the Sierra file inside a /test_data folder.

let sierra_abi = include_str!("../../test_data/cairo_ls_abi.json");
let sierra = serde_json::from_str::<SierraClass>(sierra_abi).unwrap();
let tokens = AbiParser::collect_tokens(&sierra.abi, &HashMap::new()).unwrap();
assert_ne!(tokens.enums.len(), 0);
assert_ne!(tokens.functions.len(), 0);
assert_ne!(tokens.interfaces.len(), 0);
assert_ne!(tokens.structs.len(), 0);
}
}
77 changes: 68 additions & 9 deletions crates/parser/src/tokens/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,27 +136,49 @@ impl Token {
///
/// * `token` - The token to hydrate.
/// * `filtered` - A map of type path to token that have already been hydrated.
/// * `recursion_max_depth` - Max depth recursion for token to hydrate.
/// * `iteration_count` - Current iteration count.
///
pub fn hydrate(token: Self, filtered: &HashMap<String, Token>) -> Self {
pub fn hydrate(
token: Self,
filtered: &HashMap<String, Token>,
recursion_max_depth: usize,
iteration_count: usize,
) -> Self {
if recursion_max_depth < iteration_count {
return token;
}
match token {
Token::CoreBasic(_) | Token::GenericArg(_) => token,
Token::Array(arr) => Token::Array(Array {
inner: Box::new(Self::hydrate(*arr.inner, filtered)),
inner: Box::new(Self::hydrate(
*arr.inner,
filtered,
recursion_max_depth,
iteration_count + 1,
)),
type_path: arr.type_path,
is_legacy: arr.is_legacy,
}),
Token::Tuple(tup) => Token::Tuple(Tuple {
inners: tup
.inners
.into_iter()
.map(|inner| Self::hydrate(inner, filtered))
.map(|inner| {
Self::hydrate(inner, filtered, recursion_max_depth, iteration_count + 1)
})
.collect(),
type_path: tup.type_path,
}),
Token::Composite(comp) => {
if comp.r#type == CompositeType::Unknown && !comp.is_builtin() {
if let Some(hydrated) = filtered.get(&comp.type_path) {
return Token::hydrate(hydrated.clone(), filtered);
return Token::hydrate(
hydrated.clone(),
filtered,
recursion_max_depth,
iteration_count + 1,
);
} else {
panic!("Composite {} not found in filtered tokens", comp.type_path);
}
Expand All @@ -170,13 +192,28 @@ impl Token {
index: i.index,
name: i.name,
kind: i.kind,
token: Self::hydrate(i.token, filtered),
token: Self::hydrate(
i.token,
filtered,
recursion_max_depth,
iteration_count + 1,
),
})
.collect(),
generic_args: comp
.generic_args
.into_iter()
.map(|(name, token)| (name, Self::hydrate(token, filtered)))
.map(|(name, token)| {
(
name,
Self::hydrate(
token,
filtered,
recursion_max_depth,
iteration_count + 1,
),
)
})
.collect(),
r#type: comp.r#type,
is_event: comp.is_event,
Expand All @@ -188,17 +225,39 @@ impl Token {
inputs: func
.inputs
.into_iter()
.map(|(name, token)| (name, Self::hydrate(token, filtered)))
.map(|(name, token)| {
(
name,
Self::hydrate(
token,
filtered,
recursion_max_depth,
iteration_count + 1,
),
)
})
.collect(),
outputs: func
.outputs
.into_iter()
.map(|token| Self::hydrate(token, filtered))
.map(|token| {
Self::hydrate(token, filtered, recursion_max_depth, iteration_count + 1)
})
.collect(),
named_outputs: func
.named_outputs
.into_iter()
.map(|(name, token)| (name, Self::hydrate(token, filtered)))
.map(|(name, token)| {
(
name,
Self::hydrate(
token,
filtered,
recursion_max_depth,
iteration_count + 1,
),
)
})
.collect(),
state_mutability: func.state_mutability,
}),
Expand Down
6 changes: 6 additions & 0 deletions crates/parser/test_data/cairo_ls_abi.json

Large diffs are not rendered by default.

Loading