Skip to content

Commit

Permalink
fix: response larger than 10 megabytes throwing errors
Browse files Browse the repository at this point in the history
  • Loading branch information
Ebedthan committed Aug 27, 2024
1 parent e1f097f commit 631d9f6
Showing 1 changed file with 13 additions and 2 deletions.
15 changes: 13 additions & 2 deletions src/cmd/search.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@
use anyhow::{ensure, Result};
use anyhow::{anyhow, ensure, Result};
use serde::{Deserialize, Serialize};
use std::io::Read;

use crate::api::search::SearchAPI;
use crate::cli;
use crate::utils::{self, is_taxonomy_field, OutputFormat, SearchField};

const INTO_STRING_LIMIT: usize = 20 * 1_024 * 1_024;

#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Default)]
#[serde(rename_all = "camelCase")]
/// API search result struct
Expand Down Expand Up @@ -316,7 +319,15 @@ fn handle_xsv_response(
needle: &str,
args: &cli::search::SearchArgs,
) -> Result<String> {
let result = response.into_string()?;
let mut buf: Vec<u8> = vec![];
response
.into_reader()
.take((INTO_STRING_LIMIT + 1) as u64)
.read_to_end(&mut buf)?;
if buf.len() > INTO_STRING_LIMIT {
return Err(anyhow!("GTDB response is too big (> 20 MB) to convert to string. Please use JSON output format (-O json)"));
}
let result = String::from_utf8_lossy(&buf).to_string();
if args.is_whole_words_matching() {
filter_xsv(
result.clone(),
Expand Down

0 comments on commit 631d9f6

Please sign in to comment.