Skip to content

Commit

Permalink
Cache cache_routes using worker cache api
Browse files Browse the repository at this point in the history
  • Loading branch information
ostenbom committed May 23, 2023
1 parent dbb5e60 commit 6db07ed
Show file tree
Hide file tree
Showing 6 changed files with 64 additions and 35 deletions.
13 changes: 7 additions & 6 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion linkup-cli/src/local_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ async fn linkup_request_handler(
};

let extra_resp_headers =
additional_response_headers(req.path().to_string(), config.cache_routes);
additional_response_headers();

convert_reqwest_response(response, extra_resp_headers)
.await
Expand Down
21 changes: 1 addition & 20 deletions linkup/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
use async_trait::async_trait;
use rand::Rng;
use regex::Regex;
use std::collections::HashMap;
use thiserror::Error;

Expand Down Expand Up @@ -88,10 +87,7 @@ pub fn get_additional_headers(
additional_headers
}

pub fn additional_response_headers(
path: String,
cache_routes: Option<Vec<Regex>>,
) -> HashMap<String, String> {
pub fn additional_response_headers() -> HashMap<String, String> {
let mut headers = HashMap::new();

headers.insert(
Expand All @@ -102,21 +98,6 @@ pub fn additional_response_headers(
headers.insert("Access-Control-Allow-Headers".to_string(), "*".to_string());
headers.insert("Access-Control-Max-Age".to_string(), "86400".to_string());

// only insert the cache-control header if the path does not match any of the cache routes
if let Some(routes) = cache_routes {
if !routes.iter().any(|route| route.is_match(&path)) {
headers.insert(
"Cache-Control".to_string(),
"no-store".to_string(),
);
}
} else {
headers.insert(
"Cache-Control".to_string(),
"no-store".to_string(),
);
}

headers
}

Expand Down
2 changes: 1 addition & 1 deletion linkup/src/memory_session_store.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use std::{
collections::HashMap,
sync::{Mutex, RwLock},
sync::{RwLock},
};

use async_trait::async_trait;
Expand Down
1 change: 1 addition & 0 deletions worker/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ futures = "0.3"
console_error_panic_hook = { version = "0.1.1", optional = true }
http = "0.2.9"
reqwest = "0.11.17"
regex = "1.8.1"

# [profile.release]
# Tell `rustc` to optimize for small code size.
Expand Down
60 changes: 53 additions & 7 deletions worker/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use regex::Regex;
use std::{collections::HashMap, sync::Arc};

use kv_store::CfWorkerStringStore;
Expand Down Expand Up @@ -54,12 +55,45 @@ async fn linkup_session_handler(mut req: Request, sessions: SessionAllocator) ->
}
}

async fn linkup_request_handler(mut req: Request, sessions: SessionAllocator) -> Result<Response> {
let body_bytes = match req.bytes().await {
Ok(bytes) => bytes,
Err(_) => return plaintext_error("Bad or missing request body", 400),
};
async fn get_cached_req(
req: &Request,
cache_routes: &Option<Vec<Regex>>,
) -> Result<Option<Response>> {
let path = req.path();

if let Some(routes) = cache_routes {
if routes.iter().any(|route| route.is_match(&path)) {
let url = req.url()?;
Cache::default().get(url.to_string(), false).await
} else {
Ok(None)
}
} else {
Ok(None)
}
}

async fn set_cached_req(
req: &Request,
mut resp: Response,
cache_routes: Option<Vec<Regex>>,
) -> Result<Response> {
let path = req.path();

if let Some(routes) = cache_routes {
if routes.iter().any(|route| route.is_match(&path)) {
let url = req.url()?;
let cache_resp = resp.cloned()?;
Cache::default().put(url.to_string(), cache_resp).await?;

return Ok(resp);
}
}

Ok(resp)
}

async fn linkup_request_handler(mut req: Request, sessions: SessionAllocator) -> Result<Response> {
let url = match req.url() {
Ok(url) => url.to_string(),
Err(_) => return plaintext_error("Bad or missing request url", 400),
Expand All @@ -77,6 +111,15 @@ async fn linkup_request_handler(mut req: Request, sessions: SessionAllocator) ->
Err(_) => return plaintext_error("Could not find a linkup session for this request. Use a linkup subdomain or context headers like Referer/tracestate", 422),
};

if let Some(cached_response) = get_cached_req(&req, &config.cache_routes).await? {
return Ok(cached_response);
}

let body_bytes = match req.bytes().await {
Ok(bytes) => bytes,
Err(_) => return plaintext_error("Bad or missing request body", 400),
};

let destination_url = match get_target_url(url.clone(), headers.clone(), &config, &session_name)
{
Some(result) => result,
Expand Down Expand Up @@ -104,9 +147,12 @@ async fn linkup_request_handler(mut req: Request, sessions: SessionAllocator) ->
Err(e) => return plaintext_error(format!("Failed to proxy request: {}", e), 502),
};

let extra_resp_headers = additional_response_headers(req.path(), config.cache_routes);

convert_reqwest_response_to_cf(response, extra_resp_headers).await
let mut cf_resp = convert_reqwest_response_to_cf(response, additional_response_headers()).await?;

cf_resp = set_cached_req(&req, cf_resp, config.cache_routes).await?;

Ok(cf_resp)
}

async fn linkup_ws_handler(req: Request, sessions: SessionAllocator) -> Result<Response> {
Expand Down

0 comments on commit 6db07ed

Please sign in to comment.