Skip to content

Commit

Permalink
Reduce panics (#3)
Browse files Browse the repository at this point in the history
Signed-off-by: lloydmeta <[email protected]>
  • Loading branch information
lloydmeta authored Oct 30, 2024
1 parent 09402b0 commit a119b06
Show file tree
Hide file tree
Showing 5 changed files with 70 additions and 57 deletions.
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Miniaturs
# miniaturs
[![Continuous integration](https://github.com/lloydmeta/miniaturs/actions/workflows/ci.yaml/badge.svg)](https://github.com/lloydmeta/miniaturs/actions/workflows/ci.yaml)

HTTP image resizer
Tiny HTTP image resizer.

## Goals

Expand All @@ -21,10 +21,10 @@ To fulfil the above:

* Runs in a Lambda
* Rust ⚡️
* Caching in layers: CDN with S3 for images
* Caching in layers: CDN to protect the app, with S3 for storing images
* Serverless, but built on HTTP framework ([cargo-lambda](https://www.cargo-lambda.info) on top of [axum](https://github.com/tokio-rs/axum))

An example Terraform config in `terraform/prod` is provided to show how to deploy at a subdomain using Cloudflare as our (free!) CDN + WAF.
An example Terraform config in `terraform/prod` is provided to show how to deploy at a subdomain using Cloudflare as our ([free!](https://www.cloudflare.com/en-gb/plans/free/)) CDN + WAF with AWS Lambda and S3 ([also free!](https://aws.amazon.com/free/))

## Usage:

Expand Down
12 changes: 7 additions & 5 deletions server/src/api/routing/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ async fn resize(
let maybe_cached_resized_image = app_components
.processed_images_cacher
.get(&processed_image_request)
.await;
.await?;

if let Some(cached_resized_image) = maybe_cached_resized_image {
let mut response_headers = HeaderMap::new();
Expand All @@ -91,7 +91,7 @@ async fn resize(
let maybe_cached_fetched_image = app_components
.unprocessed_images_cacher
.get(&unprocessed_cache_retrieve_req)
.await;
.await?;

let (response_status_code, bytes, maybe_content_type_string) =
if let Some(cached_fetched) = maybe_cached_fetched_image {
Expand All @@ -117,7 +117,7 @@ async fn resize(
app_components
.unprocessed_images_cacher
.set(&bytes, &cache_fetched_req)
.await;
.await?;

let response_status_code = StatusCode::from_u16(status_code.as_u16())?;
(response_status_code, bytes, maybe_content_type_string)
Expand Down Expand Up @@ -161,7 +161,7 @@ async fn resize(
app_components
.processed_images_cacher
.set(&written_bytes, &cache_image_req)
.await;
.await?;

let mut response_headers = HeaderMap::new();
let maybe_content_type_header = maybe_content_type_string
Expand Down Expand Up @@ -305,7 +305,9 @@ fn handle_panic(err: Box<dyn Any + Send + 'static>) -> Response<Full<Bytes>> {

let error = Standard { message: details };

let body = serde_json::to_string(&error).expect("Could not marshal error message");
let body = serde_json::to_string(&error).unwrap_or_else(|e| {
format!("{{\"message\": \"Could not serialise error message [{e}]\"}}")
});

Response::builder()
.status(StatusCode::INTERNAL_SERVER_ERROR)
Expand Down
2 changes: 1 addition & 1 deletion server/src/infra/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub struct AwsSettings {
}

impl Config {
pub async fn load_env() -> Result<Config, anyhow::Error> {
pub async fn load_env() -> anyhow::Result<Config> {
let shared_secret = env::var(SHARED_SECRET_ENV_KEY)
.context("Expected {SHARED_SECRET_ENV_KEY} to be defined")?;

Expand Down
103 changes: 56 additions & 47 deletions server/src/infra/image_caching.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use std::collections::HashMap;

use anyhow::Context;
use aws_sdk_s3::{
error::DisplayErrorContext,
primitives::{ByteStream, SdkBody},
Expand Down Expand Up @@ -67,17 +68,17 @@ where
GetRequest: CacheGettable<Cached = SetRequest>,
SetRequest: CacheSettable<Retrieve = GetRequest>,
{
async fn get(&self, req: &GetRequest) -> Option<Retrieved<SetRequest>>;
async fn set(&self, bytes: &[u8], req: &SetRequest);
async fn get(&self, req: &GetRequest) -> anyhow::Result<Option<Retrieved<SetRequest>>>;
async fn set(&self, bytes: &[u8], req: &SetRequest) -> anyhow::Result<()>;
}

pub trait CacheGettable {
type Cached;
fn cache_key(&self) -> CacheKey;
fn cache_key(&self) -> anyhow::Result<CacheKey>;
}
pub trait CacheSettable: CacheGettable {
type Retrieve;
fn metadata(&self) -> Metadata;
fn metadata(&self) -> anyhow::Result<Metadata>;
}

#[derive(Clone)]
Expand All @@ -100,8 +101,8 @@ where
GetReq: CacheGettable<Cached = SetReq>,
SetReq: CacheSettable<Retrieve = GetReq> + DeserializeOwned,
{
async fn get(&self, req: &GetReq) -> Option<Retrieved<SetReq>> {
let cache_key = req.cache_key();
async fn get(&self, req: &GetReq) -> anyhow::Result<Option<Retrieved<SetReq>>> {
let cache_key = req.cache_key()?;

let cache_retrieve_attempt = self
.client
Expand All @@ -113,19 +114,24 @@ where
match cache_retrieve_attempt {
Ok(retreived) => {
// If we can't retrieve metadata, it's dead to us !
let s3_metadata = retreived.metadata()?.get(METADATA_JSON_KEY)?;
let as_original_requested = serde_json::from_str(s3_metadata).ok()?;

let bytes: Vec<u8> = retreived
.body
.collect()
.await
.expect("Failure to retrieve from S3")
.to_vec();

Some(Retrieved {
bytes,
requested: as_original_requested,
let maybe_s3_metadata = retreived.metadata().and_then(|m| m.get(METADATA_JSON_KEY));
let maybe_as_original_requested =
maybe_s3_metadata.and_then(|m| serde_json::from_str(m).ok());

Ok(match maybe_as_original_requested {
Some(as_original_requested) => {
let bytes: Vec<u8> = retreived
.body
.collect()
.await
.map_err(|e| anyhow::anyhow!("Failure to retrieve from S3 [{e}]"))?
.to_vec();
Some(Retrieved {
bytes,
requested: as_original_requested,
})
}
None => None,
})
}
Err(sdk_err)
Expand All @@ -134,18 +140,18 @@ where
.filter(|e| e.is_no_such_key())
.is_some() =>
{
None
Ok(None)
}
// Anything else is fucked.
Err(other) => panic!("AWS S3 SDK error: [{}]", DisplayErrorContext(other)),
Err(other) => anyhow::bail!("AWS S3 SDK error: [{}]", DisplayErrorContext(other)),
}
}

async fn set(&self, bytes: &[u8], req: &SetReq) {
async fn set(&self, bytes: &[u8], req: &SetReq) -> anyhow::Result<()> {
let body_stream = ByteStream::new(SdkBody::from(bytes));

let metadata = req.metadata();
let cache_key = req.cache_key();
let metadata = req.metadata()?;
let cache_key = req.cache_key()?;

self.client
.put_object()
Expand All @@ -155,7 +161,8 @@ where
.body(body_stream)
.send()
.await
.expect("Writing to S3 failed.");
.map_err(|e| anyhow::anyhow!("Writing to S3 failed [{e}]"))?;
Ok(())
}
}

Expand All @@ -166,49 +173,51 @@ pub struct CacheKey(String);
static METADATA_JSON_KEY: &str = "_metadata_json";
impl CacheGettable for ImageResizeRequest {
type Cached = ImageResizedCacheRequest;
fn cache_key(&self) -> CacheKey {
let as_json = serde_json::to_string(self).expect("Could not JSON-ify, oddly");
fn cache_key(&self) -> anyhow::Result<CacheKey> {
let as_json = serde_json::to_string(self).context("Could not JSON-ify to cache key.")?;
let sha256ed = sha256::digest(as_json);
CacheKey(sha256ed)
Ok(CacheKey(sha256ed))
}
}
impl CacheGettable for ImageResizedCacheRequest {
type Cached = Self;
fn cache_key(&self) -> CacheKey {
fn cache_key(&self) -> anyhow::Result<CacheKey> {
self.request.cache_key()
}
}
impl CacheSettable for ImageResizedCacheRequest {
type Retrieve = ImageResizeRequest;
fn metadata(&self) -> Metadata {
let as_json_string = serde_json::to_string(self).expect("Could not JSON-ify, oddly");
fn metadata(&self) -> anyhow::Result<Metadata> {
let as_json_string =
serde_json::to_string(self).context("Could not JSON-ify to metadata.")?;
let mut map = HashMap::new();
map.insert(METADATA_JSON_KEY.to_string(), as_json_string);
Metadata(map)
Ok(Metadata(map))
}
}

impl CacheGettable for ImageFetchRequest {
type Cached = ImageFetchedCacheRequest;
fn cache_key(&self) -> CacheKey {
let as_json = serde_json::to_string(self).expect("Could not JSON-ify, oddly");
fn cache_key(&self) -> anyhow::Result<CacheKey> {
let as_json = serde_json::to_string(self).context("Could not JSON-ify to cache key.")?;
let sha256ed = sha256::digest(as_json);
CacheKey(sha256ed)
Ok(CacheKey(sha256ed))
}
}
impl CacheGettable for ImageFetchedCacheRequest {
type Cached = Self;
fn cache_key(&self) -> CacheKey {
fn cache_key(&self) -> anyhow::Result<CacheKey> {
self.request.cache_key()
}
}
impl CacheSettable for ImageFetchedCacheRequest {
type Retrieve = ImageFetchRequest;
fn metadata(&self) -> Metadata {
let as_json_string = serde_json::to_string(self).expect("Could not JSON-ify, oddly");
fn metadata(&self) -> anyhow::Result<Metadata> {
let as_json_string =
serde_json::to_string(self).context("Could not JSON-ify to metadata.")?;
let mut map = HashMap::new();
map.insert(METADATA_JSON_KEY.to_string(), as_json_string);
Metadata(map)
Ok(Metadata(map))
}
}

Expand Down Expand Up @@ -281,7 +290,7 @@ mod tests {
target_height: 100,
})),
};
let key = req.cache_key();
let key = req.cache_key()?;
assert!(key.0.len() < 1024);
Ok(())
}
Expand All @@ -298,7 +307,7 @@ mod tests {
},
content_type: "image/png".to_string(),
};
let metadata = req.metadata();
let metadata = req.metadata()?;

let mut expected = HashMap::new();
let metadata_as_json = serde_json::to_string(&req).unwrap();
Expand All @@ -325,7 +334,7 @@ mod tests {
})),
};
let retrieved = s3_image_cacher.get(&req).await;
assert!(retrieved.is_none());
assert!(retrieved?.is_none());
Ok(())
}

Expand All @@ -350,10 +359,10 @@ mod tests {
request: req.clone(),
content_type: "image/png".to_string(),
};
s3_image_cacher.set(content, &image_set_req).await;
s3_image_cacher.set(content, &image_set_req).await?;
let retrieved = s3_image_cacher
.get(&req)
.await
.await?
.expect("Cached image should be retrievable");
assert_eq!(content, retrieved.bytes.as_slice());
assert_eq!(image_set_req, retrieved.requested);
Expand All @@ -380,10 +389,10 @@ mod tests {
request: req.clone(),
content_type: "image/png".to_string(),
};
s3_image_cacher.set(content, &image_set_req).await;
s3_image_cacher.set(content, &image_set_req).await?;

let cache_key = req.cache_key();
let metadata_from_req = image_set_req.metadata();
let cache_key = req.cache_key()?;
let metadata_from_req = image_set_req.metadata()?;

let retrieved = s3_client()
.await
Expand Down
2 changes: 2 additions & 0 deletions server/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,7 @@ mod tests {
.unprocessed_images_cacher
.get(&unprocessed_cache_retrieve_req)
.await
.unwrap()
}

async fn retrieve_processed_cached(
Expand All @@ -331,6 +332,7 @@ mod tests {
.processed_images_cacher
.get(&processed_cache_retrieve_req)
.await
.unwrap()
}

fn signed_resize_path(
Expand Down

0 comments on commit a119b06

Please sign in to comment.