use actix_form_data::{Field, Form, Value}; use actix_fs::file; use actix_web::{ client::Client, guard, http::{ header::{ CacheControl, CacheDirective, ContentRange, ContentRangeSpec, Header, LastModified, ACCEPT_RANGES, CONTENT_LENGTH, }, HeaderValue, }, middleware::{Compress, Logger}, web, App, HttpRequest, HttpResponse, HttpServer, }; use bytes::Bytes; use futures::{ stream::{Stream, TryStreamExt}, StreamExt, }; use once_cell::sync::Lazy; use std::{ collections::HashSet, convert::TryInto, io, path::PathBuf, sync::Once, time::SystemTime, }; use structopt::StructOpt; use tracing::{debug, error, info, instrument, Span}; use tracing_subscriber::EnvFilter; mod config; mod error; mod middleware; mod migrate; mod processor; mod upload_manager; mod validate; use self::{ config::{Config, Format}, error::UploadError, middleware::{Internal, Tracing}, processor::process_image, upload_manager::{Details, UploadManager}, validate::{image_webp, video_mp4}, }; const MEGABYTES: usize = 1024 * 1024; const MINUTES: u32 = 60; const HOURS: u32 = 60 * MINUTES; const DAYS: u32 = 24 * HOURS; static TMP_DIR: Lazy = Lazy::new(|| { use rand::{ distributions::{Alphanumeric, Distribution}, thread_rng, }; let mut rng = thread_rng(); let tmp_nonce = Alphanumeric .sample_iter(&mut rng) .take(7) .collect::(); let mut path = std::env::temp_dir(); path.push(format!("pict-rs-{}", tmp_nonce)); path }); static CONFIG: Lazy = Lazy::new(Config::from_args); static MAGICK_INIT: Once = Once::new(); // try moving a file #[instrument] async fn safe_move_file(from: PathBuf, to: PathBuf) -> Result<(), UploadError> { if let Some(path) = to.parent() { debug!("Creating directory {:?}", path); actix_fs::create_dir_all(path.to_owned()).await?; } debug!("Checking if {:?} already exists", to); if let Err(e) = actix_fs::metadata(to.clone()).await { if e.kind() != Some(std::io::ErrorKind::NotFound) { return Err(e.into()); } } else { return Err(UploadError::FileExists); } debug!("Moving {:?} to {:?}", from, to); actix_fs::copy(from.clone(), to).await?; actix_fs::remove_file(from).await?; Ok(()) } async fn safe_create_parent(path: PathBuf) -> Result<(), UploadError> { if let Some(path) = path.parent() { debug!("Creating directory {:?}", path); actix_fs::create_dir_all(path.to_owned()).await?; } Ok(()) } // Try writing to a file #[instrument(skip(bytes))] async fn safe_save_file(path: PathBuf, bytes: bytes::Bytes) -> Result<(), UploadError> { if let Some(path) = path.parent() { // create the directory for the file debug!("Creating directory {:?}", path); actix_fs::create_dir_all(path.to_owned()).await?; } // Only write the file if it doesn't already exist debug!("Checking if {:?} already exists", path); if let Err(e) = actix_fs::metadata(path.clone()).await { if e.kind() != Some(std::io::ErrorKind::NotFound) { return Err(e.into()); } } else { return Ok(()); } // Open the file for writing debug!("Creating {:?}", path); let file = actix_fs::file::create(path.clone()).await?; // try writing debug!("Writing to {:?}", path); if let Err(e) = actix_fs::file::write(file, bytes).await { error!("Error writing {:?}, {}", path, e); // remove file if writing failed before completion actix_fs::remove_file(path).await?; return Err(e.into()); } debug!("{:?} written", path); Ok(()) } pub(crate) fn tmp_file() -> PathBuf { use rand::distributions::{Alphanumeric, Distribution}; let limit: usize = 10; let rng = rand::thread_rng(); let s: String = Alphanumeric.sample_iter(rng).take(limit).collect(); let name = format!("{}.tmp", s); let mut path = TMP_DIR.clone(); path.push(&name); path } fn to_ext(mime: mime::Mime) -> Result<&'static str, UploadError> { if mime == mime::IMAGE_PNG { Ok(".png") } else if mime == mime::IMAGE_JPEG { Ok(".jpg") } else if mime == video_mp4() { Ok(".mp4") } else if mime == image_webp() { Ok(".webp") } else { Err(UploadError::UnsupportedFormat) } } /// Handle responding to succesful uploads #[instrument(skip(value, manager))] async fn upload( value: Value, manager: web::Data, ) -> Result { let images = value .map() .and_then(|mut m| m.remove("images")) .and_then(|images| images.array()) .ok_or(UploadError::NoFiles)?; let mut files = Vec::new(); for image in images.into_iter().filter_map(|i| i.file()) { if let Some(alias) = image .saved_as .as_ref() .and_then(|s| s.file_name()) .and_then(|s| s.to_str()) { info!("Uploaded {} as {:?}", image.filename, alias); let delete_token = manager.delete_token(alias.to_owned()).await?; let name = manager.from_alias(alias.to_owned()).await?; let mut path = manager.image_dir(); path.push(name.clone()); let details = manager.variant_details(path.clone(), name.clone()).await?; let details = if let Some(details) = details { details } else { let new_details = Details::from_path(path.clone()).await?; manager .store_variant_details(path, name, &new_details) .await?; new_details }; files.push(serde_json::json!({ "file": alias, "delete_token": delete_token, "details": details, })); } } Ok(HttpResponse::Created().json(serde_json::json!({ "msg": "ok", "files": files }))) } #[derive(Debug, serde::Deserialize)] struct UrlQuery { url: String, } /// download an image from a URL #[instrument(skip(client, manager))] async fn download( client: web::Data, manager: web::Data, query: web::Query, ) -> Result { let mut res = client.get(&query.url).send().await?; if !res.status().is_success() { return Err(UploadError::Download(res.status())); } let fut = res.body().limit(CONFIG.max_file_size() * MEGABYTES); let stream = Box::pin(futures::stream::once(fut)); let alias = manager.upload(stream).await?; let delete_token = manager.delete_token(alias.clone()).await?; let name = manager.from_alias(alias.to_owned()).await?; let mut path = manager.image_dir(); path.push(name.clone()); let details = manager.variant_details(path.clone(), name.clone()).await?; let details = if let Some(details) = details { details } else { let new_details = Details::from_path(path.clone()).await?; manager .store_variant_details(path, name, &new_details) .await?; new_details }; Ok(HttpResponse::Created().json(serde_json::json!({ "msg": "ok", "files": [{ "file": alias, "delete_token": delete_token, "details": details, }] }))) } /// Delete aliases and files #[instrument(skip(manager))] async fn delete( manager: web::Data, path_entries: web::Path<(String, String)>, ) -> Result { let (alias, token) = path_entries.into_inner(); manager.delete(token, alias).await?; Ok(HttpResponse::NoContent().finish()) } type ProcessQuery = Vec<(String, String)>; async fn prepare_process( query: web::Query, ext: &str, manager: &UploadManager, whitelist: &Option>, ) -> Result<(processor::ProcessChain, Format, String, PathBuf), UploadError> { let (alias, operations) = query .into_inner() .into_iter() .fold((String::new(), Vec::new()), |(s, mut acc), (k, v)| { if k == "src" { (v, acc) } else { acc.push((k, v)); (s, acc) } }); if alias == "" { return Err(UploadError::MissingFilename); } let name = manager.from_alias(alias).await?; let operations = if let Some(whitelist) = whitelist.as_ref() { operations .into_iter() .filter(|(k, _)| whitelist.contains(&k.to_lowercase())) .collect() } else { operations }; let chain = self::processor::build_chain(&operations); let format = ext .parse::() .map_err(|_| UploadError::UnsupportedFormat)?; let processed_name = format!("{}.{}", name, ext); let base = manager.image_dir(); let thumbnail_path = self::processor::build_path(base, &chain, processed_name); Ok((chain, format, name, thumbnail_path)) } async fn process_details( query: web::Query, ext: web::Path, manager: web::Data, whitelist: web::Data>>, ) -> Result { let (_, _, name, thumbnail_path) = prepare_process(query, ext.as_str(), &manager, &whitelist).await?; let details = manager.variant_details(thumbnail_path, name).await?; let details = details.ok_or(UploadError::NoFiles)?; Ok(HttpResponse::Ok().json(details)) } /// Process files #[instrument(skip(manager, whitelist))] async fn process( req: HttpRequest, query: web::Query, ext: web::Path, manager: web::Data, whitelist: web::Data>>, ) -> Result { let (chain, format, name, thumbnail_path) = prepare_process(query, ext.as_str(), &manager, &whitelist).await?; // If the thumbnail doesn't exist, we need to create it let thumbnail_exists = if let Err(e) = actix_fs::metadata(thumbnail_path.clone()).await { if e.kind() != Some(std::io::ErrorKind::NotFound) { error!("Error looking up processed image, {}", e); return Err(e.into()); } false } else { true }; let details = manager .variant_details(thumbnail_path.clone(), name.clone()) .await?; if !thumbnail_exists || details.is_none() { let mut original_path = manager.image_dir(); original_path.push(name.clone()); // Create and save a JPG for motion images (gif, mp4) if let Some((updated_path, exists)) = self::processor::prepare_image(original_path.clone()).await? { original_path = updated_path.clone(); if exists.is_new() { // Save the transcoded file in another task debug!("Spawning storage task"); let span = Span::current(); let manager2 = manager.clone(); let name = name.clone(); actix_rt::spawn(async move { let entered = span.enter(); if let Err(e) = manager2.store_variant(updated_path, name).await { error!("Error storing variant, {}", e); return; } drop(entered); }); } } // apply chain to the provided image let img_bytes = process_image(original_path.clone(), chain, format).await?; let path2 = thumbnail_path.clone(); let img_bytes2 = img_bytes.clone(); let store_details = details.is_none(); let details = if let Some(details) = details { details } else { let details = Details::from_bytes(&img_bytes)?; manager .store_variant_details(path2.clone(), name.clone(), &details) .await?; details }; // Save the file in another task, we want to return the thumbnail now debug!("Spawning storage task"); let span = Span::current(); let details2 = details.clone(); actix_rt::spawn(async move { let entered = span.enter(); if store_details { debug!("Storing details"); if let Err(e) = manager .store_variant_details(path2.clone(), name.clone(), &details2) .await { error!("Error storing details, {}", e); return; } } if let Err(e) = manager.store_variant(path2.clone(), name).await { error!("Error storing variant, {}", e); return; } if let Err(e) = safe_save_file(path2, img_bytes2).await { error!("Error saving file, {}", e); } drop(entered); }); match req.headers().get("Range") { Some(range_head) => { let range = parse_range_header(range_head)?; let resp_bytes = img_bytes.slice(range[0] as usize..range[1] as usize); let stream = Box::pin(futures::stream::once(async move { Ok(resp_bytes) as Result<_, UploadError> })); return Ok(srv_ranged_response( stream, details.content_type(), 7 * DAYS, details.system_time(), Some((range[0], range[1])), Some(img_bytes.len() as u64), )); } None => { return Ok(srv_response( Box::pin(futures::stream::once(async { Ok(img_bytes) as Result<_, UploadError> })), details.content_type(), 7 * DAYS, details.system_time(), )); } }; } let details = if let Some(details) = details { details } else { let details = Details::from_path(thumbnail_path.clone()).await?; manager .store_variant_details(thumbnail_path.clone(), name, &details) .await?; details }; ranged_file_resp(thumbnail_path, req, details).await } /// Fetch file details async fn details( alias: web::Path, manager: web::Data, ) -> Result { let name = manager.from_alias(alias.into_inner()).await?; let mut path = manager.image_dir(); path.push(name.clone()); let details = manager.variant_details(path.clone(), name.clone()).await?; let details = if let Some(details) = details { details } else { let new_details = Details::from_path(path.clone()).await?; manager .store_variant_details(path.clone(), name, &new_details) .await?; new_details }; Ok(HttpResponse::Ok().json(details)) } /// Serve files #[instrument(skip(manager))] async fn serve( req: web::HttpRequest, alias: web::Path, manager: web::Data, ) -> Result { let name = manager.from_alias(alias.into_inner()).await?; let mut path = manager.image_dir(); path.push(name.clone()); let details = manager.variant_details(path.clone(), name.clone()).await?; let details = if let Some(details) = details { details } else { let details = Details::from_path(path.clone()).await?; manager .store_variant_details(path.clone(), name, &details) .await?; details }; ranged_file_resp(path, req, details).await } fn parse_range_header(range_head: &HeaderValue) -> Result, UploadError> { let range_head_str = range_head.to_str().map_err(|_| { UploadError::ParseReq("Range header contains non-utf8 characters".to_string()) })?; let range_dashed = range_head_str .split('=') .skip(1) .next() .ok_or(UploadError::ParseReq("Malformed Range header".to_string()))?; let range: Vec = range_dashed .split('-') .map(|s| s.parse::()) .collect::, _>>() .map_err(|_| { UploadError::ParseReq("Cannot parse byte locations in range header".to_string()) })?; if range[0] > range[1] { return Err(UploadError::Range); } Ok(range) } async fn ranged_file_resp( path: PathBuf, req: HttpRequest, details: Details, ) -> Result { match req.headers().get("Range") { //Range header exists - return as ranged Some(range_head) => { let range = parse_range_header(range_head)?; let (out_file, _) = file::seek( file::open(path).await?, io::SeekFrom::Current(range[0].try_into().map_err(|_| { UploadError::ParseReq("Byte locations too high in range header".to_string()) })?), ) .await?; let (out_file, meta) = file::metadata(out_file) .await .map_err(|_| UploadError::Upload("Error reading metadata".to_string()))?; if meta.len() < range[0] { return Err(UploadError::Range); } // file::read_to_stream() creates a stream in 65,356 byte chunks. let whole_to = ((range[1] - range[0]) as f64 / 65_356.0).floor() as usize; let partial_len = ((range[1] - range[0]) % 65_356) as usize; //debug!("Range of {}. Returning {} whole chunks, and {} bytes of the partial chunk", range[1]-range[0], whole_to, partial_len); let stream = file::read_to_stream(out_file) .await? .take(whole_to + 1) .enumerate() .map(move |bytes_res| match bytes_res.1 { Ok(mut bytes) => { if bytes_res.0 == whole_to && partial_len <= bytes.len() { return Ok(bytes.split_to(partial_len)); } return Ok(bytes); } Err(e) => Err(e), }); return Ok(srv_ranged_response( stream, details.content_type(), 7 * DAYS, details.system_time(), Some((range[0], range[1])), Some(meta.len()), )); } //No Range header in the request - return the entire document None => { let stream = actix_fs::read_to_stream(path).await?; return Ok(srv_response( stream, details.content_type(), 7 * DAYS, details.system_time(), )); } }; } // A helper method to produce responses with proper cache headers fn srv_response( stream: S, ext: mime::Mime, expires: u32, modified: SystemTime, ) -> HttpResponse where S: Stream> + Unpin + 'static, E: Into, { HttpResponse::Ok() .set(LastModified(modified.into())) .set(CacheControl(vec![ CacheDirective::Public, CacheDirective::MaxAge(expires), CacheDirective::Extension("immutable".to_owned(), None), ])) .set_header(ACCEPT_RANGES, "bytes") .content_type(ext.to_string()) .streaming(stream.err_into()) } fn srv_ranged_response( stream: S, ext: mime::Mime, expires: u32, modified: SystemTime, range: Option<(u64, u64)>, instance_length: Option, ) -> HttpResponse where S: Stream> + Unpin + 'static, E: Into, { HttpResponse::PartialContent() .set(LastModified(modified.into())) .set(CacheControl(vec![ CacheDirective::Public, CacheDirective::MaxAge(expires), CacheDirective::Extension("immutable".to_owned(), None), ])) .set(ContentRange(ContentRangeSpec::Bytes { range, instance_length, })) .set_header(ACCEPT_RANGES, "bytes") .content_type(ext.to_string()) .streaming(stream.err_into()) } #[derive(Debug, serde::Deserialize)] #[serde(untagged)] enum FileOrAlias { File { file: String }, Alias { alias: String }, } async fn purge( query: web::Query, upload_manager: web::Data, ) -> Result { let aliases = match query.into_inner() { FileOrAlias::File { file } => upload_manager.aliases_by_filename(file).await?, FileOrAlias::Alias { alias } => upload_manager.aliases_by_alias(alias).await?, }; for alias in aliases.iter() { upload_manager .delete_without_token(alias.to_owned()) .await?; } Ok(HttpResponse::Ok().json(serde_json::json!({ "msg": "ok", "aliases": aliases }))) } async fn aliases( query: web::Query, upload_manager: web::Data, ) -> Result { let aliases = match query.into_inner() { FileOrAlias::File { file } => upload_manager.aliases_by_filename(file).await?, FileOrAlias::Alias { alias } => upload_manager.aliases_by_alias(alias).await?, }; Ok(HttpResponse::Ok().json(serde_json::json!({ "msg": "ok", "aliases": aliases, }))) } #[derive(Debug, serde::Deserialize)] struct ByAlias { alias: String, } async fn filename_by_alias( query: web::Query, upload_manager: web::Data, ) -> Result { let filename = upload_manager.from_alias(query.into_inner().alias).await?; Ok(HttpResponse::Ok().json(serde_json::json!({ "msg": "ok", "filename": filename, }))) } #[actix_rt::main] async fn main() -> Result<(), anyhow::Error> { MAGICK_INIT.call_once(|| { magick_rust::magick_wand_genesis(); }); if std::env::var("RUST_LOG").is_err() { std::env::set_var("RUST_LOG", "info"); } tracing_subscriber::fmt() .with_env_filter(EnvFilter::from_default_env()) .init(); let manager = UploadManager::new(CONFIG.data_dir(), CONFIG.format()).await?; // Create a new Multipart Form validator // // This form is expecting a single array field, 'images' with at most 10 files in it let manager2 = manager.clone(); let form = Form::new() .max_files(10) .max_file_size(CONFIG.max_file_size() * MEGABYTES) .transform_error(|e| UploadError::from(e).into()) .field( "images", Field::array(Field::file(move |filename, _, stream| { let manager = manager2.clone(); async move { let span = tracing::info_span!("file-upload", ?filename); let entered = span.enter(); let res = manager.upload(stream).await.map(|alias| { let mut path = PathBuf::new(); path.push(alias); Some(path) }); drop(entered); res } })), ); // Create a new Multipart Form validator for internal imports // // This form is expecting a single array field, 'images' with at most 10 files in it let validate_imports = CONFIG.validate_imports(); let manager2 = manager.clone(); let import_form = Form::new() .max_files(10) .max_file_size(CONFIG.max_file_size() * MEGABYTES) .transform_error(|e| UploadError::from(e).into()) .field( "images", Field::array(Field::file(move |filename, content_type, stream| { let manager = manager2.clone(); async move { let span = tracing::info_span!("file-import", ?filename); let entered = span.enter(); let res = manager .import(filename, content_type, validate_imports, stream) .await .map(|alias| { let mut path = PathBuf::new(); path.push(alias); Some(path) }); drop(entered); res } })), ); HttpServer::new(move || { let client = Client::builder() .header("User-Agent", "pict-rs v0.1.0-master") .finish(); App::new() .wrap(Compress::default()) .wrap(Logger::default()) .wrap(Tracing) .data(manager.clone()) .data(client) .data(CONFIG.filter_whitelist()) .service( web::scope("/image") .service( web::resource("") .guard(guard::Post()) .wrap(form.clone()) .route(web::post().to(upload)), ) .service(web::resource("/download").route(web::get().to(download))) .service( web::resource("/delete/{delete_token}/{filename}") .route(web::delete().to(delete)) .route(web::get().to(delete)), ) .service(web::resource("/original/{filename}").route(web::get().to(serve))) .service(web::resource("/process.{ext}").route(web::get().to(process))) .service( web::scope("/details") .service( web::resource("/original/{filename}").route(web::get().to(details)), ) .service( web::resource("/process.{ext}") .route(web::get().to(process_details)), ), ), ) .service( web::scope("/internal") .wrap(Internal(CONFIG.api_key().map(|s| s.to_owned()))) .service( web::resource("/import") .wrap(import_form.clone()) .route(web::post().to(upload)), ) .service(web::resource("/purge").route(web::post().to(purge))) .service(web::resource("/aliases").route(web::get().to(aliases))) .service(web::resource("/filename").route(web::get().to(filename_by_alias))), ) }) .bind(CONFIG.bind_address())? .run() .await?; if actix_fs::metadata(&*TMP_DIR).await.is_ok() { actix_fs::remove_dir_all(&*TMP_DIR).await?; } Ok(()) }