diff --git a/Cargo.lock b/Cargo.lock index 439b555..9c8ac0d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -250,7 +250,7 @@ dependencies = [ [[package]] name = "buckets-core" -version = "1.0.1" +version = "1.0.2" dependencies = [ "oiseau", "pathbufd", diff --git a/crates/buckets-core/Cargo.toml b/crates/buckets-core/Cargo.toml index 0b7b7d3..306bc07 100644 --- a/crates/buckets-core/Cargo.toml +++ b/crates/buckets-core/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "buckets-core" description = "Buckets media upload types" -version = "1.0.1" +version = "1.0.2" edition = "2024" readme = "../../README.md" authors.workspace = true @@ -16,4 +16,7 @@ pathbufd = "0.1.4" serde = { version = "1.0.219", features = ["derive"] } serde_json = "1.0.142" toml = "0.9.4" -oiseau = { version = "0.1.2", default-features = false, features = ["postgres", "redis",] } +oiseau = { version = "0.1.2", default-features = false, features = [ + "postgres", + "redis", +] } diff --git a/crates/buckets-core/src/config.rs b/crates/buckets-core/src/config.rs index 382259a..3591a2f 100644 --- a/crates/buckets-core/src/config.rs +++ b/crates/buckets-core/src/config.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use oiseau::config::{Configuration, DatabaseConfig}; use pathbufd::PathBufD; use serde::{Deserialize, Serialize}; @@ -7,6 +9,9 @@ pub struct Config { /// The directory files are stored in (relative to cwd). #[serde(default = "default_directory")] pub directory: String, + /// The path to the default image to be served for the given buckets. + #[serde(default = "default_bucket_defaults")] + pub bucket_defaults: HashMap, /// Database configuration. #[serde(default = "default_database")] pub database: DatabaseConfig, @@ -16,6 +21,10 @@ fn default_directory() -> String { "buckets".to_string() } +fn default_bucket_defaults() -> HashMap { + HashMap::new() +} + fn default_database() -> DatabaseConfig { DatabaseConfig::default() } @@ -30,6 +39,7 @@ impl Default for Config { fn default() -> Self { Self { directory: default_directory(), + bucket_defaults: default_bucket_defaults(), database: default_database(), } } diff --git a/crates/buckets-core/src/database/sql/create_uploads.sql b/crates/buckets-core/src/database/sql/create_uploads.sql index 91f0928..9ef2764 100644 --- a/crates/buckets-core/src/database/sql/create_uploads.sql +++ b/crates/buckets-core/src/database/sql/create_uploads.sql @@ -1,7 +1,8 @@ CREATE TABLE IF NOT EXISTS uploads ( - id BIGINT NOT NULL PRIMARY KEY, + id BIGINT NOT NULL, created BIGINT NOT NULL, owner BIGINT NOT NULL, bucket TEXT NOT NULL, - metadata TEXT NOT NULL + metadata TEXT NOT NULL, + PRIMARY KEY (id, bucket) ) diff --git a/crates/buckets-core/src/database/uploads.rs b/crates/buckets-core/src/database/uploads.rs index f89b004..7158618 100644 --- a/crates/buckets-core/src/database/uploads.rs +++ b/crates/buckets-core/src/database/uploads.rs @@ -2,7 +2,7 @@ use crate::{ DataManager, model::{MediaUpload, UploadMetadata}, }; -use oiseau::{PostgresRow, cache::Cache, execute, get, params, query_rows}; +use oiseau::{PostgresRow, cache::Cache, execute, get, params, query_row, query_rows}; use tetratto_core::auto_method; use tetratto_core::model::{Error, Result}; @@ -20,6 +20,27 @@ impl DataManager { auto_method!(get_upload_by_id(usize as i64)@get_upload_from_row -> "SELECT * FROM uploads WHERE id = $1" --name="upload" --returns=MediaUpload --cache-key-tmpl="atto.upload:{}"); + /// Get an upload by its ID and bucket. + pub async fn get_upload_by_id_bucket(&self, id: usize, bucket: String) -> Result { + let conn = match self.0.connect().await { + Ok(c) => c, + Err(e) => return Err(Error::DatabaseConnection(e.to_string())), + }; + + let res = query_row!( + &conn, + "SELECT * FROM uploads WHERE id = $1 AND bucket = $2", + &[&(id as i64), &bucket], + |x| { Ok(Self::get_upload_from_row(x)) } + ); + + if res.is_err() { + return Err(Error::GeneralNotFound("upload".to_string())); + } + + Ok(res.unwrap()) + } + /// Get all uploads (paginated). /// /// # Arguments @@ -102,6 +123,9 @@ impl DataManager { /// Create a new upload in the database. /// + /// Please note that the file must be manually written afterwards. You can use + /// the returned [`MediaUpload`] to retrieve the correct path to write the file. + /// /// # Arguments /// * `data` - a mock [`MediaUpload`] object to insert pub async fn create_upload(&self, data: MediaUpload) -> Result { @@ -132,6 +156,11 @@ impl DataManager { Ok(data) } + /// Delete an upload given its `id`. + /// + /// # Warning + /// It's recommended that you use [`Self::delete_upload_with_id`] instead, + /// as the table's primary key is on `(id, bucket)`, not `id`. pub async fn delete_upload(&self, id: usize) -> Result<()> { // if !user.permissions.check(FinePermission::MANAGE_UPLOADS) { // return Err(Error::NotAllowed); @@ -163,5 +192,41 @@ impl DataManager { Ok(()) } + /// Delete an upload given its `id` and `bucket`. + pub async fn delete_upload_with_bucket(&self, id: usize, bucket: String) -> Result<()> { + // if !user.permissions.check(FinePermission::MANAGE_UPLOADS) { + // return Err(Error::NotAllowed); + // } + + // delete file + // it's most important that the file gets off the file system first, even + // if there's an issue in the database + // + // the actual file takes up much more space than the database entry. + let upload = self.get_upload_by_id(id).await?; + upload.remove(&self.0.0.directory)?; + + // delete from database + let conn = match self.0.connect().await { + Ok(c) => c, + Err(e) => return Err(Error::DatabaseConnection(e.to_string())), + }; + + let res = execute!( + &conn, + "DELETE FROM uploads WHERE id = $1 AND bucket = $2", + &[&(id as i64), &bucket] + ); + + if let Err(e) = res { + return Err(Error::DatabaseError(e.to_string())); + } + + self.0.1.remove(format!("atto.upload:{}", id)).await; + + // return + Ok(()) + } + auto_method!(update_upload_metadata(UploadMetadata) -> "UPDATE uploads SET metadata = $1 WHERE id = $2" --serde --cache-key-tmpl="atto.upload:{}"); } diff --git a/crates/buckets/src/routes.rs b/crates/buckets/src/routes.rs index 7350880..1e0ab52 100644 --- a/crates/buckets/src/routes.rs +++ b/crates/buckets/src/routes.rs @@ -36,7 +36,14 @@ pub async fn get_request( let upload = match data.get_upload_by_id(id).await { Ok(u) => u, Err(e) => { - return Err(Json(e.into())); + if let Some(default) = data.0.0.bucket_defaults.get(&bucket) { + return Ok(( + [("Content-Type", default.1.to_string())], + Body::from(std::fs::read(&default.0).expect("failed to read default file")), + )); + } else { + return Err(Json(e.into())); + } } };