generated from t/malachite
add: use bucket methods everywhere
This commit is contained in:
parent
4e4f26ea14
commit
6583c34243
5 changed files with 51 additions and 21 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -250,7 +250,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "buckets-core"
|
name = "buckets-core"
|
||||||
version = "1.0.3"
|
version = "1.0.4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"oiseau",
|
"oiseau",
|
||||||
"pathbufd",
|
"pathbufd",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
[package]
|
[package]
|
||||||
name = "buckets-core"
|
name = "buckets-core"
|
||||||
description = "Buckets media upload types"
|
description = "Buckets media upload types"
|
||||||
version = "1.0.3"
|
version = "1.0.4"
|
||||||
edition = "2024"
|
edition = "2024"
|
||||||
readme = "../../README.md"
|
readme = "../../README.md"
|
||||||
authors.workspace = true
|
authors.workspace = true
|
||||||
|
|
|
@ -210,7 +210,7 @@ impl DataManager {
|
||||||
// if there's an issue in the database
|
// if there's an issue in the database
|
||||||
//
|
//
|
||||||
// the actual file takes up much more space than the database entry.
|
// the actual file takes up much more space than the database entry.
|
||||||
let upload = self.get_upload_by_id(id).await?;
|
let upload = self.get_upload_by_id_bucket(id, bucket).await?;
|
||||||
upload.remove(&self.0.0.directory)?;
|
upload.remove(&self.0.0.directory)?;
|
||||||
|
|
||||||
// delete from database
|
// delete from database
|
||||||
|
|
|
@ -90,14 +90,44 @@ impl MediaUpload {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the path to the fs file for this upload.
|
/// Get the path to the fs file for this upload (without bucket).
|
||||||
pub fn path(&self, directory: &str) -> PathBufD {
|
pub(crate) fn legacy_path(&self, directory: &str) -> PathBufD {
|
||||||
PathBufD::current().extend(&[
|
PathBufD::current().extend(&[
|
||||||
directory,
|
directory,
|
||||||
&format!("{}.{}", self.id, self.metadata.what.extension()),
|
&format!("{}.{}", self.id, self.metadata.what.extension()),
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Get the path to the fs file for this upload (with bucket).
|
||||||
|
pub fn full_path(&self, directory: &str) -> PathBufD {
|
||||||
|
PathBufD::current().extend(&[
|
||||||
|
directory,
|
||||||
|
&format!(
|
||||||
|
"{}{}.{}",
|
||||||
|
if self.bucket != "" {
|
||||||
|
format!("{}.", self.bucket)
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
},
|
||||||
|
self.id,
|
||||||
|
self.metadata.what.extension()
|
||||||
|
),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Get the path to the fs file for this upload.
|
||||||
|
///
|
||||||
|
/// Uses path with bucket unless legacy path exists.
|
||||||
|
pub fn path(&self, directory: &str) -> PathBufD {
|
||||||
|
let legacy = self.legacy_path(directory);
|
||||||
|
|
||||||
|
if std::fs::exists(&legacy).unwrap() {
|
||||||
|
return legacy;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.full_path(directory)
|
||||||
|
}
|
||||||
|
|
||||||
/// Write to this upload in the file system.
|
/// Write to this upload in the file system.
|
||||||
pub fn write(&self, directory: &str, bytes: &[u8]) -> Result<()> {
|
pub fn write(&self, directory: &str, bytes: &[u8]) -> Result<()> {
|
||||||
match write(self.path(directory), bytes) {
|
match write(self.path(directory), bytes) {
|
||||||
|
|
|
@ -33,7 +33,7 @@ pub async fn get_request(
|
||||||
) -> impl IntoResponse {
|
) -> impl IntoResponse {
|
||||||
let data = &(data.read().await);
|
let data = &(data.read().await);
|
||||||
|
|
||||||
let upload = match data.get_upload_by_id(id).await {
|
let upload = match data.get_upload_by_id_bucket(id, &bucket).await {
|
||||||
Ok(u) => u,
|
Ok(u) => u,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
if let Some(default) = data.0.0.bucket_defaults.get(&bucket) {
|
if let Some(default) = data.0.0.bucket_defaults.get(&bucket) {
|
||||||
|
@ -42,19 +42,20 @@ pub async fn get_request(
|
||||||
Body::from(std::fs::read(&default.0).expect("failed to read default file")),
|
Body::from(std::fs::read(&default.0).expect("failed to read default file")),
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
return Err(Json(e.into()));
|
match data.get_upload_by_id(id).await {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(_) => {
|
||||||
|
return Err(Json(ApiReturn {
|
||||||
|
ok: false,
|
||||||
|
message: e.to_string(),
|
||||||
|
payload: (),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if !upload.bucket.is_empty() && upload.bucket != bucket {
|
|
||||||
return Err(Json(ApiReturn {
|
|
||||||
ok: false,
|
|
||||||
message: Error::MiscError("Bucket mismatch".to_string()).to_string(),
|
|
||||||
payload: (),
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
// ...
|
// ...
|
||||||
let path = upload.path(&data.0.0.directory);
|
let path = upload.path(&data.0.0.directory);
|
||||||
|
|
||||||
|
@ -84,15 +85,14 @@ pub async fn get_json_request(
|
||||||
) -> impl IntoResponse {
|
) -> impl IntoResponse {
|
||||||
let data = &(data.read().await);
|
let data = &(data.read().await);
|
||||||
|
|
||||||
let upload = match data.get_upload_by_id(id).await {
|
let upload = match data.get_upload_by_id_bucket(id, &bucket).await {
|
||||||
Ok(u) => u,
|
Ok(u) => u,
|
||||||
Err(e) => return Json(e.into()),
|
Err(_) => match data.get_upload_by_id(id).await {
|
||||||
|
Ok(x) => x,
|
||||||
|
Err(e) => return Json(e.into()),
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
if !upload.bucket.is_empty() && upload.bucket != bucket {
|
|
||||||
return Json(Error::MiscError("Bucket mismatch".to_string()).into());
|
|
||||||
}
|
|
||||||
|
|
||||||
Json(ApiReturn {
|
Json(ApiReturn {
|
||||||
ok: true,
|
ok: true,
|
||||||
message: "Success".to_string(),
|
message: "Success".to_string(),
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue