add: move upload server to buckets
This commit is contained in:
parent
8116307ba0
commit
75fe720f21
83 changed files with 351 additions and 458 deletions
|
@ -1,74 +1,7 @@
|
|||
use std::fs::exists;
|
||||
use axum::{body::Body, extract::Path, response::IntoResponse, Extension, Json};
|
||||
use axum::{extract::Path, response::IntoResponse, Extension, Json};
|
||||
use crate::cookie::CookieJar;
|
||||
use pathbufd::PathBufD;
|
||||
use crate::{get_user_from_token, routes::api::v1::UpdateUploadAlt, State};
|
||||
use super::auth::images::read_image;
|
||||
use tetratto_core::model::{carp::CarpGraph, oauth, uploads::MediaType, ApiReturn, Error};
|
||||
|
||||
pub async fn get_request(
|
||||
Path(id): Path<usize>,
|
||||
Extension(data): Extension<State>,
|
||||
) -> impl IntoResponse {
|
||||
let data = &(data.read().await).0;
|
||||
|
||||
let upload = match data.get_upload_by_id(id).await {
|
||||
Ok(u) => u,
|
||||
Err(_) => {
|
||||
return Err((
|
||||
[("Content-Type", "image/svg+xml")],
|
||||
Body::from(read_image(PathBufD::current().extend(&[
|
||||
data.0.0.dirs.media.as_str(),
|
||||
"images",
|
||||
"default-banner.svg",
|
||||
]))),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let path = upload.path(&data.0.0);
|
||||
|
||||
if !exists(&path).unwrap() {
|
||||
return Err((
|
||||
[("Content-Type", "image/svg+xml")],
|
||||
Body::from(read_image(PathBufD::current().extend(&[
|
||||
data.0.0.dirs.media.as_str(),
|
||||
"images",
|
||||
"default-banner.svg",
|
||||
]))),
|
||||
));
|
||||
}
|
||||
|
||||
let bytes = read_image(path);
|
||||
|
||||
if upload.what == MediaType::Carpgraph {
|
||||
// conver to svg and return
|
||||
return Ok((
|
||||
[("Content-Type", "image/svg+xml".to_string())],
|
||||
Body::from(CarpGraph::from_bytes(bytes).to_svg()),
|
||||
));
|
||||
}
|
||||
|
||||
Ok(([("Content-Type", upload.what.mime())], Body::from(bytes)))
|
||||
}
|
||||
|
||||
pub async fn get_json_request(
|
||||
Path(id): Path<usize>,
|
||||
Extension(data): Extension<State>,
|
||||
) -> impl IntoResponse {
|
||||
let data = &(data.read().await).0;
|
||||
|
||||
let upload = match data.get_upload_by_id(id).await {
|
||||
Ok(u) => u,
|
||||
Err(e) => return Json(e.into()),
|
||||
};
|
||||
|
||||
Json(ApiReturn {
|
||||
ok: true,
|
||||
message: "Success".to_string(),
|
||||
payload: Some(upload),
|
||||
})
|
||||
}
|
||||
use tetratto_core::model::{oauth, ApiReturn, Error};
|
||||
|
||||
pub async fn delete_request(
|
||||
jar: CookieJar,
|
||||
|
@ -81,13 +14,22 @@ pub async fn delete_request(
|
|||
None => return Json(Error::NotAllowed.into()),
|
||||
};
|
||||
|
||||
match data.delete_upload_checked(id, &user).await {
|
||||
let upload = match data.2.get_upload_by_id(id).await {
|
||||
Ok(x) => x,
|
||||
Err(e) => return Json(Error::MiscError(e.to_string()).into()),
|
||||
};
|
||||
|
||||
if user.id != upload.owner {
|
||||
return Json(Error::NotAllowed.into());
|
||||
}
|
||||
|
||||
match data.2.delete_upload(id).await {
|
||||
Ok(_) => Json(ApiReturn {
|
||||
ok: true,
|
||||
message: "Upload deleted".to_string(),
|
||||
payload: (),
|
||||
}),
|
||||
Err(e) => Json(e.into()),
|
||||
Err(e) => Json(Error::MiscError(e.to_string()).into()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -103,12 +45,22 @@ pub async fn update_alt_request(
|
|||
None => return Json(Error::NotAllowed.into()),
|
||||
};
|
||||
|
||||
match data.update_upload_alt(id, &user, &props.alt).await {
|
||||
let mut upload = match data.2.get_upload_by_id(id).await {
|
||||
Ok(x) => x,
|
||||
Err(e) => return Json(Error::MiscError(e.to_string()).into()),
|
||||
};
|
||||
|
||||
if user.id != upload.owner {
|
||||
return Json(Error::NotAllowed.into());
|
||||
}
|
||||
|
||||
upload.metadata.alt = props.alt;
|
||||
match data.2.update_upload_metadata(id, upload.metadata).await {
|
||||
Ok(_) => Json(ApiReturn {
|
||||
ok: true,
|
||||
message: "Upload updated".to_string(),
|
||||
payload: (),
|
||||
}),
|
||||
Err(e) => Json(e.into()),
|
||||
Err(e) => Json(Error::MiscError(e.to_string()).into()),
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue