1
0
Fork 0

I am still alive in case you were wondering

This commit is contained in:
Honbra 2024-04-14 22:02:46 +02:00
parent 1e0c7f5041
commit 4de6254f08
Signed by: honbra
GPG key ID: B61CC9ADABE2D952
18 changed files with 628 additions and 345 deletions

1
.gitignore vendored
View file

@ -3,3 +3,4 @@
/config.toml
/temp
/files
*.env

View file

@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO file (hash, mime) VALUES ($1, $2) ON CONFLICT DO NOTHING",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Bytea",
"Text"
]
},
"nullable": []
},
"hash": "9019613c29507ab3aacc861edc4acd1ec5b4a60f4cae5599557c9b54b19960ea"
}

View file

@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "SELECT file_hash, mime FROM file_key JOIN file ON file_hash = hash WHERE id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "file_hash",
"type_info": "Bytea"
},
{
"ordinal": 1,
"name": "mime",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true
]
},
"hash": "d2a03886009405f5abe777c6f3b387df796d340a2119ede3b74bdeccf42c4f51"
}

View file

@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO file_key (id, file_hash) VALUES ($1, $2)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Bytea"
]
},
"nullable": []
},
"hash": "e3ba3d043ee6f16689304d82ec02a1444fddb6e43323769ccd0d42ea5d9570c0"
}

614
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -4,22 +4,26 @@ version = "0.1.0"
edition = "2021"
[dependencies]
axum = { version = "0.6.20", default-features = false, features = ["http1", "json", "macros", "matched-path", "tokio", "tower-log", "tracing"] }
eyre = "0.6.8"
figment = { version = "0.10.11", features = ["env", "toml"] }
axum = { version = "0.7.5", default-features = false, features = ["http1", "json", "macros", "matched-path", "tokio", "tower-log", "tracing"] }
axum-extra = { version = "0.9.3", features = ["async-read-body"] }
bytes = "1.6.0"
eyre = "0.6.12"
figment = { version = "0.10.15", features = ["env", "toml"] }
futures-util = { version = "0.3.30", default-features = false }
hex = "0.4.3"
http = "0.2.9"
serde = { version = "1.0.189", features = ["derive"] }
http = "1.1.0"
http-body-util = "0.1.1"
mime = "0.3.17"
serde = { version = "1.0.197", features = ["derive"] }
sha2 = "0.10.8"
sqlx = { version = "0.7.3", features = ["runtime-tokio", "postgres", "uuid"] }
thiserror = "1.0.51"
tokio = { version = "1.33.0", features = ["rt-multi-thread", "macros", "fs", "io-std"] }
sqlx = { version = "0.7.4", features = ["runtime-tokio", "postgres", "uuid"] }
thiserror = "1.0.58"
tokio = { version = "1.37.0", features = ["rt-multi-thread", "macros", "fs", "io-std"] }
tokio-util = { version = "0.7.10", features = ["io"] }
tower-http = { version = "0.4.4", features = ["trace"] }
tracing = "0.1.37"
tracing-subscriber = "0.3.17"
ulid = { version = "1.1.0", features = ["uuid", "serde"] }
tower-http = { version = "0.5.2", features = ["trace", "fs"] }
tracing = "0.1.40"
tracing-subscriber = "0.3.18"
ulid = { version = "1.1.2", features = ["uuid", "serde"] }
url = { version = "2.5.0", features = ["serde"] }
uuid = "1.7.0"

View file

@ -8,11 +8,11 @@
"rust-analyzer-src": "rust-analyzer-src"
},
"locked": {
"lastModified": 1706595721,
"narHash": "sha256-nf5/lPawM20WO1gHAlsUwDEJ4v+InC3BYYV1EBsBJZk=",
"lastModified": 1712384501,
"narHash": "sha256-AZmYmEnc1ZkSlxUJVUtGh9VFAqWPr+xtNIiBqD2eKfc=",
"owner": "nix-community",
"repo": "fenix",
"rev": "46a368edf5f1cc16573157797e5acead834d5b2c",
"rev": "99c6241db5ca5363c05c8f4acbdf3a4e8fc42844",
"type": "github"
},
"original": {
@ -26,11 +26,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1705309234,
"narHash": "sha256-uNRRNRKmJyCRC/8y1RqBkqWBLM034y4qN7EprSdmgyA=",
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "1ef2e671c3b0c19053962c07dbda38332dcebf26",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
@ -41,11 +41,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1706371002,
"narHash": "sha256-dwuorKimqSYgyu8Cw6ncKhyQjUDOyuXoxDTVmAXq88s=",
"lastModified": 1712439257,
"narHash": "sha256-aSpiNepFOMk9932HOax0XwNxbA38GOUVOiXfUVPOrck=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "c002c6aa977ad22c60398daaa9be52f2203d0006",
"rev": "ff0dbd94265ac470dda06a657d5fe49de93b4599",
"type": "github"
},
"original": {
@ -65,11 +65,11 @@
"rust-analyzer-src": {
"flake": false,
"locked": {
"lastModified": 1706540258,
"narHash": "sha256-6RTztJE21l0hfWHp0qMWBurWnoFmUxYEDCvaGTnQYcA=",
"lastModified": 1712156296,
"narHash": "sha256-St7ZQrkrr5lmQX9wC1ZJAFxL8W7alswnyZk9d1se3Us=",
"owner": "rust-lang",
"repo": "rust-analyzer",
"rev": "d13951f25c4cb880bff09a3c02a796ecc51f69ac",
"rev": "8e581ac348e223488622f4d3003cb2bd412bf27e",
"type": "github"
},
"original": {

View file

@ -45,10 +45,6 @@
sqlfluff
sqlx-cli
];
# LD_LIBRARY_PATH = "${lib.makeLibraryPath buildInputs}";
# ssh -NL /home/honbra/.s.PGSQL.5432:/var/run/postgresql/.s.PGSQL.5432 <user>@<host>
# good luck setting up /home/honbra on your machine
DATABASE_URL = "postgresql:///ncpn?host=/home/honbra&user=honbra";
};
}
);

View file

@ -0,0 +1,2 @@
DROP TABLE IF EXISTS file_key;
DROP TABLE IF EXISTS file;

View file

@ -0,0 +1,10 @@
CREATE TABLE IF NOT EXISTS file (
hash BYTEA PRIMARY KEY,
mime TEXT
);
CREATE TABLE IF NOT EXISTS file_key (
id UUID PRIMARY KEY,
file_hash BYTEA REFERENCES file (hash) NOT NULL,
expires_at TIMESTAMP
);

View file

@ -1,14 +1,10 @@
use std::path::PathBuf;
use std::{path::PathBuf, sync::Arc};
use axum::{
extract::{BodyStream, State},
routing::post,
Json, Router,
};
use axum::{body::Body, extract::State, routing::post, Json, Router};
use futures_util::TryStreamExt;
use serde::Serialize;
use sha2::{Digest, Sha256};
use sqlx::PgPool;
use sqlx::{query, PgPool};
use tokio::{
fs::{self, File},
io,
@ -16,23 +12,32 @@ use tokio::{
use tokio_util::io::StreamReader;
use tracing::{error, field, info, instrument};
use ulid::Ulid;
use uuid::Uuid;
use crate::error::AppError;
use crate::{config::Config, error::AppError};
pub fn router(db: PgPool) -> Router {
Router::new().route("/", post(upload_file)).with_state(db)
#[derive(Clone)]
struct SharedState {
db: PgPool,
config: Arc<Config>,
}
pub fn router(db: PgPool, config: Arc<Config>) -> Router {
Router::new()
.route("/", post(upload_file))
.with_state(SharedState { db, config })
}
#[derive(Debug, Serialize)]
struct UploadedFile {
id: Ulid,
key: Ulid,
hash: String,
}
#[instrument(skip(_db, body))]
#[instrument(skip(db, body))]
async fn upload_file(
State(_db): State<PgPool>,
body: BodyStream,
State(SharedState { db, config }): State<SharedState>,
body: Body,
) -> Result<Json<UploadedFile>, AppError> {
let id_temp = Ulid::new();
let file_path_temp = PathBuf::from("temp").join(id_temp.to_string());
@ -42,6 +47,7 @@ async fn upload_file(
let mut file_temp = File::create(&file_path_temp).await?;
let better_body = body
.into_data_stream()
.inspect_ok(|b| hasher.update(b))
.map_err(|err| io::Error::new(io::ErrorKind::Other, err));
let mut reader = StreamReader::new(better_body);
@ -85,8 +91,27 @@ async fn upload_file(
return Err(err.into());
}
Ok(Json(UploadedFile {
id: id_temp,
let key = Ulid::new();
query!(
"INSERT INTO file (hash, mime) VALUES ($1, $2) ON CONFLICT DO NOTHING",
&hash[..],
"video/mp4", // I was testing with a video lol
)
.execute(&db)
.await?;
let result = query!(
"INSERT INTO file_key (id, file_hash) VALUES ($1, $2)",
Uuid::from(key),
&hash[..],
)
.execute(&db)
.await?;
match result.rows_affected() {
1 => Ok(Json(UploadedFile {
key,
hash: hash_hex,
}))
})),
rows => Err(AppError::ImpossibleAffectedRows(rows)),
}
}

View file

@ -1,8 +1,8 @@
use axum::{
extract::{Path, State},
routing::{get, post},
Json, Router,
};
use axum_extra::routing::Resource;
use http::StatusCode;
use serde::{Deserialize, Serialize};
use sqlx::{query, PgPool};
@ -13,13 +13,13 @@ use uuid::Uuid;
use crate::error::AppError;
pub fn router(db: PgPool) -> Router {
Router::new()
.route("/", post(create_link))
.route(
"/:id",
get(get_link_info).put(update_link).delete(delete_link),
)
.with_state(db)
let links = Resource::named("links")
.create(create_link)
.show(get_link_info)
.update(update_link)
.destroy(delete_link);
Router::new().merge(links).with_state(db)
}
#[derive(Serialize)]
@ -74,7 +74,7 @@ async fn create_link(
slug,
destination: destination.to_string(),
})),
0 => Err(AppError::LinkExists(id)),
0 => Err(AppError::ApiLinkExists(id)),
rows => Err(AppError::ImpossibleAffectedRows(rows)),
}
}
@ -99,7 +99,7 @@ async fn update_link(
match result.rows_affected() {
1 => Ok(StatusCode::NO_CONTENT),
0 => Err(AppError::LinkNotFound(id)),
0 => Err(AppError::ApiLinkNotFound(id)),
rows => Err(AppError::ImpossibleAffectedRows(rows)),
}
}
@ -114,7 +114,7 @@ async fn delete_link(
match result.rows_affected() {
1 => Ok(StatusCode::NO_CONTENT),
0 => Err(AppError::LinkNotFound(id)),
0 => Err(AppError::ApiLinkNotFound(id)),
rows => Err(AppError::ImpossibleAffectedRows(rows)),
}
}

View file

@ -1,11 +1,15 @@
mod files;
mod links;
use std::sync::Arc;
use axum::Router;
use sqlx::PgPool;
pub fn router(db: PgPool) -> Router {
use crate::config::Config;
pub fn router(db: PgPool, config: Arc<Config>) -> Router {
Router::new()
.nest("/files", files::router(db.clone()))
.nest("/files", files::router(db.clone(), config))
.nest("/links", links::router(db))
}

View file

@ -1,6 +1,8 @@
mod api;
mod root;
use std::sync::Arc;
use axum::{body::Body, Router};
use http::Request;
use sqlx::{postgres::PgConnectOptions, PgPool};
@ -19,8 +21,10 @@ pub async fn build_app(config: Config) -> eyre::Result<Router> {
)
.await?;
Ok(root::router(db.clone(), config.default_destination)
.nest("/api", api::router(db))
let config = Arc::new(config);
Ok(root::router(db.clone(), config.clone())
.nest("/api", api::router(db, config))
.layer(
TraceLayer::new_for_http()
.make_span_with(|request: &Request<Body>| {

View file

@ -1,39 +1,39 @@
use std::sync::Arc;
use axum::{
body::Body,
extract::{Path, State},
response::Redirect,
routing::get,
Router,
BoxError, Router,
};
use bytes::Bytes;
use http::{Request, Response};
use http_body_util::{combinators::UnsyncBoxBody, BodyExt};
use mime::Mime;
use sqlx::{query, PgPool};
use tower_http::services::ServeFile;
use tracing::{error, field, instrument};
use ulid::Ulid;
use url::Url;
use uuid::Uuid;
use crate::error::AppError;
use crate::{config::Config, error::AppError};
#[derive(Clone)]
struct SharedState {
db: PgPool,
default_destination: Arc<Url>,
config: Arc<Config>,
}
pub fn router(db: PgPool, default_destination: Url) -> Router {
pub fn router(db: PgPool, config: Arc<Config>) -> Router {
Router::new()
.route("/:slug", get(redirect))
.with_state(SharedState {
db,
default_destination: Arc::new(default_destination),
})
.route("/:slug", get(redirect_link))
.route("/f/:key", get(redirect_file))
.with_state(SharedState { db, config })
}
async fn redirect(
State(SharedState {
db,
default_destination,
}): State<SharedState>,
async fn redirect_link(
State(SharedState { db, .. }): State<SharedState>,
Path(slug): Path<String>,
) -> Result<Redirect, AppError> {
let result = query!("SELECT id, destination FROM link WHERE slug = $1", slug)
@ -41,13 +41,13 @@ async fn redirect(
.await?
.map(|r| (Ulid::from(r.id), r.destination));
Ok(match result {
match result {
Some((id, destination)) => {
tokio::spawn(increase_visit_count(id, db));
Redirect::temporary(&destination)
Ok(Redirect::temporary(&destination))
}
None => Err(AppError::LinkNotFound(slug)),
}
None => Redirect::temporary(default_destination.as_str()),
})
}
#[instrument(skip(db))]
@ -67,3 +67,33 @@ async fn increase_visit_count(id: Ulid, db: PgPool) {
_ => {}
}
}
async fn redirect_file(
State(SharedState { db, config }): State<SharedState>,
Path(key): Path<Ulid>,
request: Request<Body>,
) -> Result<Response<UnsyncBoxBody<Bytes, BoxError>>, AppError> {
let result = query!(
"SELECT file_hash, mime FROM file_key JOIN file ON file_hash = hash WHERE id = $1",
Uuid::from(key)
)
.fetch_optional(&db)
.await?
.map(|r| (r.file_hash, r.mime));
match result {
Some((file_hash, mime)) => {
let mime: Option<Mime> = mime.map_or(None, |m| m.parse().ok());
let file_path = config.file_store_dir.join(hex::encode(file_hash));
let mut sf = match mime {
Some(mime) => ServeFile::new_with_mime(file_path, &mime),
None => ServeFile::new(file_path),
};
match sf.try_call(request).await {
Ok(response) => Ok(response.map(|body| body.map_err(Into::into).boxed_unsync())),
Err(err) => Err(AppError::Io(err)),
}
}
None => Err(AppError::FileKeyNotFound(key)),
}
}

View file

@ -17,6 +17,11 @@ pub struct Config {
#[serde(default = "default2_destination")]
pub default_destination: Url,
#[serde(default = "default_file_store_dir")]
pub file_store_dir: PathBuf,
#[serde(default = "default_file_temp_dir")]
pub file_temp_dir: PathBuf,
}
fn default_listen_addr() -> SocketAddr {
@ -38,3 +43,11 @@ fn default_db_database() -> String {
fn default2_destination() -> Url {
"https://goob.cc/r".parse().expect("hardcoded URL is valid")
}
fn default_file_store_dir() -> PathBuf {
PathBuf::from("files")
}
fn default_file_temp_dir() -> PathBuf {
PathBuf::from("temp")
}

View file

@ -1,4 +1,4 @@
use axum::response::{IntoResponse, Response};
use axum::{body::Body, response::IntoResponse};
use http::StatusCode;
use tracing::{error, field};
use ulid::Ulid;
@ -6,9 +6,13 @@ use ulid::Ulid;
#[derive(Debug, thiserror::Error)]
pub enum AppError {
#[error("link already exists ({0})")]
LinkExists(Ulid),
ApiLinkExists(Ulid),
#[error("link not found ({0})")]
LinkNotFound(Ulid),
ApiLinkNotFound(Ulid),
#[error("link not found ({0})")]
LinkNotFound(String),
#[error("file key not found ({0})")]
FileKeyNotFound(Ulid),
#[error("database returned an impossible number of affected rows ({0})")]
ImpossibleAffectedRows(u64),
#[error("database error")]
@ -20,31 +24,31 @@ pub enum AppError {
}
impl IntoResponse for AppError {
fn into_response(self) -> Response {
fn into_response(self) -> axum::http::Response<Body> {
error!(err = field::display(&self));
match self {
Self::LinkExists(_) => (StatusCode::BAD_REQUEST, "Link already exists").into_response(),
Self::LinkNotFound(_) => (StatusCode::NOT_FOUND, "Link not found").into_response(),
Self::ApiLinkExists(_) => (StatusCode::BAD_REQUEST, "Link already exists"),
Self::ApiLinkNotFound(_) | Self::LinkNotFound(_) => {
(StatusCode::NOT_FOUND, "Link not found")
}
Self::FileKeyNotFound(_) => (StatusCode::NOT_FOUND, "File key not found"),
Self::ImpossibleAffectedRows(_) => (
StatusCode::INTERNAL_SERVER_ERROR,
"Database returned an impossible number of affected rows",
)
.into_response(),
),
Self::Database(_) => (
StatusCode::INTERNAL_SERVER_ERROR,
"A database error has occured",
)
.into_response(),
),
Self::Io(_) => (
StatusCode::INTERNAL_SERVER_ERROR,
"An I/O error has occured",
)
.into_response(),
Self::Other(err) => (
),
Self::Other(_) => (
StatusCode::INTERNAL_SERVER_ERROR,
format!("An error has occured:\n{err:?}"),
)
.into_response(),
"An unknown error has occured",
),
}
.into_response()
}
}

View file

@ -7,8 +7,8 @@ use figment::{
providers::{Env, Format, Toml},
Figment,
};
use tokio::runtime::Runtime;
use tracing::{debug, field, Level};
use tokio::{net::TcpListener, runtime::Runtime};
use tracing::Level;
use tracing_subscriber::{filter, layer::SubscriberExt, util::SubscriberInitExt};
use self::app::build_app;
@ -41,16 +41,12 @@ fn main() -> eyre::Result<()> {
rt.block_on(async move {
let listen_addr = config.listen_addr;
let router = build_app(config)
let app = build_app(config).await.context("failed to build app")?;
let listener = TcpListener::bind(&listen_addr)
.await
.context("failed to build app")?
.into_make_service();
.context("failed to bind listener")?;
debug!(addr = field::display(&listen_addr), "binding");
axum::Server::try_bind(&listen_addr)
.context("unable to bind to server address")?
.serve(router)
axum::serve(listener, app)
.await
.context("server encountered a runtime error")?;