From ef73b39f4f91a99f30c4d00db6abc25f22257d37 Mon Sep 17 00:00:00 2001 From: shockrah Date: Wed, 21 Apr 2021 16:08:23 -0700 Subject: [PATCH] + Adding db-lib::common::try_error_passthrough This is more heavily used in Message::send as a way of discerning "real" server errors from those that are caused by user input. The name itself might not be super fitting either * Moving code for inserting textual messages into its own function This splits up the logic a bit for Message::send but this segment of logic is also much simpler than that of file upload * Flattening Message::send overall Keeping this function as flat as can be is crucial as it is one of the heavier+most important funcs in the whole JSON API codebase Files are now also generated correctly and asynchronously --- json-api/db/src/channels.rs | 4 +- json-api/db/src/common.rs | 19 +++++ json-api/db/src/messages.rs | 164 +++++++++++++++++++----------------- 3 files changed, 105 insertions(+), 82 deletions(-) diff --git a/json-api/db/src/channels.rs b/json-api/db/src/channels.rs index b292aa1..d6b0116 100644 --- a/json-api/db/src/channels.rs +++ b/json-api/db/src/channels.rs @@ -36,10 +36,8 @@ impl Channel { } pub async fn add(p: &Pool, name: &str, description: &str, kind: Integer) - -> Result, SqlError> { + -> Result, SqlError> { //! @returns on success -> Response::Row - //! @returns on partial success -> Response::Empty - //! @returns on failure -> Response::Other //! @returns on user failure -> Response::RestrictedInput(msg) // bounds are literally [1, 2] diff --git a/json-api/db/src/common.rs b/json-api/db/src/common.rs index efdebf1..03c7b19 100644 --- a/json-api/db/src/common.rs +++ b/json-api/db/src/common.rs @@ -1,3 +1,6 @@ +use crate::Response; +use mysql_async::Error; + #[macro_export] macro_rules! no_conn { ($spec:literal) => { @@ -24,3 +27,19 @@ macro_rules! sql_err_log { } } + +pub fn try_error_passthrough(err: Error) -> Result, Error> { + // Some user input _will_ cause sql to complain about things like foreign key + // constraints + // In order to translate 500's into more precise 400's we use this function + // (sparingly) + match &err { + Error::Server(se) => { + if se.code == 1452 { + return Ok(Response::RestrictedInput(format!("Invalid key value given"))) + } + return Err(err); + }, + _ => Err(err) + } +} diff --git a/json-api/db/src/messages.rs b/json-api/db/src/messages.rs index 0a9b11b..4b24bfb 100644 --- a/json-api/db/src/messages.rs +++ b/json-api/db/src/messages.rs @@ -11,6 +11,7 @@ use mysql_async::Error as SqlError; use crate::Response; use crate::{UBigInt, BigInt}; use crate::{Message, UserMessage}; +use crate::common; use rand::RngCore; @@ -31,92 +32,97 @@ impl Message { } - pub async fn send(p: &Pool, content: &str, content_type: &str, cid: UBigInt, uid: UBigInt) -> Result, SqlError> { - //! @returns on_sucess -> empty - //! @returns on_failure Err(SqlErr) + async fn insert_text(p: &Pool, content: &str, cid: UBigInt, uid: UBigInt) -> Result, SqlError> { + // TODO: make this check not compare against something so hardcoded and + if content.len() > 4_000 { + return Ok(Response::RestrictedInput("Text larger than 4000 bytes".into())) + } else { + let mut conn = p.get_conn().await?; + let q = "INSERT INTO messages (id, time, content, content_type, author_id, channel_id) + VALUES(:id, :time, :content, 'text/plain', :uid, :chan_id)"; - let mut conn = p.get_conn().await?; - let q = "INSERT INTO messages - (id, time, content, content_type, author_id, channel_id) - VALUES (id, :time, :content, :ctype, :author, :channel)"; + let id: u64 = rand::rngs::OsRng.next_u64(); + let now: i64 = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("System time `NOW` failed") + .as_millis() as i64; + let p = params!{ + "id" => id, + "time" => now, + "content" => content, + "uid" => uid, + "chan_id" => cid + }; + conn.exec_drop(q, p).await?; + let msg = Message::new(id, now, content, "text/plain", uid, cid); + return Ok(Response::Row(msg)) + } + } - let id: u64 = rand::rngs::OsRng.next_u64(); - let now: i64 = SystemTime::now() - .duration_since(UNIX_EPOCH) - .expect("System time `NOW` failed") - .as_millis() as i64; - - match content_type { - "text/plain" => { - if content.len() > 4_000 { - Ok(Response::RestrictedInput("Large text not allowed".into())) + async fn save_file(filename: String, data: &[u8], msg: Message) -> Response { + match fs::File::create(filename).await { + Ok(mut file) => { + if let Ok(_) = file.write_all(data).await { + Response::Row(msg) } else { - let res = conn.exec_drop(q, params!{ - "id" => id, - "time" => now, - "content" => content, - "ctype" => content_type, - "author" => uid, - "channel" => cid - }).await; - match Ok(res) { - Ok(_) => { - let msg = Message::new(id, now, content, content_type, uid, cid); - Ok(Response::Row(msg)) - }, - Err(e) => Err(e) - } - } - }, - _ => { - /* - * Amazing hardcoded limit on binary content_length - * TODO: make this not hardcoded - * This really should be configurable by someone somewhere - * The best way of doing this honestly it probably through some kind of lazy static - * we can set an env var from the cli frontend for that lz_static to read - */ - if content.len() > 10_000_000 { - Ok(Response::RestrictedInput("Large data not allowed".into())) - } else { - let extension = match content_type { - "image/png" => "png", - "image/jpeg" | "image/jpg" => "jpg", - "application/webm" => "webm", - "application/mp4" => "mp4", - "application/mp3" => "mp3", - _ => panic!("Bad file type sent to db layer {}", content_type) - }; - let content_ref = format!("{cid}-{time}.{ext}", cid=cid, time=now, ext=extension); - - let res = conn.exec_drop(q, params!{ - "id" => id, - "time" => now, - "content" => &content_ref, // store a ref to a file instead of the actual payload - "ctype" => content_type, - "author" => uid, - "channel" => cid - }).await; - if let Ok(_) = res { - // now save the data to disk - match fs::File::create(content_ref).await { - Ok(mut file) => { - file.write_all(content.as_bytes()).await.expect("Failed to write, but the ref is saved"); - let msg = Message::new(id, now, content, content_type, uid, cid); - Ok(Response::Row(msg)) - }, - Err(_) => { - Ok(Response::Other("Saved ref but couldn't save file data".into())) - }, - } - } else { - Ok(Response::Success) - } + Response::Empty } } - + Err(e) => { + let msg = format!("db::Message::save_file {}", e); + Response::Other(msg) + } } + } + pub async fn send(p: &Pool, content: &str, content_type: &str, cid: UBigInt, uid: UBigInt) -> Result, SqlError> { + //! @returns on_sucess -> Ok(Response::Row) + //! @returns on_failure Err(SqlErr) + if content_type == "text/plain" { + match Self::insert_text(p, content, cid, uid).await { + Ok(pass) => Ok(pass), + Err(e) => common::try_error_passthrough(e) + } + } else { + if content.len() > 10_000_000 { + return Ok(Response::RestrictedInput("Large data not allowed".into())) + } + let extension = match content_type { + "image/png" => "png", + "image/jpeg" | "image/jpg" => "jpg", + "application/webm" => "webm", + "application/mp4" => "mp4", + "application/mp3" => "mp3", + _ => panic!("Bad file type sent to db layer {}", content_type) + }; + + let q = "INSERT INTO messages (id, time, content, content_type, author_id, channel_id) + VALUES(:id, :time, :fname, :ctype, :author, :channel)"; + + let id: u64 = rand::rngs::OsRng.next_u64(); + let filename = format!("{}.{}", id, extension); + let now: i64 = SystemTime::now() + .duration_since(UNIX_EPOCH) + .expect("System time `NOW` failed") + .as_millis() as i64; + + let mut conn = p.get_conn().await?; + let insert_res = conn.exec_drop(q, params!{ + "id" => id, + "time" => now, + "ctype" => content_type, + "fname" => &filename, + "author" => uid, + "channel" => cid + }).await; + match insert_res { + Ok(_) => { + let msg = Message::new(id, now, &filename, content_type, uid, cid); + Ok(Message::save_file(filename, content.as_bytes(), msg).await) + }, + Err(e) => common::try_error_passthrough(e) + } + } } pub async fn get_time_range(p: &Pool, channel_id: UBigInt, start: BigInt, end: BigInt, limit: Option) -> Result, SqlError> {