Compare commits
2 commits
Author | SHA1 | Date | |
---|---|---|---|
e9e27e6feb | |||
a5ecf145a9 |
4 changed files with 76 additions and 27 deletions
|
@ -1,6 +1,3 @@
|
|||
use data_encoding::HEXUPPER;
|
||||
use rand::RngCore;
|
||||
|
||||
pub mod csrf;
|
||||
pub mod profile_pic;
|
||||
mod session;
|
||||
|
@ -13,14 +10,6 @@ pub use user::MaybeUser;
|
|||
pub use user::User;
|
||||
pub use user::UserRole;
|
||||
|
||||
fn gen_token(token_length: usize) -> String {
|
||||
let mut token_bytes = vec![0u8; token_length];
|
||||
|
||||
rand::thread_rng().fill_bytes(&mut token_bytes);
|
||||
|
||||
HEXUPPER.encode(&token_bytes)
|
||||
}
|
||||
|
||||
/// A macro to check if a user has admin privileges.
|
||||
///
|
||||
/// This macro checks whether the provided user has admin privileges by calling the `is_admin` method on it.
|
||||
|
|
|
@ -2,9 +2,9 @@ use chrono::Utc;
|
|||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::FromRow;
|
||||
|
||||
use crate::get_pg;
|
||||
use crate::{gen_random, get_pg};
|
||||
|
||||
use super::{User, UserRole, gen_token};
|
||||
use super::{User, UserRole};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
|
||||
pub struct Session {
|
||||
|
@ -50,7 +50,7 @@ impl Sessions for User {
|
|||
sqlx::query_as(
|
||||
"INSERT INTO user_session (token, \"user\", kind, name) VALUES ($1, $2, $3, $4) RETURNING *",
|
||||
)
|
||||
.bind(gen_token(64))
|
||||
.bind(gen_random(64))
|
||||
.bind(&self.username)
|
||||
.bind(SessionKind::API)
|
||||
.bind(name)
|
||||
|
@ -108,7 +108,7 @@ impl Sessions for User {
|
|||
sqlx::query_as(
|
||||
"INSERT INTO user_session (token, \"user\", kind) VALUES ($1, $2, $3) RETURNING *",
|
||||
)
|
||||
.bind(gen_token(64))
|
||||
.bind(gen_random(64))
|
||||
.bind(&self.username)
|
||||
.bind(SessionKind::USER)
|
||||
.fetch_one(get_pg!())
|
||||
|
|
12
src/lib.rs
12
src/lib.rs
|
@ -1,3 +1,5 @@
|
|||
use data_encoding::HEXUPPER;
|
||||
use rand::RngCore;
|
||||
use tokio::sync::OnceCell;
|
||||
|
||||
pub mod auth;
|
||||
|
@ -32,7 +34,7 @@ macro_rules! get_pg {
|
|||
client
|
||||
} else {
|
||||
let client = sqlx::postgres::PgPoolOptions::new()
|
||||
.max_connections(5)
|
||||
.max_connections(12)
|
||||
.connect(&std::env::var("DATABASE_URL").unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
@ -41,3 +43,11 @@ macro_rules! get_pg {
|
|||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub fn gen_random(token_length: usize) -> String {
|
||||
let mut token_bytes = vec![0u8; token_length];
|
||||
|
||||
rand::thread_rng().fill_bytes(&mut token_bytes);
|
||||
|
||||
HEXUPPER.encode(&token_bytes)
|
||||
}
|
||||
|
|
|
@ -5,6 +5,8 @@ use rocket::http::Status;
|
|||
use rocket::response::Responder;
|
||||
use std::io::Cursor;
|
||||
|
||||
use crate::gen_random;
|
||||
|
||||
// TODO: Implement file based response
|
||||
|
||||
pub struct DataResponse {
|
||||
|
@ -29,20 +31,62 @@ impl<'r> Responder<'r, 'static> for DataResponse {
|
|||
fn respond_to(self, req: &'r Request<'_>) -> rocket::response::Result<'static> {
|
||||
// Handle Range requests
|
||||
if let Some(range) = req.headers().get_one("Range") {
|
||||
if let Some((start, end)) = parse_range_header(range, self.data.len()) {
|
||||
// TODO : Reject invalid ranges
|
||||
// TODO : Multiple ranges?
|
||||
let ranges = range.split(",").collect::<Vec<_>>();
|
||||
if ranges.len() == 1 {
|
||||
if let Some((start, end)) = parse_range_header(range, self.data.len()) {
|
||||
let sliced_data = &self.data[start..=end];
|
||||
return Ok(Response::build()
|
||||
.header(Header::new(
|
||||
"Content-Range",
|
||||
format!("bytes {}-{}/{}", start, end, self.data.len()),
|
||||
))
|
||||
.header(Header::new("Accept-Ranges", "bytes"))
|
||||
.header(Header::new("Content-Type", self.content_type.clone()))
|
||||
.status(Status::PartialContent)
|
||||
.streamed_body(Cursor::new(sliced_data.to_vec()))
|
||||
.finalize());
|
||||
}
|
||||
} else {
|
||||
let mut multipart_body: Vec<u8> = Vec::new();
|
||||
let boundary = gen_random(32);
|
||||
|
||||
for range in ranges {
|
||||
if let Some((start, end)) = parse_range_header(range, self.data.len()) {
|
||||
let sliced_data = &self.data[start..=end];
|
||||
|
||||
let mut body: Vec<u8> = Vec::new();
|
||||
|
||||
body.extend_from_slice(format!("--{boundary}\r\n").as_bytes());
|
||||
body.extend_from_slice(
|
||||
format!(
|
||||
"Content-Range: bytes {}-{}/{}\r\n",
|
||||
start,
|
||||
end,
|
||||
self.data.len()
|
||||
)
|
||||
.as_bytes(),
|
||||
);
|
||||
body.extend_from_slice(
|
||||
format!("Content-Type: {}\r\n\r\n", self.content_type.clone())
|
||||
.as_bytes(),
|
||||
);
|
||||
body.extend_from_slice(sliced_data);
|
||||
body.extend_from_slice("\r\n".as_bytes());
|
||||
|
||||
multipart_body.extend_from_slice(&body);
|
||||
}
|
||||
}
|
||||
|
||||
multipart_body.extend_from_slice(format!("--{boundary}--\r\n").as_bytes());
|
||||
|
||||
let sliced_data = &self.data[start..=end];
|
||||
return Ok(Response::build()
|
||||
.header(Header::new(
|
||||
"Content-Range",
|
||||
format!("bytes {}-{}/{}", start, end, self.data.len()),
|
||||
))
|
||||
.header(Header::new("Accept-Ranges", "bytes"))
|
||||
.header(Header::new("Content-Type", self.content_type.clone()))
|
||||
.header(Header::new(
|
||||
"Content-Type",
|
||||
format!("multipart/byteranges; boundary={boundary}"),
|
||||
))
|
||||
.status(Status::PartialContent)
|
||||
.streamed_body(Cursor::new(sliced_data.to_vec()))
|
||||
.streamed_body(Cursor::new(multipart_body.to_vec()))
|
||||
.finalize());
|
||||
}
|
||||
}
|
||||
|
@ -69,10 +113,16 @@ fn parse_range_header(range: &str, total_len: usize) -> Option<(usize, usize)> {
|
|||
}
|
||||
|
||||
let range = &range[6..];
|
||||
|
||||
if range.starts_with('-') {
|
||||
let neg: usize = range.trim_start_matches('-').parse().ok()?;
|
||||
return Some((total_len - neg, total_len));
|
||||
}
|
||||
|
||||
let parts: Vec<&str> = range.split('-').collect();
|
||||
|
||||
if parts.len() != 2 {
|
||||
return None;
|
||||
return Some((parts[0].parse().ok()?, total_len));
|
||||
}
|
||||
|
||||
let start = parts[0].parse::<usize>().ok();
|
||||
|
|
Loading…
Add table
Reference in a new issue