- removing dblib

This commit is contained in:
shockrah 2021-10-04 12:16:21 -07:00
parent 85b32d5e96
commit 87b373bde8
7 changed files with 0 additions and 336 deletions

View File

@ -1,11 +0,0 @@
[package]
name = "dblib"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
serde = { version = "1.0", features = ["derive"] }
serde_json = { version = "1.0" }
rocket = "0.4"

View File

@ -1,4 +0,0 @@
use std::error::Error;
pub type Result<T> = ::std::result::Result<T, Box<dyn Error>>;

View File

@ -1,68 +0,0 @@
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use std::io::BufWriter;
use crate::{err, DB, dev_urandom, KeyStore};
use crate::{ApiKey, filename};
impl KeyStore for DB {
fn new_entry(name: String) -> err::Result<String> {
//! Create a new API key entry in the data-store given by <filename>
//! Key value is generated via /dev/urandom on unix systems
let file = File::open(filename())?;
let reader = BufReader::new(file);
let mut data: DB = serde_json::from_reader(reader)?;
// Generate some new key data
let key = dev_urandom().unwrap(); // create a new key
let entry = ApiKey { key: key.clone(), name };
// Finally write back the data to the structure and disk
data.keys.push(entry);
let file = File::open(filename())?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &data)?;
Ok(key)
}
fn remove_entry(name: String) -> err::Result<bool> {
//! Removes the API key entry based on the given name
let file = File::open(filename())?;
let reader = BufReader::new(file);
let mut data: DB = serde_json::from_reader(reader)?;
let idx = data.keys.iter().position(|item| item.name == name);
if let Some(idx) = idx {
data.keys.remove(idx);
Ok(true)
} else {
Ok(false)
}
}
fn get_entry(name: String) -> err::Result<Option<String>> {
//! Get the API key based on name
let file = File::open(filename())?;
let reader = BufReader::new(file);
let data: DB = serde_json::from_reader(reader)?;
// WARNING: extreme autism
match data.keys.iter().find(|item| item.name == name) {
Some(item) => Ok(Some(item.key.clone())),
None => Ok(None)
}
}
fn new_store(new_filename: String) -> err::Result<()> {
//! Generate a new empty data store with the given filename
//! NOTE: The filename must include the extension(programmatically)
let file = File::create(new_filename)?;
let empty = DB { keys: vec![] , videos: HashMap::new() };
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &empty)?;
Ok(())
}
}

View File

@ -1,80 +0,0 @@
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
use std::env;
use serde::{Serialize, Deserialize};
pub mod request;
pub mod keystore;
pub mod videostore;
pub mod err;
use request::ApiKey;
fn filename() -> String {
match env::var("CLIP_KEY_STORE") {
Ok(val) => val,
Err(_) => "store.json".to_string()
}
}
fn dev_urandom() -> std::io::Result<String> {
// Creates a random hex string that is generated from 48 bytes
let mut file = File::open("/dev/urandom")?;
let mut buf: [u8;48] = [0;48];
file.read_exact(&mut buf)?;
let mut output = String::new();
for byte in buf.iter() {
let hex = format!("{:02X}", byte);
output.push_str(hex.as_str());
}
// Return the hex encoded random bytes
return Ok(output);
}
fn uid() -> std::io::Result<String> {
let raw = dev_urandom()?;
Ok(raw[..8].to_string())
}
#[derive(Serialize, Deserialize)]
pub struct VideoMeta {
pub id: String,
pub name: String,
pub desc: Option<String>,
pub ext: String,
}
#[derive(Serialize, Deserialize)]
struct VideoMetaEntry {
pub name: String,
pub desc: Option<String>,
pub ext: String
}
#[derive(Serialize, Deserialize)]
pub struct DB {
keys: Vec<ApiKey>,
// We map the video meta id to its actual body content
videos: HashMap<String, VideoMetaEntry>
}
// TODO: add some proper lifetime management here and some docs
pub trait KeyStore {
fn new_entry(name: String) -> err::Result<String>;
fn remove_entry(name: String) -> err::Result<bool>;
fn get_entry(name: String) -> err::Result<Option<String>>;
fn new_store(new_filename: String) -> err::Result<()>;
}
pub trait VideoStore {
fn new_video(name: &str, desc: &str, ext: &str) -> err::Result<()>;
fn del_video(id: String) -> err::Result<()>;
fn get_video_by_name(name: &str) -> err::Result<Option<VideoMeta>>;
fn get_video_by_id(id: &str) -> err::Result<Option<VideoMeta>>;
fn rename_video(id: String, new: &str) -> err::Result<bool>;
fn redescribe_video(id: String, new: &str) -> err::Result<bool>;
}

View File

@ -1,51 +0,0 @@
use crate::{DB, KeyStore};
use rocket::request::{self, Request, FromRequest};
use rocket::http::Status;
use rocket::Outcome;
use serde::{Serialize, Deserialize};
#[derive(Clone, Serialize, Deserialize)]
pub struct ApiKey {
// Owner of the key
pub name: String,
// The secret value of the key itself
pub key: String,
}
#[derive(Debug)]
pub enum ApiKeyError {
BadCount,
Missing,
Invalid
}
fn verify_api_key(key: &str, name: &str) -> request::Outcome<ApiKey, ApiKeyError> {
// yolo unwrap because i cba to deal w/ edge cases and shit
match DB::get_entry(name.to_string()).unwrap() {
Some(db_key) => {
if db_key == key {
Outcome::Success(
ApiKey { name: name.to_string(), key: key.to_string()}
)
} else{
Outcome::Failure((Status::Unauthorized, ApiKeyError::Invalid))
}
},
None => Outcome::Failure((Status::Unauthorized, ApiKeyError::Invalid))
}
}
impl<'a, 'r> FromRequest<'a, 'r> for ApiKey {
type Error = ApiKeyError;
fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {
let key = request.headers().get_one("x-api-key");
let name = request.headers().get_one("x-api-name");
match (key, name) {
(Some(key), Some(name)) => verify_api_key(key, name),
_ => Outcome::Failure((Status::BadRequest, ApiKeyError::Missing))
}
}
}

View File

@ -1,119 +0,0 @@
use std::fs::File;
use std::io::{BufReader, BufWriter};
use crate::{filename, err, DB, uid};
use crate::{VideoStore, VideoMeta, VideoMetaEntry};
impl VideoStore for DB {
fn new_video(name: &str, desc: &str, ext: &str) -> err::Result<()> {
let fname = filename();
let file = File::open(&fname)?;
let reader = BufReader::new(file);
// Messing with data we care about
let id = uid()?;
let mut data: DB = serde_json::from_reader(reader)?;
let entry = VideoMetaEntry {
name: name.to_string(),
ext: ext.to_string(),
desc: if desc.len() != 0 { None } else { Some(desc.to_string()) }
};
// update in mem-cache
data.videos.insert(id, entry);
// Update the meta storage
let file = File::open(&fname)?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &data)?;
// TODO: Insert a new file into the data file bucket
Ok(())
}
fn del_video(id: String) -> err::Result<()> {
let fname = filename();
let file = File::open(&fname)?;
let reader = BufReader::new(file);
let mut data: DB = serde_json::from_reader(reader)?;
data.videos.remove(&id);
let file = File::open(&fname)?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &data)?;
Ok(())
}
fn get_video_by_name(name: &str) -> err::Result<Option<VideoMeta>> {
let fname = filename();
let file = File::open(&fname)?;
let reader = BufReader::new(file);
let data: DB = serde_json::from_reader(reader)?;
// TODO: smarter searching using https://www.objc.io/blog/2020/08/18/fuzzy-search/
let row = data.videos.iter()
.find(|(_, entry)| entry.name.contains(name));
// >:|
match row {
Some((id, entry)) => {
Ok(Some(VideoMeta {
id: id.to_string(),
name: entry.name.clone(),
desc: entry.desc.clone(),
ext: entry.ext.clone(),
}))
},
None => Ok(None)
}
}
fn get_video_by_id(id: &str) -> err::Result<Option<VideoMeta>> {
let fname = filename();
let file = File::open(&fname)?;
let reader = BufReader::new(file);
let data: DB = serde_json::from_reader(reader)?;
match data.videos.get(id) {
Some(entry) => Ok(Some(VideoMeta {
id: id.to_string(),
name: entry.name.clone(),
ext: entry.ext.clone(),
desc: entry.desc.clone()
})),
None => Ok(None)
}
}
fn rename_video(id: String, new: &str) -> err::Result<bool> {
let fname = filename();
let file = File::open(&fname)?;
let reader = BufReader::new(file);
let mut data: DB = serde_json::from_reader(reader)?;
match data.videos.get_mut(&id) {
Some(mut entry) => {
entry.name = new.to_string();
let file = File::open(&fname)?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &data)?;
Ok(true)
},
None => Ok(false)
}
}
fn redescribe_video(id: String, new: &str) -> err::Result<bool> {
let fname = filename();
let file = File::open(&fname)?;
let reader = BufReader::new(file);
let mut data: DB = serde_json::from_reader(reader)?;
match data.videos.get_mut(&id) {
Some(mut entry) => {
entry.desc = Some(new.to_string());
let file = File::open(&fname)?;
let writer = BufWriter::new(file);
serde_json::to_writer(writer, &data)?;
Ok(true)
},
None => Ok(false)
}
}
}

View File

@ -1,3 +0,0 @@
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by cargo.
# For information about cache directory tags see https://bford.info/cachedir/