mirror of
https://github.com/alvierahman90/rabbit.git
synced 2024-12-15 02:51:59 +00:00
start creating server endpoints, database boilerplate
This commit is contained in:
parent
c3ae3e91e4
commit
088d3bc0a0
1
.gitignore
vendored
1
.gitignore
vendored
@ -4,3 +4,4 @@ Cargo.lock
|
||||
bin/
|
||||
pkg/
|
||||
wasm-pack.log
|
||||
.env
|
||||
|
@ -6,6 +6,10 @@ edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
chrono = { version = "0.4.31", features = [ "clock", "serde" ] }
|
||||
diesel = { version = "2.1.4", features = ["postgres", "chrono" ] }
|
||||
dotenvy = "0.15"
|
||||
serde = { version = "1.0", features = [ "derive" ] }
|
||||
serde_json = { version = "1.0", features = [ "std" ] }
|
||||
serde_with = { version = "3.4.0", features = [ "std", "chrono_0_4", "json" ] }
|
||||
|
||||
rocket = { version = "0.5", features = [ "json" ] }
|
||||
|
27
compose.yml
Normal file
27
compose.yml
Normal file
@ -0,0 +1,27 @@
|
||||
services:
|
||||
db:
|
||||
image: postgres
|
||||
#volumes:
|
||||
#- ./dbdata:/var/lib/postgresql/data
|
||||
ports:
|
||||
- 5432:5432
|
||||
environment:
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_USER: ${POSTGRES_USERNAME}
|
||||
POSTGRES_DB: ${POSTGRES_DB}
|
||||
healthcheck:
|
||||
test: [ "CMD", "pg_isready" ]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
pgweb:
|
||||
image: sosedoff/pgweb
|
||||
ports:
|
||||
- 8081:8081
|
||||
environment:
|
||||
PGWEB_DATABASE_URL: postgres://${POSTGRES_USERNAME}:${POSTGRES_PASSWORD}@db/${POSTGRES_DB}?sslmode=disable
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
restart: true
|
9
diesel.toml
Normal file
9
diesel.toml
Normal file
@ -0,0 +1,9 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/schema.rs"
|
||||
custom_type_derives = ["diesel::query_builder::QueryId"]
|
||||
|
||||
[migrations_directory]
|
||||
dir = "migrations"
|
0
migrations/.keep
Normal file
0
migrations/.keep
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
6
migrations/00000000000000_diesel_initial_setup/down.sql
Normal file
@ -0,0 +1,6 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
36
migrations/00000000000000_diesel_initial_setup/up.sql
Normal file
@ -0,0 +1,36 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
|
||||
|
||||
|
||||
-- Sets up a trigger for the given table to automatically set a column called
|
||||
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||
-- in the modified columns)
|
||||
--
|
||||
-- # Example
|
||||
--
|
||||
-- ```sql
|
||||
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||
--
|
||||
-- SELECT diesel_manage_updated_at('users');
|
||||
-- ```
|
||||
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||
BEGIN
|
||||
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||
BEGIN
|
||||
IF (
|
||||
NEW IS DISTINCT FROM OLD AND
|
||||
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||
) THEN
|
||||
NEW.updated_at := current_timestamp;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
1
migrations/2023-12-22-135046_create_users/down.sql
Normal file
1
migrations/2023-12-22-135046_create_users/down.sql
Normal file
@ -0,0 +1 @@
|
||||
DROP TABLE users;
|
5
migrations/2023-12-22-135046_create_users/up.sql
Normal file
5
migrations/2023-12-22-135046_create_users/up.sql
Normal file
@ -0,0 +1,5 @@
|
||||
CREATE TABLE users (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
email VARCHAR NOT NULL
|
||||
);
|
1
migrations/2023-12-22-135650_create_categories/down.sql
Normal file
1
migrations/2023-12-22-135650_create_categories/down.sql
Normal file
@ -0,0 +1 @@
|
||||
DROP TABLE categories;
|
5
migrations/2023-12-22-135650_create_categories/up.sql
Normal file
5
migrations/2023-12-22-135650_create_categories/up.sql
Normal file
@ -0,0 +1,5 @@
|
||||
CREATE TABLE categories (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
user_id SERIAL REFERENCES users(id)
|
||||
);
|
1
migrations/2023-12-22-135844_create_series/down.sql
Normal file
1
migrations/2023-12-22-135844_create_series/down.sql
Normal file
@ -0,0 +1 @@
|
||||
DROP TABLE series;
|
7
migrations/2023-12-22-135844_create_series/up.sql
Normal file
7
migrations/2023-12-22-135844_create_series/up.sql
Normal file
@ -0,0 +1,7 @@
|
||||
CREATE TABLE series (
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR NOT NULL,
|
||||
repeat INTEGER NOT NULL,
|
||||
good BOOLEAN NOT NULL,
|
||||
category_id SERIAL REFERENCES categories(id)
|
||||
);
|
@ -0,0 +1 @@
|
||||
DROP TABLE series_points;
|
6
migrations/2023-12-22-141631_create_series_points/up.sql
Normal file
6
migrations/2023-12-22-141631_create_series_points/up.sql
Normal file
@ -0,0 +1,6 @@
|
||||
CREATE TABLE series_points (
|
||||
id SERIAL PRIMARY KEY,
|
||||
timestamp TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
value INTEGER NOT NULL,
|
||||
series_id SERIAL REFERENCES series(id)
|
||||
);
|
@ -4,10 +4,8 @@
|
||||
|
||||
## Why
|
||||
|
||||
- Cross platform (just a web app)
|
||||
- Simple
|
||||
- Flexible (tracks whatever)
|
||||
- No sign up required
|
||||
- Can sign up if you want
|
||||
- Self-hostable
|
||||
- CalDAV reminders and calendars to remind you to build habits
|
||||
- Rust :crab: :muscle:
|
||||
|
30
src/db.rs
30
src/db.rs
@ -1,4 +1,5 @@
|
||||
pub mod json;
|
||||
//pub mod json;
|
||||
pub mod pg;
|
||||
|
||||
use crate::models::*;
|
||||
use serde_json;
|
||||
@ -9,6 +10,8 @@ pub enum Error {
|
||||
Generic(String),
|
||||
FsIo(String),
|
||||
Json(String),
|
||||
PgDb(String),
|
||||
Parsing(String),
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
@ -23,19 +26,22 @@ impl From<serde_json::Error> for Error {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<diesel::result::Error> for Error {
|
||||
fn from(e: diesel::result::Error) -> Self {
|
||||
Self::PgDb(e.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Storage {
|
||||
fn new() -> Self;
|
||||
fn add_category(&mut self, category: NewCategory) -> Result<i32, Error>;
|
||||
fn add_series(&mut self, category_id: i32, series: NewSeries) -> Result<i32, Error>;
|
||||
fn add_series_point(
|
||||
&mut self,
|
||||
category_id: i32,
|
||||
series_id: i32,
|
||||
series_point: NewSeriesPoint,
|
||||
) -> Result<i32, Error>;
|
||||
//fn add_user(&mut self, id: i32, user: NewUser) -> Result<User, Error>;
|
||||
fn get_category(&self, id: i32) -> Option<Category>;
|
||||
fn get_series(&self, category_id: i32, series_id: i32) -> Option<Series>;
|
||||
//fn get_user(&self, id: i32) -> Option<User>;
|
||||
fn add_series(&mut self, series: NewSeries) -> Result<i32, Error>;
|
||||
fn add_series_point(&mut self, series_point: NewSeriesPoint) -> Result<i32, Error>;
|
||||
fn add_user(&mut self, user: NewUser) -> Result<i32, Error>;
|
||||
fn get_categories(&mut self, filter: CategoryFilter) -> Result<Vec<Category>, Error>;
|
||||
fn get_series(&mut self, filter: SeriesFilter) -> Result<Vec<Series>, Error>;
|
||||
fn get_series_points(&mut self, filter: SeriesPointFilter) -> Result<Vec<SeriesPoint>, Error>;
|
||||
fn get_users(&mut self, filter: UserFilter) -> Result<Vec<User>, Error>;
|
||||
//fn update_category(&mut self, id: i32, changeset: CategoryChangeset) -> Result<(), Error>;
|
||||
//fn update_series(&mut self, id: i32, changeset: SeriesChangeset) -> Result<(), Error>;
|
||||
//fn update_user(&mut self, id: i32, changeset: UserChangeset) -> Result<(), Error>;
|
||||
|
@ -6,15 +6,6 @@ pub struct JsonDb {
|
||||
|
||||
/// JsonDb is single user.
|
||||
impl JsonDb {
|
||||
pub fn new(value: Option<String>) -> Self {
|
||||
match value {
|
||||
Some(value) => JsonDb { value },
|
||||
None => JsonDb {
|
||||
value: String::from("[]"),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn load(&self) -> Result<Vec<Category>, Error> {
|
||||
match serde_json::from_str::<Vec<Category>>(&self.value) {
|
||||
Ok(d) => Ok(d),
|
||||
@ -35,6 +26,12 @@ impl JsonDb {
|
||||
}
|
||||
|
||||
impl Storage for JsonDb {
|
||||
fn new() -> Self {
|
||||
JsonDb {
|
||||
value: String::from("[]"),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_category(&mut self, category: NewCategory) -> Result<i32, Error> {
|
||||
let mut max_id = 1;
|
||||
let mut data = self.load()?;
|
||||
|
181
src/db/pg.rs
Normal file
181
src/db/pg.rs
Normal file
@ -0,0 +1,181 @@
|
||||
use super::{Error, Storage};
|
||||
use crate::{models::*, schema};
|
||||
use diesel::pg::PgConnection;
|
||||
use diesel::prelude::*;
|
||||
use dotenvy::dotenv;
|
||||
use std::env;
|
||||
|
||||
pub struct PgDb {
|
||||
conn: PgConnection,
|
||||
}
|
||||
|
||||
impl Storage for PgDb {
|
||||
fn new() -> Self {
|
||||
dotenv().ok();
|
||||
|
||||
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
|
||||
Self {
|
||||
conn: PgConnection::establish(&database_url)
|
||||
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url)),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_user(&mut self, user: NewUser) -> Result<i32, Error> {
|
||||
use schema::users;
|
||||
let ret = diesel::insert_into(users::table)
|
||||
.values(&user)
|
||||
.returning(users::dsl::id)
|
||||
.get_result(&mut self.conn);
|
||||
|
||||
match ret {
|
||||
Ok(val) => Ok(val),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_category(&mut self, category: NewCategory) -> Result<i32, Error> {
|
||||
use schema::categories;
|
||||
let ret = diesel::insert_into(categories::table)
|
||||
.values(&category)
|
||||
.returning(categories::dsl::id)
|
||||
.get_result(&mut self.conn);
|
||||
|
||||
match ret {
|
||||
Ok(val) => Ok(val),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_series(&mut self, series: NewSeries) -> Result<i32, Error> {
|
||||
use schema::series;
|
||||
let ret = diesel::insert_into(series::table)
|
||||
.values(&series)
|
||||
.returning(series::dsl::id)
|
||||
.get_result(&mut self.conn);
|
||||
|
||||
match ret {
|
||||
Ok(val) => Ok(val),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_series_point(&mut self, series_point: NewSeriesPoint) -> Result<i32, Error> {
|
||||
use schema::series_points;
|
||||
let ret = diesel::insert_into(series_points::table)
|
||||
.values(&series_point)
|
||||
.returning(series_points::dsl::id)
|
||||
.get_result(&mut self.conn);
|
||||
|
||||
match ret {
|
||||
Ok(val) => Ok(val),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_categories(&mut self, filter: CategoryFilter) -> Result<Vec<Category>, Error> {
|
||||
use schema::categories;
|
||||
|
||||
let mut query = categories::table.into_boxed();
|
||||
|
||||
if let Some(val) = filter.id {
|
||||
query = query.filter(categories::id.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.name {
|
||||
query = query.filter(categories::name.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.user_id {
|
||||
query = query.filter(categories::user_id.eq(val));
|
||||
}
|
||||
|
||||
match query.select(Category::as_select()).load(&mut self.conn) {
|
||||
Ok(q) => Ok(q),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_series(&mut self, filter: SeriesFilter) -> Result<Vec<Series>, Error> {
|
||||
use schema::series;
|
||||
|
||||
let mut query = series::table.into_boxed();
|
||||
|
||||
if let Some(val) = filter.id {
|
||||
query = query.filter(series::id.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.name {
|
||||
query = query.filter(series::name.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.repeat {
|
||||
query = query.filter(series::repeat.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.good {
|
||||
query = query.filter(series::good.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.category_id {
|
||||
query = query.filter(series::category_id.eq(val));
|
||||
}
|
||||
|
||||
match query.select(Series::as_select()).load(&mut self.conn) {
|
||||
Ok(q) => Ok(q),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_series_points(&mut self, filter: SeriesPointFilter) -> Result<Vec<SeriesPoint>, Error> {
|
||||
use schema::series_points;
|
||||
|
||||
let mut query = series_points::table.into_boxed();
|
||||
|
||||
if let Some(val) = filter.id {
|
||||
query = query.filter(series_points::id.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.timestamp_millis {
|
||||
match chrono::NaiveDateTime::from_timestamp_millis(val) {
|
||||
Some(val) => query = query.filter(series_points::timestamp.eq(val)),
|
||||
_ => return Err(Error::Parsing("Failed to parse timestamp".to_owned())),
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(val) = filter.value {
|
||||
query = query.filter(series_points::value.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.series_id {
|
||||
query = query.filter(series_points::series_id.eq(val));
|
||||
}
|
||||
|
||||
match query.select(SeriesPoint::as_select()).load(&mut self.conn) {
|
||||
Ok(q) => Ok(q),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_users(&mut self, filter: UserFilter) -> Result<Vec<User>, Error> {
|
||||
use schema::users;
|
||||
|
||||
let mut query = users::table.into_boxed();
|
||||
|
||||
if let Some(val) = filter.id {
|
||||
query = query.filter(users::id.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.name {
|
||||
query = query.filter(users::name.eq(val));
|
||||
}
|
||||
|
||||
if let Some(val) = filter.email {
|
||||
query = query.filter(users::email.eq(val));
|
||||
}
|
||||
|
||||
match query.select(User::as_select()).load(&mut self.conn) {
|
||||
Ok(q) => Ok(q),
|
||||
Err(e) => Err(Error::from(e)),
|
||||
}
|
||||
}
|
||||
}
|
@ -1,2 +1,3 @@
|
||||
pub mod db;
|
||||
pub mod models;
|
||||
pub mod schema;
|
||||
|
@ -1,11 +1,9 @@
|
||||
mod category;
|
||||
mod series;
|
||||
mod series_point;
|
||||
mod series_type;
|
||||
mod user;
|
||||
|
||||
pub use category::*;
|
||||
pub use series::*;
|
||||
pub use series_point::*;
|
||||
pub use series_type::*;
|
||||
pub use user::*;
|
||||
|
@ -1,33 +1,34 @@
|
||||
use crate::models::Series;
|
||||
use crate::schema::categories;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rocket::form::FromForm;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, PartialEq)]
|
||||
#[diesel(table_name = categories)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Category {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub series: Vec<Series>,
|
||||
pub user_id: i32,
|
||||
}
|
||||
|
||||
impl Category {
|
||||
pub fn new(id: i32, new: NewCategory) -> Category {
|
||||
Category {
|
||||
id,
|
||||
name: new.name,
|
||||
series: new.series,
|
||||
}
|
||||
}
|
||||
impl Category {}
|
||||
|
||||
pub fn add_series(&mut self, series: Series) {
|
||||
self.series.push(series);
|
||||
}
|
||||
#[derive(FromForm)]
|
||||
pub struct CategoryFilter {
|
||||
pub id: Option<i32>,
|
||||
pub name: Option<String>,
|
||||
pub user_id: Option<i32>,
|
||||
}
|
||||
|
||||
pub struct CategoryChangeset {
|
||||
pub name: Option<String>,
|
||||
pub series: Option<Vec<Series>>,
|
||||
}
|
||||
|
||||
#[derive(Insertable, Deserialize, FromForm)]
|
||||
#[diesel(table_name = categories)]
|
||||
pub struct NewCategory {
|
||||
pub name: String,
|
||||
pub series: Vec<Series>,
|
||||
pub user_id: i32,
|
||||
}
|
||||
|
@ -1,45 +1,45 @@
|
||||
use super::series_point::SeriesPoint;
|
||||
use chrono;
|
||||
use crate::schema::series;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rocket::form::FromForm;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with;
|
||||
|
||||
#[serde_with::serde_as]
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, PartialEq)]
|
||||
#[diesel(table_name = series)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Series {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
#[serde_as(as = "serde_with::DurationSeconds<i64>")]
|
||||
pub repeat: chrono::Duration,
|
||||
pub repeat: i32,
|
||||
pub good: bool,
|
||||
pub points: Vec<SeriesPoint>,
|
||||
pub category_id: i32,
|
||||
}
|
||||
|
||||
impl Series {
|
||||
pub fn new(id: i32, series: NewSeries) -> Series {
|
||||
Series {
|
||||
id,
|
||||
name: series.name,
|
||||
repeat: series.repeat,
|
||||
good: series.good,
|
||||
points: series.points,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_point(&mut self, point: SeriesPoint) {
|
||||
self.points.push(point);
|
||||
}
|
||||
#[derive(FromForm)]
|
||||
pub struct SeriesFilter {
|
||||
pub id: Option<i32>,
|
||||
pub name: Option<String>,
|
||||
pub repeat: Option<i32>,
|
||||
pub good: Option<bool>,
|
||||
pub category_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(AsChangeset)]
|
||||
#[diesel(table_name = series)]
|
||||
pub struct SeriesChangeset {
|
||||
pub name: Option<String>,
|
||||
pub repeat: Option<chrono::Duration>,
|
||||
pub repeat: Option<i32>,
|
||||
pub good: Option<bool>,
|
||||
pub points: Option<Vec<SeriesPoint>>,
|
||||
pub category_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Insertable, Deserialize)]
|
||||
#[diesel(table_name = series)]
|
||||
pub struct NewSeries {
|
||||
pub name: String,
|
||||
pub repeat: chrono::Duration,
|
||||
pub repeat: i32,
|
||||
pub good: bool,
|
||||
pub points: Vec<SeriesPoint>,
|
||||
pub category_id: i32,
|
||||
}
|
||||
|
@ -1,25 +1,32 @@
|
||||
use super::series_type::SeriesType;
|
||||
use crate::schema::series_points;
|
||||
use chrono;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rocket::form::FromForm;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, PartialEq)]
|
||||
#[diesel(table_name = series_points)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct SeriesPoint {
|
||||
pub id: i32,
|
||||
pub timestamp: chrono::NaiveDateTime,
|
||||
pub value: SeriesType,
|
||||
pub value: i32,
|
||||
pub series_id: i32,
|
||||
}
|
||||
|
||||
impl SeriesPoint {
|
||||
pub fn new(id: i32, new: NewSeriesPoint) -> SeriesPoint {
|
||||
SeriesPoint {
|
||||
id,
|
||||
timestamp: new.timestamp,
|
||||
value: new.value,
|
||||
}
|
||||
}
|
||||
#[derive(FromForm)]
|
||||
pub struct SeriesPointFilter {
|
||||
pub id: Option<i32>,
|
||||
pub timestamp_millis: Option<i64>,
|
||||
pub value: Option<i32>,
|
||||
pub series_id: Option<i32>,
|
||||
}
|
||||
|
||||
#[derive(Insertable, Deserialize)]
|
||||
#[diesel(table_name = series_points)]
|
||||
pub struct NewSeriesPoint {
|
||||
pub timestamp: chrono::NaiveDateTime,
|
||||
pub value: SeriesType,
|
||||
pub value: i32,
|
||||
pub series_id: i32,
|
||||
}
|
||||
|
@ -1,9 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum SeriesType {
|
||||
Bool(bool),
|
||||
Count(u32),
|
||||
Signed(i32),
|
||||
Float(f32),
|
||||
}
|
@ -1,17 +1,34 @@
|
||||
use crate::schema::users;
|
||||
use diesel;
|
||||
use diesel::prelude::*;
|
||||
use rocket::form::FromForm;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Queryable, Selectable, Identifiable, PartialEq)]
|
||||
#[diesel(table_name = users)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct User {
|
||||
pub id: i32,
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
}
|
||||
|
||||
#[derive(FromForm)]
|
||||
pub struct UserFilter {
|
||||
pub id: Option<i32>,
|
||||
pub name: Option<String>,
|
||||
pub email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(AsChangeset)]
|
||||
#[diesel(table_name = users)]
|
||||
pub struct UserChangeset {
|
||||
pub name: Option<String>,
|
||||
pub email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Insertable, Deserialize)]
|
||||
#[diesel(table_name = users)]
|
||||
pub struct NewUser {
|
||||
pub name: String,
|
||||
pub email: String,
|
||||
|
47
src/schema.rs
Normal file
47
src/schema.rs
Normal file
@ -0,0 +1,47 @@
|
||||
// @generated automatically by Diesel CLI.
|
||||
|
||||
diesel::table! {
|
||||
categories (id) {
|
||||
id -> Int4,
|
||||
name -> Varchar,
|
||||
user_id -> Int4,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
series (id) {
|
||||
id -> Int4,
|
||||
name -> Varchar,
|
||||
repeat -> Int4,
|
||||
good -> Bool,
|
||||
category_id -> Int4,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
series_points (id) {
|
||||
id -> Int4,
|
||||
timestamp -> Timestamp,
|
||||
value -> Int4,
|
||||
series_id -> Int4,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
users (id) {
|
||||
id -> Int4,
|
||||
name -> Varchar,
|
||||
email -> Varchar,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::joinable!(categories -> users (user_id));
|
||||
diesel::joinable!(series -> categories (category_id));
|
||||
diesel::joinable!(series_points -> series (series_id));
|
||||
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
categories,
|
||||
series,
|
||||
series_points,
|
||||
users,
|
||||
);
|
Loading…
Reference in New Issue
Block a user