minimal working example with diesel

This commit is contained in:
2026-01-28 15:43:22 +01:00
parent 25beef5cca
commit 361bd7bc4a
17 changed files with 370 additions and 820 deletions

3
.gitignore vendored
View File

@@ -1,2 +1,3 @@
/target
.vscode
.vscode
*.env

923
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -8,4 +8,6 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
actix-web = "4.x.x"
bigdecimal = {version = "0.4.10", features = ["serde"] }
sqlx = "0.8.6"
diesel = { version = "2.3.6", default-features = false, features = ["postgres", "serde_json", "numeric"] }
dotenvy = "0.15.7"
schema = "0.1.0"

9
diesel.toml Normal file
View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "migrations"

0
migrations/.diesel_lock Normal file
View File

0
migrations/.keep Normal file
View File

View File

@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1,2 @@
DROP TABLE locations;
DROP TYPE battery_status_enum CASCADE

View File

@@ -0,0 +1,18 @@
CREATE TYPE battery_status_enum AS ENUM ('unknown', 'unplugged', 'charging', 'full');
CREATE TABLE locations (
timestamp bigint PRIMARY KEY,
latitude numeric(9,6) NOT NULL,
longitude numeric(9,6) NOT NULL,
accuracy numeric(7,2) NOT NULL,
altitude numeric(7,2),
velocity numeric(7,2),
battery_level smallint NOT NULL,
bearing numeric(6,3),
battery_status battery_status_enum DEFAULT 'unknown' NOT NULL,
CONSTRAINT Location_battery_level_check CHECK ((battery_level >= 0) AND (battery_level <= 100)),
CONSTRAINT Location_bearing_check CHECK ((bearing >= (0)::numeric) AND (bearing <= (360)::numeric))
)
WITH (oids = false);
CREATE INDEX locations_timestamp ON locations USING btree (timestamp);

View File

@@ -1,20 +1,20 @@
use actix_web::web::Json;
use serde::{Deserialize, Serialize};
include!("../models/location.rs");
include!("../models.rs");
async fn get_locations() -> impl Responder {
let locations: Vec<Location> = vec![];
Json(locations)
}
// async fn get_locations() -> impl Responder {
// let locations: Vec<Location> = vec![];
// Json(locations)
// }
async fn create_location(Json(location): Json<Location>) -> impl Responder {
Json(location)
}
// async fn create_location(Json(location): Json<Location>) -> impl Responder {
// Json(location)
// }
async fn update_location(Json(location): Json<Location>) -> impl Responder {
Json(location)
}
// async fn update_location(Json(location): Json<Location>) -> impl Responder {
// Json(location)
// }
async fn delete_location(timestamp: i64) -> impl Responder {
format!("Deleted location with timestamp: {}", timestamp)
}
// async fn delete_location(timestamp: i64) -> impl Responder {
// format!("Deleted location with timestamp: {}", timestamp)
// }

25
src/lib.rs Normal file
View File

@@ -0,0 +1,25 @@
use diesel::prelude::*;
use dotenvy::dotenv;
use std::{env};
use std::io::{stdout, Write};
pub mod models;
pub mod schema;
pub fn establish_connection() -> PgConnection {
dotenv().ok();
// let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
// let database_name = env::var("DATABASE_NAME").expect("DATABASE_NAME must be set");
// let database_user = env::var("DATABASE_USER").expect("DATABASE_USER must be set");
// let database_password = env::var("DATABASE_PASSWORD").expect("DATABASE_PASSWORD must be set");
// let connection_url= format!("postgres://{}:{}@{}/{}", database_user, database_password, database_url, database_name);
let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set");
println!("{}", database_url);
PgConnection::establish(&database_url)
.unwrap_or_else(|_| panic!("Error connecting to {}", database_url))
}

View File

@@ -1,19 +1,39 @@
use actix_web::{web, App, HttpRequest, HttpServer, Responder};
use colota_backend::establish_connection;
use diesel::prelude::*;
use dotenvy::dotenv;
use std::env;
pub use colota_backend::schema;
pub use colota_backend::models;
include!("handlers/locations.rs");
async fn index(req: HttpRequest) -> impl Responder { "Connection successful" }
async fn index() -> impl Responder { "Connection successful" }
#[actix_web::main]
async fn main() -> std::io::Result<()> {
use self::schema::locations::dsl::*;
let connection = &mut establish_connection();
let results = locations
.select(Location::as_select())
.load(connection)
.expect("Error loading locations");
println!("Displaying {} locations", results.len());
HttpServer::new(|| {
App::new()
.route("/", web::get().to(index))
.service(
web::scope("/api/v1")
.service(web::resource("/locations").to(|| {
get_locations()
}))
.service(web::resource("/locations").route(web::post().to(create_location)))
// .service(web::resource("/locations").to(|| {
// // get_locations()
// }))
// .service(web::resource("/locations").route(web::post().to(create_location)))
)
})
.bind("127.0.0.1:8080")?

57
src/models.rs Normal file
View File

@@ -0,0 +1,57 @@
use bigdecimal::BigDecimal;
use diesel::deserialize::{self, FromSql, FromSqlRow};
use diesel::pg::{Pg, PgValue};
use diesel::serialize::{IsNull, Output, ToSql};
use diesel::*;
use std::io::Write;
use diesel::expression::AsExpression;
use crate::schema::sql_types::BatteryStatusEnum;
#[derive(Queryable, Selectable)]
#[diesel(table_name = crate::schema::locations)]
#[diesel(check_for_backend(diesel::pg::Pg))]
struct Location {
latitude: BigDecimal,
longitude: BigDecimal,
accuracy: BigDecimal,
altitude: Option<BigDecimal>,
velocity: Option<BigDecimal>,
battery_level: i16,
battery_status: BatteryStatus,
timestamp: i64,
bearing: Option<BigDecimal>,
}
#[derive(Debug, FromSqlRow, AsExpression)]
#[diesel(sql_type = BatteryStatusEnum)]
enum BatteryStatus {
Unknown,
Unplugged,
Charging,
Full,
}
impl ToSql<BatteryStatusEnum, Pg> for BatteryStatus {
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, Pg>) -> serialize::Result {
match *self {
BatteryStatus::Unknown => out.write_all(b"unknown")?,
BatteryStatus::Unplugged => out.write_all(b"unplugged")?,
BatteryStatus::Charging => out.write_all(b"charging")?,
BatteryStatus::Full => out.write_all(b"full")?,
}
Ok(IsNull::No)
}
}
impl FromSql<BatteryStatusEnum, Pg> for BatteryStatus {
fn from_sql(bytes: PgValue<'_>) -> deserialize::Result<Self> {
match bytes.as_bytes() {
b"unknown" => Ok(BatteryStatus::Unknown),
b"unplugged" => Ok(BatteryStatus::Unplugged),
b"charging"=> Ok(BatteryStatus::Charging),
b"full"=> Ok(BatteryStatus::Full),
_ => Err("Unrecognized enum variant".into()),
}
}
}

View File

@@ -1,10 +0,0 @@
use sqlx::Type;
#[derive(Debug, Clone, Copy, Serialize, Deserialize, Type)]
#[sqlx(type_name = "battery_status", rename_all = "lowercase")]
pub enum BatteryStatus {
Unknown,
Unplugged,
Charging,
Full,
}

View File

@@ -1,15 +0,0 @@
use bigdecimal::BigDecimal;
include!("battery_status.rs");
#[derive(Deserialize, Serialize)]
struct Location {
latitude: BigDecimal,
longitude: BigDecimal,
accuracy: Option<BigDecimal>,
altitude: Option<BigDecimal>,
velocity: Option<BigDecimal>,
battery_level: i16,
battery_status: BatteryStatus,
timestamp: i64,
bearing: Option<BigDecimal>,
}

24
src/schema.rs Normal file
View File

@@ -0,0 +1,24 @@
// @generated automatically by Diesel CLI.
pub mod sql_types {
#[derive(diesel::query_builder::QueryId, Clone, diesel::sql_types::SqlType)]
#[diesel(postgres_type(name = "battery_status_enum"))]
pub struct BatteryStatusEnum;
}
diesel::table! {
use diesel::sql_types::*;
use super::sql_types::BatteryStatusEnum;
locations (timestamp) {
timestamp -> Int8,
latitude -> Numeric,
longitude -> Numeric,
accuracy -> Numeric,
altitude -> Nullable<Numeric>,
velocity -> Nullable<Numeric>,
battery_level -> Int2,
bearing -> Nullable<Numeric>,
battery_status -> BatteryStatusEnum,
}
}