From 9ca25cd5c2f4c6447405bfdb61d0db2e08bf1618 Mon Sep 17 00:00:00 2001 From: Matt Hammerly Date: Thu, 15 Feb 2024 15:53:24 -0800 Subject: [PATCH] switch from diesel to rusqlite, add initial report builder --- Cargo.toml | 10 +- .../down.sql | 0 .../up.sql | 0 src/report.rs | 91 +--- src/report/models.rs | 73 +++- src/report/schema.rs | 79 ---- src/report/sqlite_report.rs | 401 ++++++++++++++++++ 7 files changed, 477 insertions(+), 177 deletions(-) rename migrations/{2024-01-09-033635_init => 01-init}/down.sql (100%) rename migrations/{2024-01-09-033635_init => 01-init}/up.sql (100%) delete mode 100644 src/report/schema.rs create mode 100644 src/report/sqlite_report.rs diff --git a/Cargo.toml b/Cargo.toml index ca7ec6d..30638d8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,9 +4,13 @@ version = "0.1.0" edition = "2021" [dependencies] -diesel = { version = "2.1.4", features = ["sqlite"] } -diesel_migrations = "2.1.0" -diesel-derive-enum = { version = "2.1.0", features = ["sqlite"] } +rusqlite = { version = "0.30.0", features = ["bundled"] } +rusqlite_migration = { version = "1.1.0", features = ["from-directory"] } + +include_dir = "0.7.3" +lazy_static = "1.4.0" +strum = "0.26.1" +strum_macros = "0.26.1" winnow = "0.5.34" diff --git a/migrations/2024-01-09-033635_init/down.sql b/migrations/01-init/down.sql similarity index 100% rename from migrations/2024-01-09-033635_init/down.sql rename to migrations/01-init/down.sql diff --git a/migrations/2024-01-09-033635_init/up.sql b/migrations/01-init/up.sql similarity index 100% rename from migrations/2024-01-09-033635_init/up.sql rename to migrations/01-init/up.sql diff --git a/src/report.rs b/src/report.rs index 2c16c8a..d716af2 100644 --- a/src/report.rs +++ b/src/report.rs @@ -1,80 +1,25 @@ -use std::path::PathBuf; - -use diesel::{sqlite::SqliteConnection, Connection}; -use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; - -pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("migrations"); - pub mod models; -pub mod schema; - -pub struct Report { - pub filename: PathBuf, - pub conn: SqliteConnection, -} - -impl Report { - pub fn new(filename: PathBuf) -> Report { - // TODO: handle errors/results properly - let mut conn = SqliteConnection::establish(filename.to_str().unwrap()) - .ok() - .unwrap(); - conn.run_pending_migrations(MIGRATIONS).ok(); - - Report { filename, conn } - } -} - -#[cfg(test)] -mod tests { - use diesel::{QueryDsl, RunQueryDsl, SelectableHelper}; - use tempfile::TempDir; - - use super::*; - - struct Ctx { - temp_dir: TempDir, - } - - fn setup() -> Ctx { - Ctx { - temp_dir: TempDir::new().ok().unwrap(), - } - } - - #[test] - fn test_new_report() { - let ctx = setup(); - let db_file = ctx.temp_dir.path().join("db.sqlite"); - assert!(!db_file.exists()); - let mut report = Report::new(db_file); - assert!(!report.conn.has_pending_migration(MIGRATIONS).ok().unwrap()); - } +mod sqlite_report; +use rusqlite::Result; +pub use sqlite_report::*; - #[test] - fn test_existing_report() { - let ctx = setup(); - let db_file = ctx.temp_dir.path().join("db.sqlite"); - assert!(!db_file.exists()); +pub trait Report {} - let mut new_report = Report::new(db_file.clone()); +pub trait ReportBuilder { + fn insert_file(&mut self, file: models::SourceFile) -> Result; + fn insert_context(&mut self, context: models::Context) -> Result; - let mock_context = models::Context { - id: 0, - context_type: schema::ContextType::TestCase, - name: "mock_context".to_string(), - }; - diesel::insert_into(schema::context::table) - .values(&mock_context) - .execute(&mut new_report.conn) - .expect("failed to add mock context"); + fn insert_line( + &mut self, + line: models::LineStatus, + context: &models::Context, + ) -> Result; + fn insert_branch( + &mut self, + branch: models::BranchStatus, + context: &models::Context, + ) -> Result; - let mut existing_report = Report::new(db_file.clone()); - let contexts = schema::context::table - .select(models::Context::as_select()) - .load(&mut existing_report.conn) - .expect("error loading contexts"); - assert!(contexts.len() == 1); - } + fn build(self) -> R; } diff --git a/src/report/models.rs b/src/report/models.rs index 85a8203..40b78f5 100644 --- a/src/report/models.rs +++ b/src/report/models.rs @@ -1,51 +1,80 @@ -use diesel::{Associations, Identifiable, Insertable, Queryable, Selectable}; +use rusqlite::types::{FromSql, FromSqlError, FromSqlResult, ToSql, ToSqlOutput, ValueRef}; +use strum_macros::{Display, EnumString}; -use crate::report::schema::*; +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum CoverageStatus { + Hit = 1, + Miss, + Partial, +} + +impl ToSql for CoverageStatus { + fn to_sql(&self) -> rusqlite::Result> { + Ok((*self as i32).into()) + } +} + +impl FromSql for CoverageStatus { + fn column_result(value: ValueRef<'_>) -> FromSqlResult { + let variant = match value.as_i64()? { + 1 => CoverageStatus::Hit, + 2 => CoverageStatus::Miss, + 3 => CoverageStatus::Partial, + _ => panic!("Uh oh"), + }; + Ok(variant) + } +} + +#[derive(EnumString, Display, Debug, PartialEq)] +pub enum ContextType { + TestCase, + Upload, +} + +impl ToSql for ContextType { + fn to_sql(&self) -> rusqlite::Result> { + Ok(self.to_string().into()) + } +} + +impl FromSql for ContextType { + fn column_result(value: ValueRef<'_>) -> FromSqlResult { + value + .as_str()? + .parse() + .map_err(|e| FromSqlError::Other(Box::new(e))) + } +} -#[derive(Queryable, Identifiable, Selectable, Insertable, Debug, PartialEq)] -#[diesel(table_name = source_file)] pub struct SourceFile { - pub id: i32, + pub id: Option, pub path: String, } -#[derive(Queryable, Identifiable, Selectable, Associations, Insertable, Debug, PartialEq)] -#[diesel(belongs_to(SourceFile))] -#[diesel(table_name = line_status)] pub struct LineStatus { - pub id: i32, + pub id: Option, pub source_file_id: i32, pub line_no: i32, pub coverage_status: CoverageStatus, } -#[derive(Queryable, Identifiable, Selectable, Associations, Insertable, Debug, PartialEq)] -#[diesel(belongs_to(SourceFile))] -#[diesel(table_name = branch_status)] pub struct BranchStatus { - pub id: i32, + pub id: Option, pub source_file_id: i32, pub start_line_no: i32, pub end_line_no: i32, pub coverage_status: CoverageStatus, } -#[derive(Queryable, Identifiable, Selectable, Associations, Insertable, Debug, PartialEq)] -#[diesel(belongs_to(Context))] -#[diesel(belongs_to(LineStatus, foreign_key = line_id))] -#[diesel(belongs_to(BranchStatus, foreign_key = branch_id))] -#[diesel(table_name = context_assoc)] -#[diesel(primary_key(context_id, line_id, branch_id))] pub struct ContextAssoc { pub context_id: i32, pub line_id: Option, pub branch_id: Option, } -#[derive(Queryable, Identifiable, Selectable, Insertable, Debug, PartialEq)] -#[diesel(table_name = context)] pub struct Context { - pub id: i32, + pub id: Option, pub context_type: ContextType, pub name: String, } diff --git a/src/report/schema.rs b/src/report/schema.rs deleted file mode 100644 index 91dd3d5..0000000 --- a/src/report/schema.rs +++ /dev/null @@ -1,79 +0,0 @@ -use diesel::sql_types::SqlType; - -#[derive(diesel_derive_enum::DbEnum, Debug, PartialEq)] -pub enum CoverageStatus { - Hit, - Miss, - Partial, -} - -#[derive(diesel_derive_enum::DbEnum, Debug, PartialEq)] -pub enum ContextType { - TestCase, - Upload, -} - -diesel::table! { - use diesel::sql_types::{VarChar, Integer}; - source_file (id) { - id -> Integer, - path -> VarChar, - } -} - -diesel::table! { - use diesel::sql_types::Integer; - use super::CoverageStatusMapping; - line_status (id) { - id -> Integer, - source_file_id -> Integer, - line_no -> Integer, // BigInt? - coverage_status -> CoverageStatusMapping, - } -} - -diesel::table! { - use diesel::sql_types::Integer; - use super::CoverageStatusMapping; - branch_status (id) { - id -> Integer, - source_file_id -> Integer, - start_line_no -> Integer, // BigInt? - end_line_no -> Integer, // BigInt? - coverage_status -> CoverageStatusMapping, - } -} - -diesel::table! { - use diesel::sql_types::{Nullable, Integer}; - context_assoc (context_id, line_id, branch_id) { - context_id -> Integer, - line_id -> Nullable, - branch_id -> Nullable, - } -} - -diesel::table! { - use diesel::sql_types::{VarChar, Integer}; - use super::ContextTypeMapping; - context (id) { - id -> Integer, - context_type -> ContextTypeMapping, - name -> VarChar, - } -} - -diesel::joinable!(line_status -> source_file (source_file_id)); -diesel::joinable!(branch_status -> source_file (source_file_id)); - -diesel::joinable!(context_assoc -> line_status (line_id)); -diesel::joinable!(context_assoc -> branch_status (branch_id)); -diesel::joinable!(context_assoc -> context (context_id)); - -diesel::allow_tables_to_appear_in_same_query!( - source_file, - line_status, - branch_status, - context_assoc, - context -); diff --git a/src/report/sqlite_report.rs b/src/report/sqlite_report.rs new file mode 100644 index 0000000..766d15d --- /dev/null +++ b/src/report/sqlite_report.rs @@ -0,0 +1,401 @@ +use std::path::PathBuf; + +use include_dir::{include_dir, Dir}; +use lazy_static::lazy_static; +use rusqlite::{Connection, Result}; +use rusqlite_migration::Migrations; + +use crate::report::{models, Report, ReportBuilder}; + +static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/migrations"); + +lazy_static! { + pub static ref MIGRATIONS: Migrations<'static> = + Migrations::from_directory(&MIGRATIONS_DIR).unwrap(); +} + +pub struct SqliteReport { + pub filename: PathBuf, + pub conn: Connection, +} + +fn open_database(filename: &PathBuf) -> Connection { + let mut conn = Connection::open(&filename).expect("error opening database"); + MIGRATIONS + .to_latest(&mut conn) + .expect("error applying migrations"); + + conn +} + +impl SqliteReport { + pub fn new(filename: PathBuf) -> SqliteReport { + let conn = open_database(&filename); + SqliteReport { filename, conn } + } +} + +impl Report for SqliteReport {} + +pub struct SqliteReportBuilder { + pub filename: PathBuf, + pub conn: Connection, +} + +impl SqliteReportBuilder { + pub fn new(filename: PathBuf) -> SqliteReportBuilder { + let conn = open_database(&filename); + SqliteReportBuilder { filename, conn } + } +} + +impl ReportBuilder for SqliteReportBuilder { + fn insert_file(&mut self, file: models::SourceFile) -> Result { + let mut stmt = self + .conn + // TODO: memoize prepared statements + .prepare("INSERT INTO source_file (path) VALUES (?1) RETURNING id, path")?; + stmt.query_row([file.path], |row| { + Ok(models::SourceFile { + id: row.get(0)?, + path: row.get(1)?, + }) + }) + } + + fn insert_context(&mut self, context: models::Context) -> Result { + // TODO: memoize prepared statements + let mut stmt = self.conn.prepare("INSERT INTO context (context_type, name) VALUES (?1, ?2) RETURNING id, context_type, name")?; + stmt.query_row([context.context_type.to_string(), context.name], |row| { + Ok(models::Context { + id: row.get(0)?, + context_type: row.get(1)?, + name: row.get(2)?, + }) + }) + } + + fn insert_line( + &mut self, + line: models::LineStatus, + context: &models::Context, + ) -> Result { + // TODO: memoize prepared statements + let mut stmt = self.conn.prepare("INSERT INTO line_status (source_file_id, line_no, coverage_status) VALUES (?1, ?2, ?3) RETURNING id, source_file_id, line_no, coverage_status")?; + let line = stmt.query_row( + [ + line.source_file_id, + line.line_no, + line.coverage_status as i32, + ], + |row| { + Ok(models::LineStatus { + id: row.get(0)?, + source_file_id: row.get(1)?, + line_no: row.get(2)?, + coverage_status: row.get(3)?, + }) + }, + )?; + + match (line.id, context.id) { + (Some(line_id), Some(context_id)) => { + let mut context_assoc = self + .conn + // TODO: memoize prepared statements + .prepare("INSERT INTO context_assoc (context_id, line_id) VALUES (?1, ?2)")?; + context_assoc.execute([context_id, line_id])?; + Ok(line) + } + _ => { + // TODO create an error type since there isn't a corresponding rusqlite::Error + // type + panic!("missing line.id and/or context.id"); + } + } + } + + fn insert_branch( + &mut self, + branch: models::BranchStatus, + context: &models::Context, + ) -> Result { + // TODO: memoize prepared statements + let mut stmt = self.conn.prepare("INSERT INTO branch_status (source_file_id, start_line_no, end_line_no, coverage_status) VALUES (?1, ?2, ?3, ?4) RETURNING id, source_file_id, start_line_no, end_line_no, coverage_status")?; + let branch = stmt.query_row( + [ + branch.source_file_id, + branch.start_line_no, + branch.end_line_no, + branch.coverage_status as i32, + ], + |row| { + Ok(models::BranchStatus { + id: row.get(0)?, + source_file_id: row.get(1)?, + start_line_no: row.get(2)?, + end_line_no: row.get(3)?, + coverage_status: row.get(4)?, + }) + }, + )?; + + match (branch.id, context.id) { + (Some(branch_id), Some(context_id)) => { + let mut context_assoc = self + .conn + // TODO: memoize prepared statements + .prepare("INSERT INTO context_assoc (context_id, branch_id) VALUES (?1, ?2)")?; + context_assoc.execute([context_id, branch_id])?; + Ok(branch) + } + _ => { + // TODO create an error type since there isn't a corresponding rusqlite::Error + // type + panic!("missing branch.id and/or context.id"); + } + } + } + + fn build(self) -> SqliteReport { + SqliteReport { + filename: self.filename, + conn: self.conn, + } + } +} + +#[cfg(test)] +mod tests { + use std::num::NonZeroUsize; + + use rusqlite_migration::SchemaVersion; + use tempfile::TempDir; + + use super::*; + + struct Ctx { + temp_dir: TempDir, + } + + fn setup() -> Ctx { + Ctx { + temp_dir: TempDir::new().ok().unwrap(), + } + } + + #[test] + fn test_open_database_new_file_runs_migrations() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + assert!(!db_file.exists()); + + let conn = open_database(&db_file); + assert_eq!( + MIGRATIONS.current_version(&conn), + Ok(SchemaVersion::Inside(NonZeroUsize::new(1).unwrap())) + ); + } + + #[test] + fn test_open_database_existing_file() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + assert!(!db_file.exists()); + + { + let conn = open_database(&db_file); + let _ = conn.execute( + "INSERT INTO source_file (id, path) VALUES (?1, ?2)", + (1, "src/report.rs"), + ); + } + + let conn = open_database(&db_file); + let (id, path): (i32, String) = conn + .query_row("SELECT id, path FROM source_file", [], |row| { + Ok((row.get(0).unwrap(), row.get(1).unwrap())) + }) + .unwrap(); + assert_eq!(id, 1); + assert_eq!(path, "src/report.rs"); + } + + mod sqlite_report { + use super::*; + + #[test] + fn test_new_report_runs_migrations() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + assert!(!db_file.exists()); + + let report = SqliteReport::new(db_file); + assert_eq!( + MIGRATIONS.current_version(&report.conn), + Ok(SchemaVersion::Inside(NonZeroUsize::new(1).unwrap())) + ); + } + } + + mod sqlite_report_builder { + use super::*; + + #[test] + fn test_new_report_builder_runs_migrations() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + assert!(!db_file.exists()); + + let report_builder = SqliteReportBuilder::new(db_file); + assert_eq!( + MIGRATIONS.current_version(&report_builder.conn), + Ok(SchemaVersion::Inside(NonZeroUsize::new(1).unwrap())) + ); + } + + #[test] + fn test_insert_file() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + let mut report_builder = SqliteReportBuilder::new(db_file); + + let file = models::SourceFile { + id: None, + path: "src/report.rs".to_string(), + }; + let file = report_builder + .insert_file(file) + .expect("error inserting file"); + assert_eq!(file.id, Some(1)); + assert_eq!(file.path, "src/report.rs"); + } + + #[test] + fn test_insert_context() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + let mut report_builder = SqliteReportBuilder::new(db_file); + + let context = models::Context { + id: None, + context_type: models::ContextType::Upload, + name: "foo".to_string(), + }; + let context = report_builder + .insert_context(context) + .expect("error inserting context"); + assert_eq!(context.id, Some(1)); + assert_eq!(context.context_type, models::ContextType::Upload); + assert_eq!(context.name, "foo"); + } + + #[test] + fn test_insert_line() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + let mut report_builder = SqliteReportBuilder::new(db_file); + + let context = models::Context { + id: None, + context_type: models::ContextType::Upload, + name: "foo".to_string(), + }; + let context = report_builder + .insert_context(context) + .expect("error inserting context"); + + let file = models::SourceFile { + id: None, + path: "src/report.rs".to_string(), + }; + let file = report_builder + .insert_file(file) + .expect("error inserting file"); + + let line = models::LineStatus { + id: None, + source_file_id: file.id.expect("failed to get file id"), + line_no: 1, + coverage_status: models::CoverageStatus::Hit, + }; + let line = report_builder + .insert_line(line, &context) + .expect("error inserting line"); + assert_eq!(line.id, Some(1)); + assert_eq!(line.source_file_id, 1); + assert_eq!(line.line_no, 1); + assert_eq!(line.coverage_status, models::CoverageStatus::Hit); + + let line_context_assoc = report_builder.conn.query_row( + "SELECT context_id, line_id FROM context_assoc WHERE context_id=?1 AND line_id=?2", + [context.id.unwrap(), line.id.unwrap()], + |row| { + Ok(models::ContextAssoc { + context_id: row.get(0)?, + line_id: Some(row.get(1)?), + branch_id: None, + }) + }, + ).expect("error fetching context_assoc"); + assert_eq!(line_context_assoc.context_id, context.id.unwrap()); + assert_eq!(line_context_assoc.line_id, line.id); + assert_eq!(line_context_assoc.branch_id, None); + } + + #[test] + fn test_insert_branch() { + let ctx = setup(); + let db_file = ctx.temp_dir.path().join("db.sqlite"); + let mut report_builder = SqliteReportBuilder::new(db_file); + + let context = models::Context { + id: None, + context_type: models::ContextType::Upload, + name: "foo".to_string(), + }; + let context = report_builder + .insert_context(context) + .expect("error inserting context"); + + let file = models::SourceFile { + id: None, + path: "src/report.rs".to_string(), + }; + let file = report_builder + .insert_file(file) + .expect("error inserting file"); + + let branch = models::BranchStatus { + id: None, + source_file_id: file.id.expect("failed to get file id"), + start_line_no: 1, + end_line_no: 2, + coverage_status: models::CoverageStatus::Hit, + }; + let branch = report_builder + .insert_branch(branch, &context) + .expect("error inserting branch"); + assert_eq!(branch.id, Some(1)); + assert_eq!(branch.source_file_id, 1); + assert_eq!(branch.start_line_no, 1); + assert_eq!(branch.end_line_no, 2); + assert_eq!(branch.coverage_status, models::CoverageStatus::Hit); + + let branch_context_assoc = report_builder.conn.query_row( + "SELECT context_id, branch_id FROM context_assoc WHERE context_id=?1 AND branch_id=?2", + [context.id.unwrap(), branch.id.unwrap()], + |row| { + Ok(models::ContextAssoc { + context_id: row.get(0)?, + branch_id: Some(row.get(1)?), + line_id: None, + }) + }, + ).expect("error fetching context_assoc"); + assert_eq!(branch_context_assoc.context_id, context.id.unwrap()); + assert_eq!(branch_context_assoc.branch_id, branch.id); + assert_eq!(branch_context_assoc.line_id, None); + } + } +}