diff --git a/Cargo.lock b/Cargo.lock index 96342d36..f9c46446 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2622,14 +2622,17 @@ dependencies = [ name = "pg_typecheck" version = "0.0.0" dependencies = [ - "async-std", - "pg_base_db", + "insta", + "pg_console", + "pg_diagnostics", "pg_query_ext", "pg_schema_cache", - "pg_syntax", "pg_test_utils", "sqlx", "text-size", + "tokio", + "tree-sitter", + "tree_sitter_sql", ] [[package]] @@ -2650,6 +2653,7 @@ dependencies = [ "pg_query_ext", "pg_schema_cache", "pg_statement_splitter", + "pg_typecheck", "rustc-hash 2.1.0", "serde", "serde_json", diff --git a/crates/pg_completions/src/relevance.rs b/crates/pg_completions/src/relevance.rs index 5227e9bf..108f7c7f 100644 --- a/crates/pg_completions/src/relevance.rs +++ b/crates/pg_completions/src/relevance.rs @@ -70,7 +70,7 @@ impl CompletionRelevance<'_> { Some(ct) => ct, }; - let has_mentioned_tables = ctx.mentioned_relations.len() > 0; + let has_mentioned_tables = !ctx.mentioned_relations.is_empty(); self.score += match self.data { CompletionRelevanceData::Table(_) => match clause_type { diff --git a/crates/pg_completions/src/test_helper.rs b/crates/pg_completions/src/test_helper.rs index 83f9cdd9..1449c2d0 100644 --- a/crates/pg_completions/src/test_helper.rs +++ b/crates/pg_completions/src/test_helper.rs @@ -51,7 +51,7 @@ pub(crate) async fn get_test_deps( .set_language(tree_sitter_sql::language()) .expect("Error loading sql language"); - let tree = parser.parse(&input.to_string(), None).unwrap(); + let tree = parser.parse(input.to_string(), None).unwrap(); (tree, schema_cache) } diff --git a/crates/pg_diagnostics_categories/src/categories.rs b/crates/pg_diagnostics_categories/src/categories.rs index 12b25b73..40795789 100644 --- a/crates/pg_diagnostics_categories/src/categories.rs +++ b/crates/pg_diagnostics_categories/src/categories.rs @@ -26,6 +26,7 @@ define_categories! { "internalError/fs", "flags/invalid", "project", + "typecheck", "internalError/panic", "syntax", "dummy", diff --git a/crates/pg_test_utils/src/test_database.rs b/crates/pg_test_utils/src/test_database.rs index a828f3fd..b3e5825c 100644 --- a/crates/pg_test_utils/src/test_database.rs +++ b/crates/pg_test_utils/src/test_database.rs @@ -4,9 +4,7 @@ use uuid::Uuid; // TODO: Work with proper config objects instead of a connection_string. // With the current implementation, we can't parse the password from the connection string. pub async fn get_new_test_db() -> PgPool { - dotenv::dotenv() - .ok() - .expect("Unable to load .env file for tests"); + dotenv::dotenv().expect("Unable to load .env file for tests"); let connection_string = std::env::var("DATABASE_URL").expect("DATABASE_URL not set"); let password = std::env::var("DB_PASSWORD").unwrap_or("postgres".into()); diff --git a/crates/pg_treesitter_queries/src/lib.rs b/crates/pg_treesitter_queries/src/lib.rs index 8d29db38..8edec1fa 100644 --- a/crates/pg_treesitter_queries/src/lib.rs +++ b/crates/pg_treesitter_queries/src/lib.rs @@ -21,7 +21,7 @@ impl<'a> TreeSitterQueriesExecutor<'a> { #[allow(private_bounds)] pub fn add_query_results>(&mut self) { - let mut results = Q::execute(self.root_node, &self.stmt); + let mut results = Q::execute(self.root_node, self.stmt); self.results.append(&mut results); } @@ -104,9 +104,9 @@ where let mut parser = tree_sitter::Parser::new(); parser.set_language(tree_sitter_sql::language()).unwrap(); - let tree = parser.parse(&sql, None).unwrap(); + let tree = parser.parse(sql, None).unwrap(); - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), &sql); + let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); executor.add_query_results::(); @@ -152,7 +152,7 @@ on sq1.id = pt.id; let mut parser = tree_sitter::Parser::new(); parser.set_language(tree_sitter_sql::language()).unwrap(); - let tree = parser.parse(&sql, None).unwrap(); + let tree = parser.parse(sql, None).unwrap(); // trust me bro let range = { @@ -172,7 +172,7 @@ on sq1.id = pt.id; cursor.node().range() }; - let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), &sql); + let mut executor = TreeSitterQueriesExecutor::new(tree.root_node(), sql); executor.add_query_results::(); diff --git a/crates/pg_treesitter_queries/src/queries/mod.rs b/crates/pg_treesitter_queries/src/queries/mod.rs index 92e3b06c..98b55e03 100644 --- a/crates/pg_treesitter_queries/src/queries/mod.rs +++ b/crates/pg_treesitter_queries/src/queries/mod.rs @@ -7,7 +7,7 @@ pub enum QueryResult<'a> { Relation(RelationMatch<'a>), } -impl<'a> QueryResult<'a> { +impl QueryResult<'_> { pub fn within_range(&self, range: &tree_sitter::Range) -> bool { match self { Self::Relation(rm) => { diff --git a/crates/pg_treesitter_queries/src/queries/relations.rs b/crates/pg_treesitter_queries/src/queries/relations.rs index 2ca27a05..7765c054 100644 --- a/crates/pg_treesitter_queries/src/queries/relations.rs +++ b/crates/pg_treesitter_queries/src/queries/relations.rs @@ -5,7 +5,7 @@ use crate::{Query, QueryResult}; use super::QueryTryFrom; static TS_QUERY: LazyLock = LazyLock::new(|| { - static QUERY_STR: &'static str = r#" + static QUERY_STR: &str = r#" (relation (object_reference . @@ -15,7 +15,7 @@ static TS_QUERY: LazyLock = LazyLock::new(|| { )+ ) "#; - tree_sitter::Query::new(tree_sitter_sql::language(), &QUERY_STR).expect("Invalid TS Query") + tree_sitter::Query::new(tree_sitter_sql::language(), QUERY_STR).expect("Invalid TS Query") }); #[derive(Debug)] @@ -24,7 +24,7 @@ pub struct RelationMatch<'a> { pub(crate) table: tree_sitter::Node<'a>, } -impl<'a> RelationMatch<'a> { +impl RelationMatch<'_> { pub fn get_schema(&self, sql: &str) -> Option { let str = self .schema @@ -48,7 +48,7 @@ impl<'a> TryFrom<&'a QueryResult<'a>> for &'a RelationMatch<'a> { fn try_from(q: &'a QueryResult<'a>) -> Result { match q { - QueryResult::Relation(r) => Ok(&r), + QueryResult::Relation(r) => Ok(r), #[allow(unreachable_patterns)] _ => Err("Invalid QueryResult type".into()), diff --git a/crates/pg_typecheck/Cargo.toml b/crates/pg_typecheck/Cargo.toml index a66023c9..1845636d 100644 --- a/crates/pg_typecheck/Cargo.toml +++ b/crates/pg_typecheck/Cargo.toml @@ -12,16 +12,16 @@ version = "0.0.0" [dependencies] -pg_base_db.workspace = true +insta = "1.31.0" +pg_console.workspace = true +pg_diagnostics.workspace = true pg_query_ext.workspace = true pg_schema_cache.workspace = true -pg_syntax.workspace = true +sqlx.workspace = true text-size.workspace = true - -sqlx.workspace = true - -async-std = "1.12.0" - +tokio.workspace = true +tree-sitter.workspace = true +tree_sitter_sql.workspace = true [dev-dependencies] pg_test_utils.workspace = true diff --git a/crates/pg_typecheck/src/diagnostics.rs b/crates/pg_typecheck/src/diagnostics.rs new file mode 100644 index 00000000..dad260be --- /dev/null +++ b/crates/pg_typecheck/src/diagnostics.rs @@ -0,0 +1,217 @@ +use std::io; + +use pg_console::markup; +use pg_diagnostics::{Advices, Diagnostic, LogCategory, MessageAndDescription, Severity, Visit}; +use sqlx::postgres::{PgDatabaseError, PgSeverity}; +use text_size::TextRange; + +/// A specialized diagnostic for the typechecker. +/// +/// Type diagnostics are always **errors**. +#[derive(Clone, Debug, Diagnostic)] +#[diagnostic(category = "typecheck")] +pub struct TypecheckDiagnostic { + #[location(span)] + span: Option, + #[description] + #[message] + message: MessageAndDescription, + #[advice] + advices: TypecheckAdvices, + #[severity] + severity: Severity, +} + +#[derive(Debug, Clone)] +struct TypecheckAdvices { + code: String, + schema: Option, + table: Option, + column: Option, + data_type: Option, + constraint: Option, + line: Option, + file: Option, + detail: Option, + routine: Option, + where_: Option, + hint: Option, +} + +impl Advices for TypecheckAdvices { + fn record(&self, visitor: &mut dyn Visit) -> io::Result<()> { + // First, show the error code + visitor.record_log( + LogCategory::Error, + &markup! { "Error Code: " {&self.code} }, + )?; + + // Show detailed message if available + if let Some(detail) = &self.detail { + visitor.record_log(LogCategory::Info, &detail)?; + } + + // Show object location information + if let (Some(schema), Some(table)) = (&self.schema, &self.table) { + let mut location = format!("In table: {schema}.{table}"); + if let Some(column) = &self.column { + location.push_str(&format!(", column: {column}")); + } + visitor.record_log(LogCategory::Info, &location)?; + } + + // Show constraint information + if let Some(constraint) = &self.constraint { + visitor.record_log( + LogCategory::Info, + &markup! { "Constraint: " {constraint} }, + )?; + } + + // Show data type information + if let Some(data_type) = &self.data_type { + visitor.record_log( + LogCategory::Info, + &markup! { "Data type: " {data_type} }, + )?; + } + + // Show context information + if let Some(where_) = &self.where_ { + visitor.record_log(LogCategory::Info, &markup! { "Context:\n"{where_}"" })?; + } + + // Show hint if available + if let Some(hint) = &self.hint { + visitor.record_log(LogCategory::Info, &markup! { "Hint: "{hint}"" })?; + } + + // Show source location if available + if let (Some(file), Some(line)) = (&self.file, &self.line) { + if let Some(routine) = &self.routine { + visitor.record_log( + LogCategory::Info, + &markup! { "Source: "{file}":"{line}" in "{routine}"" }, + )?; + } else { + visitor.record_log(LogCategory::Info, &markup! { "Source: "{file}":"{line}"" })?; + } + } + + Ok(()) + } +} + +pub(crate) fn create_type_error( + pg_err: &PgDatabaseError, + ts: Option<&tree_sitter::Tree>, +) -> TypecheckDiagnostic { + let position = pg_err.position().and_then(|pos| match pos { + sqlx::postgres::PgErrorPosition::Original(pos) => Some(pos - 1), + _ => None, + }); + + let range = position.and_then(|pos| { + ts.and_then(|tree| { + tree.root_node() + .named_descendant_for_byte_range(pos, pos) + .map(|node| { + TextRange::new( + node.start_byte().try_into().unwrap(), + node.end_byte().try_into().unwrap(), + ) + }) + }) + }); + + let severity = match pg_err.severity() { + PgSeverity::Panic => Severity::Error, + PgSeverity::Fatal => Severity::Error, + PgSeverity::Error => Severity::Error, + PgSeverity::Warning => Severity::Warning, + PgSeverity::Notice => Severity::Hint, + PgSeverity::Debug => Severity::Hint, + PgSeverity::Info => Severity::Information, + PgSeverity::Log => Severity::Information, + }; + + TypecheckDiagnostic { + message: pg_err.to_string().into(), + severity, + span: range, + advices: TypecheckAdvices { + code: pg_err.code().to_string(), + hint: pg_err.hint().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + schema: pg_err.schema().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + table: pg_err.table().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + detail: pg_err.detail().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + column: pg_err.column().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + data_type: pg_err.data_type().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + constraint: pg_err.constraint().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + line: pg_err.line(), + file: pg_err.file().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + routine: pg_err.routine().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + where_: pg_err.r#where().and_then(|s| { + if !s.is_empty() { + Some(s.to_string()) + } else { + None + } + }), + }, + } +} diff --git a/crates/pg_typecheck/src/lib.rs b/crates/pg_typecheck/src/lib.rs index e160159f..9c9b8ff4 100644 --- a/crates/pg_typecheck/src/lib.rs +++ b/crates/pg_typecheck/src/lib.rs @@ -1,15 +1,19 @@ +mod diagnostics; + +use diagnostics::create_type_error; +pub use diagnostics::TypecheckDiagnostic; use sqlx::postgres::PgDatabaseError; pub use sqlx::postgres::PgSeverity; use sqlx::Executor; use sqlx::PgPool; use text_size::TextRange; -use text_size::TextSize; -pub struct TypecheckerParams<'a> { +#[derive(Debug)] +pub struct TypecheckParams<'a> { pub conn: &'a PgPool, pub sql: &'a str, - pub enriched_ast: Option<&'a pg_syntax::AST>, pub ast: &'a pg_query_ext::NodeEnum, + pub tree: Option<&'a tree_sitter::Tree>, } #[derive(Debug, Clone)] @@ -25,90 +29,27 @@ pub struct TypeError { pub constraint: Option, } -pub async fn check_sql<'a>(params: TypecheckerParams<'a>) -> Vec { - let mut errs = vec![]; - - // prpeared statements work only for select, insert, update, delete, and cte - if match params.ast { - pg_query_ext::NodeEnum::SelectStmt(_) => false, - pg_query_ext::NodeEnum::InsertStmt(_) => false, - pg_query_ext::NodeEnum::UpdateStmt(_) => false, - pg_query_ext::NodeEnum::DeleteStmt(_) => false, - pg_query_ext::NodeEnum::CommonTableExpr(_) => false, - _ => true, - } { - return errs; +pub async fn check_sql(params: TypecheckParams<'_>) -> Option { + // Check if the AST is not a supported statement type + if !matches!( + params.ast, + pg_query_ext::NodeEnum::SelectStmt(_) + | pg_query_ext::NodeEnum::InsertStmt(_) + | pg_query_ext::NodeEnum::UpdateStmt(_) + | pg_query_ext::NodeEnum::DeleteStmt(_) + | pg_query_ext::NodeEnum::CommonTableExpr(_) + ) { + return None; } let res = params.conn.prepare(params.sql).await; - if res.is_err() { - if let sqlx::Error::Database(err) = res.as_ref().unwrap_err() { + match res { + Ok(_) => None, + Err(sqlx::Error::Database(err)) => { let pg_err = err.downcast_ref::(); - - let position = match pg_err.position() { - Some(sqlx::postgres::PgErrorPosition::Original(pos)) => Some(pos - 1), - _ => None, - }; - - let range = match params.enriched_ast { - Some(ast) => { - if position.is_none() { - None - } else { - ast.covering_node(TextRange::empty( - TextSize::try_from(position.unwrap()).unwrap(), - )) - .map(|node| node.range()) - } - } - None => None, - }; - - errs.push(TypeError { - message: pg_err.message().to_string(), - code: pg_err.code().to_string(), - severity: pg_err.severity(), - position, - range, - table: pg_err.table().map(|s| s.to_string()), - column: pg_err.column().map(|s| s.to_string()), - data_type: pg_err.data_type().map(|s| s.to_string()), - constraint: pg_err.constraint().map(|s| s.to_string()), - }); + Some(create_type_error(pg_err, params.tree)) } - } - - errs -} - -#[cfg(test)] -mod tests { - use async_std::task::block_on; - use pg_test_utils::test_database::get_new_test_db; - - use crate::{check_sql, TypecheckerParams}; - - #[test] - fn test_basic_type() { - let input = "select id, unknown from contact;"; - - let test_db = block_on(get_new_test_db()); - - let root = pg_query_ext::parse(input).unwrap(); - let ast = pg_syntax::parse_syntax(input, &root).ast; - - let errs = block_on(check_sql(TypecheckerParams { - conn: &test_db, - sql: input, - ast: &root, - enriched_ast: Some(&ast), - })); - - assert_eq!(errs.len(), 1); - - let e = &errs[0]; - - assert_eq!(&input[e.range.unwrap()], "contact"); + Err(_) => None, } } diff --git a/crates/pg_typecheck/tests/diagnostics.rs b/crates/pg_typecheck/tests/diagnostics.rs new file mode 100644 index 00000000..4295e310 --- /dev/null +++ b/crates/pg_typecheck/tests/diagnostics.rs @@ -0,0 +1,67 @@ +use pg_console::{ + fmt::{Formatter, HTML}, + markup, +}; +use pg_diagnostics::PrintDiagnostic; +use pg_test_utils::test_database::get_new_test_db; +use pg_typecheck::{check_sql, TypecheckParams}; +use sqlx::Executor; + +async fn test(name: &str, query: &str, setup: &str) { + let test_db = get_new_test_db().await; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + let mut parser = tree_sitter::Parser::new(); + parser + .set_language(tree_sitter_sql::language()) + .expect("Error loading sql language"); + + let root = pg_query_ext::parse(query).unwrap(); + let tree = parser.parse(query, None); + + let conn = &test_db; + let result = check_sql(TypecheckParams { + conn, + sql: query, + ast: &root, + tree: tree.as_ref(), + }) + .await; + + let mut content = vec![]; + let mut writer = HTML::new(&mut content); + + Formatter::new(&mut writer) + .write_markup(markup! { + {PrintDiagnostic::simple(&result.unwrap())} + }) + .unwrap(); + + let content = String::from_utf8(content).unwrap(); + insta::with_settings!({ + prepend_module_to_snapshot => false, + }, { + insta::assert_snapshot!(name, content); + }); +} + +#[tokio::test] +async fn invalid_column() { + test( + "invalid_column", + "select id, unknown from contacts;", + r#" + create table public.contacts ( + id serial primary key, + name varchar(255) not null, + is_vegetarian bool default false, + middle_name varchar(255) + ); + "#, + ) + .await; +} diff --git a/crates/pg_typecheck/tests/snapshots/invalid_column.snap b/crates/pg_typecheck/tests/snapshots/invalid_column.snap new file mode 100644 index 00000000..87796fb4 --- /dev/null +++ b/crates/pg_typecheck/tests/snapshots/invalid_column.snap @@ -0,0 +1,12 @@ +--- +source: crates/pg_typecheck/tests/diagnostics.rs +expression: content +snapshot_kind: text +--- +typecheck ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ + + column "unknown" does not exist + + Error Code: 42703 + + Source: parse_relation.c:3716 in errorMissingColumn diff --git a/crates/pg_workspace/Cargo.toml b/crates/pg_workspace/Cargo.toml index 2dc60e99..69ccc89c 100644 --- a/crates/pg_workspace/Cargo.toml +++ b/crates/pg_workspace/Cargo.toml @@ -26,6 +26,7 @@ pg_fs = { workspace = true, features = ["serde"] } pg_query_ext = { workspace = true } pg_schema_cache = { workspace = true } pg_statement_splitter = { workspace = true } +pg_typecheck = { workspace = true } rustc-hash = { workspace = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["raw_value"] } diff --git a/crates/pg_workspace/src/workspace/server.rs b/crates/pg_workspace/src/workspace/server.rs index 0e58ec6e..d1173c5f 100644 --- a/crates/pg_workspace/src/workspace/server.rs +++ b/crates/pg_workspace/src/workspace/server.rs @@ -4,12 +4,14 @@ use analyser::AnalyserVisitorBuilder; use change::StatementChange; use dashmap::{DashMap, DashSet}; use document::{Document, Statement}; +use futures::{stream, StreamExt}; use pg_analyse::{AnalyserOptions, AnalysisFilter}; use pg_analyser::{Analyser, AnalyserConfig, AnalyserContext}; use pg_diagnostics::{serde::Diagnostic as SDiagnostic, Diagnostic, DiagnosticExt, Severity}; use pg_fs::{ConfigName, PgLspPath}; use pg_query::PgQueryStore; use pg_schema_cache::SchemaCache; +use pg_typecheck::TypecheckParams; use sqlx::PgPool; use std::sync::LazyLock; use tokio::runtime::Runtime; @@ -336,50 +338,97 @@ impl Workspace for WorkspaceServer { filter, }); - let diagnostics: Vec = doc - .iter_statements_with_range() - .flat_map(|(stmt, r)| { - let mut stmt_diagnostics = vec![]; - - stmt_diagnostics.extend(self.pg_query.get_diagnostics(&stmt)); - let ast = self.pg_query.get_ast(&stmt); - if let Some(ast) = ast { - stmt_diagnostics.extend( - analyser - .run(AnalyserContext { root: &ast }) - .into_iter() - .map(SDiagnostic::new) - .collect::>(), - ); + let mut diagnostics: Vec = vec![]; + + // run diagnostics for each statement in parallel if its mostly i/o work + if let Ok(connection) = self.connection.read() { + if let Some(pool) = connection.get_pool() { + let typecheck_params: Vec<_> = doc + .iter_statements_with_text_and_range() + .map(|(stmt, range, text)| { + let ast = self.pg_query.get_ast(&stmt); + let tree = self.tree_sitter.get_parse_tree(&stmt); + (text.to_string(), ast, tree, *range) + }) + .collect(); + + let pool_clone = pool.clone(); + let path_clone = params.path.clone(); + let async_results = run_async(async move { + stream::iter(typecheck_params) + .map(|(text, ast, tree, range)| { + let pool = pool_clone.clone(); + let path = path_clone.clone(); + async move { + if let Some(ast) = ast { + pg_typecheck::check_sql(TypecheckParams { + conn: &pool, + sql: &text, + ast: &ast, + tree: tree.as_deref(), + }) + .await + .map(|d| { + let r = d.location().span.map(|span| span + range.start()); + + d.with_file_path(path.as_path().display().to_string()) + .with_file_span(r.unwrap_or(range)) + }) + } else { + None + } + } + }) + .buffer_unordered(10) + .collect::>() + .await + })?; + + for result in async_results.into_iter().flatten() { + diagnostics.push(SDiagnostic::new(result)); } + } + } - stmt_diagnostics - .into_iter() - .map(|d| { - // We do now check if the severity of the diagnostics should be changed. - // The configuration allows to change the severity of the diagnostics emitted by rules. - let severity = d - .category() - .filter(|category| category.name().starts_with("lint/")) - .map_or_else( - || d.severity(), - |category| { - settings - .as_ref() - .get_severity_from_rule_code(category) - .unwrap_or(Severity::Warning) - }, - ); - - SDiagnostic::new( - d.with_file_path(params.path.as_path().display().to_string()) - .with_file_span(r) - .with_severity(severity), - ) - }) - .collect::>() - }) - .collect(); + diagnostics.extend(doc.iter_statements_with_range().flat_map(|(stmt, r)| { + let mut stmt_diagnostics = self.pg_query.get_diagnostics(&stmt); + + let ast = self.pg_query.get_ast(&stmt); + + if let Some(ast) = ast { + stmt_diagnostics.extend( + analyser + .run(AnalyserContext { root: &ast }) + .into_iter() + .map(SDiagnostic::new) + .collect::>(), + ); + } + + stmt_diagnostics + .into_iter() + .map(|d| { + let severity = d + .category() + .filter(|category| category.name().starts_with("lint/")) + .map_or_else( + || d.severity(), + |category| { + settings + .as_ref() + .get_severity_from_rule_code(category) + .unwrap_or(Severity::Warning) + }, + ); + + SDiagnostic::new( + d.with_file_path(params.path.as_path().display().to_string()) + .with_file_span(r) + .with_severity(severity), + ) + }) + .collect::>() + })); let errors = diagnostics .iter() diff --git a/docker-compose.yml b/docker-compose.yml index da81ea03..fede1d66 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,3 @@ -version: "3.8" services: db: # postgres://postgres:postgres@127.0.0.1:5432/postgres diff --git a/rustfmt.toml b/rustfmt.toml index 43d4840c..a328d250 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1 +1,2 @@ newline_style = "Unix" +edition = "2018"