Skip to content
Prev Previous commit
Next Next commit
resolve
  • Loading branch information
juleswritescode committed Apr 12, 2025
commit 1a7cc0fec71f3f58ae9a167380c43285dcdcee3e
1 change: 0 additions & 1 deletion Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

7 changes: 0 additions & 7 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ syn = "1.0.109"
termcolor = "1.4.1"
test-log = "0.2.17"
tokio = { version = "1.40.0", features = ["full"] }
tower-lsp = "0.20.0"
tracing = { version = "0.1.40", default-features = false, features = ["std"] }
tracing-bunyan-formatter = { version = "0.3.10 " }
tracing-subscriber = "0.3.18"
Expand All @@ -57,7 +56,6 @@ unicode-width = "0.1.12"
# postgres specific crates
pgt_analyse = { path = "./crates/pgt_analyse", version = "0.0.0" }
pgt_analyser = { path = "./crates/pgt_analyser", version = "0.0.0" }
pgt_base_db = { path = "./crates/pgt_base_db", version = "0.0.0" }
pgt_cli = { path = "./crates/pgt_cli", version = "0.0.0" }
pgt_completions = { path = "./crates/pgt_completions", version = "0.0.0" }
pgt_configuration = { path = "./crates/pgt_configuration", version = "0.0.0" }
Expand All @@ -69,9 +67,7 @@ pgt_flags = { path = "./crates/pgt_flags", version = "0.0.0" }
pgt_fs = { path = "./crates/pgt_fs", version = "0.0.0" }
pgt_lexer = { path = "./crates/pgt_lexer", version = "0.0.0" }
pgt_lexer_codegen = { path = "./crates/pgt_lexer_codegen", version = "0.0.0" }
pgt_lint = { path = "./crates/pgt_lint", version = "0.0.0" }
pgt_lsp = { path = "./crates/pgt_lsp", version = "0.0.0" }
pgt_lsp_converters = { path = "./crates/pgt_lsp_converters", version = "0.0.0" }
pgt_markup = { path = "./crates/pgt_markup", version = "0.0.0" }
pgt_query_ext = { path = "./crates/pgt_query_ext", version = "0.0.0" }
pgt_query_ext_codegen = { path = "./crates/pgt_query_ext_codegen", version = "0.0.0" }
Expand All @@ -81,14 +77,11 @@ pgt_statement_splitter = { path = "./crates/pgt_statement_splitter", version
pgt_text_edit = { path = "./crates/pgt_text_edit", version = "0.0.0" }
pgt_text_size = { path = "./crates/pgt_text_size", version = "0.0.0" }
pgt_treesitter_queries = { path = "./crates/pgt_treesitter_queries", version = "0.0.0" }
pgt_type_resolver = { path = "./crates/pgt_type_resolver", version = "0.0.0" }
pgt_typecheck = { path = "./crates/pgt_typecheck", version = "0.0.0" }
pgt_workspace = { path = "./crates/pgt_workspace", version = "0.0.0" }

pgt_test_macros = { path = "./crates/pgt_test_macros" }
pgt_test_utils = { path = "./crates/pgt_test_utils" }

docs_codegen = { path = "./docs/codegen", version = "0.0.0" }

[profile.dev.package]
insta.opt-level = 3
13 changes: 4 additions & 9 deletions crates/pgt_completions/src/sanitization.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,9 @@ where
type Error = String;

fn try_from(params: CompletionParams<'larger>) -> Result<Self, Self::Error> {
let tree = match &params.tree {
Some(tree) => tree,
None => return Err("Tree required for autocompletions.".to_string()),
};

if cursor_inbetween_nodes(tree, params.position)
|| cursor_prepared_to_write_token_after_last_node(tree, params.position)
|| cursor_before_semicolon(tree, params.position)
if cursor_inbetween_nodes(params.tree, params.position)
|| cursor_prepared_to_write_token_after_last_node(params.tree, params.position)
|| cursor_before_semicolon(params.tree, params.position)
{
Ok(SanitizedCompletionParams::with_adjusted_sql(params))
} else {
Expand Down Expand Up @@ -75,7 +70,7 @@ where
position: params.position,
text: params.text.clone(),
schema: params.schema,
tree: Cow::Borrowed(params.tree.unwrap()),
tree: Cow::Borrowed(params.tree),
}
}

Expand Down
1 change: 0 additions & 1 deletion crates/pgt_workspace/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ futures = "0.3.31"
globset = "0.4.16"

ignore = { workspace = true }
itertools = { version = "0.14.0" }
pgt_analyse = { workspace = true, features = ["serde"] }
pgt_analyser = { workspace = true }
pgt_completions = { workspace = true }
Expand Down
106 changes: 52 additions & 54 deletions crates/pgt_workspace/src/features/completions.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
use itertools::Itertools;
use std::sync::Arc;

use pgt_completions::CompletionItem;
use pgt_fs::PgTPath;
use pgt_text_size::{TextRange, TextSize};

use crate::workspace::{Document, Statement, StatementId};
use crate::workspace::{GetCompletionsFilter, GetCompletionsMapper, ParsedDocument, StatementId};

#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
Expand All @@ -29,79 +30,64 @@ impl IntoIterator for CompletionsResult {
}

pub(crate) fn get_statement_for_completions<'a>(
doc: &'a Document,
doc: &'a ParsedDocument,
position: TextSize,
) -> Option<(Statement, &'a TextRange, &'a str)> {
let count = doc.statement_count();
) -> Option<(StatementId, TextRange, String, Arc<tree_sitter::Tree>)> {
let count = doc.count();
// no arms no cookies
if count == 0 {
return None;
}

/*
* We allow an offset of two for the statement:
*
* select * from | <-- we want to suggest items for the next token.
*
* However, if the current statement is terminated by a semicolon, we don't apply any
* offset.
*
* select * from users; | <-- no autocompletions here.
*/
let matches_expanding_range = |stmt_id: StatementId, range: &TextRange, position: TextSize| {
let measuring_range = if doc.is_terminated_by_semicolon(stmt_id).unwrap() {
*range
} else {
range.checked_expand_end(2.into()).unwrap_or(*range)
};
measuring_range.contains(position)
};
let mut eligible_statements = doc.iter_with_filter(
GetCompletionsMapper,
GetCompletionsFilter {
cursor_position: position,
},
);

if count == 1 {
let (stmt, range, txt) = doc.iter_statements_with_text_and_range().next().unwrap();
if matches_expanding_range(stmt.id, range, position) {
Some((stmt, range, txt))
} else {
None
}
eligible_statements.next()
} else {
/*
* If we have multiple statements, we want to make sure that we do not overlap
* with the next one.
*
* select 1 |select 1;
*/
let mut stmts = doc.iter_statements_with_text_and_range().tuple_windows();
stmts
.find(|((current_stmt, rcurrent, _), (_, rnext, _))| {
let overlaps_next = rnext.contains(position);
matches_expanding_range(current_stmt.id, rcurrent, position) && !overlaps_next
})
.map(|t| t.0)
let mut prev_stmt = None;

for current_stmt in eligible_statements {
/*
* If we have multiple statements, we want to make sure that we do not overlap
* with the next one.
*
* select 1 |select 1;
*/
if prev_stmt.is_some_and(|_| current_stmt.1.contains(position)) {
return None;
}
prev_stmt = Some(current_stmt)
}

prev_stmt
}
}

#[cfg(test)]
mod tests {
use itertools::Itertools;
use pgt_fs::PgTPath;
use pgt_text_size::TextSize;

use crate::workspace::Document;
use crate::workspace::ParsedDocument;

use super::get_statement_for_completions;

static CURSOR_POSITION: &str = "€";

fn get_doc_and_pos(sql: &str) -> (Document, TextSize) {
fn get_doc_and_pos(sql: &str) -> (ParsedDocument, TextSize) {
let pos = sql
.find(CURSOR_POSITION)
.expect("Please add cursor position to test sql");

let pos: u32 = pos.try_into().unwrap();

(
Document::new(
ParsedDocument::new(
PgTPath::new("test.sql"),
sql.replace(CURSOR_POSITION, "").into(),
5,
Expand All @@ -125,7 +111,7 @@ mod tests {

let (doc, position) = get_doc_and_pos(sql.as_str());

let (_, _, text) =
let (_, _, text, _) =
get_statement_for_completions(&doc, position).expect("Expected Statement");

assert_eq!(text, "update users set email = 'myemail@com';")
Expand All @@ -137,7 +123,10 @@ mod tests {

let (doc, position) = get_doc_and_pos(sql.as_str());

assert_eq!(get_statement_for_completions(&doc, position), None);
assert!(matches!(
get_statement_for_completions(&doc, position),
None
));
}

#[test]
Expand All @@ -147,9 +136,12 @@ mod tests {
let (doc, position) = get_doc_and_pos(sql.as_str());

// make sure these are parsed as two
assert_eq!(doc.iter_statements().try_len().unwrap(), 2);
assert_eq!(doc.count(), 2);

assert_eq!(get_statement_for_completions(&doc, position), None);
assert!(matches!(
get_statement_for_completions(&doc, position),
None
));
}

#[test]
Expand All @@ -158,7 +150,7 @@ mod tests {

let (doc, position) = get_doc_and_pos(sql.as_str());

let (_, _, text) =
let (_, _, text, _) =
get_statement_for_completions(&doc, position).expect("Expected Statement");

assert_eq!(text, "select * from ;")
Expand All @@ -170,7 +162,7 @@ mod tests {

let (doc, position) = get_doc_and_pos(sql.as_str());

let (_, _, text) =
let (_, _, text, _) =
get_statement_for_completions(&doc, position).expect("Expected Statement");

assert_eq!(text, "select * from")
Expand All @@ -182,7 +174,10 @@ mod tests {

let (doc, position) = get_doc_and_pos(sql.as_str());

assert_eq!(get_statement_for_completions(&doc, position), None);
assert!(matches!(
get_statement_for_completions(&doc, position),
None
));
}

#[test]
Expand All @@ -191,6 +186,9 @@ mod tests {

let (doc, position) = get_doc_and_pos(sql.as_str());

assert_eq!(get_statement_for_completions(&doc, position), None);
assert!(matches!(
get_statement_for_completions(&doc, position),
None
));
}
}
3 changes: 2 additions & 1 deletion crates/pgt_workspace/src/workspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,8 @@ use crate::{
mod client;
mod server;

pub(crate) use server::document::{Document, Statement, StatementId};
pub use server::StatementId;
pub(crate) use server::parsed_document::*;

#[derive(Debug, serde::Serialize, serde::Deserialize)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
Expand Down
57 changes: 14 additions & 43 deletions crates/pgt_workspace/src/workspace/server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ use analyser::AnalyserVisitorBuilder;
use async_helper::run_async;
use dashmap::DashMap;
use db_connection::DbConnection;
use document::{Document, Statement};
use document::Document;
use futures::{StreamExt, stream};
use parsed_document::{
AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, ExecuteStatementMapper,
GetCompletionsMapper, ParsedDocument, SyncDiagnosticsMapper,
ParsedDocument, SyncDiagnosticsMapper,
};
use pgt_analyse::{AnalyserOptions, AnalysisFilter};
use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext};
Expand All @@ -29,7 +29,7 @@ use crate::{
self, CodeAction, CodeActionKind, CodeActionsResult, CommandAction,
CommandActionCategory, ExecuteStatementParams, ExecuteStatementResult,
},
completions::{self, CompletionsResult, GetCompletionsParams},
completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions},
diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult},
},
settings::{Settings, SettingsHandle, SettingsHandleMut},
Expand All @@ -48,7 +48,7 @@ mod change;
mod db_connection;
pub(crate) mod document;
mod migration;
mod parsed_document;
pub(crate) mod parsed_document;
mod pg_query;
mod schema_cache_manager;
mod sql_function;
Expand Down Expand Up @@ -469,7 +469,7 @@ impl Workspace for WorkspaceServer {
&self,
params: GetCompletionsParams,
) -> Result<CompletionsResult, WorkspaceError> {
let parser = self
let parsed_doc = self
.parsed_documents
.get(&params.path)
.ok_or(WorkspaceError::not_found())?;
Expand All @@ -482,52 +482,23 @@ impl Workspace for WorkspaceServer {
}
};

let doc = self
.documents
.get(&params.path)
.ok_or(WorkspaceError::not_found())?;

let (statement, stmt_range, text) =
match completions::get_statement_for_completions(&doc, params.position) {
None => return Ok(CompletionsResult::default()),
Some(s) => s,
};

// `offset` is the position in the document,
// but we need the position within the *statement*.
let position = params.position - stmt_range.start();

let tree = self.tree_sitter.get_parse_tree(&statement);

tracing::debug!(
"Found the statement. We're looking for position {:?}. Statement Range {:?} to {:?}. Statement: {:?}",
position,
stmt_range.start(),
stmt_range.end(),
text
);

let schema_cache = self.schema_cache.load(pool)?;

let items = parser
.iter_with_filter(
GetCompletionsMapper,
CursorPositionFilter::new(params.position),
)
.flat_map(|(_id, range, content, cst)| {
// `offset` is the position in the document,
// but we need the position within the *statement*.
match get_statement_for_completions(&parsed_doc, params.position) {
None => Ok(CompletionsResult::default()),
Some((_id, range, content, cst)) => {
let position = params.position - range.start();
pgt_completions::complete(pgt_completions::CompletionParams {

let items = pgt_completions::complete(pgt_completions::CompletionParams {
position,
schema: schema_cache.as_ref(),
tree: &cst,
text: content,
})
})
.collect();
});

Ok(CompletionsResult { items })
Ok(CompletionsResult { items })
}
}
}
}

Expand Down
Loading
Loading
You are viewing a condensed version of this merge commit. You can view the full changes here.