|  | 
| 1 |  | -mod expr_parser; | 
| 2 | 1 | mod parser; | 
| 3 |  | -mod syntax; | 
| 4 |  | - | 
| 5 |  | -use cstree::syntax::SyntaxNode; | 
| 6 |  | -use parser::Parser; | 
| 7 |  | -use std::fs; | 
| 8 |  | - | 
| 9 |  | -use crate::syntax::SyntaxKind; | 
| 10 |  | - | 
| 11 |  | -fn main() { | 
| 12 |  | - let source = fs::read_to_string("./src/example.sql").unwrap(); | 
| 13 |  | - println!("{:?}", source); | 
| 14 |  | - let mut parser = Parser::new(&source); | 
| 15 |  | - parser.parse().unwrap(); | 
| 16 |  | - let (tree, interner) = parser.finish(); | 
| 17 |  | - let root = SyntaxNode::<SyntaxKind>::new_root_with_resolver(tree, interner); | 
| 18 |  | - dbg!(root); | 
| 19 |  | - | 
| 20 |  | - // https://github.com/domenicquirl/cstree | 
| 21 |  | - // https://ericlippert.com/2012/06/08/red-green-trees/ | 
| 22 |  | - // | 
| 23 |  | - // So, for example, to parse a struct definition the parser first "enters" the struct definition node, then parses the struct keyword and type name, then parses each field, and finally "finishes" parsing the struct node. | 
| 24 |  | - // | 
| 25 |  | - // 1. lexer: parse string into tokens. cstree will allow us to just move forward until next | 
| 26 |  | - // statement. also, for comments, we should be able to store them separately since we are | 
| 27 |  | - // just walking over the source code. tokens should be expr, newlines, comments. | 
| 28 |  | - // does not work because lexer is "dumb". Token != SyntaxKind, so maybe we do not | 
| 29 |  | - // need a real lexer. | 
| 30 |  | - // 2. parser: parse tokens into cst with cstree. nodes are not typed, and we should be able to | 
| 31 |  | - // use pg_query to parse string, and turn that into SyntaxKind tokens. | 
| 32 |  | - // | 
| 33 |  | - // | 
| 34 |  | - // Notes: | 
| 35 |  | - // - maybe we do not real a lexer to parse into statements. we can just use simple string | 
| 36 |  | - // operations? or maybe lexer but with metadata on tokens because normally a token | 
| 37 |  | - // translates into a constant which is not what we want. instead, we want a token Expr to | 
| 38 |  | - // hold the expression string. | 
| 39 |  | - | 
| 40 |  | - // problem: comments | 
| 41 |  | - // general problem: declarative parsing by token will, based on initial research, not work well because we have tokens | 
| 42 |  | - // within tokens (comment can be within a sql query) | 
| 43 |  | - // let parser = any::<_, extra::Err<Simple<char>>>() | 
| 44 |  | - // .and_is(just(';').not()) | 
| 45 |  | - // .repeated() | 
| 46 |  | - // .collect::<String>() | 
| 47 |  | - // .padded() | 
| 48 |  | - // .separated_by(just(';')) | 
| 49 |  | - // .collect::<Vec<String>>(); | 
| 50 |  | - // | 
| 51 |  | - // let comment = just("--") | 
| 52 |  | - // .then( | 
| 53 |  | - // any::<_, extra::Err<Simple<char>>>() | 
| 54 |  | - // .and_is(just('\n').not()) | 
| 55 |  | - // .repeated(), | 
| 56 |  | - // ) | 
| 57 |  | - // .padded(); | 
| 58 |  | - // | 
| 59 |  | - // let comments = comment.parse(source.as_str()); | 
| 60 |  | - // let result = parser.parse(source.as_str()); | 
| 61 |  | - // | 
| 62 |  | - // println!("{:?}", source); | 
| 63 |  | - // println!("{:?}", result); | 
| 64 |  | - // println!("{:?}", comments); | 
| 65 |  | - // | 
| 66 |  | - // let pg_query_result = pg_query::parse("SELECT * FROM contacts").unwrap(); | 
| 67 |  | - // | 
| 68 |  | - // println!("{:?}", pg_query_result.protobuf.nodes()); | 
|  | 2 | +mod semantic_token; | 
|  | 3 | +use crate::parser::syntax::SyntaxKind; | 
|  | 4 | +use dashmap::DashMap; | 
|  | 5 | +use parser::parser::Parser; | 
|  | 6 | +use ropey::Rope; | 
|  | 7 | +use semantic_token::LEGEND_TYPE; | 
|  | 8 | +use serde_json::Value; | 
|  | 9 | +use tower_lsp::jsonrpc::Result; | 
|  | 10 | +use tower_lsp::lsp_types::*; | 
|  | 11 | +use tower_lsp::{Client, LanguageServer, LspService, Server}; | 
|  | 12 | + | 
|  | 13 | +#[derive(Debug)] | 
|  | 14 | +struct Backend { | 
|  | 15 | + client: Client, | 
|  | 16 | + // ast_map: DashMap<String, HashMap<String, Func>>, | 
|  | 17 | + document_map: DashMap<String, Rope>, | 
|  | 18 | + // semantic_token_map: DashMap<String, Vec<ImCompleteSemanticToken>>, | 
|  | 19 | +} | 
|  | 20 | + | 
|  | 21 | +#[tower_lsp::async_trait] | 
|  | 22 | +impl LanguageServer for Backend { | 
|  | 23 | + async fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> { | 
|  | 24 | + Ok(InitializeResult { | 
|  | 25 | + server_info: None, | 
|  | 26 | + offset_encoding: None, | 
|  | 27 | + capabilities: ServerCapabilities { | 
|  | 28 | + // inlay_hint_provider: Some(OneOf::Left(true)), | 
|  | 29 | + // text_document_sync: Some(TextDocumentSyncCapability::Kind( | 
|  | 30 | + // TextDocumentSyncKind::FULL, | 
|  | 31 | + // )), | 
|  | 32 | + // completion_provider: Some(CompletionOptions { | 
|  | 33 | + // resolve_provider: Some(false), | 
|  | 34 | + // trigger_characters: Some(vec![".".to_string()]), | 
|  | 35 | + // work_done_progress_options: Default::default(), | 
|  | 36 | + // all_commit_characters: None, | 
|  | 37 | + // completion_item: None, | 
|  | 38 | + // }), | 
|  | 39 | + // execute_command_provider: Some(ExecuteCommandOptions { | 
|  | 40 | + // commands: vec!["dummy.do_something".to_string()], | 
|  | 41 | + // work_done_progress_options: Default::default(), | 
|  | 42 | + // }), | 
|  | 43 | + // workspace: Some(WorkspaceServerCapabilities { | 
|  | 44 | + // workspace_folders: Some(WorkspaceFoldersServerCapabilities { | 
|  | 45 | + // supported: Some(true), | 
|  | 46 | + // change_notifications: Some(OneOf::Left(true)), | 
|  | 47 | + // }), | 
|  | 48 | + // file_operations: None, | 
|  | 49 | + // }), | 
|  | 50 | + semantic_tokens_provider: Some( | 
|  | 51 | + SemanticTokensServerCapabilities::SemanticTokensRegistrationOptions( | 
|  | 52 | + SemanticTokensRegistrationOptions { | 
|  | 53 | + text_document_registration_options: { | 
|  | 54 | + TextDocumentRegistrationOptions { | 
|  | 55 | + document_selector: Some(vec![DocumentFilter { | 
|  | 56 | + language: Some("nrs".to_string()), | 
|  | 57 | + scheme: Some("file".to_string()), | 
|  | 58 | + pattern: None, | 
|  | 59 | + }]), | 
|  | 60 | + } | 
|  | 61 | + }, | 
|  | 62 | + semantic_tokens_options: SemanticTokensOptions { | 
|  | 63 | + work_done_progress_options: WorkDoneProgressOptions::default(), | 
|  | 64 | + legend: SemanticTokensLegend { | 
|  | 65 | + token_types: LEGEND_TYPE.into(), | 
|  | 66 | + token_modifiers: vec![], | 
|  | 67 | + }, | 
|  | 68 | + range: Some(true), | 
|  | 69 | + full: Some(SemanticTokensFullOptions::Bool(true)), | 
|  | 70 | + }, | 
|  | 71 | + static_registration_options: StaticRegistrationOptions::default(), | 
|  | 72 | + }, | 
|  | 73 | + ), | 
|  | 74 | + ), | 
|  | 75 | + // definition: Some(GotoCapability::default()), | 
|  | 76 | + // definition_provider: Some(OneOf::Left(true)), | 
|  | 77 | + // references_provider: Some(OneOf::Left(true)), | 
|  | 78 | + // rename_provider: Some(OneOf::Left(true)), | 
|  | 79 | + ..ServerCapabilities::default() | 
|  | 80 | + }, | 
|  | 81 | + }) | 
|  | 82 | + } | 
|  | 83 | + | 
|  | 84 | + async fn initialized(&self, _: InitializedParams) { | 
|  | 85 | + self.client | 
|  | 86 | + .log_message(MessageType::INFO, "initialized!") | 
|  | 87 | + .await; | 
|  | 88 | + } | 
|  | 89 | + | 
|  | 90 | + async fn shutdown(&self) -> Result<()> { | 
|  | 91 | + Ok(()) | 
|  | 92 | + } | 
|  | 93 | + | 
|  | 94 | + async fn did_open(&self, params: DidOpenTextDocumentParams) { | 
|  | 95 | + self.client | 
|  | 96 | + .log_message(MessageType::INFO, "file opened!") | 
|  | 97 | + .await; | 
|  | 98 | + self.on_change(TextDocumentItem { | 
|  | 99 | + uri: params.text_document.uri, | 
|  | 100 | + text: params.text_document.text, | 
|  | 101 | + version: params.text_document.version, | 
|  | 102 | + }) | 
|  | 103 | + .await | 
|  | 104 | + } | 
|  | 105 | + | 
|  | 106 | + async fn did_change(&self, mut params: DidChangeTextDocumentParams) { | 
|  | 107 | + self.on_change(TextDocumentItem { | 
|  | 108 | + uri: params.text_document.uri, | 
|  | 109 | + text: std::mem::take(&mut params.content_changes[0].text), | 
|  | 110 | + version: params.text_document.version, | 
|  | 111 | + }) | 
|  | 112 | + .await | 
|  | 113 | + } | 
|  | 114 | + | 
|  | 115 | + async fn did_save(&self, _: DidSaveTextDocumentParams) { | 
|  | 116 | + self.client | 
|  | 117 | + .log_message(MessageType::INFO, "file saved!") | 
|  | 118 | + .await; | 
|  | 119 | + } | 
|  | 120 | + async fn did_close(&self, _: DidCloseTextDocumentParams) { | 
|  | 121 | + self.client | 
|  | 122 | + .log_message(MessageType::INFO, "file closed!") | 
|  | 123 | + .await; | 
|  | 124 | + } | 
|  | 125 | + | 
|  | 126 | + async fn semantic_tokens_full( | 
|  | 127 | + &self, | 
|  | 128 | + params: SemanticTokensParams, | 
|  | 129 | + ) -> Result<Option<SemanticTokensResult>> { | 
|  | 130 | + return Ok(None); | 
|  | 131 | + } | 
|  | 132 | + | 
|  | 133 | + async fn semantic_tokens_range( | 
|  | 134 | + &self, | 
|  | 135 | + params: SemanticTokensRangeParams, | 
|  | 136 | + ) -> Result<Option<SemanticTokensRangeResult>> { | 
|  | 137 | + return Ok(None); | 
|  | 138 | + } | 
|  | 139 | + | 
|  | 140 | + async fn did_change_configuration(&self, _: DidChangeConfigurationParams) { | 
|  | 141 | + self.client | 
|  | 142 | + .log_message(MessageType::INFO, "configuration changed!") | 
|  | 143 | + .await; | 
|  | 144 | + } | 
|  | 145 | + | 
|  | 146 | + async fn did_change_workspace_folders(&self, _: DidChangeWorkspaceFoldersParams) { | 
|  | 147 | + self.client | 
|  | 148 | + .log_message(MessageType::INFO, "workspace folders changed!") | 
|  | 149 | + .await; | 
|  | 150 | + } | 
|  | 151 | + | 
|  | 152 | + async fn did_change_watched_files(&self, _: DidChangeWatchedFilesParams) { | 
|  | 153 | + self.client | 
|  | 154 | + .log_message(MessageType::INFO, "watched files have changed!") | 
|  | 155 | + .await; | 
|  | 156 | + } | 
|  | 157 | + | 
|  | 158 | + async fn execute_command(&self, _: ExecuteCommandParams) -> Result<Option<Value>> { | 
|  | 159 | + self.client | 
|  | 160 | + .log_message(MessageType::INFO, "command executed!") | 
|  | 161 | + .await; | 
|  | 162 | + | 
|  | 163 | + match self.client.apply_edit(WorkspaceEdit::default()).await { | 
|  | 164 | + Ok(res) if res.applied => self.client.log_message(MessageType::INFO, "applied").await, | 
|  | 165 | + Ok(_) => self.client.log_message(MessageType::INFO, "rejected").await, | 
|  | 166 | + Err(err) => self.client.log_message(MessageType::ERROR, err).await, | 
|  | 167 | + } | 
|  | 168 | + | 
|  | 169 | + Ok(None) | 
|  | 170 | + } | 
|  | 171 | +} | 
|  | 172 | + | 
|  | 173 | +struct TextDocumentItem { | 
|  | 174 | + uri: Url, | 
|  | 175 | + text: String, | 
|  | 176 | + version: i32, | 
|  | 177 | +} | 
|  | 178 | +impl Backend { | 
|  | 179 | + async fn on_change(&self, params: TextDocumentItem) { | 
|  | 180 | + let rope = ropey::Rope::from_str(¶ms.text); | 
|  | 181 | + self.document_map | 
|  | 182 | + .insert(params.uri.to_string(), rope.clone()); | 
|  | 183 | + // let (ast, errors, semantic_tokens) = parse(¶ms.text); | 
|  | 184 | + | 
|  | 185 | + // let diagnostics = errors | 
|  | 186 | + // .into_iter() | 
|  | 187 | + // .filter_map(|item| { | 
|  | 188 | + // let (message, span) = match item.reason() { | 
|  | 189 | + // chumsky::error::SimpleReason::Unclosed { span, delimiter } => { | 
|  | 190 | + // (format!("Unclosed delimiter {}", delimiter), span.clone()) | 
|  | 191 | + // } | 
|  | 192 | + // chumsky::error::SimpleReason::Unexpected => ( | 
|  | 193 | + // format!( | 
|  | 194 | + // "{}, expected {}", | 
|  | 195 | + // if item.found().is_some() { | 
|  | 196 | + // "Unexpected token in input" | 
|  | 197 | + // } else { | 
|  | 198 | + // "Unexpected end of input" | 
|  | 199 | + // }, | 
|  | 200 | + // if item.expected().len() == 0 { | 
|  | 201 | + // "something else".to_string() | 
|  | 202 | + // } else { | 
|  | 203 | + // item.expected() | 
|  | 204 | + // .map(|expected| match expected { | 
|  | 205 | + // Some(expected) => expected.to_string(), | 
|  | 206 | + // None => "end of input".to_string(), | 
|  | 207 | + // }) | 
|  | 208 | + // .collect::<Vec<_>>() | 
|  | 209 | + // .join(", ") | 
|  | 210 | + // } | 
|  | 211 | + // ), | 
|  | 212 | + // item.span(), | 
|  | 213 | + // ), | 
|  | 214 | + // chumsky::error::SimpleReason::Custom(msg) => (msg.to_string(), item.span()), | 
|  | 215 | + // }; | 
|  | 216 | + // | 
|  | 217 | + // || -> Option<Diagnostic> { | 
|  | 218 | + // // let start_line = rope.try_char_to_line(span.start)?; | 
|  | 219 | + // // let first_char = rope.try_line_to_char(start_line)?; | 
|  | 220 | + // // let start_column = span.start - first_char; | 
|  | 221 | + // let start_position = offset_to_position(span.start, &rope)?; | 
|  | 222 | + // let end_position = offset_to_position(span.end, &rope)?; | 
|  | 223 | + // // let end_line = rope.try_char_to_line(span.end)?; | 
|  | 224 | + // // let first_char = rope.try_line_to_char(end_line)?; | 
|  | 225 | + // // let end_column = span.end - first_char; | 
|  | 226 | + // Some(Diagnostic::new_simple( | 
|  | 227 | + // Range::new(start_position, end_position), | 
|  | 228 | + // message, | 
|  | 229 | + // )) | 
|  | 230 | + // }() | 
|  | 231 | + // }) | 
|  | 232 | + // .collect::<Vec<_>>(); | 
|  | 233 | + // | 
|  | 234 | + // self.client | 
|  | 235 | + // .publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version)) | 
|  | 236 | + // .await; | 
|  | 237 | + | 
|  | 238 | + // if let Some(ast) = ast { | 
|  | 239 | + // self.ast_map.insert(params.uri.to_string(), ast); | 
|  | 240 | + // } | 
|  | 241 | + // self.client | 
|  | 242 | + // .log_message(MessageType::INFO, &format!("{:?}", semantic_tokens)) | 
|  | 243 | + // .await; | 
|  | 244 | + // self.semantic_token_map | 
|  | 245 | + // .insert(params.uri.to_string(), semantic_tokens); | 
|  | 246 | + } | 
|  | 247 | +} | 
|  | 248 | + | 
|  | 249 | +#[tokio::main] | 
|  | 250 | +async fn main() { | 
|  | 251 | + env_logger::init(); | 
|  | 252 | + | 
|  | 253 | + let stdin = tokio::io::stdin(); | 
|  | 254 | + let stdout = tokio::io::stdout(); | 
|  | 255 | + | 
|  | 256 | + let (service, socket) = LspService::build(|client| Backend { | 
|  | 257 | + client, | 
|  | 258 | + // ast_map: DashMap::new(), | 
|  | 259 | + document_map: DashMap::new(), | 
|  | 260 | + // semantic_token_map: DashMap::new(), | 
|  | 261 | + }) | 
|  | 262 | + .finish(); | 
|  | 263 | + | 
|  | 264 | + Server::new(stdin, stdout, socket).serve(service).await; | 
| 69 | 265 | } | 
0 commit comments