Skip to content

Commit 7266ef8

Browse files
psteinroejuleswritescode
andauthoredMar 17, 2025
feat: js bindings (#240)
* chore: add our own text-size * fix: test * chore: cleanup feature gates * chore: cleanup remaining feature gates * refactor: migrate to json config * fix: lint * feat: add schema field * migrate to jsonc config * feat: wasm attempt 2 * make pglt_wasm crate build * setup bun monorepo * feat: bindings * adapt generate packages * fix: add pg to job * fix: run docs codegen * fixes * fixes * fixes * Update configuration.rs Co-authored-by: Julian Domke <[email protected]> * add unit tests * fix: replace_secion * fix: remove wasm tools * remove musl * remove biome ref * remove biome ref * refactor: migrate to jsonc config (#239) * chore: add our own text-size * fix: test * chore: cleanup feature gates * chore: cleanup remaining feature gates * refactor: migrate to json config * fix: lint * feat: add schema field * migrate to jsonc config * fix: run docs codegen * Update configuration.rs Co-authored-by: Julian Domke <[email protected]> * add unit tests * fix: replace_secion --------- Co-authored-by: Julian Domke <[email protected]> --------- Co-authored-by: Julian Domke <[email protected]>
1 parent 6a3d573 commit 7266ef8

38 files changed

+2850
-590
lines changed
 

‎.github/workflows/pull_request.yml

+46-26
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,8 @@ on:
1212
- "Cargo.lock"
1313
- "rust-toolchain.toml"
1414
- "rustfmt.toml"
15+
# or in js packages
16+
- "packages/**"
1517

1618
concurrency:
1719
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.ref }}
@@ -40,10 +42,15 @@ jobs:
4042
cache-base: main
4143
env:
4244
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
45+
- name: Setup Biome
46+
uses: biomejs/setup-biome@v2
47+
with:
48+
version: latest
4349
- name: Run format
4450
run: |
4551
cargo fmt --all --check
4652
taplo format --check
53+
biome format
4754
4855
actionlint:
4956
name: Lint GitHub Actions
@@ -84,36 +91,15 @@ jobs:
8491
cache-base: main
8592
env:
8693
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
94+
- name: Setup Biome
95+
uses: biomejs/setup-biome@v2
96+
with:
97+
version: latest
8798
- name: Run Lints
8899
run: |
89100
cargo clippy
90101
cargo run -p rules_check
91-
92-
# check-dependencies:
93-
# name: Check Dependencies
94-
# runs-on: ubuntu-latest
95-
# services:
96-
# postgres:
97-
# image: postgres:latest
98-
# env:
99-
# POSTGRES_USER: postgres
100-
# POSTGRES_PASSWORD: postgres
101-
# POSTGRES_DB: postgres
102-
# ports:
103-
# - 5432:5432
104-
# steps:
105-
# - name: Checkout PR Branch
106-
# uses: actions/checkout@v4
107-
# with:
108-
# submodules: true
109-
# - name: Free Disk Space
110-
# uses: ./.github/actions/free-disk-space
111-
# - name: Install toolchain
112-
# run: rustup toolchain install nightly
113-
# - name: Install udeps
114-
# run: cargo install cargo-udeps --locked
115-
# - name: Detect unused dependencies using udeps
116-
# run: cargo +nightly udeps --all-targets
102+
biome lint
117103
118104
test:
119105
name: Test
@@ -145,6 +131,40 @@ jobs:
145131
- name: Run tests
146132
run: cargo test --workspace
147133

134+
test-js-bindings:
135+
name: Test JS Bindings
136+
runs-on: ubuntu-latest
137+
services:
138+
postgres:
139+
image: postgres:latest
140+
env:
141+
POSTGRES_USER: postgres
142+
POSTGRES_PASSWORD: postgres
143+
POSTGRES_DB: postgres
144+
ports:
145+
- 5432:5432
146+
steps:
147+
- name: Checkout PR branch
148+
uses: actions/checkout@v4
149+
with:
150+
submodules: true
151+
- name: Free Disk Space
152+
uses: ./.github/actions/free-disk-space
153+
- name: Install toolchain
154+
uses: moonrepo/setup-rust@v1
155+
- name: Build main binary
156+
run: cargo build -p pglt_cli --release
157+
- name: Setup Bun
158+
uses: oven-sh/setup-bun@v2
159+
- name: Install JS dependencies
160+
run: bun install
161+
- name: Build TypeScript code
162+
working-directory: packages/@pglt/backend-jsonrpc
163+
run: bun run build
164+
- name: Run JS tests
165+
working-directory: packages/@pglt/backend-jsonrpc
166+
run: bun run test
167+
148168
codegen:
149169
name: Check Codegen
150170
runs-on: ubuntu-latest

‎.gitignore

+5-1
Original file line numberDiff line numberDiff line change
@@ -18,4 +18,8 @@ target/
1818
.DS_Store
1919
desktop.ini
2020

21-
*.log
21+
*.log
22+
23+
node_modules/
24+
25+
**/dist/

‎Cargo.lock

+280-242
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎Cargo.toml

+6-3
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,14 @@ rust-version = "1.85.0"
1717
anyhow = "1.0.92"
1818
biome_deserialize = "0.6.0"
1919
biome_deserialize_macros = "0.6.0"
20+
biome_js_factory = "0.5.7"
21+
biome_js_formatter = "0.5.7"
22+
biome_js_syntax = "0.5.7"
23+
biome_rowan = "0.5.7"
2024
biome_string_case = "0.5.8"
2125
bpaf = { version = "0.9.15", features = ["derive"] }
2226
crossbeam = "0.8.4"
23-
enumflags2 = "0.7.10"
27+
enumflags2 = "0.7.11"
2428
ignore = "0.4.23"
2529
indexmap = { version = "2.6.0", features = ["serde"] }
2630
insta = "1.31.0"
@@ -35,11 +39,10 @@ serde = "1.0.195"
3539
serde_json = "1.0.114"
3640
similar = "2.6.0"
3741
smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] }
38-
sqlx = { version = "0.8.2", features = ["runtime-async-std", "tls-rustls", "postgres", "json"] }
42+
sqlx = { version = "0.8.2", features = ["runtime-tokio", "tls-native-tls", "postgres", "json"] }
3943
syn = "1.0.109"
4044
termcolor = "1.4.1"
4145
tokio = { version = "1.40.0", features = ["full"] }
42-
toml = "0.8.19"
4346
tower-lsp = "0.20.0"
4447
tracing = { version = "0.1.40", default-features = false, features = ["std"] }
4548
tracing-subscriber = "0.3.18"

‎biome.jsonc

+31
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
{
2+
"$schema": "https://biomejs.dev/schemas/1.9.4/schema.json",
3+
"vcs": {
4+
"enabled": false,
5+
"clientKind": "git",
6+
"useIgnoreFile": false
7+
},
8+
"files": {
9+
"ignoreUnknown": false,
10+
"ignore": [],
11+
"include": ["packages/**/*"]
12+
},
13+
"formatter": {
14+
"enabled": true,
15+
"indentStyle": "tab"
16+
},
17+
"organizeImports": {
18+
"enabled": true
19+
},
20+
"linter": {
21+
"enabled": true,
22+
"rules": {
23+
"recommended": true
24+
}
25+
},
26+
"javascript": {
27+
"formatter": {
28+
"quoteStyle": "double"
29+
}
30+
}
31+
}

‎bun.lock

+77
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
{
2+
"lockfileVersion": 1,
3+
"workspaces": {
4+
"": {
5+
"name": "postgres_lsp",
6+
"devDependencies": {
7+
"@biomejs/biome": "1.9.4",
8+
"@types/bun": "latest",
9+
},
10+
"peerDependencies": {
11+
"typescript": "^5",
12+
},
13+
},
14+
"packages/@pglt/backend-jsonrpc": {
15+
"name": "@pglt/backend-jsonrpc",
16+
"optionalDependencies": {
17+
"@pglt/cli-darwin-arm64": "<placeholder>",
18+
"@pglt/cli-darwin-x64": "<placeholder>",
19+
"@pglt/cli-linux-arm64": "<placeholder>",
20+
"@pglt/cli-linux-arm64-musl": "<placeholder>",
21+
"@pglt/cli-linux-x64": "<placeholder>",
22+
"@pglt/cli-linux-x64-musl": "<placeholder>",
23+
"@pglt/cli-win32-arm64": "<placeholder>",
24+
"@pglt/cli-win32-x64": "<placeholder>",
25+
},
26+
},
27+
"packages/@pglt/pglt": {
28+
"name": "pglt",
29+
"bin": {
30+
"pglt": "bin/pglt",
31+
},
32+
"optionalDependencies": {
33+
"pglt-aarch64-apple-darwin": "<placeholder>",
34+
"pglt-aarch64-linux-gnu": "<placeholder>",
35+
"pglt-aarch64-windows-msvc": "<placeholder>",
36+
"pglt-x86_64-apple-darwin": "<placeholder>",
37+
"pglt-x86_64-linux-gnu": "<placeholder>",
38+
"pglt-x86_64-windows-msvc": "<placeholder>",
39+
},
40+
},
41+
},
42+
"packages": {
43+
"@biomejs/biome": ["@biomejs/biome@1.9.4", "", { "optionalDependencies": { "@biomejs/cli-darwin-arm64": "1.9.4", "@biomejs/cli-darwin-x64": "1.9.4", "@biomejs/cli-linux-arm64": "1.9.4", "@biomejs/cli-linux-arm64-musl": "1.9.4", "@biomejs/cli-linux-x64": "1.9.4", "@biomejs/cli-linux-x64-musl": "1.9.4", "@biomejs/cli-win32-arm64": "1.9.4", "@biomejs/cli-win32-x64": "1.9.4" }, "bin": { "biome": "bin/biome" } }, "sha512-1rkd7G70+o9KkTn5KLmDYXihGoTaIGO9PIIN2ZB7UJxFrWw04CZHPYiMRjYsaDvVV7hP1dYNRLxSANLaBFGpog=="],
44+
45+
"@biomejs/cli-darwin-arm64": ["@biomejs/cli-darwin-arm64@1.9.4", "", { "os": "darwin", "cpu": "arm64" }, "sha512-bFBsPWrNvkdKrNCYeAp+xo2HecOGPAy9WyNyB/jKnnedgzl4W4Hb9ZMzYNbf8dMCGmUdSavlYHiR01QaYR58cw=="],
46+
47+
"@biomejs/cli-darwin-x64": ["@biomejs/cli-darwin-x64@1.9.4", "", { "os": "darwin", "cpu": "x64" }, "sha512-ngYBh/+bEedqkSevPVhLP4QfVPCpb+4BBe2p7Xs32dBgs7rh9nY2AIYUL6BgLw1JVXV8GlpKmb/hNiuIxfPfZg=="],
48+
49+
"@biomejs/cli-linux-arm64": ["@biomejs/cli-linux-arm64@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-fJIW0+LYujdjUgJJuwesP4EjIBl/N/TcOX3IvIHJQNsAqvV2CHIogsmA94BPG6jZATS4Hi+xv4SkBBQSt1N4/g=="],
50+
51+
"@biomejs/cli-linux-arm64-musl": ["@biomejs/cli-linux-arm64-musl@1.9.4", "", { "os": "linux", "cpu": "arm64" }, "sha512-v665Ct9WCRjGa8+kTr0CzApU0+XXtRgwmzIf1SeKSGAv+2scAlW6JR5PMFo6FzqqZ64Po79cKODKf3/AAmECqA=="],
52+
53+
"@biomejs/cli-linux-x64": ["@biomejs/cli-linux-x64@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-lRCJv/Vi3Vlwmbd6K+oQ0KhLHMAysN8lXoCI7XeHlxaajk06u7G+UsFSO01NAs5iYuWKmVZjmiOzJ0OJmGsMwg=="],
54+
55+
"@biomejs/cli-linux-x64-musl": ["@biomejs/cli-linux-x64-musl@1.9.4", "", { "os": "linux", "cpu": "x64" }, "sha512-gEhi/jSBhZ2m6wjV530Yy8+fNqG8PAinM3oV7CyO+6c3CEh16Eizm21uHVsyVBEB6RIM8JHIl6AGYCv6Q6Q9Tg=="],
56+
57+
"@biomejs/cli-win32-arm64": ["@biomejs/cli-win32-arm64@1.9.4", "", { "os": "win32", "cpu": "arm64" }, "sha512-tlbhLk+WXZmgwoIKwHIHEBZUwxml7bRJgk0X2sPyNR3S93cdRq6XulAZRQJ17FYGGzWne0fgrXBKpl7l4M87Hg=="],
58+
59+
"@biomejs/cli-win32-x64": ["@biomejs/cli-win32-x64@1.9.4", "", { "os": "win32", "cpu": "x64" }, "sha512-8Y5wMhVIPaWe6jw2H+KlEm4wP/f7EW3810ZLmDlrEEy5KvBsb9ECEfu/kMWD484ijfQ8+nIi0giMgu9g1UAuuA=="],
60+
61+
"@pglt/backend-jsonrpc": ["@pglt/backend-jsonrpc@workspace:packages/@pglt/backend-jsonrpc"],
62+
63+
"@types/bun": ["@types/bun@1.2.5", "", { "dependencies": { "bun-types": "1.2.5" } }, "sha512-w2OZTzrZTVtbnJew1pdFmgV99H0/L+Pvw+z1P67HaR18MHOzYnTYOi6qzErhK8HyT+DB782ADVPPE92Xu2/Opg=="],
64+
65+
"@types/node": ["@types/node@22.13.10", "", { "dependencies": { "undici-types": "~6.20.0" } }, "sha512-I6LPUvlRH+O6VRUqYOcMudhaIdUVWfsjnZavnsraHvpBwaEyMN29ry+0UVJhImYL16xsscu0aske3yA+uPOWfw=="],
66+
67+
"@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="],
68+
69+
"bun-types": ["bun-types@1.2.5", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-3oO6LVGGRRKI4kHINx5PIdIgnLRb7l/SprhzqXapmoYkFl5m4j6EvALvbDVuuBFaamB46Ap6HCUxIXNLCGy+tg=="],
70+
71+
"pglt": ["pglt@workspace:packages/@pglt/pglt"],
72+
73+
"typescript": ["typescript@5.8.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ=="],
74+
75+
"undici-types": ["undici-types@6.20.0", "", {}, "sha512-Ny6QZ2Nju20vw1SRHe3d9jVu6gJ+4e3+MMpqu7pqE5HT6WsTSlce++GQmK5UXS8mzV8DSYHrQH+Xrf2jVcuKNg=="],
76+
}
77+
}

‎crates/pglt_completions/Cargo.toml

+4
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ pglt_text_size.workspace = true
1919

2020
pglt_schema_cache.workspace = true
2121
pglt_treesitter_queries.workspace = true
22+
schemars = { workspace = true, optional = true }
2223
serde = { workspace = true, features = ["derive"] }
2324
serde_json = { workspace = true }
2425
tree-sitter.workspace = true
@@ -33,3 +34,6 @@ pglt_test_utils.workspace = true
3334

3435
[lib]
3536
doctest = false
37+
38+
[features]
39+
schema = ["dep:schemars"]

‎crates/pglt_completions/src/complete.rs

+1
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ pub struct CompletionParams<'a> {
1919
}
2020

2121
#[derive(Debug, Default, Serialize, Deserialize)]
22+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
2223
pub struct CompletionResult {
2324
pub(crate) items: Vec<CompletionItem>,
2425
}

‎crates/pglt_completions/src/item.rs

+3
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,16 @@
11
use serde::{Deserialize, Serialize};
22

33
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
4+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
5+
#[serde(rename_all = "camelCase")]
46
pub enum CompletionItemKind {
57
Table,
68
Function,
79
Column,
810
}
911

1012
#[derive(Debug, Serialize, Deserialize)]
13+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
1114
pub struct CompletionItem {
1215
pub label: String,
1316
pub(crate) score: i32,

‎crates/pglt_diagnostics/src/diagnostic.rs

+2
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,7 @@ pub trait Diagnostic: Debug {
118118
Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize, Default,
119119
)]
120120
#[serde(rename_all = "camelCase")]
121+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
121122
/// The severity to associate to a diagnostic.
122123
pub enum Severity {
123124
/// Reports a hint.
@@ -165,6 +166,7 @@ impl Display for Severity {
165166
/// and help with the implementation of `serde` and `schemars` for tags.
166167
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
167168
#[serde(rename_all = "camelCase")]
169+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
168170
#[bitflags]
169171
#[repr(u8)]
170172
pub(super) enum DiagnosticTag {

‎crates/pglt_diagnostics/src/display/backtrace.rs

+21
Original file line numberDiff line numberDiff line change
@@ -91,6 +91,17 @@ impl<'de> serde::Deserialize<'de> for Backtrace {
9191
}
9292
}
9393

94+
#[cfg(feature = "schema")]
95+
impl schemars::JsonSchema for Backtrace {
96+
fn schema_name() -> String {
97+
String::from("Backtrace")
98+
}
99+
100+
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
101+
<Vec<SerializedFrame>>::json_schema(r#gen)
102+
}
103+
}
104+
94105
/// Internal representation of a [Backtrace], can be either a native backtrace
95106
/// instance or a vector of serialized frames.
96107
#[derive(Clone, Debug)]
@@ -292,6 +303,11 @@ pub(super) fn print_backtrace(
292303

293304
/// Serializable representation of a backtrace frame.
294305
#[derive(Clone, Debug, Serialize, Deserialize)]
306+
#[cfg_attr(
307+
feature = "schema",
308+
derive(schemars::JsonSchema),
309+
schemars(rename = "BacktraceFrame")
310+
)]
295311
#[cfg_attr(test, derive(Eq, PartialEq))]
296312
struct SerializedFrame {
297313
ip: u64,
@@ -309,6 +325,11 @@ impl From<&'_ backtrace::BacktraceFrame> for SerializedFrame {
309325

310326
/// Serializable representation of a backtrace frame symbol.
311327
#[derive(Clone, Debug, Serialize, Deserialize)]
328+
#[cfg_attr(
329+
feature = "schema",
330+
derive(schemars::JsonSchema),
331+
schemars(rename = "BacktraceSymbol")
332+
)]
312333
#[cfg_attr(test, derive(Eq, PartialEq))]
313334
struct SerializedSymbol {
314335
name: Option<String>,

‎crates/pglt_diagnostics/src/location.rs

+1
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ impl Eq for Location<'_> {}
3939

4040
/// Represents the resource a diagnostic is associated with.
4141
#[derive(Debug, Clone, Copy, Eq, PartialEq, Serialize, Deserialize)]
42+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
4243
#[serde(rename_all = "camelCase")]
4344
pub enum Resource<P> {
4445
/// The diagnostic is related to the content of the command line arguments.

‎crates/pglt_diagnostics/src/serde.rs

+17-2
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,8 @@ use crate::{
1515

1616
/// Serializable representation for a [Diagnostic](super::Diagnostic).
1717
#[derive(Clone, Debug, Serialize, Deserialize)]
18-
#[cfg_attr(not(target_arch = "wasm32"), serde(rename_all = "camelCase"))]
18+
#[serde(rename_all = "camelCase")]
19+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
1920
#[cfg_attr(test, derive(Eq, PartialEq))]
2021
pub struct Diagnostic {
2122
category: Option<&'static Category>,
@@ -137,7 +138,8 @@ impl<D: super::Diagnostic + ?Sized> std::fmt::Display for PrintDescription<'_, D
137138
}
138139

139140
#[derive(Clone, Debug, Serialize, Deserialize)]
140-
#[cfg_attr(not(target_arch = "wasm32"), serde(rename_all = "camelCase"))]
141+
#[serde(rename_all = "camelCase")]
142+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
141143
#[cfg_attr(test, derive(Eq, PartialEq))]
142144
struct Location {
143145
path: Option<Resource<String>>,
@@ -160,6 +162,7 @@ impl From<super::Location<'_>> for Location {
160162
/// Implementation of [Visitor] collecting serializable [Advice] into a vector.
161163
#[derive(Clone, Debug, Serialize, Deserialize)]
162164
#[serde(rename_all = "camelCase")]
165+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
163166
#[cfg_attr(test, derive(Eq, PartialEq))]
164167
struct Advices {
165168
advices: Vec<Advice>,
@@ -246,6 +249,7 @@ impl super::Advices for Advices {
246249
/// advice types.
247250
#[derive(Clone, Debug, Serialize, Deserialize)]
248251
#[serde(rename_all = "camelCase")]
252+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
249253
#[cfg_attr(test, derive(Eq, PartialEq))]
250254
enum Advice {
251255
Log(LogCategory, MarkupBuf),
@@ -354,6 +358,17 @@ impl<'de> Deserialize<'de> for DiagnosticTags {
354358
}
355359
}
356360

361+
#[cfg(feature = "schema")]
362+
impl schemars::JsonSchema for DiagnosticTags {
363+
fn schema_name() -> String {
364+
String::from("DiagnosticTags")
365+
}
366+
367+
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> schemars::schema::Schema {
368+
<Vec<DiagnosticTag>>::json_schema(r#gen)
369+
}
370+
}
371+
357372
#[cfg(test)]
358373
mod tests {
359374
use std::io;

‎crates/pglt_lsp/src/handlers/completions.rs

+17-16
Original file line numberDiff line numberDiff line change
@@ -26,22 +26,23 @@ pub fn get_completions(
2626
pglt_lsp_converters::negotiated_encoding(client_capabilities),
2727
)?;
2828

29-
let completion_result = match session
30-
.workspace
31-
.get_completions(workspace::CompletionParams {
32-
path,
33-
position: offset,
34-
}) {
35-
Ok(result) => result,
36-
Err(e) => match e {
37-
WorkspaceError::DatabaseConnectionError(_) => {
38-
return Ok(lsp_types::CompletionResponse::Array(vec![]));
39-
}
40-
_ => {
41-
return Err(e.into());
42-
}
43-
},
44-
};
29+
let completion_result =
30+
match session
31+
.workspace
32+
.get_completions(workspace::GetCompletionsParams {
33+
path,
34+
position: offset,
35+
}) {
36+
Ok(result) => result,
37+
Err(e) => match e {
38+
WorkspaceError::DatabaseConnectionError(_) => {
39+
return Ok(lsp_types::CompletionResponse::Array(vec![]));
40+
}
41+
_ => {
42+
return Err(e.into());
43+
}
44+
},
45+
};
4546

4647
let items: Vec<CompletionItem> = completion_result
4748
.into_iter()

‎crates/pglt_workspace/Cargo.toml

+14-1
Original file line numberDiff line numberDiff line change
@@ -38,9 +38,22 @@ tracing = { workspace = true, features = ["attributes", "log"]
3838
tree-sitter.workspace = true
3939
tree_sitter_sql.workspace = true
4040

41+
biome_js_factory = { workspace = true, optional = true }
42+
biome_js_syntax = { workspace = true, optional = true }
43+
biome_rowan = { workspace = true, optional = true }
4144

4245
[features]
43-
schema = ["dep:schemars", "pglt_configuration/schema", "pglt_fs/schema"]
46+
schema = [
47+
"dep:schemars",
48+
"dep:biome_rowan",
49+
"dep:biome_js_syntax",
50+
"dep:biome_js_factory",
51+
"pglt_configuration/schema",
52+
"pglt_diagnostics/schema",
53+
"pglt_fs/schema",
54+
"pglt_analyse/schema",
55+
"pglt_completions/schema",
56+
]
4457

4558
[dev-dependencies]
4659
tempfile = "3.15.0"

‎crates/pglt_workspace/src/lib.rs

+2
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,8 @@ pub mod dome;
99
pub mod matcher;
1010
pub mod settings;
1111
pub mod workspace;
12+
#[cfg(feature = "schema")]
13+
pub mod workspace_types;
1214

1315
pub use crate::diagnostics::{TransportError, WorkspaceError};
1416
pub use crate::workspace::Workspace;

‎crates/pglt_workspace/src/workspace.rs

+2-71
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ pub struct PullDiagnosticsParams {
4646

4747
#[derive(Debug, serde::Serialize, serde::Deserialize)]
4848
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
49-
pub struct CompletionParams {
49+
pub struct GetCompletionsParams {
5050
/// The File for which a completion is requested.
5151
pub path: PgLTPath,
5252
/// The Cursor position in the file for which a completion is requested.
@@ -117,7 +117,7 @@ pub trait Workspace: Send + Sync + RefUnwindSafe {
117117

118118
fn get_completions(
119119
&self,
120-
params: CompletionParams,
120+
params: GetCompletionsParams,
121121
) -> Result<pglt_completions::CompletionResult, WorkspaceError>;
122122

123123
/// Update the global settings for this workspace
@@ -213,75 +213,6 @@ impl<'app, W: Workspace + ?Sized> FileGuard<'app, W> {
213213
skip,
214214
})
215215
}
216-
//
217-
// pub fn pull_actions(
218-
// &self,
219-
// range: Option<TextRange>,
220-
// only: Vec<RuleSelector>,
221-
// skip: Vec<RuleSelector>,
222-
// suppression_reason: Option<String>,
223-
// ) -> Result<PullActionsResult, WorkspaceError> {
224-
// self.workspace.pull_actions(PullActionsParams {
225-
// path: self.path.clone(),
226-
// range,
227-
// only,
228-
// skip,
229-
// suppression_reason,
230-
// })
231-
// }
232-
//
233-
// pub fn format_file(&self) -> Result<Printed, WorkspaceError> {
234-
// self.workspace.format_file(FormatFileParams {
235-
// path: self.path.clone(),
236-
// })
237-
// }
238-
//
239-
// pub fn format_range(&self, range: TextRange) -> Result<Printed, WorkspaceError> {
240-
// self.workspace.format_range(FormatRangeParams {
241-
// path: self.path.clone(),
242-
// range,
243-
// })
244-
// }
245-
//
246-
// pub fn format_on_type(&self, offset: TextSize) -> Result<Printed, WorkspaceError> {
247-
// self.workspace.format_on_type(FormatOnTypeParams {
248-
// path: self.path.clone(),
249-
// offset,
250-
// })
251-
// }
252-
//
253-
// pub fn fix_file(
254-
// &self,
255-
// fix_file_mode: FixFileMode,
256-
// should_format: bool,
257-
// rule_categories: RuleCategories,
258-
// only: Vec<RuleSelector>,
259-
// skip: Vec<RuleSelector>,
260-
// suppression_reason: Option<String>,
261-
// ) -> Result<FixFileResult, WorkspaceError> {
262-
// self.workspace.fix_file(FixFileParams {
263-
// path: self.path.clone(),
264-
// fix_file_mode,
265-
// should_format,
266-
// only,
267-
// skip,
268-
// rule_categories,
269-
// suppression_reason,
270-
// })
271-
// }
272-
//
273-
// pub fn organize_imports(&self) -> Result<OrganizeImportsResult, WorkspaceError> {
274-
// self.workspace.organize_imports(OrganizeImportsParams {
275-
// path: self.path.clone(),
276-
// })
277-
// }
278-
//
279-
// pub fn search_pattern(&self, pattern: &PatternId) -> Result<SearchResults, WorkspaceError> {
280-
// self.workspace.search_pattern(SearchPatternParams {
281-
// path: self.path.clone(),
282-
// pattern: pattern.clone(),
283-
// })
284-
// }
285216
}
286217

287218
impl<W: Workspace + ?Sized> Drop for FileGuard<'_, W> {

‎crates/pglt_workspace/src/workspace/client.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ where
126126

127127
fn get_completions(
128128
&self,
129-
params: super::CompletionParams,
129+
params: super::GetCompletionsParams,
130130
) -> Result<pglt_completions::CompletionResult, WorkspaceError> {
131131
self.request("pglt/get_completions", params)
132132
}

‎crates/pglt_workspace/src/workspace/server.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -397,7 +397,7 @@ impl Workspace for WorkspaceServer {
397397
#[tracing::instrument(level = "debug", skip(self))]
398398
fn get_completions(
399399
&self,
400-
params: super::CompletionParams,
400+
params: super::GetCompletionsParams,
401401
) -> Result<pglt_completions::CompletionResult, WorkspaceError> {
402402
tracing::debug!(
403403
"Getting completions for file {:?} at position {:?}",

‎crates/pglt_workspace/src/workspace_types.rs

+471
Large diffs are not rendered by default.

‎justfile

+9-5
Original file line numberDiff line numberDiff line change
@@ -11,33 +11,34 @@ install-tools:
1111
cargo install cargo-binstall
1212
cargo binstall cargo-insta taplo-cli
1313
cargo binstall --git "https://github.com/astral-sh/uv" uv
14-
14+
bun install
1515

1616
# Upgrades the tools needed to develop
1717
upgrade-tools:
1818
cargo install cargo-binstall --force
1919
cargo binstall cargo-insta taplo-cli --force
2020
cargo binstall --git "https://github.com/astral-sh/uv" uv --force
21+
bun install
2122

2223
# Generates code generated files for the linter
2324
gen-lint:
2425
cargo run -p xtask_codegen -- analyser
2526
cargo run -p xtask_codegen -- configuration
26-
# cargo codegen-migrate
27-
# just gen-bindings
27+
cargo run -p xtask_codegen -- bindings
2828
cargo run -p rules_check
29+
cargo run -p docs_codegen
2930
just format
3031

3132
# Creates a new lint rule in the given path, with the given name. Name has to be camel case. Group should be lowercase.
3233
new-lintrule group rulename:
3334
cargo run -p xtask_codegen -- new-lintrule --category=lint --name={{rulename}} --group={{group}}
3435
just gen-lint
35-
# just documentation
3636

37-
# Format Rust files and TOML files
37+
# Format Rust, JS and TOML files
3838
format:
3939
cargo fmt
4040
taplo format
41+
bun biome format --write
4142

4243
[unix]
4344
_touch file:
@@ -63,10 +64,12 @@ test-doc:
6364
lint:
6465
cargo clippy
6566
cargo run -p rules_check
67+
bun biome lint
6668

6769
lint-fix:
6870
cargo clippy --fix
6971
cargo run -p rules_check
72+
bun biome lint --write
7073

7174
serve-docs:
7275
uv sync
@@ -77,6 +80,7 @@ ready:
7780
git diff --exit-code --quiet
7881
cargo run -p xtask_codegen -- configuration
7982
cargo run -p docs_codegen
83+
cargo run -p xtask_codegen -- bindings
8084
just lint-fix
8185
just format
8286
git diff --exit-code --quiet

‎package.json

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
{
2+
"name": "@pglt/monorepo",
3+
"version": "0.0.0",
4+
"private": true,
5+
"devDependencies": {
6+
"@biomejs/biome": "1.9.4",
7+
"@types/bun": "latest"
8+
},
9+
"peerDependencies": {
10+
"typescript": "^5"
11+
},
12+
"workspaces": ["packages/@pglt/pglt", "packages/@pglt/backend-jsonrpc"],
13+
"keywords": [],
14+
"author": "Supabase Community",
15+
"license": "MIT OR Apache-2.0",
16+
"packageManager": "bun@1"
17+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
{
2+
"name": "@pglt/backend-jsonrpc",
3+
"version": "<placeholder>",
4+
"main": "dist/index.js",
5+
"scripts": {
6+
"test": "bun test",
7+
"test:ci": "bun build && bun test",
8+
"build": "bun build ./src/index.ts --outdir ./dist --target node"
9+
},
10+
"files": ["dist/", "README.md"],
11+
"repository": {
12+
"type": "git",
13+
"url": "git+https://github.com/supabase-community/postgres_lsp.git",
14+
"directory": "packages/@pglt/backend-jsonrpc"
15+
},
16+
"author": "Supabase Community",
17+
"bugs": "ttps://github.com/supabase-community/postgres_lsp/issues",
18+
"description": "Bindings to the JSON-RPC Workspace API of the Postgres Language Tools daemon",
19+
"keywords": ["TypeScript", "Postgres"],
20+
"license": "MIT",
21+
"publishConfig": {
22+
"provenance": true
23+
},
24+
"optionalDependencies": {
25+
"@pglt/cli-win32-x64": "<placeholder>",
26+
"@pglt/cli-win32-arm64": "<placeholder>",
27+
"@pglt/cli-darwin-x64": "<placeholder>",
28+
"@pglt/cli-darwin-arm64": "<placeholder>",
29+
"@pglt/cli-linux-x64": "<placeholder>",
30+
"@pglt/cli-linux-arm64": "<placeholder>"
31+
}
32+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
/**
2+
* Gets the path of the binary for the current platform
3+
*
4+
* @returns Filesystem path to the binary, or null if no prebuilt distribution exists for the current platform
5+
*/
6+
export function getCommand(): string | null {
7+
const { platform, arch } = process;
8+
9+
type PlatformPaths = {
10+
[P in NodeJS.Platform]?: {
11+
[A in NodeJS.Architecture]?: string;
12+
};
13+
};
14+
15+
const PLATFORMS: PlatformPaths = {
16+
win32: {
17+
x64: "@pglt/cli-win32-x64/pglt.exe",
18+
arm64: "@pglt/cli-win32-arm64/pglt.exe",
19+
},
20+
darwin: {
21+
x64: "@pglt/cli-darwin-x64/pglt",
22+
arm64: "@pglt/cli-darwin-arm64/pglt",
23+
},
24+
linux: {
25+
x64: "@pglt/cli-linux-x64/pglt",
26+
arm64: "@pglt/cli-linux-arm64/pglt",
27+
},
28+
};
29+
30+
const binPath = PLATFORMS?.[platform]?.[arch];
31+
if (!binPath) {
32+
return null;
33+
}
34+
35+
return require.resolve(binPath);
36+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import { getCommand } from "./command";
2+
import { createSocket } from "./socket";
3+
import { Transport } from "./transport";
4+
import { type Workspace, createWorkspace as wrapTransport } from "./workspace";
5+
6+
/**
7+
* Create an instance of the Workspace client connected to a remote daemon
8+
* instance through the JSON-RPC protocol
9+
*
10+
* @returns A Workspace client, or null if the underlying platform is not supported
11+
*/
12+
export async function createWorkspace(): Promise<Workspace | null> {
13+
const command = getCommand();
14+
if (!command) {
15+
return null;
16+
}
17+
18+
return createWorkspaceWithBinary(command);
19+
}
20+
21+
/**
22+
* Create an instance of the Workspace client connected to a remote daemon
23+
* instance through the JSON-RPC protocol, using the provided command to spawn
24+
* the daemon if necessary
25+
*
26+
* @param command Path to the binary
27+
* @returns A Workspace client, or null if the underlying platform is not supported
28+
*/
29+
export async function createWorkspaceWithBinary(
30+
command: string,
31+
): Promise<Workspace> {
32+
const socket = await createSocket(command);
33+
const transport = new Transport(socket);
34+
35+
await transport.request("initialize", {
36+
capabilities: {},
37+
client_info: {
38+
name: "@pglt/backend-jsonrpc",
39+
version: "0.0.0",
40+
},
41+
});
42+
43+
return wrapTransport(transport);
44+
}
45+
46+
export * from "./workspace";
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
import { spawn } from "node:child_process";
2+
import { type Socket, connect } from "node:net";
3+
4+
function getSocket(command: string): Promise<string> {
5+
return new Promise((resolve, reject) => {
6+
const process = spawn(command, ["__print_socket"], {
7+
stdio: "pipe",
8+
});
9+
10+
process.on("error", reject);
11+
12+
let pipeName = "";
13+
process.stdout.on("data", (data) => {
14+
pipeName += data.toString("utf-8");
15+
});
16+
17+
process.on("exit", (code) => {
18+
if (code === 0) {
19+
resolve(pipeName.trimEnd());
20+
} else {
21+
reject(
22+
new Error(
23+
`Command '${command} __print_socket' exited with code ${code}`,
24+
),
25+
);
26+
}
27+
});
28+
});
29+
}
30+
31+
/**
32+
* Ensure the daemon server is running and create a Socket connected to the RPC channel
33+
*
34+
* @param command Path to the daemon binary
35+
* @returns Socket instance connected to the daemon
36+
*/
37+
export async function createSocket(command: string): Promise<Socket> {
38+
const path = await getSocket(command);
39+
const socket = connect(path);
40+
41+
await new Promise((resolve, reject) => {
42+
socket.once("error", reject);
43+
socket.once("ready", resolve);
44+
});
45+
46+
return socket;
47+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,293 @@
1+
interface Socket {
2+
on(event: "data", fn: (data: Buffer) => void): void;
3+
write(data: Buffer): void;
4+
destroy(): void;
5+
}
6+
7+
enum ReaderStateKind {
8+
Header = 0,
9+
Body = 1,
10+
}
11+
12+
interface ReaderStateHeader {
13+
readonly kind: ReaderStateKind.Header;
14+
contentLength?: number;
15+
contentType?: string;
16+
}
17+
18+
interface ReaderStateBody {
19+
readonly kind: ReaderStateKind.Body;
20+
readonly contentLength: number;
21+
readonly contentType?: string;
22+
}
23+
24+
type ReaderState = ReaderStateHeader | ReaderStateBody;
25+
26+
interface JsonRpcRequest {
27+
jsonrpc: "2.0";
28+
id: number;
29+
method: string;
30+
params: unknown;
31+
}
32+
33+
function isJsonRpcRequest(message: JsonRpcMessage): message is JsonRpcRequest {
34+
return (
35+
"id" in message &&
36+
typeof message.id === "number" &&
37+
"method" in message &&
38+
typeof message.method === "string" &&
39+
"params" in message
40+
);
41+
}
42+
43+
interface JsonRpcNotification {
44+
jsonrpc: "2.0";
45+
method: string;
46+
params: unknown;
47+
}
48+
49+
function isJsonRpcNotification(
50+
message: JsonRpcMessage,
51+
): message is JsonRpcNotification {
52+
return (
53+
!("id" in message) &&
54+
"method" in message &&
55+
typeof message.method === "string" &&
56+
"params" in message
57+
);
58+
}
59+
60+
type JsonRpcResponse =
61+
| {
62+
jsonrpc: "2.0";
63+
id: number;
64+
result: unknown;
65+
}
66+
| {
67+
jsonrpc: "2.0";
68+
id: number;
69+
error: unknown;
70+
};
71+
72+
function isJsonRpcResponse(
73+
message: JsonRpcMessage,
74+
): message is JsonRpcResponse {
75+
return (
76+
"id" in message &&
77+
typeof message.id === "number" &&
78+
!("method" in message) &&
79+
("result" in message || "error" in message)
80+
);
81+
}
82+
83+
type JsonRpcMessage = JsonRpcRequest | JsonRpcNotification | JsonRpcResponse;
84+
85+
function isJsonRpcMessage(message: unknown): message is JsonRpcMessage {
86+
return (
87+
typeof message === "object" &&
88+
message !== null &&
89+
"jsonrpc" in message &&
90+
message.jsonrpc === "2.0"
91+
);
92+
}
93+
94+
interface PendingRequest {
95+
resolve(result: unknown): void;
96+
reject(error: unknown): void;
97+
}
98+
99+
const MIME_JSONRPC = "application/vscode-jsonrpc";
100+
101+
/**
102+
* Implements the daemon server JSON-RPC protocol over a Socket instance
103+
*/
104+
export class Transport {
105+
/**
106+
* Counter incremented for each outgoing request to generate a unique ID
107+
*/
108+
private nextRequestId = 0;
109+
110+
/**
111+
* Storage for the promise resolver functions of pending requests,
112+
* keyed by ID of the request
113+
*/
114+
private pendingRequests: Map<number, PendingRequest> = new Map();
115+
116+
constructor(private socket: Socket) {
117+
socket.on("data", (data) => {
118+
this.processIncoming(data);
119+
});
120+
}
121+
122+
/**
123+
* Send a request to the remote server
124+
*
125+
* @param method Name of the remote method to call
126+
* @param params Parameters object the remote method should be called with
127+
* @return Promise resolving with the value returned by the remote method, or rejecting with an RPC error if the remote call failed
128+
*/
129+
// biome-ignore lint/suspicious/noExplicitAny: if i change it to Promise<unknown> typescript breaks
130+
request(method: string, params: unknown): Promise<any> {
131+
return new Promise((resolve, reject) => {
132+
const id = this.nextRequestId++;
133+
this.pendingRequests.set(id, { resolve, reject });
134+
this.sendMessage({
135+
jsonrpc: "2.0",
136+
id,
137+
method,
138+
params,
139+
});
140+
});
141+
}
142+
143+
/**
144+
* Send a notification message to the remote server
145+
*
146+
* @param method Name of the remote method to call
147+
* @param params Parameters object the remote method should be called with
148+
*/
149+
notify(method: string, params: unknown) {
150+
this.sendMessage({
151+
jsonrpc: "2.0",
152+
method,
153+
params,
154+
});
155+
}
156+
157+
/**
158+
* Destroy the internal socket instance for this Transport
159+
*/
160+
destroy() {
161+
this.socket.destroy();
162+
}
163+
164+
private sendMessage(message: JsonRpcMessage) {
165+
const body = Buffer.from(JSON.stringify(message));
166+
const headers = Buffer.from(
167+
`Content-Length: ${body.length}\r\nContent-Type: ${MIME_JSONRPC};charset=utf-8\r\n\r\n`,
168+
);
169+
this.socket.write(Buffer.concat([headers, body]));
170+
}
171+
172+
private pendingData = Buffer.from("");
173+
private readerState: ReaderState = {
174+
kind: ReaderStateKind.Header,
175+
};
176+
177+
private processIncoming(data: Buffer) {
178+
this.pendingData = Buffer.concat([this.pendingData, data]);
179+
180+
while (this.pendingData.length > 0) {
181+
if (this.readerState.kind === ReaderStateKind.Header) {
182+
const lineBreakIndex = this.pendingData.indexOf("\n");
183+
if (lineBreakIndex < 0) {
184+
break;
185+
}
186+
187+
const header = this.pendingData.subarray(0, lineBreakIndex + 1);
188+
this.pendingData = this.pendingData.subarray(lineBreakIndex + 1);
189+
this.processIncomingHeader(this.readerState, header.toString("utf-8"));
190+
} else if (this.pendingData.length >= this.readerState.contentLength) {
191+
const body = this.pendingData.subarray(
192+
0,
193+
this.readerState.contentLength,
194+
);
195+
this.pendingData = this.pendingData.subarray(
196+
this.readerState.contentLength,
197+
);
198+
this.processIncomingBody(body);
199+
200+
this.readerState = {
201+
kind: ReaderStateKind.Header,
202+
};
203+
} else {
204+
break;
205+
}
206+
}
207+
}
208+
209+
private processIncomingHeader(readerState: ReaderStateHeader, line: string) {
210+
if (line === "\r\n") {
211+
const { contentLength, contentType } = readerState;
212+
if (typeof contentLength !== "number") {
213+
throw new Error(
214+
"incoming message from the remote workspace is missing the Content-Length header",
215+
);
216+
}
217+
218+
this.readerState = {
219+
kind: ReaderStateKind.Body,
220+
contentLength,
221+
contentType,
222+
};
223+
return;
224+
}
225+
226+
const colonIndex = line.indexOf(":");
227+
if (colonIndex < 0) {
228+
throw new Error(`could not find colon token in "${line}"`);
229+
}
230+
231+
const headerName = line.substring(0, colonIndex);
232+
const headerValue = line.substring(colonIndex + 1).trim();
233+
234+
switch (headerName) {
235+
case "Content-Length": {
236+
const value = Number.parseInt(headerValue);
237+
readerState.contentLength = value;
238+
break;
239+
}
240+
case "Content-Type": {
241+
if (!headerValue.startsWith(MIME_JSONRPC)) {
242+
throw new Error(
243+
`invalid value for Content-Type expected "${MIME_JSONRPC}", got "${headerValue}"`,
244+
);
245+
}
246+
247+
readerState.contentType = headerValue;
248+
break;
249+
}
250+
default:
251+
console.warn(`ignoring unknown header "${headerName}"`);
252+
}
253+
}
254+
255+
private processIncomingBody(buffer: Buffer) {
256+
const data = buffer.toString("utf-8");
257+
const body = JSON.parse(data);
258+
259+
if (isJsonRpcMessage(body)) {
260+
if (isJsonRpcRequest(body)) {
261+
// TODO: Not implemented at the moment
262+
return;
263+
}
264+
265+
if (isJsonRpcNotification(body)) {
266+
// TODO: Not implemented at the moment
267+
return;
268+
}
269+
270+
if (isJsonRpcResponse(body)) {
271+
const pendingRequest = this.pendingRequests.get(body.id);
272+
if (pendingRequest) {
273+
this.pendingRequests.delete(body.id);
274+
const { resolve, reject } = pendingRequest;
275+
if ("result" in body) {
276+
resolve(body.result);
277+
} else {
278+
reject(body.error);
279+
}
280+
} else {
281+
throw new Error(
282+
`could not find any pending request matching RPC response ID ${body.id}`,
283+
);
284+
}
285+
return;
286+
}
287+
}
288+
289+
throw new Error(
290+
`failed to deserialize incoming message from remote workspace, "${data}" is not a valid JSON-RPC message body`,
291+
);
292+
}
293+
}

‎packages/@pglt/backend-jsonrpc/src/workspace.ts

+453
Large diffs are not rendered by default.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,160 @@
1+
import { describe, expect, it, mock } from "bun:test";
2+
3+
import { Transport } from "../src/transport";
4+
5+
function makeMessage(body) {
6+
const content = JSON.stringify(body);
7+
return Buffer.from(
8+
`Content-Length: ${content.length}\r\nContent-Type: application/vscode-jsonrpc;charset=utf-8\r\n\r\n${content}`,
9+
);
10+
}
11+
12+
describe("Transport Layer", () => {
13+
it("should encode requests into the socket", async () => {
14+
let onData = null;
15+
const socket = {
16+
on(event, fn) {
17+
expect(event).toBe("data");
18+
onData = fn;
19+
},
20+
write: mock(),
21+
destroy: mock(),
22+
};
23+
24+
const transport = new Transport(socket);
25+
26+
const result = transport.request("method", "params");
27+
28+
expect(socket.write).toHaveBeenCalledWith(
29+
makeMessage({
30+
jsonrpc: "2.0",
31+
id: 0,
32+
method: "method",
33+
params: "params",
34+
}),
35+
);
36+
37+
onData(
38+
makeMessage({
39+
jsonrpc: "2.0",
40+
id: 0,
41+
result: "result",
42+
}),
43+
);
44+
45+
const response = await result;
46+
expect(response).toBe("result");
47+
48+
transport.destroy();
49+
expect(socket.destroy).toHaveBeenCalledOnce();
50+
});
51+
52+
it("should throw on missing Content-Length headers", async () => {
53+
let onData = null;
54+
const socket = {
55+
on(event, fn) {
56+
expect(event).toBe("data");
57+
onData = fn;
58+
},
59+
write: mock(),
60+
destroy: mock(),
61+
};
62+
63+
const transport = new Transport(socket);
64+
65+
expect(() => onData(Buffer.from("\r\n"))).toThrowError(
66+
"incoming message from the remote workspace is missing the Content-Length header",
67+
);
68+
69+
transport.destroy();
70+
expect(socket.destroy).toHaveBeenCalledOnce();
71+
});
72+
73+
it("should throw on missing colon token", async () => {
74+
let onData = null;
75+
const socket = {
76+
on(event, fn) {
77+
expect(event).toBe("data");
78+
onData = fn;
79+
},
80+
write: mock(),
81+
destroy: mock(),
82+
};
83+
84+
const transport = new Transport(socket);
85+
86+
expect(() => onData(Buffer.from("Content-Length\r\n"))).toThrowError(
87+
'could not find colon token in "Content-Length\r\n"',
88+
);
89+
90+
transport.destroy();
91+
expect(socket.destroy).toHaveBeenCalledOnce();
92+
});
93+
94+
it("should throw on invalid Content-Type", async () => {
95+
let onData = null;
96+
const socket = {
97+
on(event, fn) {
98+
expect(event).toBe("data");
99+
onData = fn;
100+
},
101+
write: mock(),
102+
destroy: mock(),
103+
};
104+
105+
const transport = new Transport(socket);
106+
107+
expect(() =>
108+
onData(Buffer.from("Content-Type: text/plain\r\n")),
109+
).toThrowError(
110+
'invalid value for Content-Type expected "application/vscode-jsonrpc", got "text/plain"',
111+
);
112+
113+
transport.destroy();
114+
expect(socket.destroy).toHaveBeenCalledOnce();
115+
});
116+
117+
it("should throw on unknown request ID", async () => {
118+
let onData = null;
119+
const socket = {
120+
on(event, fn) {
121+
expect(event).toBe("data");
122+
onData = fn;
123+
},
124+
write: mock(),
125+
destroy: mock(),
126+
};
127+
128+
const transport = new Transport(socket);
129+
130+
expect(() =>
131+
onData(makeMessage({ jsonrpc: "2.0", id: 0, result: "result" })),
132+
).toThrowError(
133+
"could not find any pending request matching RPC response ID 0",
134+
);
135+
136+
transport.destroy();
137+
expect(socket.destroy).toHaveBeenCalledOnce();
138+
});
139+
140+
it("should throw on invalid messages", async () => {
141+
let onData = null;
142+
const socket = {
143+
on(event, fn) {
144+
expect(event).toBe("data");
145+
onData = fn;
146+
},
147+
write: mock(),
148+
destroy: mock(),
149+
};
150+
151+
const transport = new Transport(socket);
152+
153+
expect(() => onData(makeMessage({}))).toThrowError(
154+
'failed to deserialize incoming message from remote workspace, "{}" is not a valid JSON-RPC message body',
155+
);
156+
157+
transport.destroy();
158+
expect(socket.destroy).toHaveBeenCalledOnce();
159+
});
160+
});
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
import { resolve } from "node:path";
2+
import { fileURLToPath } from "node:url";
3+
import { describe, expect, it } from "vitest";
4+
5+
import { createWorkspaceWithBinary } from "../dist";
6+
7+
describe("Workspace API", () => {
8+
it("should process remote requests", async () => {
9+
const extension = process.platform === "win32" ? ".exe" : "";
10+
const command = resolve(
11+
fileURLToPath(import.meta.url),
12+
"../../../../..",
13+
`target/release/pglt${extension}`,
14+
);
15+
16+
const workspace = await createWorkspaceWithBinary(command);
17+
await workspace.openFile({
18+
path: {
19+
path: "test.sql",
20+
was_written: false,
21+
kind: ["Handleable"],
22+
},
23+
content: "select 1 from",
24+
version: 0,
25+
});
26+
27+
const { diagnostics } = await workspace.pullDiagnostics({
28+
only: [],
29+
skip: [],
30+
max_diagnostics: 100,
31+
categories: [],
32+
path: {
33+
path: "test.sql",
34+
was_written: false,
35+
kind: ["Handleable"],
36+
},
37+
});
38+
39+
expect(diagnostics).toHaveLength(1);
40+
expect(diagnostics[0].description).toBe(
41+
"Invalid statement: syntax error at end of input",
42+
);
43+
44+
await workspace.closeFile({
45+
path: {
46+
path: "test.sql",
47+
was_written: false,
48+
kind: ["Handleable"],
49+
},
50+
});
51+
52+
workspace.destroy();
53+
});
54+
});

‎packages/@pglt/pglt/bin/pglt

100644100755
File mode changed.

‎packages/@pglt/pglt/package.json

+38-41
Original file line numberDiff line numberDiff line change
@@ -1,43 +1,40 @@
11
{
2-
"name": "pglt",
3-
"version": "<placeholder>",
4-
"bin": {
5-
"pglt": "bin/pglt"
6-
},
7-
"repository": {
8-
"type": "git",
9-
"url": "git+https://github.com/supabase-community/postgres_lsp.git",
10-
"directory": "packages/@pglt/pglt"
11-
},
12-
"author": "Supabase Community",
13-
"contributors": [
14-
{
15-
"name": "Philipp Steinrötter",
16-
"url": "https://github.com/psteinroe"
17-
},
18-
{
19-
"name": "Julian Domke",
20-
"url": "https://github.com/juleswritescode"
21-
}
22-
],
23-
"license": "MIT or Apache-2.0",
24-
"description": "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.",
25-
"files": [
26-
"bin/pglt",
27-
"schema.json"
28-
],
29-
"engines": {
30-
"node": ">=20"
31-
},
32-
"publishConfig": {
33-
"provenance": true
34-
},
35-
"optionalDependencies": {
36-
"pglt-x86_64-windows-msvc": "<placeholder>",
37-
"pglt-aarch64-windows-msvc": "<placeholder>",
38-
"pglt-x86_64-apple-darwin": "<placeholder>",
39-
"pglt-aarch64-apple-darwin": "<placeholder>",
40-
"pglt-x86_64-linux-gnu": "<placeholder>",
41-
"pglt-aarch64-linux-gnu": "<placeholder>"
42-
}
2+
"name": "pglt",
3+
"version": "<placeholder>",
4+
"bin": {
5+
"pglt": "bin/pglt"
6+
},
7+
"repository": {
8+
"type": "git",
9+
"url": "git+https://github.com/supabase-community/postgres_lsp.git",
10+
"directory": "packages/@pglt/pglt"
11+
},
12+
"author": "Supabase Community",
13+
"contributors": [
14+
{
15+
"name": "Philipp Steinrötter",
16+
"url": "https://github.com/psteinroe"
17+
},
18+
{
19+
"name": "Julian Domke",
20+
"url": "https://github.com/juleswritescode"
21+
}
22+
],
23+
"license": "MIT or Apache-2.0",
24+
"description": "A collection of language tools and a Language Server Protocol (LSP) implementation for Postgres, focusing on developer experience and reliable SQL tooling.",
25+
"files": ["bin/pglt", "schema.json"],
26+
"engines": {
27+
"node": ">=20"
28+
},
29+
"publishConfig": {
30+
"provenance": true
31+
},
32+
"optionalDependencies": {
33+
"pglt-x86_64-windows-msvc": "<placeholder>",
34+
"pglt-aarch64-windows-msvc": "<placeholder>",
35+
"pglt-x86_64-apple-darwin": "<placeholder>",
36+
"pglt-aarch64-apple-darwin": "<placeholder>",
37+
"pglt-x86_64-linux-gnu": "<placeholder>",
38+
"pglt-aarch64-linux-gnu": "<placeholder>"
39+
}
4340
}

‎packages/@pglt/pglt/scripts/generate-packages.mjs

+178-169
Large diffs are not rendered by default.

‎tsconfig.json

+27
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
{
2+
"compilerOptions": {
3+
// Enable latest features
4+
"lib": ["ESNext", "DOM"],
5+
"target": "ESNext",
6+
"module": "ESNext",
7+
"moduleDetection": "force",
8+
"jsx": "react-jsx",
9+
"allowJs": true,
10+
11+
// Bundler mode
12+
"moduleResolution": "bundler",
13+
"allowImportingTsExtensions": true,
14+
"verbatimModuleSyntax": true,
15+
"noEmit": true,
16+
17+
// Best practices
18+
"strict": true,
19+
"skipLibCheck": true,
20+
"noFallthroughCasesInSwitch": true,
21+
22+
// Some stricter flags (disabled by default)
23+
"noUnusedLocals": false,
24+
"noUnusedParameters": false,
25+
"noPropertyAccessFromIndexSignature": false
26+
}
27+
}

‎xtask/codegen/Cargo.toml

+14-9
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,17 @@ publish = false
55
version = "0.0.0"
66

77
[dependencies]
8-
anyhow = { workspace = true }
9-
biome_string_case = { workspace = true }
10-
bpaf = { workspace = true, features = ["derive"] }
11-
pglt_analyse = { workspace = true }
12-
pglt_analyser = { workspace = true }
13-
proc-macro2 = { workspace = true, features = ["span-locations"] }
14-
pulldown-cmark = { version = "0.12.2" }
15-
quote = "1.0.36"
16-
xtask = { path = '../', version = "0.0" }
8+
anyhow = { workspace = true }
9+
biome_js_factory = { workspace = true }
10+
biome_js_formatter = { workspace = true }
11+
biome_js_syntax = { workspace = true }
12+
biome_rowan = { workspace = true }
13+
biome_string_case = { workspace = true }
14+
bpaf = { workspace = true, features = ["derive"] }
15+
pglt_analyse = { workspace = true }
16+
pglt_analyser = { workspace = true }
17+
pglt_workspace = { workspace = true, features = ["schema"] }
18+
proc-macro2 = { workspace = true, features = ["span-locations"] }
19+
pulldown-cmark = { version = "0.12.2" }
20+
quote = "1.0.36"
21+
xtask = { path = '../', version = "0.0" }

‎xtask/codegen/src/generate_bindings.rs

+434
Large diffs are not rendered by default.

‎xtask/codegen/src/lib.rs

+5
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
//! Codegen tools. Derived from Biome's codegen
22
33
mod generate_analyser;
4+
mod generate_bindings;
45
mod generate_configuration;
56
mod generate_crate;
67
mod generate_new_analyser_rule;
78

89
pub use self::generate_analyser::generate_analyser;
10+
pub use self::generate_bindings::generate_bindings;
911
pub use self::generate_configuration::generate_rules_configuration;
1012
pub use self::generate_crate::generate_crate;
1113
pub use self::generate_new_analyser_rule::generate_new_analyser_rule;
@@ -52,6 +54,9 @@ pub fn to_capitalized(s: &str) -> String {
5254
#[derive(Debug, Clone, Bpaf)]
5355
#[bpaf(options)]
5456
pub enum TaskCommand {
57+
/// Generate TypeScript definitions for the JavaScript bindings to the Workspace API
58+
#[bpaf(command)]
59+
Bindings,
5560
/// Generate factory functions for the analyser and the configuration of the analysers
5661
#[bpaf(command)]
5762
Analyser,

‎xtask/codegen/src/main.rs

+5-2
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@ use xtask::Mode::Overwrite;
22
use xtask::{project_root, pushd, Result};
33

44
use xtask_codegen::{
5-
generate_analyser, generate_crate, generate_new_analyser_rule, generate_rules_configuration,
6-
task_command, TaskCommand,
5+
generate_analyser, generate_bindings, generate_crate, generate_new_analyser_rule,
6+
generate_rules_configuration, task_command, TaskCommand,
77
};
88

99
fn main() -> Result<()> {
@@ -27,6 +27,9 @@ fn main() -> Result<()> {
2727
TaskCommand::Configuration => {
2828
generate_rules_configuration(Overwrite)?;
2929
}
30+
TaskCommand::Bindings => {
31+
generate_bindings(Overwrite)?;
32+
}
3033
}
3134

3235
Ok(())

0 commit comments

Comments
 (0)
Please sign in to comment.