Skip to content

Commit 4bfc1cc

Browse files
committed
chore: add parser error test
1 parent 5edc55f commit 4bfc1cc

File tree

5 files changed

+135
-29
lines changed

5 files changed

+135
-29
lines changed

package-lock.json

Lines changed: 58 additions & 21 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,10 +46,10 @@
4646
"crypto-js": "^4.0.0",
4747
"fastify": "^4.24.3",
4848
"fastify-metrics": "^10.0.0",
49-
"pg": "https://gitpkg.vercel.app/supabase/node-postgres/packages/pg?master",
49+
"pg": "npm:@supabase/pg@0.0.3",
5050
"pg-connection-string": "^2.7.0",
5151
"pg-format": "^1.0.4",
52-
"pg-protocol": "^1.7.0",
52+
"pg-protocol": "npm:@supabase/pg-protocol@0.0.2",
5353
"pgsql-parser": "^13.16.0",
5454
"pino": "^9.5.0",
5555
"postgres-array": "^3.0.1",

src/lib/PostgresMeta.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import { PoolConfig } from 'pg'
21
import * as Parser from './Parser.js'
32
import PostgresMetaColumnPrivileges from './PostgresMetaColumnPrivileges.js'
43
import PostgresMetaColumns from './PostgresMetaColumns.js'
@@ -20,7 +19,7 @@ import PostgresMetaTypes from './PostgresMetaTypes.js'
2019
import PostgresMetaVersion from './PostgresMetaVersion.js'
2120
import PostgresMetaViews from './PostgresMetaViews.js'
2221
import { init } from './db.js'
23-
import { PostgresMetaResult } from './types.js'
22+
import { PostgresMetaResult, PoolConfig } from './types.js'
2423

2524
export default class PostgresMeta {
2625
query: (sql: string) => Promise<PostgresMetaResult<any>>

src/lib/db.ts

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ pg.types.setTypeParser(1017, (x) => x) // _point
2222

2323
// Ensure any query will have an appropriate error handler on the pool to prevent connections errors
2424
// to bubble up all the stack eventually killing the server
25-
const poolerQueryHandleError = (pool: pg.Pool, sql: string): Promise<pg.QueryResult<any>> => {
25+
const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise<pg.QueryResult<any>> => {
2626
return new Promise((resolve, reject) => {
2727
let rejected = false
2828
const connectionErrorHandler = (err: any) => {
@@ -38,8 +38,8 @@ const poolerQueryHandleError = (pool: pg.Pool, sql: string): Promise<pg.QueryRes
3838
}
3939
// This listened avoid getting uncaught exceptions for errors happening at connection level within the stream
4040
// such as parse or RESULT_SIZE_EXCEEDED errors instead, handle the error gracefully by bubbling in up to the caller
41-
pool.once('error', connectionErrorHandler)
42-
pool
41+
pgpool.once('error', connectionErrorHandler)
42+
pgpool
4343
.query(sql)
4444
.then((results: pg.QueryResult<any>) => {
4545
if (!rejected) {
@@ -196,7 +196,6 @@ ${' '.repeat(5 + lineNumber.toString().length + 2 + lineOffset)}^
196196
},
197197
}
198198
}
199-
200199
return { data: null, error: { code: error.code, message: error.message } }
201200
} finally {
202201
// If the error isn't a "DatabaseError" assume it's a connection related we kill the connection

test/server/result-size-limit.ts

Lines changed: 71 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -67,3 +67,74 @@ describe('test max-result-size limit', () => {
6767
expect(nextRes.json()).toHaveLength(50)
6868
})
6969
})
70+
71+
describe('test js parser error max result', () => {
72+
// Create a table with large data for testing
73+
beforeAll(async () => {
74+
// Create a table with a large text column
75+
await pgMeta.query(`
76+
CREATE TABLE very_large_data (
77+
id SERIAL PRIMARY KEY,
78+
data TEXT
79+
);
80+
`)
81+
82+
// Insert data that will exceed our limit in tests it's set around ~20MB
83+
await pgMeta.query(`
84+
INSERT INTO very_large_data (data)
85+
VALUES (repeat('x', 710 * 1024 * 1024)) -- 700+MB string will raise a JS exception at parse time
86+
`)
87+
})
88+
89+
afterAll(async () => {
90+
// Clean up the test table
91+
await pgMeta.query('DROP TABLE very_large_data;')
92+
})
93+
94+
test(
95+
'should not kill the server on underlying parser error',
96+
async () => {
97+
// Set a small maxResultSize (50MB)
98+
const res = await app.inject({
99+
method: 'POST',
100+
path: '/query',
101+
payload: { query: 'SELECT * FROM very_large_data;' },
102+
})
103+
104+
// Check that we get the proper error response from the underlying parser
105+
expect(res.json()).toMatchInlineSnapshot(`
106+
{
107+
"error": "exception received while handling packet: Error: Cannot create a string longer than 0x1fffffe8 characters
108+
",
109+
"formattedError": "exception received while handling packet: Error: Cannot create a string longer than 0x1fffffe8 characters
110+
",
111+
"length": 744488975,
112+
"message": "exception received while handling packet: Error: Cannot create a string longer than 0x1fffffe8 characters",
113+
"name": "error",
114+
}
115+
`)
116+
117+
// Verify that subsequent queries still work and the server isn't killed
118+
const nextRes = await app.inject({
119+
method: 'POST',
120+
path: '/query',
121+
payload: { query: 'SELECT * FROM todos;' },
122+
})
123+
expect(nextRes.json()).toMatchInlineSnapshot(`
124+
[
125+
{
126+
"details": "Star the repo",
127+
"id": 1,
128+
"user-id": 1,
129+
},
130+
{
131+
"details": "Watch the releases",
132+
"id": 2,
133+
"user-id": 2,
134+
},
135+
]
136+
`)
137+
},
138+
{ timeout: 20000 }
139+
)
140+
})

0 commit comments

Comments
 (0)