first commit

This commit is contained in:
Myk
2025-07-31 23:47:20 +03:00
commit 2186b278a0
5149 changed files with 537218 additions and 0 deletions

1386
node_modules/postgres/README.md generated vendored Normal file

File diff suppressed because it is too large Load Diff

233
node_modules/postgres/cf/polyfills.js generated vendored Normal file
View File

@@ -0,0 +1,233 @@
import { EventEmitter } from 'node:events'
import { Buffer } from 'node:buffer'
const Crypto = globalThis.crypto
let ids = 1
const tasks = new Set()
const v4Seg = '(?:[0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])'
const v4Str = `(${v4Seg}[.]){3}${v4Seg}`
const IPv4Reg = new RegExp(`^${v4Str}$`)
const v6Seg = '(?:[0-9a-fA-F]{1,4})'
const IPv6Reg = new RegExp(
'^(' +
`(?:${v6Seg}:){7}(?:${v6Seg}|:)|` +
`(?:${v6Seg}:){6}(?:${v4Str}|:${v6Seg}|:)|` +
`(?:${v6Seg}:){5}(?::${v4Str}|(:${v6Seg}){1,2}|:)|` +
`(?:${v6Seg}:){4}(?:(:${v6Seg}){0,1}:${v4Str}|(:${v6Seg}){1,3}|:)|` +
`(?:${v6Seg}:){3}(?:(:${v6Seg}){0,2}:${v4Str}|(:${v6Seg}){1,4}|:)|` +
`(?:${v6Seg}:){2}(?:(:${v6Seg}){0,3}:${v4Str}|(:${v6Seg}){1,5}|:)|` +
`(?:${v6Seg}:){1}(?:(:${v6Seg}){0,4}:${v4Str}|(:${v6Seg}){1,6}|:)|` +
`(?::((?::${v6Seg}){0,5}:${v4Str}|(?::${v6Seg}){1,7}|:))` +
')(%[0-9a-zA-Z-.:]{1,})?$'
)
const textEncoder = new TextEncoder()
export const crypto = {
randomBytes: l => Crypto.getRandomValues(Buffer.alloc(l)),
pbkdf2Sync: async(password, salt, iterations, keylen) =>
Crypto.subtle.deriveBits(
{
name: 'PBKDF2',
hash: 'SHA-256',
salt,
iterations
},
await Crypto.subtle.importKey(
'raw',
textEncoder.encode(password),
'PBKDF2',
false,
['deriveBits']
),
keylen * 8,
['deriveBits']
),
createHash: type => ({
update: x => ({
digest: encoding => {
if (!(x instanceof Uint8Array)) {
x = textEncoder.encode(x)
}
let prom
if (type === 'sha256') {
prom = Crypto.subtle.digest('SHA-256', x)
} else if (type === 'md5') {
prom = Crypto.subtle.digest('md5', x)
} else {
throw Error('createHash only supports sha256 or md5 in this environment, not ${type}.')
}
if (encoding === 'hex') {
return prom.then((arrayBuf) => Buffer.from(arrayBuf).toString('hex'))
} else if (encoding) {
throw Error(`createHash only supports hex encoding or unencoded in this environment, not ${encoding}`)
} else {
return prom
}
}
})
}),
createHmac: (type, key) => ({
update: x => ({
digest: async() =>
Buffer.from(
await Crypto.subtle.sign(
'HMAC',
await Crypto.subtle.importKey('raw', key, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign']),
textEncoder.encode(x)
)
)
})
})
}
export const performance = globalThis.performance
export const process = {
env: {}
}
export const os = {
userInfo() {
return { username: 'postgres' }
}
}
export const fs = {
readFile() {
throw new Error('Reading files not supported on CloudFlare')
}
}
export const net = {
isIP: (x) => IPv4Reg.test(x) ? 4 : IPv6Reg.test(x) ? 6 : 0,
Socket
}
export { setImmediate, clearImmediate }
export const tls = {
connect({ socket: tcp, servername }) {
tcp.writer.releaseLock()
tcp.reader.releaseLock()
tcp.readyState = 'upgrading'
tcp.raw = tcp.raw.startTls({ servername })
tcp.raw.closed.then(
() => tcp.emit('close'),
(e) => tcp.emit('error', e)
)
tcp.writer = tcp.raw.writable.getWriter()
tcp.reader = tcp.raw.readable.getReader()
tcp.writer.ready.then(() => {
tcp.read()
tcp.readyState = 'upgrade'
})
return tcp
}
}
function Socket() {
const tcp = Object.assign(new EventEmitter(), {
readyState: 'open',
raw: null,
writer: null,
reader: null,
connect,
write,
end,
destroy,
read
})
return tcp
async function connect(port, host) {
try {
tcp.readyState = 'opening'
const { connect } = await import('cloudflare:sockets')
tcp.raw = connect(host + ':' + port, tcp.ssl ? { secureTransport: 'starttls' } : {})
tcp.raw.closed.then(
() => {
tcp.readyState !== 'upgrade'
? close()
: ((tcp.readyState = 'open'), tcp.emit('secureConnect'))
},
(e) => tcp.emit('error', e)
)
tcp.writer = tcp.raw.writable.getWriter()
tcp.reader = tcp.raw.readable.getReader()
tcp.ssl ? readFirst() : read()
tcp.writer.ready.then(() => {
tcp.readyState = 'open'
tcp.emit('connect')
})
} catch (err) {
error(err)
}
}
function close() {
if (tcp.readyState === 'closed')
return
tcp.readyState = 'closed'
tcp.emit('close')
}
function write(data, cb) {
tcp.writer.write(data).then(cb, error)
return true
}
function end(data) {
return data
? tcp.write(data, () => tcp.raw.close())
: tcp.raw.close()
}
function destroy() {
tcp.destroyed = true
tcp.end()
}
async function read() {
try {
let done
, value
while (({ done, value } = await tcp.reader.read(), !done))
tcp.emit('data', Buffer.from(value))
} catch (err) {
error(err)
}
}
async function readFirst() {
const { value } = await tcp.reader.read()
tcp.emit('data', Buffer.from(value))
}
function error(err) {
tcp.emit('error', err)
tcp.emit('close')
}
}
function setImmediate(fn) {
const id = ids++
tasks.add(id)
queueMicrotask(() => {
if (tasks.has(id)) {
fn()
tasks.delete(id)
}
})
return id
}
function clearImmediate(id) {
tasks.delete(id)
}

79
node_modules/postgres/cf/src/bytes.js generated vendored Normal file
View File

@@ -0,0 +1,79 @@
import { Buffer } from 'node:buffer'
const size = 256
let buffer = Buffer.allocUnsafe(size)
const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => {
const v = x.charCodeAt(0)
acc[x] = () => {
buffer[0] = v
b.i = 5
return b
}
return acc
}, {})
const b = Object.assign(reset, messages, {
N: String.fromCharCode(0),
i: 0,
inc(x) {
b.i += x
return b
},
str(x) {
const length = Buffer.byteLength(x)
fit(length)
b.i += buffer.write(x, b.i, length, 'utf8')
return b
},
i16(x) {
fit(2)
buffer.writeUInt16BE(x, b.i)
b.i += 2
return b
},
i32(x, i) {
if (i || i === 0) {
buffer.writeUInt32BE(x, i)
return b
}
fit(4)
buffer.writeUInt32BE(x, b.i)
b.i += 4
return b
},
z(x) {
fit(x)
buffer.fill(0, b.i, b.i + x)
b.i += x
return b
},
raw(x) {
buffer = Buffer.concat([buffer.subarray(0, b.i), x])
b.i = buffer.length
return b
},
end(at = 1) {
buffer.writeUInt32BE(b.i - at, at)
const out = buffer.subarray(0, b.i)
b.i = 0
buffer = Buffer.allocUnsafe(size)
return out
}
})
export default b
function fit(x) {
if (buffer.length - b.i < x) {
const prev = buffer
, length = prev.length
buffer = Buffer.allocUnsafe(length + (length >> 1) + x)
prev.copy(buffer)
}
}
function reset() {
b.i = 0
return b
}

1044
node_modules/postgres/cf/src/connection.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

53
node_modules/postgres/cf/src/errors.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
export class PostgresError extends Error {
constructor(x) {
super(x.message)
this.name = this.constructor.name
Object.assign(this, x)
}
}
export const Errors = {
connection,
postgres,
generic,
notSupported
}
function connection(x, options, socket) {
const { host, port } = socket || options
const error = Object.assign(
new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
{
code: x,
errno: x,
address: options.path || host
}, options.path ? {} : { port: port }
)
Error.captureStackTrace(error, connection)
return error
}
function postgres(x) {
const error = new PostgresError(x)
Error.captureStackTrace(error, postgres)
return error
}
function generic(code, message) {
const error = Object.assign(new Error(code + ': ' + message), { code })
Error.captureStackTrace(error, generic)
return error
}
/* c8 ignore next 10 */
function notSupported(x) {
const error = Object.assign(
new Error(x + ' (B) is not supported'),
{
code: 'MESSAGE_NOT_SUPPORTED',
name: x
}
)
Error.captureStackTrace(error, notSupported)
return error
}

567
node_modules/postgres/cf/src/index.js generated vendored Normal file
View File

@@ -0,0 +1,567 @@
import { process } from '../polyfills.js'
import { os } from '../polyfills.js'
import { fs } from '../polyfills.js'
import {
mergeUserTypes,
inferType,
Parameter,
Identifier,
Builder,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab
} from './types.js'
import Connection from './connection.js'
import { Query, CLOSE } from './query.js'
import Queue from './queue.js'
import { Errors, PostgresError } from './errors.js'
import Subscribe from './subscribe.js'
import largeObject from './large.js'
Object.assign(Postgres, {
PostgresError,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab,
BigInt: {
to: 20,
from: [20],
parse: x => BigInt(x), // eslint-disable-line
serialize: x => x.toString()
}
})
export default Postgres
function Postgres(a, b) {
const options = parseOptions(a, b)
, subscribe = options.no_subscribe || Subscribe(Postgres, { ...options })
let ending = false
const queries = Queue()
, connecting = Queue()
, reserved = Queue()
, closed = Queue()
, ended = Queue()
, open = Queue()
, busy = Queue()
, full = Queue()
, queues = { connecting, reserved, closed, ended, open, busy, full }
const connections = [...Array(options.max)].map(() => Connection(options, queues, { onopen, onend, onclose }))
const sql = Sql(handler)
Object.assign(sql, {
get parameters() { return options.parameters },
largeObject: largeObject.bind(null, sql),
subscribe,
CLOSE,
END: CLOSE,
PostgresError,
options,
reserve,
listen,
begin,
close,
end
})
return sql
function Sql(handler) {
handler.debug = options.debug
Object.entries(options.types).reduce((acc, [name, type]) => {
acc[name] = (x) => new Parameter(x, type.to)
return acc
}, typed)
Object.assign(sql, {
types: typed,
typed,
unsafe,
notify,
array,
json,
file
})
return sql
function typed(value, type) {
return new Parameter(value, type)
}
function sql(strings, ...args) {
const query = strings && Array.isArray(strings.raw)
? new Query(strings, args, handler, cancel)
: typeof strings === 'string' && !args.length
? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
: new Builder(strings, args)
return query
}
function unsafe(string, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([string], args, handler, cancel, {
prepare: false,
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
function file(path, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([], args, (query) => {
fs.readFile(path, 'utf8', (err, string) => {
if (err)
return query.reject(err)
query.strings = [string]
handler(query)
})
}, cancel, {
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
}
async function listen(name, fn, onlisten) {
const listener = { fn, onlisten }
const sql = listen.sql || (listen.sql = Postgres({
...options,
max: 1,
idle_timeout: null,
max_lifetime: null,
fetch_types: false,
onclose() {
Object.entries(listen.channels).forEach(([name, { listeners }]) => {
delete listen.channels[name]
Promise.all(listeners.map(l => listen(name, l.fn, l.onlisten).catch(() => { /* noop */ })))
})
},
onnotify(c, x) {
c in listen.channels && listen.channels[c].listeners.forEach(l => l.fn(x))
}
}))
const channels = listen.channels || (listen.channels = {})
, exists = name in channels
if (exists) {
channels[name].listeners.push(listener)
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
}
channels[name] = { result: sql`listen ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`, listeners: [listener] }
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
async function unlisten() {
if (name in channels === false)
return
channels[name].listeners = channels[name].listeners.filter(x => x !== listener)
if (channels[name].listeners.length)
return
delete channels[name]
return sql`unlisten ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`
}
}
async function notify(channel, payload) {
return await sql`select pg_notify(${ channel }, ${ '' + payload })`
}
async function reserve() {
const queue = Queue()
const c = open.length
? open.shift()
: await new Promise((resolve, reject) => {
const query = { reserve: resolve, reject }
queries.push(query)
closed.length && connect(closed.shift(), query)
})
move(c, reserved)
c.reserved = () => queue.length
? c.execute(queue.shift())
: move(c, reserved)
c.reserved.release = true
const sql = Sql(handler)
sql.release = () => {
c.reserved = null
onopen(c)
}
return sql
function handler(q) {
c.queue === full
? queue.push(q)
: c.execute(q) || move(c, full)
}
}
async function begin(options, fn) {
!fn && (fn = options, options = '')
const queries = Queue()
let savepoints = 0
, connection
, prepare = null
try {
await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).execute()
return await Promise.race([
scope(connection, fn),
new Promise((_, reject) => connection.onclose = reject)
])
} catch (error) {
throw error
}
async function scope(c, fn, name) {
const sql = Sql(handler)
sql.savepoint = savepoint
sql.prepare = x => prepare = x.replace(/[^a-z0-9$-_. ]/gi)
let uncaughtError
, result
name && await sql`savepoint ${ sql(name) }`
try {
result = await new Promise((resolve, reject) => {
const x = fn(sql)
Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
})
if (uncaughtError)
throw uncaughtError
} catch (e) {
await (name
? sql`rollback to ${ sql(name) }`
: sql`rollback`
)
throw e instanceof PostgresError && e.code === '25P02' && uncaughtError || e
}
if (!name) {
prepare
? await sql`prepare transaction '${ sql.unsafe(prepare) }'`
: await sql`commit`
}
return result
function savepoint(name, fn) {
if (name && Array.isArray(name.raw))
return savepoint(sql => sql.apply(sql, arguments))
arguments.length === 1 && (fn = name, name = null)
return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
}
function handler(q) {
q.catch(e => uncaughtError || (uncaughtError = e))
c.queue === full
? queries.push(q)
: c.execute(q) || move(c, full)
}
}
function onexecute(c) {
connection = c
move(c, reserved)
c.reserved = () => queries.length
? c.execute(queries.shift())
: move(c, reserved)
}
}
function move(c, queue) {
c.queue.remove(c)
queue.push(c)
c.queue = queue
queue === open
? c.idleTimer.start()
: c.idleTimer.cancel()
return c
}
function json(x) {
return new Parameter(x, 3802)
}
function array(x, type) {
if (!Array.isArray(x))
return array(Array.from(arguments))
return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
}
function handler(query) {
if (ending)
return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
if (open.length)
return go(open.shift(), query)
if (closed.length)
return connect(closed.shift(), query)
busy.length
? go(busy.shift(), query)
: queries.push(query)
}
function go(c, query) {
return c.execute(query)
? move(c, busy)
: move(c, full)
}
function cancel(query) {
return new Promise((resolve, reject) => {
query.state
? query.active
? Connection(options).cancel(query.state, resolve, reject)
: query.cancelled = { resolve, reject }
: (
queries.remove(query),
query.cancelled = true,
query.reject(Errors.generic('57014', 'canceling statement due to user request')),
resolve()
)
})
}
async function end({ timeout = null } = {}) {
if (ending)
return ending
await 1
let timer
return ending = Promise.race([
new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
Promise.all(connections.map(c => c.end()).concat(
listen.sql ? listen.sql.end({ timeout: 0 }) : [],
subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
))
]).then(() => clearTimeout(timer))
}
async function close() {
await Promise.all(connections.map(c => c.end()))
}
async function destroy(resolve) {
await Promise.all(connections.map(c => c.terminate()))
while (queries.length)
queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
resolve()
}
function connect(c, query) {
move(c, connecting)
c.connect(query)
return c
}
function onend(c) {
move(c, ended)
}
function onopen(c) {
if (queries.length === 0)
return move(c, open)
let max = Math.ceil(queries.length / (connecting.length + 1))
, ready = true
while (ready && queries.length && max-- > 0) {
const query = queries.shift()
if (query.reserve)
return query.reserve(c)
ready = c.execute(query)
}
ready
? move(c, busy)
: move(c, full)
}
function onclose(c, e) {
move(c, closed)
c.reserved = null
c.onclose && (c.onclose(e), c.onclose = null)
options.onclose && options.onclose(c.id)
queries.length && connect(c, queries.shift())
}
}
function parseOptions(a, b) {
if (a && a.shared)
return a
const env = process.env // eslint-disable-line
, o = (!a || typeof a === 'string' ? b : a) || {}
, { url, multihost } = parseUrl(a)
, query = [...url.searchParams].reduce((a, [b, c]) => (a[b] = c, a), {})
, host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
, port = o.port || url.port || env.PGPORT || 5432
, user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
o.no_prepare && (o.prepare = false)
query.sslmode && (query.ssl = query.sslmode, delete query.sslmode)
'timeout' in o && (console.log('The timeout option is deprecated, use idle_timeout instead'), o.idle_timeout = o.timeout) // eslint-disable-line
query.sslrootcert === 'system' && (query.ssl = 'verify-full')
const ints = ['idle_timeout', 'connect_timeout', 'max_lifetime', 'max_pipeline', 'backoff', 'keep_alive']
const defaults = {
max : 10,
ssl : false,
idle_timeout : null,
connect_timeout : 30,
max_lifetime : max_lifetime,
max_pipeline : 100,
backoff : backoff,
keep_alive : 60,
prepare : true,
debug : false,
fetch_types : true,
publications : 'alltables',
target_session_attrs: null
}
return {
host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
user : user,
pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
...Object.entries(defaults).reduce(
(acc, [k, d]) => {
const value = k in o ? o[k] : k in query
? (query[k] === 'disable' || query[k] === 'false' ? false : query[k])
: env['PG' + k.toUpperCase()] || d
acc[k] = typeof value === 'string' && ints.includes(k)
? +value
: value
return acc
},
{}
),
connection : {
application_name: env.PGAPPNAME || 'postgres.js',
...o.connection,
...Object.entries(query).reduce((acc, [k, v]) => (k in defaults || (acc[k] = v), acc), {})
},
types : o.types || {},
target_session_attrs: tsa(o, url, env),
onnotice : o.onnotice,
onnotify : o.onnotify,
onclose : o.onclose,
onparameter : o.onparameter,
socket : o.socket,
transform : parseTransform(o.transform || { undefined: undefined }),
parameters : {},
shared : { retries: 0, typeArrayMap: {} },
...mergeUserTypes(o.types)
}
}
function tsa(o, url, env) {
const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
return x
throw new Error('target_session_attrs ' + x + ' is not supported')
}
function backoff(retries) {
return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
}
function max_lifetime() {
return 60 * (30 + Math.random() * 30)
}
function parseTransform(x) {
return {
undefined: x.undefined,
column: {
from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
to: x.column && x.column.to
},
value: {
from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
to: x.value && x.value.to
},
row: {
from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
to: x.row && x.row.to
}
}
}
function parseUrl(url) {
if (!url || typeof url !== 'string')
return { url: { searchParams: new Map() } }
let host = url
host = host.slice(host.indexOf('://') + 3).split(/[?/]/)[0]
host = decodeURIComponent(host.slice(host.indexOf('@') + 1))
const urlObj = new URL(url.replace(host, host.split(',')[0]))
return {
url: {
username: decodeURIComponent(urlObj.username),
password: decodeURIComponent(urlObj.password),
host: urlObj.host,
hostname: urlObj.hostname,
port: urlObj.port,
pathname: urlObj.pathname,
searchParams: urlObj.searchParams
},
multihost: host.indexOf(',') > -1 && host
}
}
function osUsername() {
try {
return os.userInfo().username // eslint-disable-line
} catch (_) {
return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
}
}

70
node_modules/postgres/cf/src/large.js generated vendored Normal file
View File

@@ -0,0 +1,70 @@
import Stream from 'node:stream'
export default function largeObject(sql, oid, mode = 0x00020000 | 0x00040000) {
return new Promise(async(resolve, reject) => {
await sql.begin(async sql => {
let finish
!oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
const lo = {
writable,
readable,
close : () => sql`select lo_close(${ fd })`.then(finish),
tell : () => sql`select lo_tell64(${ fd })`,
read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
write : (x) => sql`select lowrite(${ fd }, ${ x })`,
truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
size : () => sql`
select
lo_lseek64(${ fd }, location, 0) as position,
seek.size
from (
select
lo_lseek64($1, 0, 2) as size,
tell.location
from (select lo_tell64($1) as location) tell
) seek
`
}
resolve(lo)
return new Promise(async r => finish = r)
async function readable({
highWaterMark = 2048 * 8,
start = 0,
end = Infinity
} = {}) {
let max = end - start
start && await lo.seek(start)
return new Stream.Readable({
highWaterMark,
async read(size) {
const l = size > max ? size - max : size
max -= size
const [{ data }] = await lo.read(l)
this.push(data)
if (data.length < size)
this.push(null)
}
})
}
async function writable({
highWaterMark = 2048 * 8,
start = 0
} = {}) {
start && await lo.seek(start)
return new Stream.Writable({
highWaterMark,
write(chunk, encoding, callback) {
lo.write(chunk).then(() => callback(), callback)
}
})
}
}).catch(reject)
})
}

173
node_modules/postgres/cf/src/query.js generated vendored Normal file
View File

@@ -0,0 +1,173 @@
const originCache = new Map()
, originStackCache = new Map()
, originError = Symbol('OriginError')
export const CLOSE = {}
export class Query extends Promise {
constructor(strings, args, handler, canceller, options = {}) {
let resolve
, reject
super((a, b) => {
resolve = a
reject = b
})
this.tagged = Array.isArray(strings.raw)
this.strings = strings
this.args = args
this.handler = handler
this.canceller = canceller
this.options = options
this.state = null
this.statement = null
this.resolve = x => (this.active = false, resolve(x))
this.reject = x => (this.active = false, reject(x))
this.active = false
this.cancelled = null
this.executed = false
this.signature = ''
this[originError] = this.handler.debug
? new Error()
: this.tagged && cachedError(this.strings)
}
get origin() {
return (this.handler.debug
? this[originError].stack
: this.tagged && originStackCache.has(this.strings)
? originStackCache.get(this.strings)
: originStackCache.set(this.strings, this[originError].stack).get(this.strings)
) || ''
}
static get [Symbol.species]() {
return Promise
}
cancel() {
return this.canceller && (this.canceller(this), this.canceller = null)
}
simple() {
this.options.simple = true
this.options.prepare = false
return this
}
async readable() {
this.simple()
this.streaming = true
return this
}
async writable() {
this.simple()
this.streaming = true
return this
}
cursor(rows = 1, fn) {
this.options.simple = false
if (typeof rows === 'function') {
fn = rows
rows = 1
}
this.cursorRows = rows
if (typeof fn === 'function')
return (this.cursorFn = fn, this)
let prev
return {
[Symbol.asyncIterator]: () => ({
next: () => {
if (this.executed && !this.active)
return { done: true }
prev && prev()
const promise = new Promise((resolve, reject) => {
this.cursorFn = value => {
resolve({ value, done: false })
return new Promise(r => prev = r)
}
this.resolve = () => (this.active = false, resolve({ done: true }))
this.reject = x => (this.active = false, reject(x))
})
this.execute()
return promise
},
return() {
prev && prev(CLOSE)
return { done: true }
}
})
}
}
describe() {
this.options.simple = false
this.onlyDescribe = this.options.prepare = true
return this
}
stream() {
throw new Error('.stream has been renamed to .forEach')
}
forEach(fn) {
this.forEachFn = fn
this.handle()
return this
}
raw() {
this.isRaw = true
return this
}
values() {
this.isRaw = 'values'
return this
}
async handle() {
!this.executed && (this.executed = true) && await 1 && this.handler(this)
}
execute() {
this.handle()
return this
}
then() {
this.handle()
return super.then.apply(this, arguments)
}
catch() {
this.handle()
return super.catch.apply(this, arguments)
}
finally() {
this.handle()
return super.finally.apply(this, arguments)
}
}
function cachedError(xs) {
if (originCache.has(xs))
return originCache.get(xs)
const x = Error.stackTraceLimit
Error.stackTraceLimit = 4
originCache.set(xs, new Error())
Error.stackTraceLimit = x
return originCache.get(xs)
}

31
node_modules/postgres/cf/src/queue.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
export default Queue
function Queue(initial = []) {
let xs = initial.slice()
let index = 0
return {
get length() {
return xs.length - index
},
remove: (x) => {
const index = xs.indexOf(x)
return index === -1
? null
: (xs.splice(index, 1), x)
},
push: (x) => (xs.push(x), x),
shift: () => {
const out = xs[index++]
if (index === xs.length) {
index = 0
xs = []
} else {
xs[index - 1] = undefined
}
return out
}
}
}

16
node_modules/postgres/cf/src/result.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
export default class Result extends Array {
constructor() {
super()
Object.defineProperties(this, {
count: { value: null, writable: true },
state: { value: null, writable: true },
command: { value: null, writable: true },
columns: { value: null, writable: true },
statement: { value: null, writable: true }
})
}
static get [Symbol.species]() {
return Array
}
}

278
node_modules/postgres/cf/src/subscribe.js generated vendored Normal file
View File

@@ -0,0 +1,278 @@
import { Buffer } from 'node:buffer'
const noop = () => { /* noop */ }
export default function Subscribe(postgres, options) {
const subscribers = new Map()
, slot = 'postgresjs_' + Math.random().toString(36).slice(2)
, state = {}
let connection
, stream
, ended = false
const sql = subscribe.sql = postgres({
...options,
transform: { column: {}, value: {}, row: {} },
max: 1,
fetch_types: false,
idle_timeout: null,
max_lifetime: null,
connection: {
...options.connection,
replication: 'database'
},
onclose: async function() {
if (ended)
return
stream = null
state.pid = state.secret = undefined
connected(await init(sql, slot, options.publications))
subscribers.forEach(event => event.forEach(({ onsubscribe }) => onsubscribe()))
},
no_subscribe: true
})
const end = sql.end
, close = sql.close
sql.end = async() => {
ended = true
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return end()
}
sql.close = async() => {
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return close()
}
return subscribe
async function subscribe(event, fn, onsubscribe = noop, onerror = noop) {
event = parseEvent(event)
if (!connection)
connection = init(sql, slot, options.publications)
const subscriber = { fn, onsubscribe }
const fns = subscribers.has(event)
? subscribers.get(event).add(subscriber)
: subscribers.set(event, new Set([subscriber])).get(event)
const unsubscribe = () => {
fns.delete(subscriber)
fns.size === 0 && subscribers.delete(event)
}
return connection.then(x => {
connected(x)
onsubscribe()
stream && stream.on('error', onerror)
return { unsubscribe, state, sql }
})
}
function connected(x) {
stream = x.stream
state.pid = x.state.pid
state.secret = x.state.secret
}
async function init(sql, slot, publications) {
if (!publications)
throw new Error('Missing publication names')
const xs = await sql.unsafe(
`CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT`
)
const [x] = xs
const stream = await sql.unsafe(
`START_REPLICATION SLOT ${ slot } LOGICAL ${
x.consistent_point
} (proto_version '1', publication_names '${ publications }')`
).writable()
const state = {
lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex')))
}
stream.on('data', data)
stream.on('error', error)
stream.on('close', sql.close)
return { stream, state: xs.state }
function error(e) {
console.error('Unexpected error during logical streaming - reconnecting', e) // eslint-disable-line
}
function data(x) {
if (x[0] === 0x77) {
parse(x.subarray(25), state, sql.options.parsers, handle, options.transform)
} else if (x[0] === 0x6b && x[17]) {
state.lsn = x.subarray(1, 9)
pong()
}
}
function handle(a, b) {
const path = b.relation.schema + '.' + b.relation.table
call('*', a, b)
call('*:' + path, a, b)
b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
call(b.command, a, b)
call(b.command + ':' + path, a, b)
b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
}
function pong() {
const x = Buffer.alloc(34)
x[0] = 'r'.charCodeAt(0)
x.fill(state.lsn, 1)
x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25)
stream.write(x)
}
}
function call(x, a, b) {
subscribers.has(x) && subscribers.get(x).forEach(({ fn }) => fn(a, b, x))
}
}
function Time(x) {
return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000)))
}
function parse(x, state, parsers, handle, transform) {
const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
Object.entries({
R: x => { // Relation
let i = 1
const r = state[x.readUInt32BE(i)] = {
schema: x.toString('utf8', i += 4, i = x.indexOf(0, i)) || 'pg_catalog',
table: x.toString('utf8', i + 1, i = x.indexOf(0, i + 1)),
columns: Array(x.readUInt16BE(i += 2)),
keys: []
}
i += 2
let columnIndex = 0
, column
while (i < x.length) {
column = r.columns[columnIndex++] = {
key: x[i++],
name: transform.column.from
? transform.column.from(x.toString('utf8', i, i = x.indexOf(0, i)))
: x.toString('utf8', i, i = x.indexOf(0, i)),
type: x.readUInt32BE(i += 1),
parser: parsers[x.readUInt32BE(i)],
atttypmod: x.readUInt32BE(i += 4)
}
column.key && r.keys.push(column)
i += 4
}
},
Y: () => { /* noop */ }, // Type
O: () => { /* noop */ }, // Origin
B: x => { // Begin
state.date = Time(x.readBigInt64BE(9))
state.lsn = x.subarray(1, 9)
},
I: x => { // Insert
let i = 1
const relation = state[x.readUInt32BE(i)]
const { row } = tuples(x, relation.columns, i += 7, transform)
handle(row, {
command: 'insert',
relation
})
},
D: x => { // Delete
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
handle(key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform).row
: null
, {
command: 'delete',
relation,
key
})
},
U: x => { // Update
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
const xs = key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform)
: null
xs && (i = xs.i)
const { row } = tuples(x, relation.columns, i + 3, transform)
handle(row, {
command: 'update',
relation,
key,
old: xs && xs.row
})
},
T: () => { /* noop */ }, // Truncate,
C: () => { /* noop */ } // Commit
}).reduce(char, {})[x[0]](x)
}
function tuples(x, columns, xi, transform) {
let type
, column
, value
const row = transform.raw ? new Array(columns.length) : {}
for (let i = 0; i < columns.length; i++) {
type = x[xi++]
column = columns[i]
value = type === 110 // n
? null
: type === 117 // u
? undefined
: column.parser === undefined
? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))
: column.parser.array === true
? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi)))
: column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)))
transform.raw
? (row[i] = transform.raw === true
? value
: transform.value.from ? transform.value.from(value, column) : value)
: (row[column.name] = transform.value.from
? transform.value.from(value, column)
: value
)
}
return { i: xi, row: transform.row.from ? transform.row.from(row) : row }
}
function parseEvent(x) {
const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || []
if (!xs)
throw new Error('Malformed subscribe pattern: ' + x)
const [, command, path, key] = xs
return (command || '*')
+ (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '')
+ (key ? '=' + key : '')
}

368
node_modules/postgres/cf/src/types.js generated vendored Normal file
View File

@@ -0,0 +1,368 @@
import { Buffer } from 'node:buffer'
import { Query } from './query.js'
import { Errors } from './errors.js'
export const types = {
string: {
to: 25,
from: null, // defaults to string
serialize: x => '' + x
},
number: {
to: 0,
from: [21, 23, 26, 700, 701],
serialize: x => '' + x,
parse: x => +x
},
json: {
to: 114,
from: [114, 3802],
serialize: x => JSON.stringify(x),
parse: x => JSON.parse(x)
},
boolean: {
to: 16,
from: 16,
serialize: x => x === true ? 't' : 'f',
parse: x => x === 't'
},
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(),
parse: x => new Date(x)
},
bytea: {
to: 17,
from: 17,
serialize: x => '\\x' + Buffer.from(x).toString('hex'),
parse: x => Buffer.from(x.slice(2), 'hex')
}
}
class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }}
export class Identifier extends NotTagged {
constructor(value) {
super()
this.value = escapeIdentifier(value)
}
}
export class Parameter extends NotTagged {
constructor(value, type, array) {
super()
this.value = value
this.type = type
this.array = array
}
}
export class Builder extends NotTagged {
constructor(first, rest) {
super()
this.first = first
this.rest = rest
}
build(before, parameters, types, options) {
const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop()
return keyword.i === -1
? escapeIdentifiers(this.first, options)
: keyword.fn(this.first, this.rest, parameters, types, options)
}
}
export function handleValue(x, parameters, types, options) {
let value = x instanceof Parameter ? x.value : x
if (value === undefined) {
x instanceof Parameter
? x.value = options.transform.undefined
: value = x = options.transform.undefined
if (value === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return '$' + (types.push(
x instanceof Parameter
? (parameters.push(x.value), x.array
? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value)
: x.type
)
: (parameters.push(x), inferType(x))
))
}
const defaultHandlers = typeHandlers(types)
export function stringify(q, string, value, parameters, types, options) { // eslint-disable-line
for (let i = 1; i < q.strings.length; i++) {
string += (stringifyValue(string, value, parameters, types, options)) + q.strings[i]
value = q.args[i]
}
return string
}
function stringifyValue(string, value, parameters, types, o) {
return (
value instanceof Builder ? value.build(string, parameters, types, o) :
value instanceof Query ? fragment(value, parameters, types, o) :
value instanceof Identifier ? value.value :
value && value[0] instanceof Query ? value.reduce((acc, x) => acc + ' ' + fragment(x, parameters, types, o), '') :
handleValue(value, parameters, types, o)
)
}
function fragment(q, parameters, types, options) {
q.fragment = true
return stringify(q, q.strings[0], q.args[0], parameters, types, options)
}
function valuesBuilder(first, parameters, types, columns, options) {
return first.map(row =>
'(' + columns.map(column =>
stringifyValue('values', row[column], parameters, types, options)
).join(',') + ')'
).join(',')
}
function values(first, rest, parameters, types, options) {
const multi = Array.isArray(first[0])
const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first)
return valuesBuilder(multi ? first : [first], parameters, types, columns, options)
}
function select(first, rest, parameters, types, options) {
typeof first === 'string' && (first = [first].concat(rest))
if (Array.isArray(first))
return escapeIdentifiers(first, options)
let value
const columns = rest.length ? rest.flat() : Object.keys(first)
return columns.map(x => {
value = first[x]
return (
value instanceof Query ? fragment(value, parameters, types, options) :
value instanceof Identifier ? value.value :
handleValue(value, parameters, types, options)
) + ' as ' + escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x)
}).join(',')
}
const builders = Object.entries({
values,
in: (...xs) => {
const x = values(...xs)
return x === '()' ? '(null)' : x
},
select,
as: select,
returning: select,
'\\(': select,
update(first, rest, parameters, types, options) {
return (rest.length ? rest.flat() : Object.keys(first)).map(x =>
escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x) +
'=' + stringifyValue('values', first[x], parameters, types, options)
)
},
insert(first, rest, parameters, types, options) {
const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first)
return '(' + escapeIdentifiers(columns, options) + ')values' +
valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, columns, options)
}
}).map(([x, fn]) => ([new RegExp('((?:^|[\\s(])' + x + '(?:$|[\\s(]))(?![\\s\\S]*\\1)', 'i'), fn]))
function notTagged() {
throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal')
}
export const serializers = defaultHandlers.serializers
export const parsers = defaultHandlers.parsers
export const END = {}
function firstIsString(x) {
if (Array.isArray(x))
return firstIsString(x[0])
return typeof x === 'string' ? 1009 : 0
}
export const mergeUserTypes = function(types) {
const user = typeHandlers(types || {})
return {
serializers: Object.assign({}, serializers, user.serializers),
parsers: Object.assign({}, parsers, user.parsers)
}
}
function typeHandlers(types) {
return Object.keys(types).reduce((acc, k) => {
types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
if (types[k].serialize) {
acc.serializers[types[k].to] = types[k].serialize
types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize)
}
return acc
}, { parsers: {}, serializers: {} })
}
function escapeIdentifiers(xs, { transform: { column } }) {
return xs.map(x => escapeIdentifier(column.to ? column.to(x) : x)).join(',')
}
export const escapeIdentifier = function escape(str) {
return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
}
export const inferType = function inferType(x) {
return (
x instanceof Parameter ? x.type :
x instanceof Date ? 1184 :
x instanceof Uint8Array ? 17 :
(x === true || x === false) ? 16 :
typeof x === 'bigint' ? 20 :
Array.isArray(x) ? inferType(x[0]) :
0
)
}
const escapeBackslash = /\\/g
const escapeQuote = /"/g
function arrayEscape(x) {
return x
.replace(escapeBackslash, '\\\\')
.replace(escapeQuote, '\\"')
}
export const arraySerializer = function arraySerializer(xs, serializer, options, typarray) {
if (Array.isArray(xs) === false)
return xs
if (!xs.length)
return '{}'
const first = xs[0]
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
if (Array.isArray(first) && !first.type)
return '{' + xs.map(x => arraySerializer(x, serializer, options, typarray)).join(delimiter) + '}'
return '{' + xs.map(x => {
if (x === undefined) {
x = options.transform.undefined
if (x === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return x === null
? 'null'
: '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
}).join(delimiter) + '}'
}
const arrayParserState = {
i: 0,
char: null,
str: '',
quoted: false,
last: 0
}
export const arrayParser = function arrayParser(x, parser, typarray) {
arrayParserState.i = arrayParserState.last = 0
return arrayParserLoop(arrayParserState, x, parser, typarray)
}
function arrayParserLoop(s, x, parser, typarray) {
const xs = []
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
for (; s.i < x.length; s.i++) {
s.char = x[s.i]
if (s.quoted) {
if (s.char === '\\') {
s.str += x[++s.i]
} else if (s.char === '"') {
xs.push(parser ? parser(s.str) : s.str)
s.str = ''
s.quoted = x[s.i + 1] === '"'
s.last = s.i + 2
} else {
s.str += s.char
}
} else if (s.char === '"') {
s.quoted = true
} else if (s.char === '{') {
s.last = ++s.i
xs.push(arrayParserLoop(s, x, parser, typarray))
} else if (s.char === '}') {
s.quoted = false
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
break
} else if (s.char === delimiter && s.p !== '}' && s.p !== '"') {
xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
}
s.p = s.char
}
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
return xs
}
export const toCamel = x => {
let str = x[0]
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
export const toPascal = x => {
let str = x[0].toUpperCase()
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
export const toKebab = x => x.replace(/_/g, '-')
export const fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
export const fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
export const fromKebab = x => x.replace(/-/g, '_')
function createJsonTransform(fn) {
return function jsonTransform(x, column) {
return typeof x === 'object' && x !== null && (column.type === 114 || column.type === 3802)
? Array.isArray(x)
? x.map(x => jsonTransform(x, column))
: Object.entries(x).reduce((acc, [k, v]) => Object.assign(acc, { [fn(k)]: jsonTransform(v, column) }), {})
: x
}
}
toCamel.column = { from: toCamel }
toCamel.value = { from: createJsonTransform(toCamel) }
fromCamel.column = { to: fromCamel }
export const camel = { ...toCamel }
camel.column.to = fromCamel
toPascal.column = { from: toPascal }
toPascal.value = { from: createJsonTransform(toPascal) }
fromPascal.column = { to: fromPascal }
export const pascal = { ...toPascal }
pascal.column.to = fromPascal
toKebab.column = { from: toKebab }
toKebab.value = { from: createJsonTransform(toKebab) }
fromKebab.column = { to: fromKebab }
export const kebab = { ...toKebab }
kebab.column.to = fromKebab

1
node_modules/postgres/cjs/package.json generated vendored Normal file
View File

@@ -0,0 +1 @@
{"type":"commonjs"}

78
node_modules/postgres/cjs/src/bytes.js generated vendored Normal file
View File

@@ -0,0 +1,78 @@
const size = 256
let buffer = Buffer.allocUnsafe(size)
const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => {
const v = x.charCodeAt(0)
acc[x] = () => {
buffer[0] = v
b.i = 5
return b
}
return acc
}, {})
const b = Object.assign(reset, messages, {
N: String.fromCharCode(0),
i: 0,
inc(x) {
b.i += x
return b
},
str(x) {
const length = Buffer.byteLength(x)
fit(length)
b.i += buffer.write(x, b.i, length, 'utf8')
return b
},
i16(x) {
fit(2)
buffer.writeUInt16BE(x, b.i)
b.i += 2
return b
},
i32(x, i) {
if (i || i === 0) {
buffer.writeUInt32BE(x, i)
return b
}
fit(4)
buffer.writeUInt32BE(x, b.i)
b.i += 4
return b
},
z(x) {
fit(x)
buffer.fill(0, b.i, b.i + x)
b.i += x
return b
},
raw(x) {
buffer = Buffer.concat([buffer.subarray(0, b.i), x])
b.i = buffer.length
return b
},
end(at = 1) {
buffer.writeUInt32BE(b.i - at, at)
const out = buffer.subarray(0, b.i)
b.i = 0
buffer = Buffer.allocUnsafe(size)
return out
}
})
module.exports = b
function fit(x) {
if (buffer.length - b.i < x) {
const prev = buffer
, length = prev.length
buffer = Buffer.allocUnsafe(length + (length >> 1) + x)
prev.copy(buffer)
}
}
function reset() {
b.i = 0
return b
}

1042
node_modules/postgres/cjs/src/connection.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

53
node_modules/postgres/cjs/src/errors.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
const PostgresError = module.exports.PostgresError = class PostgresError extends Error {
constructor(x) {
super(x.message)
this.name = this.constructor.name
Object.assign(this, x)
}
}
const Errors = module.exports.Errors = {
connection,
postgres,
generic,
notSupported
}
function connection(x, options, socket) {
const { host, port } = socket || options
const error = Object.assign(
new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
{
code: x,
errno: x,
address: options.path || host
}, options.path ? {} : { port: port }
)
Error.captureStackTrace(error, connection)
return error
}
function postgres(x) {
const error = new PostgresError(x)
Error.captureStackTrace(error, postgres)
return error
}
function generic(code, message) {
const error = Object.assign(new Error(code + ': ' + message), { code })
Error.captureStackTrace(error, generic)
return error
}
/* c8 ignore next 10 */
function notSupported(x) {
const error = Object.assign(
new Error(x + ' (B) is not supported'),
{
code: 'MESSAGE_NOT_SUPPORTED',
name: x
}
)
Error.captureStackTrace(error, notSupported)
return error
}

566
node_modules/postgres/cjs/src/index.js generated vendored Normal file
View File

@@ -0,0 +1,566 @@
const os = require('os')
const fs = require('fs')
const {
mergeUserTypes,
inferType,
Parameter,
Identifier,
Builder,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab
} = require('./types.js')
const Connection = require('./connection.js')
const { Query, CLOSE } = require('./query.js')
const Queue = require('./queue.js')
const { Errors, PostgresError } = require('./errors.js')
const Subscribe = require('./subscribe.js')
const largeObject = require('./large.js')
Object.assign(Postgres, {
PostgresError,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab,
BigInt: {
to: 20,
from: [20],
parse: x => BigInt(x), // eslint-disable-line
serialize: x => x.toString()
}
})
module.exports = Postgres
function Postgres(a, b) {
const options = parseOptions(a, b)
, subscribe = options.no_subscribe || Subscribe(Postgres, { ...options })
let ending = false
const queries = Queue()
, connecting = Queue()
, reserved = Queue()
, closed = Queue()
, ended = Queue()
, open = Queue()
, busy = Queue()
, full = Queue()
, queues = { connecting, reserved, closed, ended, open, busy, full }
const connections = [...Array(options.max)].map(() => Connection(options, queues, { onopen, onend, onclose }))
const sql = Sql(handler)
Object.assign(sql, {
get parameters() { return options.parameters },
largeObject: largeObject.bind(null, sql),
subscribe,
CLOSE,
END: CLOSE,
PostgresError,
options,
reserve,
listen,
begin,
close,
end
})
return sql
function Sql(handler) {
handler.debug = options.debug
Object.entries(options.types).reduce((acc, [name, type]) => {
acc[name] = (x) => new Parameter(x, type.to)
return acc
}, typed)
Object.assign(sql, {
types: typed,
typed,
unsafe,
notify,
array,
json,
file
})
return sql
function typed(value, type) {
return new Parameter(value, type)
}
function sql(strings, ...args) {
const query = strings && Array.isArray(strings.raw)
? new Query(strings, args, handler, cancel)
: typeof strings === 'string' && !args.length
? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
: new Builder(strings, args)
return query
}
function unsafe(string, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([string], args, handler, cancel, {
prepare: false,
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
function file(path, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([], args, (query) => {
fs.readFile(path, 'utf8', (err, string) => {
if (err)
return query.reject(err)
query.strings = [string]
handler(query)
})
}, cancel, {
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
}
async function listen(name, fn, onlisten) {
const listener = { fn, onlisten }
const sql = listen.sql || (listen.sql = Postgres({
...options,
max: 1,
idle_timeout: null,
max_lifetime: null,
fetch_types: false,
onclose() {
Object.entries(listen.channels).forEach(([name, { listeners }]) => {
delete listen.channels[name]
Promise.all(listeners.map(l => listen(name, l.fn, l.onlisten).catch(() => { /* noop */ })))
})
},
onnotify(c, x) {
c in listen.channels && listen.channels[c].listeners.forEach(l => l.fn(x))
}
}))
const channels = listen.channels || (listen.channels = {})
, exists = name in channels
if (exists) {
channels[name].listeners.push(listener)
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
}
channels[name] = { result: sql`listen ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`, listeners: [listener] }
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
async function unlisten() {
if (name in channels === false)
return
channels[name].listeners = channels[name].listeners.filter(x => x !== listener)
if (channels[name].listeners.length)
return
delete channels[name]
return sql`unlisten ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`
}
}
async function notify(channel, payload) {
return await sql`select pg_notify(${ channel }, ${ '' + payload })`
}
async function reserve() {
const queue = Queue()
const c = open.length
? open.shift()
: await new Promise((resolve, reject) => {
const query = { reserve: resolve, reject }
queries.push(query)
closed.length && connect(closed.shift(), query)
})
move(c, reserved)
c.reserved = () => queue.length
? c.execute(queue.shift())
: move(c, reserved)
c.reserved.release = true
const sql = Sql(handler)
sql.release = () => {
c.reserved = null
onopen(c)
}
return sql
function handler(q) {
c.queue === full
? queue.push(q)
: c.execute(q) || move(c, full)
}
}
async function begin(options, fn) {
!fn && (fn = options, options = '')
const queries = Queue()
let savepoints = 0
, connection
, prepare = null
try {
await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).execute()
return await Promise.race([
scope(connection, fn),
new Promise((_, reject) => connection.onclose = reject)
])
} catch (error) {
throw error
}
async function scope(c, fn, name) {
const sql = Sql(handler)
sql.savepoint = savepoint
sql.prepare = x => prepare = x.replace(/[^a-z0-9$-_. ]/gi)
let uncaughtError
, result
name && await sql`savepoint ${ sql(name) }`
try {
result = await new Promise((resolve, reject) => {
const x = fn(sql)
Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
})
if (uncaughtError)
throw uncaughtError
} catch (e) {
await (name
? sql`rollback to ${ sql(name) }`
: sql`rollback`
)
throw e instanceof PostgresError && e.code === '25P02' && uncaughtError || e
}
if (!name) {
prepare
? await sql`prepare transaction '${ sql.unsafe(prepare) }'`
: await sql`commit`
}
return result
function savepoint(name, fn) {
if (name && Array.isArray(name.raw))
return savepoint(sql => sql.apply(sql, arguments))
arguments.length === 1 && (fn = name, name = null)
return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
}
function handler(q) {
q.catch(e => uncaughtError || (uncaughtError = e))
c.queue === full
? queries.push(q)
: c.execute(q) || move(c, full)
}
}
function onexecute(c) {
connection = c
move(c, reserved)
c.reserved = () => queries.length
? c.execute(queries.shift())
: move(c, reserved)
}
}
function move(c, queue) {
c.queue.remove(c)
queue.push(c)
c.queue = queue
queue === open
? c.idleTimer.start()
: c.idleTimer.cancel()
return c
}
function json(x) {
return new Parameter(x, 3802)
}
function array(x, type) {
if (!Array.isArray(x))
return array(Array.from(arguments))
return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
}
function handler(query) {
if (ending)
return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
if (open.length)
return go(open.shift(), query)
if (closed.length)
return connect(closed.shift(), query)
busy.length
? go(busy.shift(), query)
: queries.push(query)
}
function go(c, query) {
return c.execute(query)
? move(c, busy)
: move(c, full)
}
function cancel(query) {
return new Promise((resolve, reject) => {
query.state
? query.active
? Connection(options).cancel(query.state, resolve, reject)
: query.cancelled = { resolve, reject }
: (
queries.remove(query),
query.cancelled = true,
query.reject(Errors.generic('57014', 'canceling statement due to user request')),
resolve()
)
})
}
async function end({ timeout = null } = {}) {
if (ending)
return ending
await 1
let timer
return ending = Promise.race([
new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
Promise.all(connections.map(c => c.end()).concat(
listen.sql ? listen.sql.end({ timeout: 0 }) : [],
subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
))
]).then(() => clearTimeout(timer))
}
async function close() {
await Promise.all(connections.map(c => c.end()))
}
async function destroy(resolve) {
await Promise.all(connections.map(c => c.terminate()))
while (queries.length)
queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
resolve()
}
function connect(c, query) {
move(c, connecting)
c.connect(query)
return c
}
function onend(c) {
move(c, ended)
}
function onopen(c) {
if (queries.length === 0)
return move(c, open)
let max = Math.ceil(queries.length / (connecting.length + 1))
, ready = true
while (ready && queries.length && max-- > 0) {
const query = queries.shift()
if (query.reserve)
return query.reserve(c)
ready = c.execute(query)
}
ready
? move(c, busy)
: move(c, full)
}
function onclose(c, e) {
move(c, closed)
c.reserved = null
c.onclose && (c.onclose(e), c.onclose = null)
options.onclose && options.onclose(c.id)
queries.length && connect(c, queries.shift())
}
}
function parseOptions(a, b) {
if (a && a.shared)
return a
const env = process.env // eslint-disable-line
, o = (!a || typeof a === 'string' ? b : a) || {}
, { url, multihost } = parseUrl(a)
, query = [...url.searchParams].reduce((a, [b, c]) => (a[b] = c, a), {})
, host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
, port = o.port || url.port || env.PGPORT || 5432
, user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
o.no_prepare && (o.prepare = false)
query.sslmode && (query.ssl = query.sslmode, delete query.sslmode)
'timeout' in o && (console.log('The timeout option is deprecated, use idle_timeout instead'), o.idle_timeout = o.timeout) // eslint-disable-line
query.sslrootcert === 'system' && (query.ssl = 'verify-full')
const ints = ['idle_timeout', 'connect_timeout', 'max_lifetime', 'max_pipeline', 'backoff', 'keep_alive']
const defaults = {
max : 10,
ssl : false,
idle_timeout : null,
connect_timeout : 30,
max_lifetime : max_lifetime,
max_pipeline : 100,
backoff : backoff,
keep_alive : 60,
prepare : true,
debug : false,
fetch_types : true,
publications : 'alltables',
target_session_attrs: null
}
return {
host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
user : user,
pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
...Object.entries(defaults).reduce(
(acc, [k, d]) => {
const value = k in o ? o[k] : k in query
? (query[k] === 'disable' || query[k] === 'false' ? false : query[k])
: env['PG' + k.toUpperCase()] || d
acc[k] = typeof value === 'string' && ints.includes(k)
? +value
: value
return acc
},
{}
),
connection : {
application_name: env.PGAPPNAME || 'postgres.js',
...o.connection,
...Object.entries(query).reduce((acc, [k, v]) => (k in defaults || (acc[k] = v), acc), {})
},
types : o.types || {},
target_session_attrs: tsa(o, url, env),
onnotice : o.onnotice,
onnotify : o.onnotify,
onclose : o.onclose,
onparameter : o.onparameter,
socket : o.socket,
transform : parseTransform(o.transform || { undefined: undefined }),
parameters : {},
shared : { retries: 0, typeArrayMap: {} },
...mergeUserTypes(o.types)
}
}
function tsa(o, url, env) {
const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
return x
throw new Error('target_session_attrs ' + x + ' is not supported')
}
function backoff(retries) {
return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
}
function max_lifetime() {
return 60 * (30 + Math.random() * 30)
}
function parseTransform(x) {
return {
undefined: x.undefined,
column: {
from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
to: x.column && x.column.to
},
value: {
from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
to: x.value && x.value.to
},
row: {
from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
to: x.row && x.row.to
}
}
}
function parseUrl(url) {
if (!url || typeof url !== 'string')
return { url: { searchParams: new Map() } }
let host = url
host = host.slice(host.indexOf('://') + 3).split(/[?/]/)[0]
host = decodeURIComponent(host.slice(host.indexOf('@') + 1))
const urlObj = new URL(url.replace(host, host.split(',')[0]))
return {
url: {
username: decodeURIComponent(urlObj.username),
password: decodeURIComponent(urlObj.password),
host: urlObj.host,
hostname: urlObj.hostname,
port: urlObj.port,
pathname: urlObj.pathname,
searchParams: urlObj.searchParams
},
multihost: host.indexOf(',') > -1 && host
}
}
function osUsername() {
try {
return os.userInfo().username // eslint-disable-line
} catch (_) {
return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
}
}

70
node_modules/postgres/cjs/src/large.js generated vendored Normal file
View File

@@ -0,0 +1,70 @@
const Stream = require('stream')
module.exports = largeObject;function largeObject(sql, oid, mode = 0x00020000 | 0x00040000) {
return new Promise(async(resolve, reject) => {
await sql.begin(async sql => {
let finish
!oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
const lo = {
writable,
readable,
close : () => sql`select lo_close(${ fd })`.then(finish),
tell : () => sql`select lo_tell64(${ fd })`,
read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
write : (x) => sql`select lowrite(${ fd }, ${ x })`,
truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
size : () => sql`
select
lo_lseek64(${ fd }, location, 0) as position,
seek.size
from (
select
lo_lseek64($1, 0, 2) as size,
tell.location
from (select lo_tell64($1) as location) tell
) seek
`
}
resolve(lo)
return new Promise(async r => finish = r)
async function readable({
highWaterMark = 2048 * 8,
start = 0,
end = Infinity
} = {}) {
let max = end - start
start && await lo.seek(start)
return new Stream.Readable({
highWaterMark,
async read(size) {
const l = size > max ? size - max : size
max -= size
const [{ data }] = await lo.read(l)
this.push(data)
if (data.length < size)
this.push(null)
}
})
}
async function writable({
highWaterMark = 2048 * 8,
start = 0
} = {}) {
start && await lo.seek(start)
return new Stream.Writable({
highWaterMark,
write(chunk, encoding, callback) {
lo.write(chunk).then(() => callback(), callback)
}
})
}
}).catch(reject)
})
}

173
node_modules/postgres/cjs/src/query.js generated vendored Normal file
View File

@@ -0,0 +1,173 @@
const originCache = new Map()
, originStackCache = new Map()
, originError = Symbol('OriginError')
const CLOSE = module.exports.CLOSE = {}
const Query = module.exports.Query = class Query extends Promise {
constructor(strings, args, handler, canceller, options = {}) {
let resolve
, reject
super((a, b) => {
resolve = a
reject = b
})
this.tagged = Array.isArray(strings.raw)
this.strings = strings
this.args = args
this.handler = handler
this.canceller = canceller
this.options = options
this.state = null
this.statement = null
this.resolve = x => (this.active = false, resolve(x))
this.reject = x => (this.active = false, reject(x))
this.active = false
this.cancelled = null
this.executed = false
this.signature = ''
this[originError] = this.handler.debug
? new Error()
: this.tagged && cachedError(this.strings)
}
get origin() {
return (this.handler.debug
? this[originError].stack
: this.tagged && originStackCache.has(this.strings)
? originStackCache.get(this.strings)
: originStackCache.set(this.strings, this[originError].stack).get(this.strings)
) || ''
}
static get [Symbol.species]() {
return Promise
}
cancel() {
return this.canceller && (this.canceller(this), this.canceller = null)
}
simple() {
this.options.simple = true
this.options.prepare = false
return this
}
async readable() {
this.simple()
this.streaming = true
return this
}
async writable() {
this.simple()
this.streaming = true
return this
}
cursor(rows = 1, fn) {
this.options.simple = false
if (typeof rows === 'function') {
fn = rows
rows = 1
}
this.cursorRows = rows
if (typeof fn === 'function')
return (this.cursorFn = fn, this)
let prev
return {
[Symbol.asyncIterator]: () => ({
next: () => {
if (this.executed && !this.active)
return { done: true }
prev && prev()
const promise = new Promise((resolve, reject) => {
this.cursorFn = value => {
resolve({ value, done: false })
return new Promise(r => prev = r)
}
this.resolve = () => (this.active = false, resolve({ done: true }))
this.reject = x => (this.active = false, reject(x))
})
this.execute()
return promise
},
return() {
prev && prev(CLOSE)
return { done: true }
}
})
}
}
describe() {
this.options.simple = false
this.onlyDescribe = this.options.prepare = true
return this
}
stream() {
throw new Error('.stream has been renamed to .forEach')
}
forEach(fn) {
this.forEachFn = fn
this.handle()
return this
}
raw() {
this.isRaw = true
return this
}
values() {
this.isRaw = 'values'
return this
}
async handle() {
!this.executed && (this.executed = true) && await 1 && this.handler(this)
}
execute() {
this.handle()
return this
}
then() {
this.handle()
return super.then.apply(this, arguments)
}
catch() {
this.handle()
return super.catch.apply(this, arguments)
}
finally() {
this.handle()
return super.finally.apply(this, arguments)
}
}
function cachedError(xs) {
if (originCache.has(xs))
return originCache.get(xs)
const x = Error.stackTraceLimit
Error.stackTraceLimit = 4
originCache.set(xs, new Error())
Error.stackTraceLimit = x
return originCache.get(xs)
}

31
node_modules/postgres/cjs/src/queue.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
module.exports = Queue
function Queue(initial = []) {
let xs = initial.slice()
let index = 0
return {
get length() {
return xs.length - index
},
remove: (x) => {
const index = xs.indexOf(x)
return index === -1
? null
: (xs.splice(index, 1), x)
},
push: (x) => (xs.push(x), x),
shift: () => {
const out = xs[index++]
if (index === xs.length) {
index = 0
xs = []
} else {
xs[index - 1] = undefined
}
return out
}
}
}

16
node_modules/postgres/cjs/src/result.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
module.exports = class Result extends Array {
constructor() {
super()
Object.defineProperties(this, {
count: { value: null, writable: true },
state: { value: null, writable: true },
command: { value: null, writable: true },
columns: { value: null, writable: true },
statement: { value: null, writable: true }
})
}
static get [Symbol.species]() {
return Array
}
}

277
node_modules/postgres/cjs/src/subscribe.js generated vendored Normal file
View File

@@ -0,0 +1,277 @@
const noop = () => { /* noop */ }
module.exports = Subscribe;function Subscribe(postgres, options) {
const subscribers = new Map()
, slot = 'postgresjs_' + Math.random().toString(36).slice(2)
, state = {}
let connection
, stream
, ended = false
const sql = subscribe.sql = postgres({
...options,
transform: { column: {}, value: {}, row: {} },
max: 1,
fetch_types: false,
idle_timeout: null,
max_lifetime: null,
connection: {
...options.connection,
replication: 'database'
},
onclose: async function() {
if (ended)
return
stream = null
state.pid = state.secret = undefined
connected(await init(sql, slot, options.publications))
subscribers.forEach(event => event.forEach(({ onsubscribe }) => onsubscribe()))
},
no_subscribe: true
})
const end = sql.end
, close = sql.close
sql.end = async() => {
ended = true
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return end()
}
sql.close = async() => {
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return close()
}
return subscribe
async function subscribe(event, fn, onsubscribe = noop, onerror = noop) {
event = parseEvent(event)
if (!connection)
connection = init(sql, slot, options.publications)
const subscriber = { fn, onsubscribe }
const fns = subscribers.has(event)
? subscribers.get(event).add(subscriber)
: subscribers.set(event, new Set([subscriber])).get(event)
const unsubscribe = () => {
fns.delete(subscriber)
fns.size === 0 && subscribers.delete(event)
}
return connection.then(x => {
connected(x)
onsubscribe()
stream && stream.on('error', onerror)
return { unsubscribe, state, sql }
})
}
function connected(x) {
stream = x.stream
state.pid = x.state.pid
state.secret = x.state.secret
}
async function init(sql, slot, publications) {
if (!publications)
throw new Error('Missing publication names')
const xs = await sql.unsafe(
`CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT`
)
const [x] = xs
const stream = await sql.unsafe(
`START_REPLICATION SLOT ${ slot } LOGICAL ${
x.consistent_point
} (proto_version '1', publication_names '${ publications }')`
).writable()
const state = {
lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex')))
}
stream.on('data', data)
stream.on('error', error)
stream.on('close', sql.close)
return { stream, state: xs.state }
function error(e) {
console.error('Unexpected error during logical streaming - reconnecting', e) // eslint-disable-line
}
function data(x) {
if (x[0] === 0x77) {
parse(x.subarray(25), state, sql.options.parsers, handle, options.transform)
} else if (x[0] === 0x6b && x[17]) {
state.lsn = x.subarray(1, 9)
pong()
}
}
function handle(a, b) {
const path = b.relation.schema + '.' + b.relation.table
call('*', a, b)
call('*:' + path, a, b)
b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
call(b.command, a, b)
call(b.command + ':' + path, a, b)
b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
}
function pong() {
const x = Buffer.alloc(34)
x[0] = 'r'.charCodeAt(0)
x.fill(state.lsn, 1)
x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25)
stream.write(x)
}
}
function call(x, a, b) {
subscribers.has(x) && subscribers.get(x).forEach(({ fn }) => fn(a, b, x))
}
}
function Time(x) {
return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000)))
}
function parse(x, state, parsers, handle, transform) {
const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
Object.entries({
R: x => { // Relation
let i = 1
const r = state[x.readUInt32BE(i)] = {
schema: x.toString('utf8', i += 4, i = x.indexOf(0, i)) || 'pg_catalog',
table: x.toString('utf8', i + 1, i = x.indexOf(0, i + 1)),
columns: Array(x.readUInt16BE(i += 2)),
keys: []
}
i += 2
let columnIndex = 0
, column
while (i < x.length) {
column = r.columns[columnIndex++] = {
key: x[i++],
name: transform.column.from
? transform.column.from(x.toString('utf8', i, i = x.indexOf(0, i)))
: x.toString('utf8', i, i = x.indexOf(0, i)),
type: x.readUInt32BE(i += 1),
parser: parsers[x.readUInt32BE(i)],
atttypmod: x.readUInt32BE(i += 4)
}
column.key && r.keys.push(column)
i += 4
}
},
Y: () => { /* noop */ }, // Type
O: () => { /* noop */ }, // Origin
B: x => { // Begin
state.date = Time(x.readBigInt64BE(9))
state.lsn = x.subarray(1, 9)
},
I: x => { // Insert
let i = 1
const relation = state[x.readUInt32BE(i)]
const { row } = tuples(x, relation.columns, i += 7, transform)
handle(row, {
command: 'insert',
relation
})
},
D: x => { // Delete
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
handle(key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform).row
: null
, {
command: 'delete',
relation,
key
})
},
U: x => { // Update
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
const xs = key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform)
: null
xs && (i = xs.i)
const { row } = tuples(x, relation.columns, i + 3, transform)
handle(row, {
command: 'update',
relation,
key,
old: xs && xs.row
})
},
T: () => { /* noop */ }, // Truncate,
C: () => { /* noop */ } // Commit
}).reduce(char, {})[x[0]](x)
}
function tuples(x, columns, xi, transform) {
let type
, column
, value
const row = transform.raw ? new Array(columns.length) : {}
for (let i = 0; i < columns.length; i++) {
type = x[xi++]
column = columns[i]
value = type === 110 // n
? null
: type === 117 // u
? undefined
: column.parser === undefined
? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))
: column.parser.array === true
? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi)))
: column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)))
transform.raw
? (row[i] = transform.raw === true
? value
: transform.value.from ? transform.value.from(value, column) : value)
: (row[column.name] = transform.value.from
? transform.value.from(value, column)
: value
)
}
return { i: xi, row: transform.row.from ? transform.row.from(row) : row }
}
function parseEvent(x) {
const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || []
if (!xs)
throw new Error('Malformed subscribe pattern: ' + x)
const [, command, path, key] = xs
return (command || '*')
+ (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '')
+ (key ? '=' + key : '')
}

367
node_modules/postgres/cjs/src/types.js generated vendored Normal file
View File

@@ -0,0 +1,367 @@
const { Query } = require('./query.js')
const { Errors } = require('./errors.js')
const types = module.exports.types = {
string: {
to: 25,
from: null, // defaults to string
serialize: x => '' + x
},
number: {
to: 0,
from: [21, 23, 26, 700, 701],
serialize: x => '' + x,
parse: x => +x
},
json: {
to: 114,
from: [114, 3802],
serialize: x => JSON.stringify(x),
parse: x => JSON.parse(x)
},
boolean: {
to: 16,
from: 16,
serialize: x => x === true ? 't' : 'f',
parse: x => x === 't'
},
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(),
parse: x => new Date(x)
},
bytea: {
to: 17,
from: 17,
serialize: x => '\\x' + Buffer.from(x).toString('hex'),
parse: x => Buffer.from(x.slice(2), 'hex')
}
}
class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }}
const Identifier = module.exports.Identifier = class Identifier extends NotTagged {
constructor(value) {
super()
this.value = escapeIdentifier(value)
}
}
const Parameter = module.exports.Parameter = class Parameter extends NotTagged {
constructor(value, type, array) {
super()
this.value = value
this.type = type
this.array = array
}
}
const Builder = module.exports.Builder = class Builder extends NotTagged {
constructor(first, rest) {
super()
this.first = first
this.rest = rest
}
build(before, parameters, types, options) {
const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop()
return keyword.i === -1
? escapeIdentifiers(this.first, options)
: keyword.fn(this.first, this.rest, parameters, types, options)
}
}
module.exports.handleValue = handleValue;function handleValue(x, parameters, types, options) {
let value = x instanceof Parameter ? x.value : x
if (value === undefined) {
x instanceof Parameter
? x.value = options.transform.undefined
: value = x = options.transform.undefined
if (value === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return '$' + (types.push(
x instanceof Parameter
? (parameters.push(x.value), x.array
? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value)
: x.type
)
: (parameters.push(x), inferType(x))
))
}
const defaultHandlers = typeHandlers(types)
module.exports.stringify = stringify;function stringify(q, string, value, parameters, types, options) { // eslint-disable-line
for (let i = 1; i < q.strings.length; i++) {
string += (stringifyValue(string, value, parameters, types, options)) + q.strings[i]
value = q.args[i]
}
return string
}
function stringifyValue(string, value, parameters, types, o) {
return (
value instanceof Builder ? value.build(string, parameters, types, o) :
value instanceof Query ? fragment(value, parameters, types, o) :
value instanceof Identifier ? value.value :
value && value[0] instanceof Query ? value.reduce((acc, x) => acc + ' ' + fragment(x, parameters, types, o), '') :
handleValue(value, parameters, types, o)
)
}
function fragment(q, parameters, types, options) {
q.fragment = true
return stringify(q, q.strings[0], q.args[0], parameters, types, options)
}
function valuesBuilder(first, parameters, types, columns, options) {
return first.map(row =>
'(' + columns.map(column =>
stringifyValue('values', row[column], parameters, types, options)
).join(',') + ')'
).join(',')
}
function values(first, rest, parameters, types, options) {
const multi = Array.isArray(first[0])
const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first)
return valuesBuilder(multi ? first : [first], parameters, types, columns, options)
}
function select(first, rest, parameters, types, options) {
typeof first === 'string' && (first = [first].concat(rest))
if (Array.isArray(first))
return escapeIdentifiers(first, options)
let value
const columns = rest.length ? rest.flat() : Object.keys(first)
return columns.map(x => {
value = first[x]
return (
value instanceof Query ? fragment(value, parameters, types, options) :
value instanceof Identifier ? value.value :
handleValue(value, parameters, types, options)
) + ' as ' + escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x)
}).join(',')
}
const builders = Object.entries({
values,
in: (...xs) => {
const x = values(...xs)
return x === '()' ? '(null)' : x
},
select,
as: select,
returning: select,
'\\(': select,
update(first, rest, parameters, types, options) {
return (rest.length ? rest.flat() : Object.keys(first)).map(x =>
escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x) +
'=' + stringifyValue('values', first[x], parameters, types, options)
)
},
insert(first, rest, parameters, types, options) {
const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first)
return '(' + escapeIdentifiers(columns, options) + ')values' +
valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, columns, options)
}
}).map(([x, fn]) => ([new RegExp('((?:^|[\\s(])' + x + '(?:$|[\\s(]))(?![\\s\\S]*\\1)', 'i'), fn]))
function notTagged() {
throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal')
}
const serializers = module.exports.serializers = defaultHandlers.serializers
const parsers = module.exports.parsers = defaultHandlers.parsers
const END = module.exports.END = {}
function firstIsString(x) {
if (Array.isArray(x))
return firstIsString(x[0])
return typeof x === 'string' ? 1009 : 0
}
const mergeUserTypes = module.exports.mergeUserTypes = function(types) {
const user = typeHandlers(types || {})
return {
serializers: Object.assign({}, serializers, user.serializers),
parsers: Object.assign({}, parsers, user.parsers)
}
}
function typeHandlers(types) {
return Object.keys(types).reduce((acc, k) => {
types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
if (types[k].serialize) {
acc.serializers[types[k].to] = types[k].serialize
types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize)
}
return acc
}, { parsers: {}, serializers: {} })
}
function escapeIdentifiers(xs, { transform: { column } }) {
return xs.map(x => escapeIdentifier(column.to ? column.to(x) : x)).join(',')
}
const escapeIdentifier = module.exports.escapeIdentifier = function escape(str) {
return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
}
const inferType = module.exports.inferType = function inferType(x) {
return (
x instanceof Parameter ? x.type :
x instanceof Date ? 1184 :
x instanceof Uint8Array ? 17 :
(x === true || x === false) ? 16 :
typeof x === 'bigint' ? 20 :
Array.isArray(x) ? inferType(x[0]) :
0
)
}
const escapeBackslash = /\\/g
const escapeQuote = /"/g
function arrayEscape(x) {
return x
.replace(escapeBackslash, '\\\\')
.replace(escapeQuote, '\\"')
}
const arraySerializer = module.exports.arraySerializer = function arraySerializer(xs, serializer, options, typarray) {
if (Array.isArray(xs) === false)
return xs
if (!xs.length)
return '{}'
const first = xs[0]
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
if (Array.isArray(first) && !first.type)
return '{' + xs.map(x => arraySerializer(x, serializer, options, typarray)).join(delimiter) + '}'
return '{' + xs.map(x => {
if (x === undefined) {
x = options.transform.undefined
if (x === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return x === null
? 'null'
: '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
}).join(delimiter) + '}'
}
const arrayParserState = {
i: 0,
char: null,
str: '',
quoted: false,
last: 0
}
const arrayParser = module.exports.arrayParser = function arrayParser(x, parser, typarray) {
arrayParserState.i = arrayParserState.last = 0
return arrayParserLoop(arrayParserState, x, parser, typarray)
}
function arrayParserLoop(s, x, parser, typarray) {
const xs = []
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
for (; s.i < x.length; s.i++) {
s.char = x[s.i]
if (s.quoted) {
if (s.char === '\\') {
s.str += x[++s.i]
} else if (s.char === '"') {
xs.push(parser ? parser(s.str) : s.str)
s.str = ''
s.quoted = x[s.i + 1] === '"'
s.last = s.i + 2
} else {
s.str += s.char
}
} else if (s.char === '"') {
s.quoted = true
} else if (s.char === '{') {
s.last = ++s.i
xs.push(arrayParserLoop(s, x, parser, typarray))
} else if (s.char === '}') {
s.quoted = false
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
break
} else if (s.char === delimiter && s.p !== '}' && s.p !== '"') {
xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
}
s.p = s.char
}
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
return xs
}
const toCamel = module.exports.toCamel = x => {
let str = x[0]
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
const toPascal = module.exports.toPascal = x => {
let str = x[0].toUpperCase()
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
const toKebab = module.exports.toKebab = x => x.replace(/_/g, '-')
const fromCamel = module.exports.fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
const fromPascal = module.exports.fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
const fromKebab = module.exports.fromKebab = x => x.replace(/-/g, '_')
function createJsonTransform(fn) {
return function jsonTransform(x, column) {
return typeof x === 'object' && x !== null && (column.type === 114 || column.type === 3802)
? Array.isArray(x)
? x.map(x => jsonTransform(x, column))
: Object.entries(x).reduce((acc, [k, v]) => Object.assign(acc, { [fn(k)]: jsonTransform(v, column) }), {})
: x
}
}
toCamel.column = { from: toCamel }
toCamel.value = { from: createJsonTransform(toCamel) }
fromCamel.column = { to: fromCamel }
const camel = module.exports.camel = { ...toCamel }
camel.column.to = fromCamel
toPascal.column = { from: toPascal }
toPascal.value = { from: createJsonTransform(toPascal) }
fromPascal.column = { to: fromPascal }
const pascal = module.exports.pascal = { ...toPascal }
pascal.column.to = fromPascal
toKebab.column = { from: toKebab }
toKebab.value = { from: createJsonTransform(toKebab) }
fromKebab.column = { to: fromKebab }
const kebab = module.exports.kebab = { ...toKebab }
kebab.column.to = fromKebab

63
node_modules/postgres/package.json generated vendored Normal file
View File

@@ -0,0 +1,63 @@
{
"name": "postgres",
"version": "3.4.7",
"description": "Fastest full featured PostgreSQL client for Node.js",
"type": "module",
"module": "src/index.js",
"main": "cjs/src/index.js",
"exports": {
"types": "./types/index.d.ts",
"bun": "./src/index.js",
"workerd": "./cf/src/index.js",
"import": "./src/index.js",
"default": "./cjs/src/index.js"
},
"types": "types/index.d.ts",
"typings": "types/index.d.ts",
"engines": {
"node": ">=12"
},
"scripts": {
"build": "npm run build:cjs && npm run build:deno && npm run build:cf",
"build:cjs": "node transpile.cjs",
"build:deno": "node transpile.deno.js",
"build:cf": "node transpile.cf.js",
"test": "npm run test:esm && npm run test:cjs && npm run test:deno",
"test:esm": "node tests/index.js",
"test:cjs": "npm run build:cjs && cd cjs/tests && node index.js && cd ../../",
"test:deno": "npm run build:deno && cd deno/tests && deno run --no-lock --allow-all --unsafely-ignore-certificate-errors index.js && cd ../../",
"lint": "eslint src && eslint tests",
"prepare": "npm run build",
"prepublishOnly": "npm run lint"
},
"files": [
"/cf/src",
"/cf/polyfills.js",
"/cjs/src",
"/cjs/package.json",
"/src",
"/types"
],
"author": "Rasmus Porsager <rasmus@porsager.com> (https://www.porsager.com)",
"funding": {
"type": "individual",
"url": "https://github.com/sponsors/porsager"
},
"license": "Unlicense",
"repository": "porsager/postgres",
"homepage": "https://github.com/porsager/postgres",
"bugs": "https://github.com/porsager/postgres/issues",
"keywords": [
"driver",
"postgresql",
"postgres.js",
"postgres",
"postrges",
"postgre",
"client",
"sql",
"db",
"pg",
"database"
]
}

78
node_modules/postgres/src/bytes.js generated vendored Normal file
View File

@@ -0,0 +1,78 @@
const size = 256
let buffer = Buffer.allocUnsafe(size)
const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => {
const v = x.charCodeAt(0)
acc[x] = () => {
buffer[0] = v
b.i = 5
return b
}
return acc
}, {})
const b = Object.assign(reset, messages, {
N: String.fromCharCode(0),
i: 0,
inc(x) {
b.i += x
return b
},
str(x) {
const length = Buffer.byteLength(x)
fit(length)
b.i += buffer.write(x, b.i, length, 'utf8')
return b
},
i16(x) {
fit(2)
buffer.writeUInt16BE(x, b.i)
b.i += 2
return b
},
i32(x, i) {
if (i || i === 0) {
buffer.writeUInt32BE(x, i)
return b
}
fit(4)
buffer.writeUInt32BE(x, b.i)
b.i += 4
return b
},
z(x) {
fit(x)
buffer.fill(0, b.i, b.i + x)
b.i += x
return b
},
raw(x) {
buffer = Buffer.concat([buffer.subarray(0, b.i), x])
b.i = buffer.length
return b
},
end(at = 1) {
buffer.writeUInt32BE(b.i - at, at)
const out = buffer.subarray(0, b.i)
b.i = 0
buffer = Buffer.allocUnsafe(size)
return out
}
})
export default b
function fit(x) {
if (buffer.length - b.i < x) {
const prev = buffer
, length = prev.length
buffer = Buffer.allocUnsafe(length + (length >> 1) + x)
prev.copy(buffer)
}
}
function reset() {
b.i = 0
return b
}

1042
node_modules/postgres/src/connection.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

53
node_modules/postgres/src/errors.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
export class PostgresError extends Error {
constructor(x) {
super(x.message)
this.name = this.constructor.name
Object.assign(this, x)
}
}
export const Errors = {
connection,
postgres,
generic,
notSupported
}
function connection(x, options, socket) {
const { host, port } = socket || options
const error = Object.assign(
new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
{
code: x,
errno: x,
address: options.path || host
}, options.path ? {} : { port: port }
)
Error.captureStackTrace(error, connection)
return error
}
function postgres(x) {
const error = new PostgresError(x)
Error.captureStackTrace(error, postgres)
return error
}
function generic(code, message) {
const error = Object.assign(new Error(code + ': ' + message), { code })
Error.captureStackTrace(error, generic)
return error
}
/* c8 ignore next 10 */
function notSupported(x) {
const error = Object.assign(
new Error(x + ' (B) is not supported'),
{
code: 'MESSAGE_NOT_SUPPORTED',
name: x
}
)
Error.captureStackTrace(error, notSupported)
return error
}

566
node_modules/postgres/src/index.js generated vendored Normal file
View File

@@ -0,0 +1,566 @@
import os from 'os'
import fs from 'fs'
import {
mergeUserTypes,
inferType,
Parameter,
Identifier,
Builder,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab
} from './types.js'
import Connection from './connection.js'
import { Query, CLOSE } from './query.js'
import Queue from './queue.js'
import { Errors, PostgresError } from './errors.js'
import Subscribe from './subscribe.js'
import largeObject from './large.js'
Object.assign(Postgres, {
PostgresError,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab,
BigInt: {
to: 20,
from: [20],
parse: x => BigInt(x), // eslint-disable-line
serialize: x => x.toString()
}
})
export default Postgres
function Postgres(a, b) {
const options = parseOptions(a, b)
, subscribe = options.no_subscribe || Subscribe(Postgres, { ...options })
let ending = false
const queries = Queue()
, connecting = Queue()
, reserved = Queue()
, closed = Queue()
, ended = Queue()
, open = Queue()
, busy = Queue()
, full = Queue()
, queues = { connecting, reserved, closed, ended, open, busy, full }
const connections = [...Array(options.max)].map(() => Connection(options, queues, { onopen, onend, onclose }))
const sql = Sql(handler)
Object.assign(sql, {
get parameters() { return options.parameters },
largeObject: largeObject.bind(null, sql),
subscribe,
CLOSE,
END: CLOSE,
PostgresError,
options,
reserve,
listen,
begin,
close,
end
})
return sql
function Sql(handler) {
handler.debug = options.debug
Object.entries(options.types).reduce((acc, [name, type]) => {
acc[name] = (x) => new Parameter(x, type.to)
return acc
}, typed)
Object.assign(sql, {
types: typed,
typed,
unsafe,
notify,
array,
json,
file
})
return sql
function typed(value, type) {
return new Parameter(value, type)
}
function sql(strings, ...args) {
const query = strings && Array.isArray(strings.raw)
? new Query(strings, args, handler, cancel)
: typeof strings === 'string' && !args.length
? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
: new Builder(strings, args)
return query
}
function unsafe(string, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([string], args, handler, cancel, {
prepare: false,
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
function file(path, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([], args, (query) => {
fs.readFile(path, 'utf8', (err, string) => {
if (err)
return query.reject(err)
query.strings = [string]
handler(query)
})
}, cancel, {
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
}
async function listen(name, fn, onlisten) {
const listener = { fn, onlisten }
const sql = listen.sql || (listen.sql = Postgres({
...options,
max: 1,
idle_timeout: null,
max_lifetime: null,
fetch_types: false,
onclose() {
Object.entries(listen.channels).forEach(([name, { listeners }]) => {
delete listen.channels[name]
Promise.all(listeners.map(l => listen(name, l.fn, l.onlisten).catch(() => { /* noop */ })))
})
},
onnotify(c, x) {
c in listen.channels && listen.channels[c].listeners.forEach(l => l.fn(x))
}
}))
const channels = listen.channels || (listen.channels = {})
, exists = name in channels
if (exists) {
channels[name].listeners.push(listener)
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
}
channels[name] = { result: sql`listen ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`, listeners: [listener] }
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
async function unlisten() {
if (name in channels === false)
return
channels[name].listeners = channels[name].listeners.filter(x => x !== listener)
if (channels[name].listeners.length)
return
delete channels[name]
return sql`unlisten ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`
}
}
async function notify(channel, payload) {
return await sql`select pg_notify(${ channel }, ${ '' + payload })`
}
async function reserve() {
const queue = Queue()
const c = open.length
? open.shift()
: await new Promise((resolve, reject) => {
const query = { reserve: resolve, reject }
queries.push(query)
closed.length && connect(closed.shift(), query)
})
move(c, reserved)
c.reserved = () => queue.length
? c.execute(queue.shift())
: move(c, reserved)
c.reserved.release = true
const sql = Sql(handler)
sql.release = () => {
c.reserved = null
onopen(c)
}
return sql
function handler(q) {
c.queue === full
? queue.push(q)
: c.execute(q) || move(c, full)
}
}
async function begin(options, fn) {
!fn && (fn = options, options = '')
const queries = Queue()
let savepoints = 0
, connection
, prepare = null
try {
await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).execute()
return await Promise.race([
scope(connection, fn),
new Promise((_, reject) => connection.onclose = reject)
])
} catch (error) {
throw error
}
async function scope(c, fn, name) {
const sql = Sql(handler)
sql.savepoint = savepoint
sql.prepare = x => prepare = x.replace(/[^a-z0-9$-_. ]/gi)
let uncaughtError
, result
name && await sql`savepoint ${ sql(name) }`
try {
result = await new Promise((resolve, reject) => {
const x = fn(sql)
Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
})
if (uncaughtError)
throw uncaughtError
} catch (e) {
await (name
? sql`rollback to ${ sql(name) }`
: sql`rollback`
)
throw e instanceof PostgresError && e.code === '25P02' && uncaughtError || e
}
if (!name) {
prepare
? await sql`prepare transaction '${ sql.unsafe(prepare) }'`
: await sql`commit`
}
return result
function savepoint(name, fn) {
if (name && Array.isArray(name.raw))
return savepoint(sql => sql.apply(sql, arguments))
arguments.length === 1 && (fn = name, name = null)
return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
}
function handler(q) {
q.catch(e => uncaughtError || (uncaughtError = e))
c.queue === full
? queries.push(q)
: c.execute(q) || move(c, full)
}
}
function onexecute(c) {
connection = c
move(c, reserved)
c.reserved = () => queries.length
? c.execute(queries.shift())
: move(c, reserved)
}
}
function move(c, queue) {
c.queue.remove(c)
queue.push(c)
c.queue = queue
queue === open
? c.idleTimer.start()
: c.idleTimer.cancel()
return c
}
function json(x) {
return new Parameter(x, 3802)
}
function array(x, type) {
if (!Array.isArray(x))
return array(Array.from(arguments))
return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
}
function handler(query) {
if (ending)
return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
if (open.length)
return go(open.shift(), query)
if (closed.length)
return connect(closed.shift(), query)
busy.length
? go(busy.shift(), query)
: queries.push(query)
}
function go(c, query) {
return c.execute(query)
? move(c, busy)
: move(c, full)
}
function cancel(query) {
return new Promise((resolve, reject) => {
query.state
? query.active
? Connection(options).cancel(query.state, resolve, reject)
: query.cancelled = { resolve, reject }
: (
queries.remove(query),
query.cancelled = true,
query.reject(Errors.generic('57014', 'canceling statement due to user request')),
resolve()
)
})
}
async function end({ timeout = null } = {}) {
if (ending)
return ending
await 1
let timer
return ending = Promise.race([
new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
Promise.all(connections.map(c => c.end()).concat(
listen.sql ? listen.sql.end({ timeout: 0 }) : [],
subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
))
]).then(() => clearTimeout(timer))
}
async function close() {
await Promise.all(connections.map(c => c.end()))
}
async function destroy(resolve) {
await Promise.all(connections.map(c => c.terminate()))
while (queries.length)
queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
resolve()
}
function connect(c, query) {
move(c, connecting)
c.connect(query)
return c
}
function onend(c) {
move(c, ended)
}
function onopen(c) {
if (queries.length === 0)
return move(c, open)
let max = Math.ceil(queries.length / (connecting.length + 1))
, ready = true
while (ready && queries.length && max-- > 0) {
const query = queries.shift()
if (query.reserve)
return query.reserve(c)
ready = c.execute(query)
}
ready
? move(c, busy)
: move(c, full)
}
function onclose(c, e) {
move(c, closed)
c.reserved = null
c.onclose && (c.onclose(e), c.onclose = null)
options.onclose && options.onclose(c.id)
queries.length && connect(c, queries.shift())
}
}
function parseOptions(a, b) {
if (a && a.shared)
return a
const env = process.env // eslint-disable-line
, o = (!a || typeof a === 'string' ? b : a) || {}
, { url, multihost } = parseUrl(a)
, query = [...url.searchParams].reduce((a, [b, c]) => (a[b] = c, a), {})
, host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
, port = o.port || url.port || env.PGPORT || 5432
, user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
o.no_prepare && (o.prepare = false)
query.sslmode && (query.ssl = query.sslmode, delete query.sslmode)
'timeout' in o && (console.log('The timeout option is deprecated, use idle_timeout instead'), o.idle_timeout = o.timeout) // eslint-disable-line
query.sslrootcert === 'system' && (query.ssl = 'verify-full')
const ints = ['idle_timeout', 'connect_timeout', 'max_lifetime', 'max_pipeline', 'backoff', 'keep_alive']
const defaults = {
max : 10,
ssl : false,
idle_timeout : null,
connect_timeout : 30,
max_lifetime : max_lifetime,
max_pipeline : 100,
backoff : backoff,
keep_alive : 60,
prepare : true,
debug : false,
fetch_types : true,
publications : 'alltables',
target_session_attrs: null
}
return {
host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
user : user,
pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
...Object.entries(defaults).reduce(
(acc, [k, d]) => {
const value = k in o ? o[k] : k in query
? (query[k] === 'disable' || query[k] === 'false' ? false : query[k])
: env['PG' + k.toUpperCase()] || d
acc[k] = typeof value === 'string' && ints.includes(k)
? +value
: value
return acc
},
{}
),
connection : {
application_name: env.PGAPPNAME || 'postgres.js',
...o.connection,
...Object.entries(query).reduce((acc, [k, v]) => (k in defaults || (acc[k] = v), acc), {})
},
types : o.types || {},
target_session_attrs: tsa(o, url, env),
onnotice : o.onnotice,
onnotify : o.onnotify,
onclose : o.onclose,
onparameter : o.onparameter,
socket : o.socket,
transform : parseTransform(o.transform || { undefined: undefined }),
parameters : {},
shared : { retries: 0, typeArrayMap: {} },
...mergeUserTypes(o.types)
}
}
function tsa(o, url, env) {
const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
return x
throw new Error('target_session_attrs ' + x + ' is not supported')
}
function backoff(retries) {
return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
}
function max_lifetime() {
return 60 * (30 + Math.random() * 30)
}
function parseTransform(x) {
return {
undefined: x.undefined,
column: {
from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
to: x.column && x.column.to
},
value: {
from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
to: x.value && x.value.to
},
row: {
from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
to: x.row && x.row.to
}
}
}
function parseUrl(url) {
if (!url || typeof url !== 'string')
return { url: { searchParams: new Map() } }
let host = url
host = host.slice(host.indexOf('://') + 3).split(/[?/]/)[0]
host = decodeURIComponent(host.slice(host.indexOf('@') + 1))
const urlObj = new URL(url.replace(host, host.split(',')[0]))
return {
url: {
username: decodeURIComponent(urlObj.username),
password: decodeURIComponent(urlObj.password),
host: urlObj.host,
hostname: urlObj.hostname,
port: urlObj.port,
pathname: urlObj.pathname,
searchParams: urlObj.searchParams
},
multihost: host.indexOf(',') > -1 && host
}
}
function osUsername() {
try {
return os.userInfo().username // eslint-disable-line
} catch (_) {
return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
}
}

70
node_modules/postgres/src/large.js generated vendored Normal file
View File

@@ -0,0 +1,70 @@
import Stream from 'stream'
export default function largeObject(sql, oid, mode = 0x00020000 | 0x00040000) {
return new Promise(async(resolve, reject) => {
await sql.begin(async sql => {
let finish
!oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
const lo = {
writable,
readable,
close : () => sql`select lo_close(${ fd })`.then(finish),
tell : () => sql`select lo_tell64(${ fd })`,
read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
write : (x) => sql`select lowrite(${ fd }, ${ x })`,
truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
size : () => sql`
select
lo_lseek64(${ fd }, location, 0) as position,
seek.size
from (
select
lo_lseek64($1, 0, 2) as size,
tell.location
from (select lo_tell64($1) as location) tell
) seek
`
}
resolve(lo)
return new Promise(async r => finish = r)
async function readable({
highWaterMark = 2048 * 8,
start = 0,
end = Infinity
} = {}) {
let max = end - start
start && await lo.seek(start)
return new Stream.Readable({
highWaterMark,
async read(size) {
const l = size > max ? size - max : size
max -= size
const [{ data }] = await lo.read(l)
this.push(data)
if (data.length < size)
this.push(null)
}
})
}
async function writable({
highWaterMark = 2048 * 8,
start = 0
} = {}) {
start && await lo.seek(start)
return new Stream.Writable({
highWaterMark,
write(chunk, encoding, callback) {
lo.write(chunk).then(() => callback(), callback)
}
})
}
}).catch(reject)
})
}

173
node_modules/postgres/src/query.js generated vendored Normal file
View File

@@ -0,0 +1,173 @@
const originCache = new Map()
, originStackCache = new Map()
, originError = Symbol('OriginError')
export const CLOSE = {}
export class Query extends Promise {
constructor(strings, args, handler, canceller, options = {}) {
let resolve
, reject
super((a, b) => {
resolve = a
reject = b
})
this.tagged = Array.isArray(strings.raw)
this.strings = strings
this.args = args
this.handler = handler
this.canceller = canceller
this.options = options
this.state = null
this.statement = null
this.resolve = x => (this.active = false, resolve(x))
this.reject = x => (this.active = false, reject(x))
this.active = false
this.cancelled = null
this.executed = false
this.signature = ''
this[originError] = this.handler.debug
? new Error()
: this.tagged && cachedError(this.strings)
}
get origin() {
return (this.handler.debug
? this[originError].stack
: this.tagged && originStackCache.has(this.strings)
? originStackCache.get(this.strings)
: originStackCache.set(this.strings, this[originError].stack).get(this.strings)
) || ''
}
static get [Symbol.species]() {
return Promise
}
cancel() {
return this.canceller && (this.canceller(this), this.canceller = null)
}
simple() {
this.options.simple = true
this.options.prepare = false
return this
}
async readable() {
this.simple()
this.streaming = true
return this
}
async writable() {
this.simple()
this.streaming = true
return this
}
cursor(rows = 1, fn) {
this.options.simple = false
if (typeof rows === 'function') {
fn = rows
rows = 1
}
this.cursorRows = rows
if (typeof fn === 'function')
return (this.cursorFn = fn, this)
let prev
return {
[Symbol.asyncIterator]: () => ({
next: () => {
if (this.executed && !this.active)
return { done: true }
prev && prev()
const promise = new Promise((resolve, reject) => {
this.cursorFn = value => {
resolve({ value, done: false })
return new Promise(r => prev = r)
}
this.resolve = () => (this.active = false, resolve({ done: true }))
this.reject = x => (this.active = false, reject(x))
})
this.execute()
return promise
},
return() {
prev && prev(CLOSE)
return { done: true }
}
})
}
}
describe() {
this.options.simple = false
this.onlyDescribe = this.options.prepare = true
return this
}
stream() {
throw new Error('.stream has been renamed to .forEach')
}
forEach(fn) {
this.forEachFn = fn
this.handle()
return this
}
raw() {
this.isRaw = true
return this
}
values() {
this.isRaw = 'values'
return this
}
async handle() {
!this.executed && (this.executed = true) && await 1 && this.handler(this)
}
execute() {
this.handle()
return this
}
then() {
this.handle()
return super.then.apply(this, arguments)
}
catch() {
this.handle()
return super.catch.apply(this, arguments)
}
finally() {
this.handle()
return super.finally.apply(this, arguments)
}
}
function cachedError(xs) {
if (originCache.has(xs))
return originCache.get(xs)
const x = Error.stackTraceLimit
Error.stackTraceLimit = 4
originCache.set(xs, new Error())
Error.stackTraceLimit = x
return originCache.get(xs)
}

31
node_modules/postgres/src/queue.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
export default Queue
function Queue(initial = []) {
let xs = initial.slice()
let index = 0
return {
get length() {
return xs.length - index
},
remove: (x) => {
const index = xs.indexOf(x)
return index === -1
? null
: (xs.splice(index, 1), x)
},
push: (x) => (xs.push(x), x),
shift: () => {
const out = xs[index++]
if (index === xs.length) {
index = 0
xs = []
} else {
xs[index - 1] = undefined
}
return out
}
}
}

16
node_modules/postgres/src/result.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
export default class Result extends Array {
constructor() {
super()
Object.defineProperties(this, {
count: { value: null, writable: true },
state: { value: null, writable: true },
command: { value: null, writable: true },
columns: { value: null, writable: true },
statement: { value: null, writable: true }
})
}
static get [Symbol.species]() {
return Array
}
}

277
node_modules/postgres/src/subscribe.js generated vendored Normal file
View File

@@ -0,0 +1,277 @@
const noop = () => { /* noop */ }
export default function Subscribe(postgres, options) {
const subscribers = new Map()
, slot = 'postgresjs_' + Math.random().toString(36).slice(2)
, state = {}
let connection
, stream
, ended = false
const sql = subscribe.sql = postgres({
...options,
transform: { column: {}, value: {}, row: {} },
max: 1,
fetch_types: false,
idle_timeout: null,
max_lifetime: null,
connection: {
...options.connection,
replication: 'database'
},
onclose: async function() {
if (ended)
return
stream = null
state.pid = state.secret = undefined
connected(await init(sql, slot, options.publications))
subscribers.forEach(event => event.forEach(({ onsubscribe }) => onsubscribe()))
},
no_subscribe: true
})
const end = sql.end
, close = sql.close
sql.end = async() => {
ended = true
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return end()
}
sql.close = async() => {
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return close()
}
return subscribe
async function subscribe(event, fn, onsubscribe = noop, onerror = noop) {
event = parseEvent(event)
if (!connection)
connection = init(sql, slot, options.publications)
const subscriber = { fn, onsubscribe }
const fns = subscribers.has(event)
? subscribers.get(event).add(subscriber)
: subscribers.set(event, new Set([subscriber])).get(event)
const unsubscribe = () => {
fns.delete(subscriber)
fns.size === 0 && subscribers.delete(event)
}
return connection.then(x => {
connected(x)
onsubscribe()
stream && stream.on('error', onerror)
return { unsubscribe, state, sql }
})
}
function connected(x) {
stream = x.stream
state.pid = x.state.pid
state.secret = x.state.secret
}
async function init(sql, slot, publications) {
if (!publications)
throw new Error('Missing publication names')
const xs = await sql.unsafe(
`CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT`
)
const [x] = xs
const stream = await sql.unsafe(
`START_REPLICATION SLOT ${ slot } LOGICAL ${
x.consistent_point
} (proto_version '1', publication_names '${ publications }')`
).writable()
const state = {
lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex')))
}
stream.on('data', data)
stream.on('error', error)
stream.on('close', sql.close)
return { stream, state: xs.state }
function error(e) {
console.error('Unexpected error during logical streaming - reconnecting', e) // eslint-disable-line
}
function data(x) {
if (x[0] === 0x77) {
parse(x.subarray(25), state, sql.options.parsers, handle, options.transform)
} else if (x[0] === 0x6b && x[17]) {
state.lsn = x.subarray(1, 9)
pong()
}
}
function handle(a, b) {
const path = b.relation.schema + '.' + b.relation.table
call('*', a, b)
call('*:' + path, a, b)
b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
call(b.command, a, b)
call(b.command + ':' + path, a, b)
b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
}
function pong() {
const x = Buffer.alloc(34)
x[0] = 'r'.charCodeAt(0)
x.fill(state.lsn, 1)
x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25)
stream.write(x)
}
}
function call(x, a, b) {
subscribers.has(x) && subscribers.get(x).forEach(({ fn }) => fn(a, b, x))
}
}
function Time(x) {
return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000)))
}
function parse(x, state, parsers, handle, transform) {
const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
Object.entries({
R: x => { // Relation
let i = 1
const r = state[x.readUInt32BE(i)] = {
schema: x.toString('utf8', i += 4, i = x.indexOf(0, i)) || 'pg_catalog',
table: x.toString('utf8', i + 1, i = x.indexOf(0, i + 1)),
columns: Array(x.readUInt16BE(i += 2)),
keys: []
}
i += 2
let columnIndex = 0
, column
while (i < x.length) {
column = r.columns[columnIndex++] = {
key: x[i++],
name: transform.column.from
? transform.column.from(x.toString('utf8', i, i = x.indexOf(0, i)))
: x.toString('utf8', i, i = x.indexOf(0, i)),
type: x.readUInt32BE(i += 1),
parser: parsers[x.readUInt32BE(i)],
atttypmod: x.readUInt32BE(i += 4)
}
column.key && r.keys.push(column)
i += 4
}
},
Y: () => { /* noop */ }, // Type
O: () => { /* noop */ }, // Origin
B: x => { // Begin
state.date = Time(x.readBigInt64BE(9))
state.lsn = x.subarray(1, 9)
},
I: x => { // Insert
let i = 1
const relation = state[x.readUInt32BE(i)]
const { row } = tuples(x, relation.columns, i += 7, transform)
handle(row, {
command: 'insert',
relation
})
},
D: x => { // Delete
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
handle(key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform).row
: null
, {
command: 'delete',
relation,
key
})
},
U: x => { // Update
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
const xs = key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform)
: null
xs && (i = xs.i)
const { row } = tuples(x, relation.columns, i + 3, transform)
handle(row, {
command: 'update',
relation,
key,
old: xs && xs.row
})
},
T: () => { /* noop */ }, // Truncate,
C: () => { /* noop */ } // Commit
}).reduce(char, {})[x[0]](x)
}
function tuples(x, columns, xi, transform) {
let type
, column
, value
const row = transform.raw ? new Array(columns.length) : {}
for (let i = 0; i < columns.length; i++) {
type = x[xi++]
column = columns[i]
value = type === 110 // n
? null
: type === 117 // u
? undefined
: column.parser === undefined
? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))
: column.parser.array === true
? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi)))
: column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)))
transform.raw
? (row[i] = transform.raw === true
? value
: transform.value.from ? transform.value.from(value, column) : value)
: (row[column.name] = transform.value.from
? transform.value.from(value, column)
: value
)
}
return { i: xi, row: transform.row.from ? transform.row.from(row) : row }
}
function parseEvent(x) {
const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || []
if (!xs)
throw new Error('Malformed subscribe pattern: ' + x)
const [, command, path, key] = xs
return (command || '*')
+ (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '')
+ (key ? '=' + key : '')
}

367
node_modules/postgres/src/types.js generated vendored Normal file
View File

@@ -0,0 +1,367 @@
import { Query } from './query.js'
import { Errors } from './errors.js'
export const types = {
string: {
to: 25,
from: null, // defaults to string
serialize: x => '' + x
},
number: {
to: 0,
from: [21, 23, 26, 700, 701],
serialize: x => '' + x,
parse: x => +x
},
json: {
to: 114,
from: [114, 3802],
serialize: x => JSON.stringify(x),
parse: x => JSON.parse(x)
},
boolean: {
to: 16,
from: 16,
serialize: x => x === true ? 't' : 'f',
parse: x => x === 't'
},
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(),
parse: x => new Date(x)
},
bytea: {
to: 17,
from: 17,
serialize: x => '\\x' + Buffer.from(x).toString('hex'),
parse: x => Buffer.from(x.slice(2), 'hex')
}
}
class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }}
export class Identifier extends NotTagged {
constructor(value) {
super()
this.value = escapeIdentifier(value)
}
}
export class Parameter extends NotTagged {
constructor(value, type, array) {
super()
this.value = value
this.type = type
this.array = array
}
}
export class Builder extends NotTagged {
constructor(first, rest) {
super()
this.first = first
this.rest = rest
}
build(before, parameters, types, options) {
const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop()
return keyword.i === -1
? escapeIdentifiers(this.first, options)
: keyword.fn(this.first, this.rest, parameters, types, options)
}
}
export function handleValue(x, parameters, types, options) {
let value = x instanceof Parameter ? x.value : x
if (value === undefined) {
x instanceof Parameter
? x.value = options.transform.undefined
: value = x = options.transform.undefined
if (value === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return '$' + (types.push(
x instanceof Parameter
? (parameters.push(x.value), x.array
? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value)
: x.type
)
: (parameters.push(x), inferType(x))
))
}
const defaultHandlers = typeHandlers(types)
export function stringify(q, string, value, parameters, types, options) { // eslint-disable-line
for (let i = 1; i < q.strings.length; i++) {
string += (stringifyValue(string, value, parameters, types, options)) + q.strings[i]
value = q.args[i]
}
return string
}
function stringifyValue(string, value, parameters, types, o) {
return (
value instanceof Builder ? value.build(string, parameters, types, o) :
value instanceof Query ? fragment(value, parameters, types, o) :
value instanceof Identifier ? value.value :
value && value[0] instanceof Query ? value.reduce((acc, x) => acc + ' ' + fragment(x, parameters, types, o), '') :
handleValue(value, parameters, types, o)
)
}
function fragment(q, parameters, types, options) {
q.fragment = true
return stringify(q, q.strings[0], q.args[0], parameters, types, options)
}
function valuesBuilder(first, parameters, types, columns, options) {
return first.map(row =>
'(' + columns.map(column =>
stringifyValue('values', row[column], parameters, types, options)
).join(',') + ')'
).join(',')
}
function values(first, rest, parameters, types, options) {
const multi = Array.isArray(first[0])
const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first)
return valuesBuilder(multi ? first : [first], parameters, types, columns, options)
}
function select(first, rest, parameters, types, options) {
typeof first === 'string' && (first = [first].concat(rest))
if (Array.isArray(first))
return escapeIdentifiers(first, options)
let value
const columns = rest.length ? rest.flat() : Object.keys(first)
return columns.map(x => {
value = first[x]
return (
value instanceof Query ? fragment(value, parameters, types, options) :
value instanceof Identifier ? value.value :
handleValue(value, parameters, types, options)
) + ' as ' + escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x)
}).join(',')
}
const builders = Object.entries({
values,
in: (...xs) => {
const x = values(...xs)
return x === '()' ? '(null)' : x
},
select,
as: select,
returning: select,
'\\(': select,
update(first, rest, parameters, types, options) {
return (rest.length ? rest.flat() : Object.keys(first)).map(x =>
escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x) +
'=' + stringifyValue('values', first[x], parameters, types, options)
)
},
insert(first, rest, parameters, types, options) {
const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first)
return '(' + escapeIdentifiers(columns, options) + ')values' +
valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, columns, options)
}
}).map(([x, fn]) => ([new RegExp('((?:^|[\\s(])' + x + '(?:$|[\\s(]))(?![\\s\\S]*\\1)', 'i'), fn]))
function notTagged() {
throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal')
}
export const serializers = defaultHandlers.serializers
export const parsers = defaultHandlers.parsers
export const END = {}
function firstIsString(x) {
if (Array.isArray(x))
return firstIsString(x[0])
return typeof x === 'string' ? 1009 : 0
}
export const mergeUserTypes = function(types) {
const user = typeHandlers(types || {})
return {
serializers: Object.assign({}, serializers, user.serializers),
parsers: Object.assign({}, parsers, user.parsers)
}
}
function typeHandlers(types) {
return Object.keys(types).reduce((acc, k) => {
types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
if (types[k].serialize) {
acc.serializers[types[k].to] = types[k].serialize
types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize)
}
return acc
}, { parsers: {}, serializers: {} })
}
function escapeIdentifiers(xs, { transform: { column } }) {
return xs.map(x => escapeIdentifier(column.to ? column.to(x) : x)).join(',')
}
export const escapeIdentifier = function escape(str) {
return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
}
export const inferType = function inferType(x) {
return (
x instanceof Parameter ? x.type :
x instanceof Date ? 1184 :
x instanceof Uint8Array ? 17 :
(x === true || x === false) ? 16 :
typeof x === 'bigint' ? 20 :
Array.isArray(x) ? inferType(x[0]) :
0
)
}
const escapeBackslash = /\\/g
const escapeQuote = /"/g
function arrayEscape(x) {
return x
.replace(escapeBackslash, '\\\\')
.replace(escapeQuote, '\\"')
}
export const arraySerializer = function arraySerializer(xs, serializer, options, typarray) {
if (Array.isArray(xs) === false)
return xs
if (!xs.length)
return '{}'
const first = xs[0]
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
if (Array.isArray(first) && !first.type)
return '{' + xs.map(x => arraySerializer(x, serializer, options, typarray)).join(delimiter) + '}'
return '{' + xs.map(x => {
if (x === undefined) {
x = options.transform.undefined
if (x === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return x === null
? 'null'
: '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
}).join(delimiter) + '}'
}
const arrayParserState = {
i: 0,
char: null,
str: '',
quoted: false,
last: 0
}
export const arrayParser = function arrayParser(x, parser, typarray) {
arrayParserState.i = arrayParserState.last = 0
return arrayParserLoop(arrayParserState, x, parser, typarray)
}
function arrayParserLoop(s, x, parser, typarray) {
const xs = []
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
for (; s.i < x.length; s.i++) {
s.char = x[s.i]
if (s.quoted) {
if (s.char === '\\') {
s.str += x[++s.i]
} else if (s.char === '"') {
xs.push(parser ? parser(s.str) : s.str)
s.str = ''
s.quoted = x[s.i + 1] === '"'
s.last = s.i + 2
} else {
s.str += s.char
}
} else if (s.char === '"') {
s.quoted = true
} else if (s.char === '{') {
s.last = ++s.i
xs.push(arrayParserLoop(s, x, parser, typarray))
} else if (s.char === '}') {
s.quoted = false
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
break
} else if (s.char === delimiter && s.p !== '}' && s.p !== '"') {
xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
}
s.p = s.char
}
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
return xs
}
export const toCamel = x => {
let str = x[0]
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
export const toPascal = x => {
let str = x[0].toUpperCase()
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
export const toKebab = x => x.replace(/_/g, '-')
export const fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
export const fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
export const fromKebab = x => x.replace(/-/g, '_')
function createJsonTransform(fn) {
return function jsonTransform(x, column) {
return typeof x === 'object' && x !== null && (column.type === 114 || column.type === 3802)
? Array.isArray(x)
? x.map(x => jsonTransform(x, column))
: Object.entries(x).reduce((acc, [k, v]) => Object.assign(acc, { [fn(k)]: jsonTransform(v, column) }), {})
: x
}
}
toCamel.column = { from: toCamel }
toCamel.value = { from: createJsonTransform(toCamel) }
fromCamel.column = { to: fromCamel }
export const camel = { ...toCamel }
camel.column.to = fromCamel
toPascal.column = { from: toPascal }
toPascal.value = { from: createJsonTransform(toPascal) }
fromPascal.column = { to: fromPascal }
export const pascal = { ...toPascal }
pascal.column.to = fromPascal
toKebab.column = { from: toKebab }
toKebab.value = { from: createJsonTransform(toKebab) }
fromKebab.column = { to: fromKebab }
export const kebab = { ...toKebab }
kebab.column.to = fromKebab

730
node_modules/postgres/types/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,730 @@
import { Readable, Writable } from 'node:stream'
/**
* Establish a connection to a PostgreSQL server.
* @param options Connection options - default to the same as psql
* @returns An utility function to make queries to the server
*/
declare function postgres<T extends Record<string, postgres.PostgresType> = {}>(options?: postgres.Options<T> | undefined): postgres.Sql<Record<string, postgres.PostgresType> extends T ? {} : { [type in keyof T]: T[type] extends {
serialize: (value: infer R) => any,
parse: (raw: any) => infer R
} ? R : never }>
/**
* Establish a connection to a PostgreSQL server.
* @param url Connection string used for authentication
* @param options Connection options - default to the same as psql
* @returns An utility function to make queries to the server
*/
declare function postgres<T extends Record<string, postgres.PostgresType> = {}>(url: string, options?: postgres.Options<T> | undefined): postgres.Sql<Record<string, postgres.PostgresType> extends T ? {} : { [type in keyof T]: T[type] extends {
serialize: (value: infer R) => any,
parse: (raw: any) => infer R
} ? R : never }>
/**
* Connection options of Postgres.
*/
interface BaseOptions<T extends Record<string, postgres.PostgresType>> {
/** Postgres ip address[s] or domain name[s] */
host: string | string[] | undefined;
/** Postgres server[s] port[s] */
port: number | number[] | undefined;
/** unix socket path (usually '/tmp') */
path: string | undefined;
/**
* Name of database to connect to
* @default process.env['PGDATABASE'] || options.user
*/
database: string;
/**
* Username of database user
* @default process.env['PGUSERNAME'] || process.env['PGUSER'] || require('os').userInfo().username
*/
user: string;
/**
* How to deal with ssl (can be a tls.connect option object)
* @default false
*/
ssl: 'require' | 'allow' | 'prefer' | 'verify-full' | boolean | object;
/**
* Max number of connections
* @default 10
*/
max: number;
/**
* Idle connection timeout in seconds
* @default process.env['PGIDLE_TIMEOUT']
*/
idle_timeout: number | undefined;
/**
* Connect timeout in seconds
* @default process.env['PGCONNECT_TIMEOUT']
*/
connect_timeout: number;
/** Array of custom types; see more in the README */
types: T;
/**
* Enables prepare mode.
* @default true
*/
prepare: boolean;
/**
* Called when a notice is received
* @default console.log
*/
onnotice: (notice: postgres.Notice) => void;
/** (key; value) when a server param change */
onparameter: (key: string, value: any) => void;
/** Is called with (connection; query; parameters) */
debug: boolean | ((connection: number, query: string, parameters: any[], paramTypes: any[]) => void);
/** Transform hooks */
transform: {
/** Transforms outcoming undefined values */
undefined?: any
/** Transforms incoming and outgoing column names */
column?: ((column: string) => string) | {
/** Transform function for column names in result rows */
from?: ((column: string) => string) | undefined;
/** Transform function for column names in interpolated values passed to tagged template literal */
to?: ((column: string) => string) | undefined;
} | undefined;
/** Transforms incoming and outgoing row values */
value?: ((value: any) => any) | {
/** Transform function for values in result rows */
from?: ((value: unknown, column: postgres.Column<string>) => any) | undefined;
// to?: ((value: unknown) => any) | undefined; // unused
} | undefined;
/** Transforms entire rows */
row?: ((row: postgres.Row) => any) | {
/** Transform function for entire result rows */
from?: ((row: postgres.Row) => any) | undefined;
// to?: ((row: postgres.Row) => any) | undefined; // unused
} | undefined;
};
/** Connection parameters */
connection: Partial<postgres.ConnectionParameters>;
/**
* Use 'read-write' with multiple hosts to ensure only connecting to primary
* @default process.env['PGTARGETSESSIONATTRS']
*/
target_session_attrs: undefined | 'read-write' | 'read-only' | 'primary' | 'standby' | 'prefer-standby';
/**
* Automatically fetches types on connect
* @default true
*/
fetch_types: boolean;
/**
* Publications to subscribe to (only relevant when calling `sql.subscribe()`)
* @default 'alltables'
*/
publications: string
onclose: (connId: number) => void;
backoff: boolean | ((attemptNum: number) => number);
max_lifetime: number | null;
keep_alive: number | null;
}
declare const PRIVATE: unique symbol;
declare class NotAPromise {
private [PRIVATE]: never; // prevent user-side interface implementation
/**
* @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
* @throws NOT_TAGGED_CALL
*/
private then(): never;
/**
* @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
* @throws NOT_TAGGED_CALL
*/
private catch(): never;
/**
* @deprecated This object isn't an SQL query, and therefore not a Promise; use the tagged template string syntax instead: ```await sql\`...\`;```
* @throws NOT_TAGGED_CALL
*/
private finally(): never;
}
type UnwrapPromiseArray<T> = T extends any[] ? {
[k in keyof T]: T[k] extends Promise<infer R> ? R : T[k]
} : T;
type Keys = string
type SerializableObject<T, K extends readonly any[], TT> =
number extends K['length'] ? {} :
Partial<(Record<Keys & (keyof T) & (K['length'] extends 0 ? string : K[number]), postgres.ParameterOrJSON<TT> | undefined> & Record<string, any>)>
type First<T, K extends readonly any[], TT> =
// Tagged template string call
T extends TemplateStringsArray ? TemplateStringsArray :
// Identifiers helper
T extends string ? string :
// Dynamic values helper (depth 2)
T extends readonly any[][] ? readonly postgres.EscapableArray[] :
// Insert/update helper (depth 2)
T extends readonly (object & infer R)[] ? (R extends postgres.SerializableParameter<TT> ? readonly postgres.SerializableParameter<TT>[] : readonly SerializableObject<R, K, TT>[]) :
// Dynamic values/ANY helper (depth 1)
T extends readonly any[] ? (readonly postgres.SerializableParameter<TT>[]) :
// Insert/update helper (depth 1)
T extends object ? SerializableObject<T, K, TT> :
// Unexpected type
never
type Rest<T> =
T extends TemplateStringsArray ? never : // force fallback to the tagged template function overload
T extends string ? readonly string[] :
T extends readonly any[][] ? readonly [] :
T extends readonly (object & infer R)[] ? (
readonly (Keys & keyof R)[] // sql(data, "prop", "prop2") syntax
|
[readonly (Keys & keyof R)[]] // sql(data, ["prop", "prop2"]) syntax
) :
T extends readonly any[] ? readonly [] :
T extends object ? (
readonly (Keys & keyof T)[] // sql(data, "prop", "prop2") syntax
|
[readonly (Keys & keyof T)[]] // sql(data, ["prop", "prop2"]) syntax
) :
any
type Return<T, K extends readonly any[]> =
[T] extends [TemplateStringsArray] ?
[unknown] extends [T] ? postgres.Helper<T, K> : // ensure no `PendingQuery` with `any` types
[TemplateStringsArray] extends [T] ? postgres.PendingQuery<postgres.Row[]> :
postgres.Helper<T, K> :
postgres.Helper<T, K>
declare namespace postgres {
class PostgresError extends Error {
name: 'PostgresError';
severity_local: string;
severity: string;
code: string;
position: string;
file: string;
line: string;
routine: string;
detail?: string | undefined;
hint?: string | undefined;
internal_position?: string | undefined;
internal_query?: string | undefined;
where?: string | undefined;
schema_name?: string | undefined;
table_name?: string | undefined;
column_name?: string | undefined;
data?: string | undefined;
type_name?: string | undefined;
constraint_name?: string | undefined;
/** Only set when debug is enabled */
query: string;
/** Only set when debug is enabled */
parameters: any[];
}
/**
* Convert a snake_case string to PascalCase.
* @param str The string from snake_case to convert
* @returns The new string in PascalCase
*/
function toPascal(str: string): string;
namespace toPascal {
namespace column { function from(str: string): string; }
namespace value { function from(str: unknown, column: Column<string>): string }
}
/**
* Convert a PascalCase string to snake_case.
* @param str The string from snake_case to convert
* @returns The new string in snake_case
*/
function fromPascal(str: string): string;
namespace fromPascal {
namespace column { function to(str: string): string }
}
/**
* Convert snake_case to and from PascalCase.
*/
namespace pascal {
namespace column {
function from(str: string): string;
function to(str: string): string;
}
namespace value { function from(str: unknown, column: Column<string>): string }
}
/**
* Convert a snake_case string to camelCase.
* @param str The string from snake_case to convert
* @returns The new string in camelCase
*/
function toCamel(str: string): string;
namespace toCamel {
namespace column { function from(str: string): string; }
namespace value { function from(str: unknown, column: Column<string>): string }
}
/**
* Convert a camelCase string to snake_case.
* @param str The string from snake_case to convert
* @returns The new string in snake_case
*/
function fromCamel(str: string): string;
namespace fromCamel {
namespace column { function to(str: string): string }
}
/**
* Convert snake_case to and from camelCase.
*/
namespace camel {
namespace column {
function from(str: string): string;
function to(str: string): string;
}
namespace value { function from(str: unknown, column: Column<string>): string }
}
/**
* Convert a snake_case string to kebab-case.
* @param str The string from snake_case to convert
* @returns The new string in kebab-case
*/
function toKebab(str: string): string;
namespace toKebab {
namespace column { function from(str: string): string; }
namespace value { function from(str: unknown, column: Column<string>): string }
}
/**
* Convert a kebab-case string to snake_case.
* @param str The string from snake_case to convert
* @returns The new string in snake_case
*/
function fromKebab(str: string): string;
namespace fromKebab {
namespace column { function to(str: string): string }
}
/**
* Convert snake_case to and from kebab-case.
*/
namespace kebab {
namespace column {
function from(str: string): string;
function to(str: string): string;
}
namespace value { function from(str: unknown, column: Column<string>): string }
}
const BigInt: PostgresType<bigint>;
interface PostgresType<T = any> {
to: number;
from: number[];
serialize: (value: T) => unknown;
parse: (raw: any) => T;
}
interface ConnectionParameters {
/**
* Default application_name
* @default 'postgres.js'
*/
application_name: string;
default_transaction_isolation: 'read uncommitted' | 'read committed' | 'repeatable read' | 'serializable',
default_transaction_read_only: boolean,
default_transaction_deferrable: boolean,
statement_timeout: number,
lock_timeout: number,
idle_in_transaction_session_timeout: number,
idle_session_timeout: number,
DateStyle: string,
IntervalStyle: string,
TimeZone: string,
/** Other connection parameters */
[name: string]: string | number | boolean;
}
interface Options<T extends Record<string, postgres.PostgresType>> extends Partial<BaseOptions<T>> {
/** @inheritdoc */
host?: string | undefined;
/** @inheritdoc */
port?: number | undefined;
/** @inheritdoc */
path?: string | undefined;
/** Password of database user (an alias for `password`) */
pass?: Options<T>['password'] | undefined;
/**
* Password of database user
* @default process.env['PGPASSWORD']
*/
password?: string | (() => string | Promise<string>) | undefined;
/** Name of database to connect to (an alias for `database`) */
db?: Options<T>['database'] | undefined;
/** Username of database user (an alias for `user`) */
username?: Options<T>['user'] | undefined;
/** Postgres ip address or domain name (an alias for `host`) */
hostname?: Options<T>['host'] | undefined;
/**
* Disable prepared mode
* @deprecated use "prepare" option instead
*/
no_prepare?: boolean | undefined;
/**
* Idle connection timeout in seconds
* @deprecated use "idle_timeout" option instead
*/
timeout?: Options<T>['idle_timeout'] | undefined;
}
interface ParsedOptions<T extends Record<string, unknown> = {}> extends BaseOptions<{ [name in keyof T]: PostgresType<T[name]> }> {
/** @inheritdoc */
host: string[];
/** @inheritdoc */
port: number[];
/** @inheritdoc */
pass: null;
/** @inheritdoc */
transform: Transform;
serializers: Record<number, (value: any) => unknown>;
parsers: Record<number, (value: any) => unknown>;
}
interface Transform {
/** Transforms outcoming undefined values */
undefined: any
column: {
/** Transform function for column names in result rows */
from: ((column: string) => string) | undefined;
/** Transform function for column names in interpolated values passed to tagged template literal */
to: ((column: string) => string) | undefined;
};
value: {
/** Transform function for values in result rows */
from: ((value: any, column?: Column<string>) => any) | undefined;
/** Transform function for interpolated values passed to tagged template literal */
to: undefined; // (value: any) => any
};
row: {
/** Transform function for entire result rows */
from: ((row: postgres.Row) => any) | undefined;
to: undefined; // (row: postgres.Row) => any
};
}
interface Notice {
[field: string]: string;
}
interface Parameter<T = SerializableParameter> extends NotAPromise {
/**
* PostgreSQL OID of the type
*/
type: number;
/**
* Serialized value
*/
value: string | null;
/**
* Raw value to serialize
*/
raw: T | null;
}
interface ArrayParameter<T extends readonly any[] = readonly any[]> extends Parameter<T | T[]> {
array: true;
}
interface ConnectionError extends globalThis.Error {
code:
| 'CONNECTION_DESTROYED'
| 'CONNECT_TIMEOUT'
| 'CONNECTION_CLOSED'
| 'CONNECTION_ENDED';
errno: this['code'];
address: string;
port?: number | undefined;
}
interface NotSupportedError extends globalThis.Error {
code: 'MESSAGE_NOT_SUPPORTED';
name: string;
}
interface GenericError extends globalThis.Error {
code:
| '57014' // canceling statement due to user request
| 'NOT_TAGGED_CALL'
| 'UNDEFINED_VALUE'
| 'MAX_PARAMETERS_EXCEEDED'
| 'SASL_SIGNATURE_MISMATCH'
| 'UNSAFE_TRANSACTION';
message: string;
}
interface AuthNotImplementedError extends globalThis.Error {
code: 'AUTH_TYPE_NOT_IMPLEMENTED';
type: number | string;
message: string;
}
type Error = never
| PostgresError
| ConnectionError
| NotSupportedError
| GenericError
| AuthNotImplementedError;
interface ColumnInfo {
key: number;
name: string;
type: number;
parser?(raw: string): unknown;
atttypmod: number;
}
interface RelationInfo {
schema: string;
table: string;
columns: ColumnInfo[];
keys: ColumnInfo[];
}
type ReplicationEvent =
| { command: 'insert', relation: RelationInfo }
| { command: 'delete', relation: RelationInfo, key: boolean }
| { command: 'update', relation: RelationInfo, key: boolean, old: Row | null };
interface SubscriptionHandle {
unsubscribe(): void;
}
interface LargeObject {
writable(options?: {
highWaterMark?: number | undefined,
start?: number | undefined
} | undefined): Promise<Writable>;
readable(options?: {
highWaterMark?: number | undefined,
start?: number | undefined,
end?: number | undefined
} | undefined): Promise<Readable>;
close(): Promise<void>;
tell(): Promise<void>;
read(size: number): Promise<void>;
write(buffer: Uint8Array): Promise<[{ data: Uint8Array }]>;
truncate(size: number): Promise<void>;
seek(offset: number, whence?: number | undefined): Promise<void>;
size(): Promise<[{ position: bigint, size: bigint }]>;
}
type EscapableArray = (string | number)[]
type Serializable = never
| null
| boolean
| number
| string
| Date
| Uint8Array;
type SerializableParameter<T = never> = never
| T
| Serializable
| Helper<any>
| Parameter<any>
| ArrayParameter
| readonly SerializableParameter<T>[];
type JSONValue = // using a dedicated type to detect symbols, bigints, and other non serializable types
| null
| string
| number
| boolean
| Date // serialized as `string`
| readonly JSONValue[]
| { toJSON(): any } // `toJSON` called by `JSON.stringify`; not typing the return type, types definition is strict enough anyway
| {
readonly [prop: string | number]:
| undefined
| JSONValue
| ((...args: any) => any) // serialized as `undefined`
};
interface Row {
[column: string]: any;
}
type MaybeRow = Row | undefined;
interface Column<T extends string> {
name: T;
type: number;
table: number;
number: number;
parser?: ((raw: string) => unknown) | undefined;
}
type ColumnList<T> = (T extends string ? Column<T> : never)[];
interface State {
status: string;
pid: number;
secret: number;
}
interface Statement {
/** statement unique name */
name: string;
/** sql query */
string: string;
/** parameters types */
types: number[];
columns: ColumnList<string>;
}
interface ResultMeta<T extends number | null> {
count: T; // For tuples
command: string;
statement: Statement;
state: State;
}
interface ResultQueryMeta<T extends number | null, U> extends ResultMeta<T> {
columns: ColumnList<U>;
}
type ExecutionResult<T> = [] & ResultQueryMeta<number, keyof NonNullable<T>>;
type ValuesRowList<T extends readonly any[]> = T[number][keyof T[number]][][] & ResultQueryMeta<T['length'], keyof T[number]>;
type RawRowList<T extends readonly any[]> = Buffer[][] & Iterable<Buffer[][]> & ResultQueryMeta<T['length'], keyof T[number]>;
type RowList<T extends readonly any[]> = T & Iterable<NonNullable<T[number]>> & ResultQueryMeta<T['length'], keyof T[number]>;
interface PendingQueryModifiers<TRow extends readonly any[]> {
simple(): this;
readable(): Promise<Readable>;
writable(): Promise<Writable>;
execute(): this;
cancel(): void;
/**
* @deprecated `.stream` has been renamed to `.forEach`
* @throws
*/
stream(cb: (row: NonNullable<TRow[number]>, result: ExecutionResult<TRow[number]>) => void): never;
forEach(cb: (row: NonNullable<TRow[number]>, result: ExecutionResult<TRow[number]>) => void): Promise<ExecutionResult<TRow[number]>>;
cursor(rows?: number | undefined): AsyncIterable<NonNullable<TRow[number]>[]>;
cursor(cb: (row: [NonNullable<TRow[number]>]) => void): Promise<ExecutionResult<TRow[number]>>;
cursor(rows: number, cb: (rows: NonNullable<TRow[number]>[]) => void): Promise<ExecutionResult<TRow[number]>>;
}
interface PendingDescribeQuery extends Promise<Statement> {
}
interface PendingValuesQuery<TRow extends readonly MaybeRow[]> extends Promise<ValuesRowList<TRow>>, PendingQueryModifiers<TRow[number][keyof TRow[number]][][]> {
describe(): PendingDescribeQuery;
}
interface PendingRawQuery<TRow extends readonly MaybeRow[]> extends Promise<RawRowList<TRow>>, PendingQueryModifiers<Buffer[][]> {
}
interface PendingQuery<TRow extends readonly MaybeRow[]> extends Promise<RowList<TRow>>, PendingQueryModifiers<TRow> {
describe(): PendingDescribeQuery;
values(): PendingValuesQuery<TRow>;
raw(): PendingRawQuery<TRow>;
}
interface PendingRequest extends Promise<[] & ResultMeta<null>> { }
interface ListenRequest extends Promise<ListenMeta> { }
interface ListenMeta extends ResultMeta<null> {
unlisten(): Promise<void>
}
interface Helper<T, U extends readonly any[] = T[]> extends NotAPromise {
first: T;
rest: U;
}
type Fragment = PendingQuery<any>
type ParameterOrJSON<T> =
| SerializableParameter<T>
| JSONValue
type ParameterOrFragment<T> =
| SerializableParameter<T>
| Fragment
| Fragment[]
interface Sql<TTypes extends Record<string, unknown> = {}> {
/**
* Query helper
* @param first Define how the helper behave
* @param rest Other optional arguments, depending on the helper type
* @returns An helper object usable as tagged template parameter in sql queries
*/
<T, K extends Rest<T>>(first: T & First<T, K, TTypes[keyof TTypes]>, ...rest: K): Return<T, K>;
/**
* Execute the SQL query passed as a template string. Can only be used as template string tag.
* @param template The template generated from the template string
* @param parameters Interpoled values of the template string
* @returns A promise resolving to the result of your query
*/
<T extends readonly (object | undefined)[] = Row[]>(template: TemplateStringsArray, ...parameters: readonly (ParameterOrFragment<TTypes[keyof TTypes]>)[]): PendingQuery<T>;
CLOSE: {};
END: this['CLOSE'];
PostgresError: typeof PostgresError;
options: ParsedOptions<TTypes>;
parameters: ConnectionParameters;
types: this['typed'];
typed: (<T>(value: T, oid: number) => Parameter<T>) & {
[name in keyof TTypes]: (value: TTypes[name]) => postgres.Parameter<TTypes[name]>
};
unsafe<T extends any[] = (Row & Iterable<Row>)[]>(query: string, parameters?: (ParameterOrJSON<TTypes[keyof TTypes]>)[] | undefined, queryOptions?: UnsafeQueryOptions | undefined): PendingQuery<T>;
end(options?: { timeout?: number | undefined } | undefined): Promise<void>;
listen(channel: string, onnotify: (value: string) => void, onlisten?: (() => void) | undefined): ListenRequest;
notify(channel: string, payload: string): PendingRequest;
subscribe(event: string, cb: (row: Row | null, info: ReplicationEvent) => void, onsubscribe?: (() => void), onerror?: (() => any)): Promise<SubscriptionHandle>;
largeObject(oid?: number | undefined, /** @default 0x00020000 | 0x00040000 */ mode?: number | undefined): Promise<LargeObject>;
begin<T>(cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
begin<T>(options: string, cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
array<T extends SerializableParameter<TTypes[keyof TTypes]>[] = SerializableParameter<TTypes[keyof TTypes]>[]>(value: T, type?: number | undefined): ArrayParameter<T>;
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, options?: { cache?: boolean | undefined } | undefined): PendingQuery<T>;
file<T extends readonly any[] = Row[]>(path: string | Buffer | URL | number, args: (ParameterOrJSON<TTypes[keyof TTypes]>)[], options?: { cache?: boolean | undefined } | undefined): PendingQuery<T>;
json(value: JSONValue): Parameter;
reserve(): Promise<ReservedSql<TTypes>>
}
interface UnsafeQueryOptions {
/**
* When executes query as prepared statement.
* @default false
*/
prepare?: boolean | undefined;
}
interface TransactionSql<TTypes extends Record<string, unknown> = {}> extends Sql<TTypes> {
savepoint<T>(cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
savepoint<T>(name: string, cb: (sql: TransactionSql<TTypes>) => T | Promise<T>): Promise<UnwrapPromiseArray<T>>;
prepare<T>(name: string): Promise<UnwrapPromiseArray<T>>;
}
interface ReservedSql<TTypes extends Record<string, unknown> = {}> extends Sql<TTypes> {
release(): void;
}
}
export = postgres;

5
node_modules/postgres/types/package.json generated vendored Normal file
View File

@@ -0,0 +1,5 @@
{
"devDependencies": {
"@types/node": "^16"
}
}

14
node_modules/postgres/types/tsconfig.json generated vendored Normal file
View File

@@ -0,0 +1,14 @@
{
"compilerOptions": {
"lib": [
"ES2015"
],
"types": [
"node"
],
"esModuleInterop": true,
"strict": true,
"noImplicitAny": true,
"exactOptionalPropertyTypes": true
}
}