first commit

This commit is contained in:
Myk
2025-07-31 23:47:20 +03:00
commit 2186b278a0
5149 changed files with 537218 additions and 0 deletions

1
node_modules/postgres/cjs/package.json generated vendored Normal file
View File

@@ -0,0 +1 @@
{"type":"commonjs"}

78
node_modules/postgres/cjs/src/bytes.js generated vendored Normal file
View File

@@ -0,0 +1,78 @@
const size = 256
let buffer = Buffer.allocUnsafe(size)
const messages = 'BCcDdEFfHPpQSX'.split('').reduce((acc, x) => {
const v = x.charCodeAt(0)
acc[x] = () => {
buffer[0] = v
b.i = 5
return b
}
return acc
}, {})
const b = Object.assign(reset, messages, {
N: String.fromCharCode(0),
i: 0,
inc(x) {
b.i += x
return b
},
str(x) {
const length = Buffer.byteLength(x)
fit(length)
b.i += buffer.write(x, b.i, length, 'utf8')
return b
},
i16(x) {
fit(2)
buffer.writeUInt16BE(x, b.i)
b.i += 2
return b
},
i32(x, i) {
if (i || i === 0) {
buffer.writeUInt32BE(x, i)
return b
}
fit(4)
buffer.writeUInt32BE(x, b.i)
b.i += 4
return b
},
z(x) {
fit(x)
buffer.fill(0, b.i, b.i + x)
b.i += x
return b
},
raw(x) {
buffer = Buffer.concat([buffer.subarray(0, b.i), x])
b.i = buffer.length
return b
},
end(at = 1) {
buffer.writeUInt32BE(b.i - at, at)
const out = buffer.subarray(0, b.i)
b.i = 0
buffer = Buffer.allocUnsafe(size)
return out
}
})
module.exports = b
function fit(x) {
if (buffer.length - b.i < x) {
const prev = buffer
, length = prev.length
buffer = Buffer.allocUnsafe(length + (length >> 1) + x)
prev.copy(buffer)
}
}
function reset() {
b.i = 0
return b
}

1042
node_modules/postgres/cjs/src/connection.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

53
node_modules/postgres/cjs/src/errors.js generated vendored Normal file
View File

@@ -0,0 +1,53 @@
const PostgresError = module.exports.PostgresError = class PostgresError extends Error {
constructor(x) {
super(x.message)
this.name = this.constructor.name
Object.assign(this, x)
}
}
const Errors = module.exports.Errors = {
connection,
postgres,
generic,
notSupported
}
function connection(x, options, socket) {
const { host, port } = socket || options
const error = Object.assign(
new Error(('write ' + x + ' ' + (options.path || (host + ':' + port)))),
{
code: x,
errno: x,
address: options.path || host
}, options.path ? {} : { port: port }
)
Error.captureStackTrace(error, connection)
return error
}
function postgres(x) {
const error = new PostgresError(x)
Error.captureStackTrace(error, postgres)
return error
}
function generic(code, message) {
const error = Object.assign(new Error(code + ': ' + message), { code })
Error.captureStackTrace(error, generic)
return error
}
/* c8 ignore next 10 */
function notSupported(x) {
const error = Object.assign(
new Error(x + ' (B) is not supported'),
{
code: 'MESSAGE_NOT_SUPPORTED',
name: x
}
)
Error.captureStackTrace(error, notSupported)
return error
}

566
node_modules/postgres/cjs/src/index.js generated vendored Normal file
View File

@@ -0,0 +1,566 @@
const os = require('os')
const fs = require('fs')
const {
mergeUserTypes,
inferType,
Parameter,
Identifier,
Builder,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab
} = require('./types.js')
const Connection = require('./connection.js')
const { Query, CLOSE } = require('./query.js')
const Queue = require('./queue.js')
const { Errors, PostgresError } = require('./errors.js')
const Subscribe = require('./subscribe.js')
const largeObject = require('./large.js')
Object.assign(Postgres, {
PostgresError,
toPascal,
pascal,
toCamel,
camel,
toKebab,
kebab,
fromPascal,
fromCamel,
fromKebab,
BigInt: {
to: 20,
from: [20],
parse: x => BigInt(x), // eslint-disable-line
serialize: x => x.toString()
}
})
module.exports = Postgres
function Postgres(a, b) {
const options = parseOptions(a, b)
, subscribe = options.no_subscribe || Subscribe(Postgres, { ...options })
let ending = false
const queries = Queue()
, connecting = Queue()
, reserved = Queue()
, closed = Queue()
, ended = Queue()
, open = Queue()
, busy = Queue()
, full = Queue()
, queues = { connecting, reserved, closed, ended, open, busy, full }
const connections = [...Array(options.max)].map(() => Connection(options, queues, { onopen, onend, onclose }))
const sql = Sql(handler)
Object.assign(sql, {
get parameters() { return options.parameters },
largeObject: largeObject.bind(null, sql),
subscribe,
CLOSE,
END: CLOSE,
PostgresError,
options,
reserve,
listen,
begin,
close,
end
})
return sql
function Sql(handler) {
handler.debug = options.debug
Object.entries(options.types).reduce((acc, [name, type]) => {
acc[name] = (x) => new Parameter(x, type.to)
return acc
}, typed)
Object.assign(sql, {
types: typed,
typed,
unsafe,
notify,
array,
json,
file
})
return sql
function typed(value, type) {
return new Parameter(value, type)
}
function sql(strings, ...args) {
const query = strings && Array.isArray(strings.raw)
? new Query(strings, args, handler, cancel)
: typeof strings === 'string' && !args.length
? new Identifier(options.transform.column.to ? options.transform.column.to(strings) : strings)
: new Builder(strings, args)
return query
}
function unsafe(string, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([string], args, handler, cancel, {
prepare: false,
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
function file(path, args = [], options = {}) {
arguments.length === 2 && !Array.isArray(args) && (options = args, args = [])
const query = new Query([], args, (query) => {
fs.readFile(path, 'utf8', (err, string) => {
if (err)
return query.reject(err)
query.strings = [string]
handler(query)
})
}, cancel, {
...options,
simple: 'simple' in options ? options.simple : args.length === 0
})
return query
}
}
async function listen(name, fn, onlisten) {
const listener = { fn, onlisten }
const sql = listen.sql || (listen.sql = Postgres({
...options,
max: 1,
idle_timeout: null,
max_lifetime: null,
fetch_types: false,
onclose() {
Object.entries(listen.channels).forEach(([name, { listeners }]) => {
delete listen.channels[name]
Promise.all(listeners.map(l => listen(name, l.fn, l.onlisten).catch(() => { /* noop */ })))
})
},
onnotify(c, x) {
c in listen.channels && listen.channels[c].listeners.forEach(l => l.fn(x))
}
}))
const channels = listen.channels || (listen.channels = {})
, exists = name in channels
if (exists) {
channels[name].listeners.push(listener)
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
}
channels[name] = { result: sql`listen ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`, listeners: [listener] }
const result = await channels[name].result
listener.onlisten && listener.onlisten()
return { state: result.state, unlisten }
async function unlisten() {
if (name in channels === false)
return
channels[name].listeners = channels[name].listeners.filter(x => x !== listener)
if (channels[name].listeners.length)
return
delete channels[name]
return sql`unlisten ${
sql.unsafe('"' + name.replace(/"/g, '""') + '"')
}`
}
}
async function notify(channel, payload) {
return await sql`select pg_notify(${ channel }, ${ '' + payload })`
}
async function reserve() {
const queue = Queue()
const c = open.length
? open.shift()
: await new Promise((resolve, reject) => {
const query = { reserve: resolve, reject }
queries.push(query)
closed.length && connect(closed.shift(), query)
})
move(c, reserved)
c.reserved = () => queue.length
? c.execute(queue.shift())
: move(c, reserved)
c.reserved.release = true
const sql = Sql(handler)
sql.release = () => {
c.reserved = null
onopen(c)
}
return sql
function handler(q) {
c.queue === full
? queue.push(q)
: c.execute(q) || move(c, full)
}
}
async function begin(options, fn) {
!fn && (fn = options, options = '')
const queries = Queue()
let savepoints = 0
, connection
, prepare = null
try {
await sql.unsafe('begin ' + options.replace(/[^a-z ]/ig, ''), [], { onexecute }).execute()
return await Promise.race([
scope(connection, fn),
new Promise((_, reject) => connection.onclose = reject)
])
} catch (error) {
throw error
}
async function scope(c, fn, name) {
const sql = Sql(handler)
sql.savepoint = savepoint
sql.prepare = x => prepare = x.replace(/[^a-z0-9$-_. ]/gi)
let uncaughtError
, result
name && await sql`savepoint ${ sql(name) }`
try {
result = await new Promise((resolve, reject) => {
const x = fn(sql)
Promise.resolve(Array.isArray(x) ? Promise.all(x) : x).then(resolve, reject)
})
if (uncaughtError)
throw uncaughtError
} catch (e) {
await (name
? sql`rollback to ${ sql(name) }`
: sql`rollback`
)
throw e instanceof PostgresError && e.code === '25P02' && uncaughtError || e
}
if (!name) {
prepare
? await sql`prepare transaction '${ sql.unsafe(prepare) }'`
: await sql`commit`
}
return result
function savepoint(name, fn) {
if (name && Array.isArray(name.raw))
return savepoint(sql => sql.apply(sql, arguments))
arguments.length === 1 && (fn = name, name = null)
return scope(c, fn, 's' + savepoints++ + (name ? '_' + name : ''))
}
function handler(q) {
q.catch(e => uncaughtError || (uncaughtError = e))
c.queue === full
? queries.push(q)
: c.execute(q) || move(c, full)
}
}
function onexecute(c) {
connection = c
move(c, reserved)
c.reserved = () => queries.length
? c.execute(queries.shift())
: move(c, reserved)
}
}
function move(c, queue) {
c.queue.remove(c)
queue.push(c)
c.queue = queue
queue === open
? c.idleTimer.start()
: c.idleTimer.cancel()
return c
}
function json(x) {
return new Parameter(x, 3802)
}
function array(x, type) {
if (!Array.isArray(x))
return array(Array.from(arguments))
return new Parameter(x, type || (x.length ? inferType(x) || 25 : 0), options.shared.typeArrayMap)
}
function handler(query) {
if (ending)
return query.reject(Errors.connection('CONNECTION_ENDED', options, options))
if (open.length)
return go(open.shift(), query)
if (closed.length)
return connect(closed.shift(), query)
busy.length
? go(busy.shift(), query)
: queries.push(query)
}
function go(c, query) {
return c.execute(query)
? move(c, busy)
: move(c, full)
}
function cancel(query) {
return new Promise((resolve, reject) => {
query.state
? query.active
? Connection(options).cancel(query.state, resolve, reject)
: query.cancelled = { resolve, reject }
: (
queries.remove(query),
query.cancelled = true,
query.reject(Errors.generic('57014', 'canceling statement due to user request')),
resolve()
)
})
}
async function end({ timeout = null } = {}) {
if (ending)
return ending
await 1
let timer
return ending = Promise.race([
new Promise(r => timeout !== null && (timer = setTimeout(destroy, timeout * 1000, r))),
Promise.all(connections.map(c => c.end()).concat(
listen.sql ? listen.sql.end({ timeout: 0 }) : [],
subscribe.sql ? subscribe.sql.end({ timeout: 0 }) : []
))
]).then(() => clearTimeout(timer))
}
async function close() {
await Promise.all(connections.map(c => c.end()))
}
async function destroy(resolve) {
await Promise.all(connections.map(c => c.terminate()))
while (queries.length)
queries.shift().reject(Errors.connection('CONNECTION_DESTROYED', options))
resolve()
}
function connect(c, query) {
move(c, connecting)
c.connect(query)
return c
}
function onend(c) {
move(c, ended)
}
function onopen(c) {
if (queries.length === 0)
return move(c, open)
let max = Math.ceil(queries.length / (connecting.length + 1))
, ready = true
while (ready && queries.length && max-- > 0) {
const query = queries.shift()
if (query.reserve)
return query.reserve(c)
ready = c.execute(query)
}
ready
? move(c, busy)
: move(c, full)
}
function onclose(c, e) {
move(c, closed)
c.reserved = null
c.onclose && (c.onclose(e), c.onclose = null)
options.onclose && options.onclose(c.id)
queries.length && connect(c, queries.shift())
}
}
function parseOptions(a, b) {
if (a && a.shared)
return a
const env = process.env // eslint-disable-line
, o = (!a || typeof a === 'string' ? b : a) || {}
, { url, multihost } = parseUrl(a)
, query = [...url.searchParams].reduce((a, [b, c]) => (a[b] = c, a), {})
, host = o.hostname || o.host || multihost || url.hostname || env.PGHOST || 'localhost'
, port = o.port || url.port || env.PGPORT || 5432
, user = o.user || o.username || url.username || env.PGUSERNAME || env.PGUSER || osUsername()
o.no_prepare && (o.prepare = false)
query.sslmode && (query.ssl = query.sslmode, delete query.sslmode)
'timeout' in o && (console.log('The timeout option is deprecated, use idle_timeout instead'), o.idle_timeout = o.timeout) // eslint-disable-line
query.sslrootcert === 'system' && (query.ssl = 'verify-full')
const ints = ['idle_timeout', 'connect_timeout', 'max_lifetime', 'max_pipeline', 'backoff', 'keep_alive']
const defaults = {
max : 10,
ssl : false,
idle_timeout : null,
connect_timeout : 30,
max_lifetime : max_lifetime,
max_pipeline : 100,
backoff : backoff,
keep_alive : 60,
prepare : true,
debug : false,
fetch_types : true,
publications : 'alltables',
target_session_attrs: null
}
return {
host : Array.isArray(host) ? host : host.split(',').map(x => x.split(':')[0]),
port : Array.isArray(port) ? port : host.split(',').map(x => parseInt(x.split(':')[1] || port)),
path : o.path || host.indexOf('/') > -1 && host + '/.s.PGSQL.' + port,
database : o.database || o.db || (url.pathname || '').slice(1) || env.PGDATABASE || user,
user : user,
pass : o.pass || o.password || url.password || env.PGPASSWORD || '',
...Object.entries(defaults).reduce(
(acc, [k, d]) => {
const value = k in o ? o[k] : k in query
? (query[k] === 'disable' || query[k] === 'false' ? false : query[k])
: env['PG' + k.toUpperCase()] || d
acc[k] = typeof value === 'string' && ints.includes(k)
? +value
: value
return acc
},
{}
),
connection : {
application_name: env.PGAPPNAME || 'postgres.js',
...o.connection,
...Object.entries(query).reduce((acc, [k, v]) => (k in defaults || (acc[k] = v), acc), {})
},
types : o.types || {},
target_session_attrs: tsa(o, url, env),
onnotice : o.onnotice,
onnotify : o.onnotify,
onclose : o.onclose,
onparameter : o.onparameter,
socket : o.socket,
transform : parseTransform(o.transform || { undefined: undefined }),
parameters : {},
shared : { retries: 0, typeArrayMap: {} },
...mergeUserTypes(o.types)
}
}
function tsa(o, url, env) {
const x = o.target_session_attrs || url.searchParams.get('target_session_attrs') || env.PGTARGETSESSIONATTRS
if (!x || ['read-write', 'read-only', 'primary', 'standby', 'prefer-standby'].includes(x))
return x
throw new Error('target_session_attrs ' + x + ' is not supported')
}
function backoff(retries) {
return (0.5 + Math.random() / 2) * Math.min(3 ** retries / 100, 20)
}
function max_lifetime() {
return 60 * (30 + Math.random() * 30)
}
function parseTransform(x) {
return {
undefined: x.undefined,
column: {
from: typeof x.column === 'function' ? x.column : x.column && x.column.from,
to: x.column && x.column.to
},
value: {
from: typeof x.value === 'function' ? x.value : x.value && x.value.from,
to: x.value && x.value.to
},
row: {
from: typeof x.row === 'function' ? x.row : x.row && x.row.from,
to: x.row && x.row.to
}
}
}
function parseUrl(url) {
if (!url || typeof url !== 'string')
return { url: { searchParams: new Map() } }
let host = url
host = host.slice(host.indexOf('://') + 3).split(/[?/]/)[0]
host = decodeURIComponent(host.slice(host.indexOf('@') + 1))
const urlObj = new URL(url.replace(host, host.split(',')[0]))
return {
url: {
username: decodeURIComponent(urlObj.username),
password: decodeURIComponent(urlObj.password),
host: urlObj.host,
hostname: urlObj.hostname,
port: urlObj.port,
pathname: urlObj.pathname,
searchParams: urlObj.searchParams
},
multihost: host.indexOf(',') > -1 && host
}
}
function osUsername() {
try {
return os.userInfo().username // eslint-disable-line
} catch (_) {
return process.env.USERNAME || process.env.USER || process.env.LOGNAME // eslint-disable-line
}
}

70
node_modules/postgres/cjs/src/large.js generated vendored Normal file
View File

@@ -0,0 +1,70 @@
const Stream = require('stream')
module.exports = largeObject;function largeObject(sql, oid, mode = 0x00020000 | 0x00040000) {
return new Promise(async(resolve, reject) => {
await sql.begin(async sql => {
let finish
!oid && ([{ oid }] = await sql`select lo_creat(-1) as oid`)
const [{ fd }] = await sql`select lo_open(${ oid }, ${ mode }) as fd`
const lo = {
writable,
readable,
close : () => sql`select lo_close(${ fd })`.then(finish),
tell : () => sql`select lo_tell64(${ fd })`,
read : (x) => sql`select loread(${ fd }, ${ x }) as data`,
write : (x) => sql`select lowrite(${ fd }, ${ x })`,
truncate : (x) => sql`select lo_truncate64(${ fd }, ${ x })`,
seek : (x, whence = 0) => sql`select lo_lseek64(${ fd }, ${ x }, ${ whence })`,
size : () => sql`
select
lo_lseek64(${ fd }, location, 0) as position,
seek.size
from (
select
lo_lseek64($1, 0, 2) as size,
tell.location
from (select lo_tell64($1) as location) tell
) seek
`
}
resolve(lo)
return new Promise(async r => finish = r)
async function readable({
highWaterMark = 2048 * 8,
start = 0,
end = Infinity
} = {}) {
let max = end - start
start && await lo.seek(start)
return new Stream.Readable({
highWaterMark,
async read(size) {
const l = size > max ? size - max : size
max -= size
const [{ data }] = await lo.read(l)
this.push(data)
if (data.length < size)
this.push(null)
}
})
}
async function writable({
highWaterMark = 2048 * 8,
start = 0
} = {}) {
start && await lo.seek(start)
return new Stream.Writable({
highWaterMark,
write(chunk, encoding, callback) {
lo.write(chunk).then(() => callback(), callback)
}
})
}
}).catch(reject)
})
}

173
node_modules/postgres/cjs/src/query.js generated vendored Normal file
View File

@@ -0,0 +1,173 @@
const originCache = new Map()
, originStackCache = new Map()
, originError = Symbol('OriginError')
const CLOSE = module.exports.CLOSE = {}
const Query = module.exports.Query = class Query extends Promise {
constructor(strings, args, handler, canceller, options = {}) {
let resolve
, reject
super((a, b) => {
resolve = a
reject = b
})
this.tagged = Array.isArray(strings.raw)
this.strings = strings
this.args = args
this.handler = handler
this.canceller = canceller
this.options = options
this.state = null
this.statement = null
this.resolve = x => (this.active = false, resolve(x))
this.reject = x => (this.active = false, reject(x))
this.active = false
this.cancelled = null
this.executed = false
this.signature = ''
this[originError] = this.handler.debug
? new Error()
: this.tagged && cachedError(this.strings)
}
get origin() {
return (this.handler.debug
? this[originError].stack
: this.tagged && originStackCache.has(this.strings)
? originStackCache.get(this.strings)
: originStackCache.set(this.strings, this[originError].stack).get(this.strings)
) || ''
}
static get [Symbol.species]() {
return Promise
}
cancel() {
return this.canceller && (this.canceller(this), this.canceller = null)
}
simple() {
this.options.simple = true
this.options.prepare = false
return this
}
async readable() {
this.simple()
this.streaming = true
return this
}
async writable() {
this.simple()
this.streaming = true
return this
}
cursor(rows = 1, fn) {
this.options.simple = false
if (typeof rows === 'function') {
fn = rows
rows = 1
}
this.cursorRows = rows
if (typeof fn === 'function')
return (this.cursorFn = fn, this)
let prev
return {
[Symbol.asyncIterator]: () => ({
next: () => {
if (this.executed && !this.active)
return { done: true }
prev && prev()
const promise = new Promise((resolve, reject) => {
this.cursorFn = value => {
resolve({ value, done: false })
return new Promise(r => prev = r)
}
this.resolve = () => (this.active = false, resolve({ done: true }))
this.reject = x => (this.active = false, reject(x))
})
this.execute()
return promise
},
return() {
prev && prev(CLOSE)
return { done: true }
}
})
}
}
describe() {
this.options.simple = false
this.onlyDescribe = this.options.prepare = true
return this
}
stream() {
throw new Error('.stream has been renamed to .forEach')
}
forEach(fn) {
this.forEachFn = fn
this.handle()
return this
}
raw() {
this.isRaw = true
return this
}
values() {
this.isRaw = 'values'
return this
}
async handle() {
!this.executed && (this.executed = true) && await 1 && this.handler(this)
}
execute() {
this.handle()
return this
}
then() {
this.handle()
return super.then.apply(this, arguments)
}
catch() {
this.handle()
return super.catch.apply(this, arguments)
}
finally() {
this.handle()
return super.finally.apply(this, arguments)
}
}
function cachedError(xs) {
if (originCache.has(xs))
return originCache.get(xs)
const x = Error.stackTraceLimit
Error.stackTraceLimit = 4
originCache.set(xs, new Error())
Error.stackTraceLimit = x
return originCache.get(xs)
}

31
node_modules/postgres/cjs/src/queue.js generated vendored Normal file
View File

@@ -0,0 +1,31 @@
module.exports = Queue
function Queue(initial = []) {
let xs = initial.slice()
let index = 0
return {
get length() {
return xs.length - index
},
remove: (x) => {
const index = xs.indexOf(x)
return index === -1
? null
: (xs.splice(index, 1), x)
},
push: (x) => (xs.push(x), x),
shift: () => {
const out = xs[index++]
if (index === xs.length) {
index = 0
xs = []
} else {
xs[index - 1] = undefined
}
return out
}
}
}

16
node_modules/postgres/cjs/src/result.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
module.exports = class Result extends Array {
constructor() {
super()
Object.defineProperties(this, {
count: { value: null, writable: true },
state: { value: null, writable: true },
command: { value: null, writable: true },
columns: { value: null, writable: true },
statement: { value: null, writable: true }
})
}
static get [Symbol.species]() {
return Array
}
}

277
node_modules/postgres/cjs/src/subscribe.js generated vendored Normal file
View File

@@ -0,0 +1,277 @@
const noop = () => { /* noop */ }
module.exports = Subscribe;function Subscribe(postgres, options) {
const subscribers = new Map()
, slot = 'postgresjs_' + Math.random().toString(36).slice(2)
, state = {}
let connection
, stream
, ended = false
const sql = subscribe.sql = postgres({
...options,
transform: { column: {}, value: {}, row: {} },
max: 1,
fetch_types: false,
idle_timeout: null,
max_lifetime: null,
connection: {
...options.connection,
replication: 'database'
},
onclose: async function() {
if (ended)
return
stream = null
state.pid = state.secret = undefined
connected(await init(sql, slot, options.publications))
subscribers.forEach(event => event.forEach(({ onsubscribe }) => onsubscribe()))
},
no_subscribe: true
})
const end = sql.end
, close = sql.close
sql.end = async() => {
ended = true
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return end()
}
sql.close = async() => {
stream && (await new Promise(r => (stream.once('close', r), stream.end())))
return close()
}
return subscribe
async function subscribe(event, fn, onsubscribe = noop, onerror = noop) {
event = parseEvent(event)
if (!connection)
connection = init(sql, slot, options.publications)
const subscriber = { fn, onsubscribe }
const fns = subscribers.has(event)
? subscribers.get(event).add(subscriber)
: subscribers.set(event, new Set([subscriber])).get(event)
const unsubscribe = () => {
fns.delete(subscriber)
fns.size === 0 && subscribers.delete(event)
}
return connection.then(x => {
connected(x)
onsubscribe()
stream && stream.on('error', onerror)
return { unsubscribe, state, sql }
})
}
function connected(x) {
stream = x.stream
state.pid = x.state.pid
state.secret = x.state.secret
}
async function init(sql, slot, publications) {
if (!publications)
throw new Error('Missing publication names')
const xs = await sql.unsafe(
`CREATE_REPLICATION_SLOT ${ slot } TEMPORARY LOGICAL pgoutput NOEXPORT_SNAPSHOT`
)
const [x] = xs
const stream = await sql.unsafe(
`START_REPLICATION SLOT ${ slot } LOGICAL ${
x.consistent_point
} (proto_version '1', publication_names '${ publications }')`
).writable()
const state = {
lsn: Buffer.concat(x.consistent_point.split('/').map(x => Buffer.from(('00000000' + x).slice(-8), 'hex')))
}
stream.on('data', data)
stream.on('error', error)
stream.on('close', sql.close)
return { stream, state: xs.state }
function error(e) {
console.error('Unexpected error during logical streaming - reconnecting', e) // eslint-disable-line
}
function data(x) {
if (x[0] === 0x77) {
parse(x.subarray(25), state, sql.options.parsers, handle, options.transform)
} else if (x[0] === 0x6b && x[17]) {
state.lsn = x.subarray(1, 9)
pong()
}
}
function handle(a, b) {
const path = b.relation.schema + '.' + b.relation.table
call('*', a, b)
call('*:' + path, a, b)
b.relation.keys.length && call('*:' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
call(b.command, a, b)
call(b.command + ':' + path, a, b)
b.relation.keys.length && call(b.command + ':' + path + '=' + b.relation.keys.map(x => a[x.name]), a, b)
}
function pong() {
const x = Buffer.alloc(34)
x[0] = 'r'.charCodeAt(0)
x.fill(state.lsn, 1)
x.writeBigInt64BE(BigInt(Date.now() - Date.UTC(2000, 0, 1)) * BigInt(1000), 25)
stream.write(x)
}
}
function call(x, a, b) {
subscribers.has(x) && subscribers.get(x).forEach(({ fn }) => fn(a, b, x))
}
}
function Time(x) {
return new Date(Date.UTC(2000, 0, 1) + Number(x / BigInt(1000)))
}
function parse(x, state, parsers, handle, transform) {
const char = (acc, [k, v]) => (acc[k.charCodeAt(0)] = v, acc)
Object.entries({
R: x => { // Relation
let i = 1
const r = state[x.readUInt32BE(i)] = {
schema: x.toString('utf8', i += 4, i = x.indexOf(0, i)) || 'pg_catalog',
table: x.toString('utf8', i + 1, i = x.indexOf(0, i + 1)),
columns: Array(x.readUInt16BE(i += 2)),
keys: []
}
i += 2
let columnIndex = 0
, column
while (i < x.length) {
column = r.columns[columnIndex++] = {
key: x[i++],
name: transform.column.from
? transform.column.from(x.toString('utf8', i, i = x.indexOf(0, i)))
: x.toString('utf8', i, i = x.indexOf(0, i)),
type: x.readUInt32BE(i += 1),
parser: parsers[x.readUInt32BE(i)],
atttypmod: x.readUInt32BE(i += 4)
}
column.key && r.keys.push(column)
i += 4
}
},
Y: () => { /* noop */ }, // Type
O: () => { /* noop */ }, // Origin
B: x => { // Begin
state.date = Time(x.readBigInt64BE(9))
state.lsn = x.subarray(1, 9)
},
I: x => { // Insert
let i = 1
const relation = state[x.readUInt32BE(i)]
const { row } = tuples(x, relation.columns, i += 7, transform)
handle(row, {
command: 'insert',
relation
})
},
D: x => { // Delete
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
handle(key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform).row
: null
, {
command: 'delete',
relation,
key
})
},
U: x => { // Update
let i = 1
const relation = state[x.readUInt32BE(i)]
i += 4
const key = x[i] === 75
const xs = key || x[i] === 79
? tuples(x, relation.columns, i += 3, transform)
: null
xs && (i = xs.i)
const { row } = tuples(x, relation.columns, i + 3, transform)
handle(row, {
command: 'update',
relation,
key,
old: xs && xs.row
})
},
T: () => { /* noop */ }, // Truncate,
C: () => { /* noop */ } // Commit
}).reduce(char, {})[x[0]](x)
}
function tuples(x, columns, xi, transform) {
let type
, column
, value
const row = transform.raw ? new Array(columns.length) : {}
for (let i = 0; i < columns.length; i++) {
type = x[xi++]
column = columns[i]
value = type === 110 // n
? null
: type === 117 // u
? undefined
: column.parser === undefined
? x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi))
: column.parser.array === true
? column.parser(x.toString('utf8', xi + 5, xi += 4 + x.readUInt32BE(xi)))
: column.parser(x.toString('utf8', xi + 4, xi += 4 + x.readUInt32BE(xi)))
transform.raw
? (row[i] = transform.raw === true
? value
: transform.value.from ? transform.value.from(value, column) : value)
: (row[column.name] = transform.value.from
? transform.value.from(value, column)
: value
)
}
return { i: xi, row: transform.row.from ? transform.row.from(row) : row }
}
function parseEvent(x) {
const xs = x.match(/^(\*|insert|update|delete)?:?([^.]+?\.?[^=]+)?=?(.+)?/i) || []
if (!xs)
throw new Error('Malformed subscribe pattern: ' + x)
const [, command, path, key] = xs
return (command || '*')
+ (path ? ':' + (path.indexOf('.') === -1 ? 'public.' + path : path) : '')
+ (key ? '=' + key : '')
}

367
node_modules/postgres/cjs/src/types.js generated vendored Normal file
View File

@@ -0,0 +1,367 @@
const { Query } = require('./query.js')
const { Errors } = require('./errors.js')
const types = module.exports.types = {
string: {
to: 25,
from: null, // defaults to string
serialize: x => '' + x
},
number: {
to: 0,
from: [21, 23, 26, 700, 701],
serialize: x => '' + x,
parse: x => +x
},
json: {
to: 114,
from: [114, 3802],
serialize: x => JSON.stringify(x),
parse: x => JSON.parse(x)
},
boolean: {
to: 16,
from: 16,
serialize: x => x === true ? 't' : 'f',
parse: x => x === 't'
},
date: {
to: 1184,
from: [1082, 1114, 1184],
serialize: x => (x instanceof Date ? x : new Date(x)).toISOString(),
parse: x => new Date(x)
},
bytea: {
to: 17,
from: 17,
serialize: x => '\\x' + Buffer.from(x).toString('hex'),
parse: x => Buffer.from(x.slice(2), 'hex')
}
}
class NotTagged { then() { notTagged() } catch() { notTagged() } finally() { notTagged() }}
const Identifier = module.exports.Identifier = class Identifier extends NotTagged {
constructor(value) {
super()
this.value = escapeIdentifier(value)
}
}
const Parameter = module.exports.Parameter = class Parameter extends NotTagged {
constructor(value, type, array) {
super()
this.value = value
this.type = type
this.array = array
}
}
const Builder = module.exports.Builder = class Builder extends NotTagged {
constructor(first, rest) {
super()
this.first = first
this.rest = rest
}
build(before, parameters, types, options) {
const keyword = builders.map(([x, fn]) => ({ fn, i: before.search(x) })).sort((a, b) => a.i - b.i).pop()
return keyword.i === -1
? escapeIdentifiers(this.first, options)
: keyword.fn(this.first, this.rest, parameters, types, options)
}
}
module.exports.handleValue = handleValue;function handleValue(x, parameters, types, options) {
let value = x instanceof Parameter ? x.value : x
if (value === undefined) {
x instanceof Parameter
? x.value = options.transform.undefined
: value = x = options.transform.undefined
if (value === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return '$' + (types.push(
x instanceof Parameter
? (parameters.push(x.value), x.array
? x.array[x.type || inferType(x.value)] || x.type || firstIsString(x.value)
: x.type
)
: (parameters.push(x), inferType(x))
))
}
const defaultHandlers = typeHandlers(types)
module.exports.stringify = stringify;function stringify(q, string, value, parameters, types, options) { // eslint-disable-line
for (let i = 1; i < q.strings.length; i++) {
string += (stringifyValue(string, value, parameters, types, options)) + q.strings[i]
value = q.args[i]
}
return string
}
function stringifyValue(string, value, parameters, types, o) {
return (
value instanceof Builder ? value.build(string, parameters, types, o) :
value instanceof Query ? fragment(value, parameters, types, o) :
value instanceof Identifier ? value.value :
value && value[0] instanceof Query ? value.reduce((acc, x) => acc + ' ' + fragment(x, parameters, types, o), '') :
handleValue(value, parameters, types, o)
)
}
function fragment(q, parameters, types, options) {
q.fragment = true
return stringify(q, q.strings[0], q.args[0], parameters, types, options)
}
function valuesBuilder(first, parameters, types, columns, options) {
return first.map(row =>
'(' + columns.map(column =>
stringifyValue('values', row[column], parameters, types, options)
).join(',') + ')'
).join(',')
}
function values(first, rest, parameters, types, options) {
const multi = Array.isArray(first[0])
const columns = rest.length ? rest.flat() : Object.keys(multi ? first[0] : first)
return valuesBuilder(multi ? first : [first], parameters, types, columns, options)
}
function select(first, rest, parameters, types, options) {
typeof first === 'string' && (first = [first].concat(rest))
if (Array.isArray(first))
return escapeIdentifiers(first, options)
let value
const columns = rest.length ? rest.flat() : Object.keys(first)
return columns.map(x => {
value = first[x]
return (
value instanceof Query ? fragment(value, parameters, types, options) :
value instanceof Identifier ? value.value :
handleValue(value, parameters, types, options)
) + ' as ' + escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x)
}).join(',')
}
const builders = Object.entries({
values,
in: (...xs) => {
const x = values(...xs)
return x === '()' ? '(null)' : x
},
select,
as: select,
returning: select,
'\\(': select,
update(first, rest, parameters, types, options) {
return (rest.length ? rest.flat() : Object.keys(first)).map(x =>
escapeIdentifier(options.transform.column.to ? options.transform.column.to(x) : x) +
'=' + stringifyValue('values', first[x], parameters, types, options)
)
},
insert(first, rest, parameters, types, options) {
const columns = rest.length ? rest.flat() : Object.keys(Array.isArray(first) ? first[0] : first)
return '(' + escapeIdentifiers(columns, options) + ')values' +
valuesBuilder(Array.isArray(first) ? first : [first], parameters, types, columns, options)
}
}).map(([x, fn]) => ([new RegExp('((?:^|[\\s(])' + x + '(?:$|[\\s(]))(?![\\s\\S]*\\1)', 'i'), fn]))
function notTagged() {
throw Errors.generic('NOT_TAGGED_CALL', 'Query not called as a tagged template literal')
}
const serializers = module.exports.serializers = defaultHandlers.serializers
const parsers = module.exports.parsers = defaultHandlers.parsers
const END = module.exports.END = {}
function firstIsString(x) {
if (Array.isArray(x))
return firstIsString(x[0])
return typeof x === 'string' ? 1009 : 0
}
const mergeUserTypes = module.exports.mergeUserTypes = function(types) {
const user = typeHandlers(types || {})
return {
serializers: Object.assign({}, serializers, user.serializers),
parsers: Object.assign({}, parsers, user.parsers)
}
}
function typeHandlers(types) {
return Object.keys(types).reduce((acc, k) => {
types[k].from && [].concat(types[k].from).forEach(x => acc.parsers[x] = types[k].parse)
if (types[k].serialize) {
acc.serializers[types[k].to] = types[k].serialize
types[k].from && [].concat(types[k].from).forEach(x => acc.serializers[x] = types[k].serialize)
}
return acc
}, { parsers: {}, serializers: {} })
}
function escapeIdentifiers(xs, { transform: { column } }) {
return xs.map(x => escapeIdentifier(column.to ? column.to(x) : x)).join(',')
}
const escapeIdentifier = module.exports.escapeIdentifier = function escape(str) {
return '"' + str.replace(/"/g, '""').replace(/\./g, '"."') + '"'
}
const inferType = module.exports.inferType = function inferType(x) {
return (
x instanceof Parameter ? x.type :
x instanceof Date ? 1184 :
x instanceof Uint8Array ? 17 :
(x === true || x === false) ? 16 :
typeof x === 'bigint' ? 20 :
Array.isArray(x) ? inferType(x[0]) :
0
)
}
const escapeBackslash = /\\/g
const escapeQuote = /"/g
function arrayEscape(x) {
return x
.replace(escapeBackslash, '\\\\')
.replace(escapeQuote, '\\"')
}
const arraySerializer = module.exports.arraySerializer = function arraySerializer(xs, serializer, options, typarray) {
if (Array.isArray(xs) === false)
return xs
if (!xs.length)
return '{}'
const first = xs[0]
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
if (Array.isArray(first) && !first.type)
return '{' + xs.map(x => arraySerializer(x, serializer, options, typarray)).join(delimiter) + '}'
return '{' + xs.map(x => {
if (x === undefined) {
x = options.transform.undefined
if (x === undefined)
throw Errors.generic('UNDEFINED_VALUE', 'Undefined values are not allowed')
}
return x === null
? 'null'
: '"' + arrayEscape(serializer ? serializer(x.type ? x.value : x) : '' + x) + '"'
}).join(delimiter) + '}'
}
const arrayParserState = {
i: 0,
char: null,
str: '',
quoted: false,
last: 0
}
const arrayParser = module.exports.arrayParser = function arrayParser(x, parser, typarray) {
arrayParserState.i = arrayParserState.last = 0
return arrayParserLoop(arrayParserState, x, parser, typarray)
}
function arrayParserLoop(s, x, parser, typarray) {
const xs = []
// Only _box (1020) has the ';' delimiter for arrays, all other types use the ',' delimiter
const delimiter = typarray === 1020 ? ';' : ','
for (; s.i < x.length; s.i++) {
s.char = x[s.i]
if (s.quoted) {
if (s.char === '\\') {
s.str += x[++s.i]
} else if (s.char === '"') {
xs.push(parser ? parser(s.str) : s.str)
s.str = ''
s.quoted = x[s.i + 1] === '"'
s.last = s.i + 2
} else {
s.str += s.char
}
} else if (s.char === '"') {
s.quoted = true
} else if (s.char === '{') {
s.last = ++s.i
xs.push(arrayParserLoop(s, x, parser, typarray))
} else if (s.char === '}') {
s.quoted = false
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
break
} else if (s.char === delimiter && s.p !== '}' && s.p !== '"') {
xs.push(parser ? parser(x.slice(s.last, s.i)) : x.slice(s.last, s.i))
s.last = s.i + 1
}
s.p = s.char
}
s.last < s.i && xs.push(parser ? parser(x.slice(s.last, s.i + 1)) : x.slice(s.last, s.i + 1))
return xs
}
const toCamel = module.exports.toCamel = x => {
let str = x[0]
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
const toPascal = module.exports.toPascal = x => {
let str = x[0].toUpperCase()
for (let i = 1; i < x.length; i++)
str += x[i] === '_' ? x[++i].toUpperCase() : x[i]
return str
}
const toKebab = module.exports.toKebab = x => x.replace(/_/g, '-')
const fromCamel = module.exports.fromCamel = x => x.replace(/([A-Z])/g, '_$1').toLowerCase()
const fromPascal = module.exports.fromPascal = x => (x.slice(0, 1) + x.slice(1).replace(/([A-Z])/g, '_$1')).toLowerCase()
const fromKebab = module.exports.fromKebab = x => x.replace(/-/g, '_')
function createJsonTransform(fn) {
return function jsonTransform(x, column) {
return typeof x === 'object' && x !== null && (column.type === 114 || column.type === 3802)
? Array.isArray(x)
? x.map(x => jsonTransform(x, column))
: Object.entries(x).reduce((acc, [k, v]) => Object.assign(acc, { [fn(k)]: jsonTransform(v, column) }), {})
: x
}
}
toCamel.column = { from: toCamel }
toCamel.value = { from: createJsonTransform(toCamel) }
fromCamel.column = { to: fromCamel }
const camel = module.exports.camel = { ...toCamel }
camel.column.to = fromCamel
toPascal.column = { from: toPascal }
toPascal.value = { from: createJsonTransform(toPascal) }
fromPascal.column = { to: fromPascal }
const pascal = module.exports.pascal = { ...toPascal }
pascal.column.to = fromPascal
toKebab.column = { from: toKebab }
toKebab.value = { from: createJsonTransform(toKebab) }
fromKebab.column = { to: fromKebab }
const kebab = module.exports.kebab = { ...toKebab }
kebab.column.to = fromKebab