diff --git a/api/__pycache__/scan-erp-gaps-rss.cpython-312.pyc b/api/__pycache__/scan-erp-gaps-rss.cpython-312.pyc
new file mode 100644
index 000000000..fa3779f86
Binary files /dev/null and b/api/__pycache__/scan-erp-gaps-rss.cpython-312.pyc differ
diff --git a/api/blade-actions-surfaced.json b/api/blade-actions-surfaced.json
index 22391b64b..2408c49dd 100644
--- a/api/blade-actions-surfaced.json
+++ b/api/blade-actions-surfaced.json
@@ -1,15 +1,15 @@
{
- "generated_at": "2026-04-19T20:20:01.395783",
+ "generated_at": "2026-04-19T20:25:01.795036",
"stats": {
- "total": 479,
- "pending": 919,
+ "total": 480,
+ "pending": 921,
"kaouther_surfaced": 29,
"chrome_surfaced": 10,
"notif_only_done": 0,
"autofix_archived": 0,
"cerebras_archived": 0,
"older_3d_archived": 0,
- "unknown": 440,
+ "unknown": 441,
"errors": 0
},
"actions": [
diff --git a/api/erp-gap-scans.php b/api/erp-gap-scans.php
new file mode 100644
index 000000000..24e7b2041
--- /dev/null
+++ b/api/erp-gap-scans.php
@@ -0,0 +1,112 @@
+&min_conf=0.5&limit=20&source=llm|rss|playwright]
+// Returns aggregated scan data for pain-points-atlas integration
+
+header('Content-Type: application/json');
+header('Cache-Control: public, max-age=60');
+header('Access-Control-Allow-Origin: *');
+
+$erp = $_GET['erp'] ?? null;
+$min_conf = isset($_GET['min_conf']) ? (float)$_GET['min_conf'] : 0.3;
+$limit = isset($_GET['limit']) ? min(100, (int)$_GET['limit']) : 50;
+$source = $_GET['source'] ?? null; // llm|rss|playwright
+
+try {
+ $pdo = new PDO('pgsql:host=10.1.0.3;port=5432;dbname=adx_system', 'admin', 'admin123', [
+ PDO::ATTR_ERRMODE => PDO::ERRMODE_EXCEPTION,
+ PDO::ATTR_TIMEOUT => 3,
+ ]);
+
+ // Source filter
+ $source_clause = "";
+ $params_stats = [];
+ if ($source) {
+ $source_clause = " WHERE query LIKE :source_pattern";
+ $params_stats[':source_pattern'] = $source . '_%';
+ }
+
+ // Global stats
+ $stmt = $pdo->prepare("SELECT
+ COUNT(*) as total_gaps,
+ COUNT(DISTINCT erp_id) as erps_covered,
+ ROUND(AVG(confidence_score)::NUMERIC, 3) as avg_confidence,
+ MAX(scanned_at) as last_scan_at,
+ COUNT(*) FILTER (WHERE query LIKE 'llm_%') as gaps_llm,
+ COUNT(*) FILTER (WHERE query LIKE 'rss_%') as gaps_rss,
+ COUNT(*) FILTER (WHERE query LIKE 'playwright%') as gaps_playwright
+ FROM erp_gap_scans" . $source_clause);
+ $stmt->execute($params_stats);
+ $stats = $stmt->fetch(PDO::FETCH_ASSOC);
+
+ // Per-ERP breakdown
+ $stmt = $pdo->prepare("SELECT
+ erp_id, erp_name,
+ COUNT(*) as gaps_count,
+ ROUND(AVG(confidence_score)::NUMERIC, 3) as avg_conf,
+ MAX(scanned_at) as last_scan,
+ array_agg(DISTINCT CASE
+ WHEN query LIKE 'llm_%' THEN 'LLM'
+ WHEN query LIKE 'rss_%' THEN 'RSS'
+ WHEN query LIKE 'playwright%' THEN 'Playwright'
+ ELSE 'Other'
+ END) as sources
+ FROM erp_gap_scans" . $source_clause . "
+ GROUP BY erp_id, erp_name
+ ORDER BY gaps_count DESC");
+ $stmt->execute($params_stats);
+ $per_erp = $stmt->fetchAll(PDO::FETCH_ASSOC);
+
+ // Detail gaps (filtered)
+ $detail_sql = "SELECT id, erp_id, erp_name, title, snippet, source_url, confidence_score, keywords,
+ CASE
+ WHEN query LIKE 'llm_%' THEN 'LLM'
+ WHEN query LIKE 'rss_%' THEN 'RSS'
+ WHEN query LIKE 'playwright%' THEN 'Playwright'
+ ELSE 'Other'
+ END as source,
+ scanned_at
+ FROM erp_gap_scans
+ WHERE confidence_score >= :min_conf";
+ $params = [':min_conf' => $min_conf];
+ if ($erp) { $detail_sql .= " AND erp_id = :erp"; $params[':erp'] = $erp; }
+ if ($source) { $detail_sql .= " AND query LIKE :src"; $params[':src'] = $source . '_%'; }
+ $detail_sql .= " ORDER BY confidence_score DESC, scanned_at DESC LIMIT " . (int)$limit;
+ $stmt = $pdo->prepare($detail_sql);
+ $stmt->execute($params);
+ $details = $stmt->fetchAll(PDO::FETCH_ASSOC);
+
+ // Latest scans per source
+ $stmt = $pdo->query("SELECT
+ CASE
+ WHEN query LIKE 'llm_%' THEN 'LLM'
+ WHEN query LIKE 'rss_%' THEN 'RSS'
+ WHEN query LIKE 'playwright%' THEN 'Playwright'
+ ELSE 'Other'
+ END as source,
+ MAX(scanned_at) as last_scan,
+ COUNT(*) as gaps,
+ COUNT(DISTINCT erp_id) as erps
+ FROM erp_gap_scans GROUP BY source ORDER BY last_scan DESC");
+ $sources = $stmt->fetchAll(PDO::FETCH_ASSOC);
+
+ echo json_encode([
+ 'generated_at' => date('c'),
+ 'version' => 'V96',
+ 'module' => 'ERP Gap Scans — consolidated D+C+B',
+ 'stats' => $stats,
+ 'sources' => $sources,
+ 'per_erp' => $per_erp,
+ 'gaps' => $details,
+ 'filters_applied' => [
+ 'erp' => $erp,
+ 'min_confidence' => $min_conf,
+ 'limit' => $limit,
+ 'source' => $source,
+ ],
+ ], JSON_PRETTY_PRINT | JSON_UNESCAPED_UNICODE);
+
+} catch (Exception $e) {
+ http_response_code(500);
+ echo json_encode(['error' => 'db_error', 'message' => $e->getMessage()]);
+}
diff --git a/api/node_modules/.package-lock.json b/api/node_modules/.package-lock.json
index 240d7bfeb..b49c6405a 100644
--- a/api/node_modules/.package-lock.json
+++ b/api/node_modules/.package-lock.json
@@ -588,6 +588,95 @@
"integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==",
"license": "MIT"
},
+ "node_modules/pg": {
+ "version": "8.20.0",
+ "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz",
+ "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==",
+ "license": "MIT",
+ "dependencies": {
+ "pg-connection-string": "^2.12.0",
+ "pg-pool": "^3.13.0",
+ "pg-protocol": "^1.13.0",
+ "pg-types": "2.2.0",
+ "pgpass": "1.0.5"
+ },
+ "engines": {
+ "node": ">= 16.0.0"
+ },
+ "optionalDependencies": {
+ "pg-cloudflare": "^1.3.0"
+ },
+ "peerDependencies": {
+ "pg-native": ">=3.0.1"
+ },
+ "peerDependenciesMeta": {
+ "pg-native": {
+ "optional": true
+ }
+ }
+ },
+ "node_modules/pg-cloudflare": {
+ "version": "1.3.0",
+ "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz",
+ "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==",
+ "license": "MIT",
+ "optional": true
+ },
+ "node_modules/pg-connection-string": {
+ "version": "2.12.0",
+ "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.12.0.tgz",
+ "integrity": "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==",
+ "license": "MIT"
+ },
+ "node_modules/pg-int8": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz",
+ "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==",
+ "license": "ISC",
+ "engines": {
+ "node": ">=4.0.0"
+ }
+ },
+ "node_modules/pg-pool": {
+ "version": "3.13.0",
+ "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.13.0.tgz",
+ "integrity": "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==",
+ "license": "MIT",
+ "peerDependencies": {
+ "pg": ">=8.0"
+ }
+ },
+ "node_modules/pg-protocol": {
+ "version": "1.13.0",
+ "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.13.0.tgz",
+ "integrity": "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==",
+ "license": "MIT"
+ },
+ "node_modules/pg-types": {
+ "version": "2.2.0",
+ "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz",
+ "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==",
+ "license": "MIT",
+ "dependencies": {
+ "pg-int8": "1.0.1",
+ "postgres-array": "~2.0.0",
+ "postgres-bytea": "~1.0.0",
+ "postgres-date": "~1.0.4",
+ "postgres-interval": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/pgpass": {
+ "version": "1.0.5",
+ "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz",
+ "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==",
+ "license": "MIT",
+ "dependencies": {
+ "split2": "^4.1.0"
+ }
+ },
"node_modules/playwright": {
"version": "1.59.1",
"resolved": "https://registry.npmjs.org/playwright/-/playwright-1.59.1.tgz",
@@ -618,6 +707,45 @@
"node": ">=18"
}
},
+ "node_modules/postgres-array": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz",
+ "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=4"
+ }
+ },
+ "node_modules/postgres-bytea": {
+ "version": "1.0.1",
+ "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz",
+ "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/postgres-date": {
+ "version": "1.0.7",
+ "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz",
+ "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
+ "node_modules/postgres-interval": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz",
+ "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==",
+ "license": "MIT",
+ "dependencies": {
+ "xtend": "^4.0.0"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ }
+ },
"node_modules/progress": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz",
@@ -749,6 +877,15 @@
"node": ">=0.10.0"
}
},
+ "node_modules/split2": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
+ "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
+ "license": "ISC",
+ "engines": {
+ "node": ">= 10.x"
+ }
+ },
"node_modules/streamx": {
"version": "2.25.0",
"resolved": "https://registry.npmjs.org/streamx/-/streamx-2.25.0.tgz",
@@ -899,6 +1036,15 @@
}
}
},
+ "node_modules/xtend": {
+ "version": "4.0.2",
+ "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
+ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
+ "license": "MIT",
+ "engines": {
+ "node": ">=0.4"
+ }
+ },
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
diff --git a/api/node_modules/pg-cloudflare/LICENSE b/api/node_modules/pg-cloudflare/LICENSE
new file mode 100644
index 000000000..5c1405646
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2010 - 2021 Brian Carlson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/api/node_modules/pg-cloudflare/README.md b/api/node_modules/pg-cloudflare/README.md
new file mode 100644
index 000000000..68663c45c
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/README.md
@@ -0,0 +1,112 @@
+# pg-cloudflare
+
+`pg-cloudflare` makes it easier to take an existing package that relies on `tls` and `net`, and make it work in environments where only `connect()` is supported, such as Cloudflare Workers.
+
+`pg-cloudflare` wraps `connect()`, the [TCP Socket API](https://github.com/wintercg/proposal-sockets-api) proposed within WinterCG, and implemented in [Cloudflare Workers](https://developers.cloudflare.com/workers/runtime-apis/tcp-sockets/), and exposes an interface with methods similar to what the `net` and `tls` modules in Node.js expose. (ex: `net.connect(path[, options][, callback])`). This minimizes the number of changes needed in order to make an existing package work across JavaScript runtimes.
+
+## Installation
+
+```
+npm i --save-dev pg-cloudflare
+```
+
+The package uses conditional exports to support bundlers that don't know about
+`cloudflare:sockets`, so the consumer code by default imports an empty file. To
+enable the package, resolve to the `cloudflare` condition in your bundler's
+config. For example:
+
+- `webpack.config.js`
+ ```js
+ export default {
+ ...,
+ resolve: { conditionNames: [..., "workerd"] },
+ plugins: [
+ // ignore cloudflare:sockets imports
+ new webpack.IgnorePlugin({
+ resourceRegExp: /^cloudflare:sockets$/,
+ }),
+ ],
+ }
+ ```
+- `vite.config.js`
+
+ > [!NOTE]
+ > If you are using the [Cloudflare Vite plugin](https://www.npmjs.com/package/@cloudflare/vite-plugin) then the following configuration is not necessary.
+
+ ```js
+ export default defineConfig({
+ ...,
+ resolve: {
+ conditions: [..., "workerd"],
+ },
+ build: {
+ ...,
+ // don't try to bundle cloudflare:sockets
+ rollupOptions: {
+ external: [..., 'cloudflare:sockets'],
+ },
+ },
+ })
+ ```
+
+- `rollup.config.js`
+ ```js
+ export default defineConfig({
+ ...,
+ plugins: [..., nodeResolve({ exportConditions: [..., 'workerd'] })],
+ // don't try to bundle cloudflare:sockets
+ external: [..., 'cloudflare:sockets'],
+ })
+ ```
+- `esbuild.config.js`
+ ```js
+ await esbuild.build({
+ ...,
+ conditions: [..., 'workerd'],
+ })
+ ```
+
+The concrete examples can be found in `packages/pg-bundler-test`.
+
+## How to use conditionally, in non-Node.js environments
+
+As implemented in `pg` [here](https://github.com/brianc/node-postgres/commit/07553428e9c0eacf761a5d4541a3300ff7859578#diff-34588ad868ebcb232660aba7ee6a99d1e02f4bc93f73497d2688c3f074e60533R5-R13), a typical use case might look as follows, where in a Node.js environment the `net` module is used, while in a non-Node.js environment, where `net` is unavailable, `pg-cloudflare` is used instead, providing an equivalent interface:
+
+```js
+module.exports.getStream = function getStream(ssl = false) {
+ const net = require('net')
+ if (typeof net.Socket === 'function') {
+ return net.Socket()
+ }
+ const { CloudflareSocket } = require('pg-cloudflare')
+ return new CloudflareSocket(ssl)
+}
+```
+
+## Node.js implementation of the Socket API proposal
+
+If you're looking for a way to rely on `connect()` as the interface you use to interact with raw sockets, but need this interface to be available in a Node.js environment, [`@arrowood.dev/socket`](https://github.com/Ethan-Arrowood/socket) provides a Node.js implementation of the Socket API.
+
+### license
+
+The MIT License (MIT)
+
+Copyright (c) 2023 Brian M. Carlson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/pg-cloudflare/dist/empty.d.ts b/api/node_modules/pg-cloudflare/dist/empty.d.ts
new file mode 100644
index 000000000..d87c48512
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/dist/empty.d.ts
@@ -0,0 +1,2 @@
+declare const _default: {};
+export default _default;
diff --git a/api/node_modules/pg-cloudflare/dist/empty.js b/api/node_modules/pg-cloudflare/dist/empty.js
new file mode 100644
index 000000000..bd78fc976
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/dist/empty.js
@@ -0,0 +1,6 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+// This is an empty module that is served up when outside of a workerd environment
+// See the `exports` field in package.json
+exports.default = {};
+//# sourceMappingURL=empty.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-cloudflare/dist/empty.js.map b/api/node_modules/pg-cloudflare/dist/empty.js.map
new file mode 100644
index 000000000..3eaa3b1c2
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/dist/empty.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"empty.js","sourceRoot":"","sources":["../src/empty.ts"],"names":[],"mappings":";;AAAA,kFAAkF;AAClF,0CAA0C;AAC1C,kBAAe,EAAE,CAAA"}
\ No newline at end of file
diff --git a/api/node_modules/pg-cloudflare/dist/index.d.ts b/api/node_modules/pg-cloudflare/dist/index.d.ts
new file mode 100644
index 000000000..a779d149e
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/dist/index.d.ts
@@ -0,0 +1,31 @@
+///
+///
+///
+import { TlsOptions } from 'cloudflare:sockets';
+import { EventEmitter } from 'events';
+/**
+ * Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
+ */
+export declare class CloudflareSocket extends EventEmitter {
+ readonly ssl: boolean;
+ writable: boolean;
+ destroyed: boolean;
+ private _upgrading;
+ private _upgraded;
+ private _cfSocket;
+ private _cfWriter;
+ private _cfReader;
+ constructor(ssl: boolean);
+ setNoDelay(): this;
+ setKeepAlive(): this;
+ ref(): this;
+ unref(): this;
+ connect(port: number, host: string, connectListener?: (...args: unknown[]) => void): Promise;
+ _listen(): Promise;
+ _listenOnce(): Promise;
+ write(data: Uint8Array | string, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): true | void;
+ end(data?: Buffer, encoding?: BufferEncoding, callback?: (...args: unknown[]) => void): this;
+ destroy(reason: string): this;
+ startTls(options: TlsOptions): void;
+ _addClosedHandler(): void;
+}
diff --git a/api/node_modules/pg-cloudflare/dist/index.js b/api/node_modules/pg-cloudflare/dist/index.js
new file mode 100644
index 000000000..032339873
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/dist/index.js
@@ -0,0 +1,152 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudflareSocket = void 0;
+const events_1 = require("events");
+/**
+ * Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
+ */
+class CloudflareSocket extends events_1.EventEmitter {
+ constructor(ssl) {
+ super();
+ this.ssl = ssl;
+ this.writable = false;
+ this.destroyed = false;
+ this._upgrading = false;
+ this._upgraded = false;
+ this._cfSocket = null;
+ this._cfWriter = null;
+ this._cfReader = null;
+ }
+ setNoDelay() {
+ return this;
+ }
+ setKeepAlive() {
+ return this;
+ }
+ ref() {
+ return this;
+ }
+ unref() {
+ return this;
+ }
+ async connect(port, host, connectListener) {
+ try {
+ log('connecting');
+ if (connectListener)
+ this.once('connect', connectListener);
+ const options = this.ssl ? { secureTransport: 'starttls' } : {};
+ const mod = await import('cloudflare:sockets');
+ const connect = mod.connect;
+ this._cfSocket = connect(`${host}:${port}`, options);
+ this._cfWriter = this._cfSocket.writable.getWriter();
+ this._addClosedHandler();
+ this._cfReader = this._cfSocket.readable.getReader();
+ if (this.ssl) {
+ this._listenOnce().catch((e) => this.emit('error', e));
+ }
+ else {
+ this._listen().catch((e) => this.emit('error', e));
+ }
+ await this._cfWriter.ready;
+ log('socket ready');
+ this.writable = true;
+ this.emit('connect');
+ return this;
+ }
+ catch (e) {
+ this.emit('error', e);
+ }
+ }
+ async _listen() {
+ // eslint-disable-next-line no-constant-condition
+ while (true) {
+ log('awaiting receive from CF socket');
+ const { done, value } = await this._cfReader.read();
+ log('CF socket received:', done, value);
+ if (done) {
+ log('done');
+ break;
+ }
+ this.emit('data', Buffer.from(value));
+ }
+ }
+ async _listenOnce() {
+ log('awaiting first receive from CF socket');
+ const { done, value } = await this._cfReader.read();
+ log('First CF socket received:', done, value);
+ this.emit('data', Buffer.from(value));
+ }
+ write(data, encoding = 'utf8', callback = () => { }) {
+ if (data.length === 0)
+ return callback();
+ if (typeof data === 'string')
+ data = Buffer.from(data, encoding);
+ log('sending data direct:', data);
+ this._cfWriter.write(data).then(() => {
+ log('data sent');
+ callback();
+ }, (err) => {
+ log('send error', err);
+ callback(err);
+ });
+ return true;
+ }
+ end(data = Buffer.alloc(0), encoding = 'utf8', callback = () => { }) {
+ log('ending CF socket');
+ this.write(data, encoding, (err) => {
+ this._cfSocket.close();
+ if (callback)
+ callback(err);
+ });
+ return this;
+ }
+ destroy(reason) {
+ log('destroying CF socket', reason);
+ this.destroyed = true;
+ return this.end();
+ }
+ startTls(options) {
+ if (this._upgraded) {
+ // Don't try to upgrade again.
+ this.emit('error', 'Cannot call `startTls()` more than once on a socket');
+ return;
+ }
+ this._cfWriter.releaseLock();
+ this._cfReader.releaseLock();
+ this._upgrading = true;
+ this._cfSocket = this._cfSocket.startTls(options);
+ this._cfWriter = this._cfSocket.writable.getWriter();
+ this._cfReader = this._cfSocket.readable.getReader();
+ this._addClosedHandler();
+ this._listen().catch((e) => this.emit('error', e));
+ }
+ _addClosedHandler() {
+ this._cfSocket.closed.then(() => {
+ if (!this._upgrading) {
+ log('CF socket closed');
+ this._cfSocket = null;
+ this.emit('close');
+ }
+ else {
+ this._upgrading = false;
+ this._upgraded = true;
+ }
+ }).catch((e) => this.emit('error', e));
+ }
+}
+exports.CloudflareSocket = CloudflareSocket;
+const debug = false;
+function dump(data) {
+ if (data instanceof Uint8Array || data instanceof ArrayBuffer) {
+ const hex = Buffer.from(data).toString('hex');
+ const str = new TextDecoder().decode(data);
+ return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`;
+ }
+ else {
+ return data;
+ }
+}
+function log(...args) {
+ debug && console.log(...args.map(dump));
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-cloudflare/dist/index.js.map b/api/node_modules/pg-cloudflare/dist/index.js.map
new file mode 100644
index 000000000..abfb848b6
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/dist/index.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AACA,mCAAqC;AAErC;;GAEG;AACH,MAAa,gBAAiB,SAAQ,qBAAY;IAUhD,YAAqB,GAAY;QAC/B,KAAK,EAAE,CAAA;QADY,QAAG,GAAH,GAAG,CAAS;QATjC,aAAQ,GAAG,KAAK,CAAA;QAChB,cAAS,GAAG,KAAK,CAAA;QAET,eAAU,GAAG,KAAK,CAAA;QAClB,cAAS,GAAG,KAAK,CAAA;QACjB,cAAS,GAAkB,IAAI,CAAA;QAC/B,cAAS,GAAuC,IAAI,CAAA;QACpD,cAAS,GAAuC,IAAI,CAAA;IAI5D,CAAC;IAED,UAAU;QACR,OAAO,IAAI,CAAA;IACb,CAAC;IACD,YAAY;QACV,OAAO,IAAI,CAAA;IACb,CAAC;IACD,GAAG;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IACD,KAAK;QACH,OAAO,IAAI,CAAA;IACb,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,IAAY,EAAE,IAAY,EAAE,eAA8C;QACtF,IAAI;YACF,GAAG,CAAC,YAAY,CAAC,CAAA;YACjB,IAAI,eAAe;gBAAE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,eAAe,CAAC,CAAA;YAE1D,MAAM,OAAO,GAAkB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,eAAe,EAAE,UAAU,EAAE,CAAC,CAAC,CAAC,EAAE,CAAA;YAC9E,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,oBAAoB,CAAC,CAAA;YAC9C,MAAM,OAAO,GAAG,GAAG,CAAC,OAAO,CAAA;YAC3B,IAAI,CAAC,SAAS,GAAG,OAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE,EAAE,OAAO,CAAC,CAAA;YACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;YAExB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;YACpD,IAAI,IAAI,CAAC,GAAG,EAAE;gBACZ,IAAI,CAAC,WAAW,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACvD;iBAAM;gBACL,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;aACnD;YAED,MAAM,IAAI,CAAC,SAAU,CAAC,KAAK,CAAA;YAC3B,GAAG,CAAC,cAAc,CAAC,CAAA;YACnB,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAA;YACpB,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;YAEpB,OAAO,IAAI,CAAA;SACZ;QAAC,OAAO,CAAC,EAAE;YACV,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;SACtB;IACH,CAAC;IAED,KAAK,CAAC,OAAO;QACX,iDAAiD;QACjD,OAAO,IAAI,EAAE;YACX,GAAG,CAAC,iCAAiC,CAAC,CAAA;YACtC,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;YACpD,GAAG,CAAC,qBAAqB,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;YACvC,IAAI,IAAI,EAAE;gBACR,GAAG,CAAC,MAAM,CAAC,CAAA;gBACX,MAAK;aACN;YACD,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;SACtC;IACH,CAAC;IAED,KAAK,CAAC,WAAW;QACf,GAAG,CAAC,uCAAuC,CAAC,CAAA;QAC5C,MAAM,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,MAAM,IAAI,CAAC,SAAU,CAAC,IAAI,EAAE,CAAA;QACpD,GAAG,CAAC,2BAA2B,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;QAC7C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;IACvC,CAAC;IAED,KAAK,CACH,IAAyB,EACzB,WAA2B,MAAM,EACjC,WAAyC,GAAG,EAAE,GAAE,CAAC;QAEjD,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC;YAAE,OAAO,QAAQ,EAAE,CAAA;QACxC,IAAI,OAAO,IAAI,KAAK,QAAQ;YAAE,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;QAEhE,GAAG,CAAC,sBAAsB,EAAE,IAAI,CAAC,CAAA;QACjC,IAAI,CAAC,SAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,GAAG,EAAE;YACH,GAAG,CAAC,WAAW,CAAC,CAAA;YAChB,QAAQ,EAAE,CAAA;QACZ,CAAC,EACD,CAAC,GAAG,EAAE,EAAE;YACN,GAAG,CAAC,YAAY,EAAE,GAAG,CAAC,CAAA;YACtB,QAAQ,CAAC,GAAG,CAAC,CAAA;QACf,CAAC,CACF,CAAA;QACD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,GAAG,CAAC,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,WAA2B,MAAM,EAAE,WAAyC,GAAG,EAAE,GAAE,CAAC;QAC9G,GAAG,CAAC,kBAAkB,CAAC,CAAA;QACvB,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,QAAQ,EAAE,CAAC,GAAG,EAAE,EAAE;YACjC,IAAI,CAAC,SAAU,CAAC,KAAK,EAAE,CAAA;YACvB,IAAI,QAAQ;gBAAE,QAAQ,CAAC,GAAG,CAAC,CAAA;QAC7B,CAAC,CAAC,CAAA;QACF,OAAO,IAAI,CAAA;IACb,CAAC;IAED,OAAO,CAAC,MAAc;QACpB,GAAG,CAAC,sBAAsB,EAAE,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;QACrB,OAAO,IAAI,CAAC,GAAG,EAAE,CAAA;IACnB,CAAC;IAED,QAAQ,CAAC,OAAmB;QAC1B,IAAI,IAAI,CAAC,SAAS,EAAE;YAClB,8BAA8B;YAC9B,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,qDAAqD,CAAC,CAAA;YACzE,OAAM;SACP;QACD,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,SAAU,CAAC,WAAW,EAAE,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,IAAI,CAAA;QACtB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAU,CAAC,QAAQ,CAAC,OAAO,CAAC,CAAA;QAClD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAA;QACpD,IAAI,CAAC,iBAAiB,EAAE,CAAA;QACxB,IAAI,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACpD,CAAC;IAED,iBAAiB;QACf,IAAI,CAAC,SAAU,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE;YAC/B,IAAI,CAAC,IAAI,CAAC,UAAU,EAAE;gBACpB,GAAG,CAAC,kBAAkB,CAAC,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;gBACrB,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,CAAA;aACnB;iBAAM;gBACL,IAAI,CAAC,UAAU,GAAG,KAAK,CAAA;gBACvB,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;aACtB;QACH,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAA;IACxC,CAAC;CACF;AA/ID,4CA+IC;AAED,MAAM,KAAK,GAAG,KAAK,CAAA;AAEnB,SAAS,IAAI,CAAC,IAAa;IACzB,IAAI,IAAI,YAAY,UAAU,IAAI,IAAI,YAAY,WAAW,EAAE;QAC7D,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAA;QAC7C,MAAM,GAAG,GAAG,IAAI,WAAW,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;QAC1C,OAAO,eAAe,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,KAAK,CAAC,eAAe,GAAG,IAAI,CAAA;KACtE;SAAM;QACL,OAAO,IAAI,CAAA;KACZ;AACH,CAAC;AAED,SAAS,GAAG,CAAC,GAAG,IAAe;IAC7B,KAAK,IAAI,OAAO,CAAC,GAAG,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAA;AACzC,CAAC"}
\ No newline at end of file
diff --git a/api/node_modules/pg-cloudflare/esm/index.mjs b/api/node_modules/pg-cloudflare/esm/index.mjs
new file mode 100644
index 000000000..6384216f5
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/esm/index.mjs
@@ -0,0 +1,3 @@
+import cf from '../dist/index.js'
+
+export const CloudflareSocket = cf.CloudflareSocket
diff --git a/api/node_modules/pg-cloudflare/package.json b/api/node_modules/pg-cloudflare/package.json
new file mode 100644
index 000000000..7eebd46db
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/package.json
@@ -0,0 +1,39 @@
+{
+ "name": "pg-cloudflare",
+ "version": "1.3.0",
+ "description": "A socket implementation that can run on Cloudflare Workers using native TCP connections.",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "license": "MIT",
+ "devDependencies": {
+ "ts-node": "^8.5.4",
+ "typescript": "^4.0.3"
+ },
+ "exports": {
+ ".": {
+ "workerd": {
+ "import": "./esm/index.mjs",
+ "require": "./dist/index.js"
+ },
+ "default": "./dist/empty.js"
+ },
+ "./package.json": "./package.json"
+ },
+ "scripts": {
+ "build": "tsc",
+ "build:watch": "tsc --watch",
+ "prepublish": "yarn build",
+ "test": "echo e2e test in pg package"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/brianc/node-postgres.git",
+ "directory": "packages/pg-cloudflare"
+ },
+ "files": [
+ "/dist/*{js,ts,map}",
+ "/src",
+ "/esm"
+ ],
+ "gitHead": "d10e09c888f94abf77382aba6f353ca665a1cf09"
+}
diff --git a/api/node_modules/pg-cloudflare/src/empty.ts b/api/node_modules/pg-cloudflare/src/empty.ts
new file mode 100644
index 000000000..f1e6740db
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/src/empty.ts
@@ -0,0 +1,3 @@
+// This is an empty module that is served up when outside of a workerd environment
+// See the `exports` field in package.json
+export default {}
diff --git a/api/node_modules/pg-cloudflare/src/index.ts b/api/node_modules/pg-cloudflare/src/index.ts
new file mode 100644
index 000000000..d83882efe
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/src/index.ts
@@ -0,0 +1,166 @@
+import { SocketOptions, Socket, TlsOptions } from 'cloudflare:sockets'
+import { EventEmitter } from 'events'
+
+/**
+ * Wrapper around the Cloudflare built-in socket that can be used by the `Connection`.
+ */
+export class CloudflareSocket extends EventEmitter {
+ writable = false
+ destroyed = false
+
+ private _upgrading = false
+ private _upgraded = false
+ private _cfSocket: Socket | null = null
+ private _cfWriter: WritableStreamDefaultWriter | null = null
+ private _cfReader: ReadableStreamDefaultReader | null = null
+
+ constructor(readonly ssl: boolean) {
+ super()
+ }
+
+ setNoDelay() {
+ return this
+ }
+ setKeepAlive() {
+ return this
+ }
+ ref() {
+ return this
+ }
+ unref() {
+ return this
+ }
+
+ async connect(port: number, host: string, connectListener?: (...args: unknown[]) => void) {
+ try {
+ log('connecting')
+ if (connectListener) this.once('connect', connectListener)
+
+ const options: SocketOptions = this.ssl ? { secureTransport: 'starttls' } : {}
+ const mod = await import('cloudflare:sockets')
+ const connect = mod.connect
+ this._cfSocket = connect(`${host}:${port}`, options)
+ this._cfWriter = this._cfSocket.writable.getWriter()
+ this._addClosedHandler()
+
+ this._cfReader = this._cfSocket.readable.getReader()
+ if (this.ssl) {
+ this._listenOnce().catch((e) => this.emit('error', e))
+ } else {
+ this._listen().catch((e) => this.emit('error', e))
+ }
+
+ await this._cfWriter!.ready
+ log('socket ready')
+ this.writable = true
+ this.emit('connect')
+
+ return this
+ } catch (e) {
+ this.emit('error', e)
+ }
+ }
+
+ async _listen() {
+ // eslint-disable-next-line no-constant-condition
+ while (true) {
+ log('awaiting receive from CF socket')
+ const { done, value } = await this._cfReader!.read()
+ log('CF socket received:', done, value)
+ if (done) {
+ log('done')
+ break
+ }
+ this.emit('data', Buffer.from(value))
+ }
+ }
+
+ async _listenOnce() {
+ log('awaiting first receive from CF socket')
+ const { done, value } = await this._cfReader!.read()
+ log('First CF socket received:', done, value)
+ this.emit('data', Buffer.from(value))
+ }
+
+ write(
+ data: Uint8Array | string,
+ encoding: BufferEncoding = 'utf8',
+ callback: (...args: unknown[]) => void = () => {}
+ ) {
+ if (data.length === 0) return callback()
+ if (typeof data === 'string') data = Buffer.from(data, encoding)
+
+ log('sending data direct:', data)
+ this._cfWriter!.write(data).then(
+ () => {
+ log('data sent')
+ callback()
+ },
+ (err) => {
+ log('send error', err)
+ callback(err)
+ }
+ )
+ return true
+ }
+
+ end(data = Buffer.alloc(0), encoding: BufferEncoding = 'utf8', callback: (...args: unknown[]) => void = () => {}) {
+ log('ending CF socket')
+ this.write(data, encoding, (err) => {
+ this._cfSocket!.close()
+ if (callback) callback(err)
+ })
+ return this
+ }
+
+ destroy(reason: string) {
+ log('destroying CF socket', reason)
+ this.destroyed = true
+ return this.end()
+ }
+
+ startTls(options: TlsOptions) {
+ if (this._upgraded) {
+ // Don't try to upgrade again.
+ this.emit('error', 'Cannot call `startTls()` more than once on a socket')
+ return
+ }
+ this._cfWriter!.releaseLock()
+ this._cfReader!.releaseLock()
+ this._upgrading = true
+ this._cfSocket = this._cfSocket!.startTls(options)
+ this._cfWriter = this._cfSocket.writable.getWriter()
+ this._cfReader = this._cfSocket.readable.getReader()
+ this._addClosedHandler()
+ this._listen().catch((e) => this.emit('error', e))
+ }
+
+ _addClosedHandler() {
+ this._cfSocket!.closed.then(() => {
+ if (!this._upgrading) {
+ log('CF socket closed')
+ this._cfSocket = null
+ this.emit('close')
+ } else {
+ this._upgrading = false
+ this._upgraded = true
+ }
+ }).catch((e) => this.emit('error', e))
+ }
+}
+
+const debug = false
+
+function dump(data: unknown) {
+ if (data instanceof Uint8Array || data instanceof ArrayBuffer) {
+ const hex = Buffer.from(data).toString('hex')
+ const str = new TextDecoder().decode(data)
+ return `\n>>> STR: "${str.replace(/\n/g, '\\n')}"\n>>> HEX: ${hex}\n`
+ } else {
+ return data
+ }
+}
+
+function log(...args: unknown[]) {
+ debug && console.log(...args.map(dump))
+}
diff --git a/api/node_modules/pg-cloudflare/src/types.d.ts b/api/node_modules/pg-cloudflare/src/types.d.ts
new file mode 100644
index 000000000..f6f1c3f2f
--- /dev/null
+++ b/api/node_modules/pg-cloudflare/src/types.d.ts
@@ -0,0 +1,25 @@
+declare module 'cloudflare:sockets' {
+ export class Socket {
+ public readonly readable: any
+ public readonly writable: any
+ public readonly closed: Promise
+ public close(): Promise
+ public startTls(options: TlsOptions): Socket
+ }
+
+ export type TlsOptions = {
+ expectedServerHostname?: string
+ }
+
+ export type SocketAddress = {
+ hostname: string
+ port: number
+ }
+
+ export type SocketOptions = {
+ secureTransport?: 'off' | 'on' | 'starttls'
+ allowHalfOpen?: boolean
+ }
+
+ export function connect(address: string | SocketAddress, options?: SocketOptions): Socket
+}
diff --git a/api/node_modules/pg-connection-string/LICENSE b/api/node_modules/pg-connection-string/LICENSE
new file mode 100644
index 000000000..b068a6cb2
--- /dev/null
+++ b/api/node_modules/pg-connection-string/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2014 Iced Development
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
\ No newline at end of file
diff --git a/api/node_modules/pg-connection-string/README.md b/api/node_modules/pg-connection-string/README.md
new file mode 100644
index 000000000..e47adc816
--- /dev/null
+++ b/api/node_modules/pg-connection-string/README.md
@@ -0,0 +1,105 @@
+pg-connection-string
+====================
+
+[](https://nodei.co/npm/pg-connection-string/)
+
+Functions for dealing with a PostgresSQL connection string
+
+`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git)
+Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
+MIT License
+
+## Usage
+
+```js
+const parse = require('pg-connection-string').parse;
+
+const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
+```
+
+The resulting config contains a subset of the following properties:
+
+* `user` - User with which to authenticate to the server
+* `password` - Corresponding password
+* `host` - Postgres server hostname or, for UNIX domain sockets, the socket filename
+* `port` - port on which to connect
+* `database` - Database name within the server
+* `client_encoding` - string encoding the client will use
+* `ssl`, either a boolean or an object with properties
+ * `rejectUnauthorized`
+ * `cert`
+ * `key`
+ * `ca`
+* any other query parameters (for example, `application_name`) are preserved intact.
+
+### ClientConfig Compatibility for TypeScript
+
+The pg-connection-string `ConnectionOptions` interface is not compatible with the `ClientConfig` interface that [pg.Client](https://node-postgres.com/apis/client) expects. To remedy this, use the `parseIntoClientConfig` function instead of `parse`:
+
+```ts
+import { ClientConfig } from 'pg';
+import { parseIntoClientConfig } from 'pg-connection-string';
+
+const config: ClientConfig = parseIntoClientConfig('postgres://someuser:somepassword@somehost:381/somedatabase')
+```
+
+You can also use `toClientConfig` to convert an existing `ConnectionOptions` interface into a `ClientConfig` interface:
+
+```ts
+import { ClientConfig } from 'pg';
+import { parse, toClientConfig } from 'pg-connection-string';
+
+const config = parse('postgres://someuser:somepassword@somehost:381/somedatabase')
+const clientConfig: ClientConfig = toClientConfig(config)
+```
+
+## Connection Strings
+
+The short summary of acceptable URLs is:
+
+ * `socket:?` - UNIX domain socket
+ * `postgres://:@:/?` - TCP connection
+
+But see below for more details.
+
+### UNIX Domain Sockets
+
+When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`.
+This form can be shortened to just a path: `/var/run/pgsql`.
+
+When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`.
+
+Query parameters follow a `?` character, including the following special query parameters:
+
+ * `db=` - sets the database name (urlencoded)
+ * `encoding=` - sets the `client_encoding` property
+
+### TCP Connections
+
+TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted).
+If username and password are included, they should be urlencoded.
+The database name, however, should *not* be urlencoded.
+
+Query parameters follow a `?` character, including the following special query parameters:
+ * `host=` - sets `host` property, overriding the URL's host
+ * `encoding=` - sets the `client_encoding` property
+ * `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly
+ * `uselibpqcompat=true` - use libpq semantics
+ * `sslmode=` when `uselibpqcompat=true` is not set
+ * `sslmode=disable` - sets `ssl` to false
+ * `sslmode=no-verify` - sets `ssl` to `{ rejectUnauthorized: false }`
+ * `sslmode=prefer`, `sslmode=require`, `sslmode=verify-ca`, `sslmode=verify-full` - sets `ssl` to true
+ * `sslmode=` when `uselibpqcompat=true`
+ * `sslmode=disable` - sets `ssl` to false
+ * `sslmode=prefer` - sets `ssl` to `{ rejectUnauthorized: false }`
+ * `sslmode=require` - sets `ssl` to `{ rejectUnauthorized: false }` unless `sslrootcert` is specified, in which case it behaves like `verify-ca`
+ * `sslmode=verify-ca` - sets `ssl` to `{ checkServerIdentity: no-op }` (verify CA, but not server identity). This verifies the presented certificate against the effective CA specified in sslrootcert.
+ * `sslmode=verify-full` - sets `ssl` to `{}` (verify CA and server identity)
+ * `sslcert=` - reads data from the given file and includes the result as `ssl.cert`
+ * `sslkey=` - reads data from the given file and includes the result as `ssl.key`
+ * `sslrootcert=` - reads data from the given file and includes the result as `ssl.ca`
+
+A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty.
+
+> [!CAUTION]
+> Choosing an sslmode other than verify-full has serious security implications. Please read https://www.postgresql.org/docs/current/libpq-ssl.html#LIBPQ-SSL-SSLMODE-STATEMENTS to understand the trade-offs.
diff --git a/api/node_modules/pg-connection-string/esm/index.mjs b/api/node_modules/pg-connection-string/esm/index.mjs
new file mode 100644
index 000000000..7b390c514
--- /dev/null
+++ b/api/node_modules/pg-connection-string/esm/index.mjs
@@ -0,0 +1,8 @@
+// ESM wrapper for pg-connection-string
+import connectionString from '../index.js'
+
+// Re-export the parse function
+export default connectionString.parse
+export const parse = connectionString.parse
+export const toClientConfig = connectionString.toClientConfig
+export const parseIntoClientConfig = connectionString.parseIntoClientConfig
diff --git a/api/node_modules/pg-connection-string/index.d.ts b/api/node_modules/pg-connection-string/index.d.ts
new file mode 100644
index 000000000..2ebe67534
--- /dev/null
+++ b/api/node_modules/pg-connection-string/index.d.ts
@@ -0,0 +1,36 @@
+import { ClientConfig } from 'pg'
+
+export function parse(connectionString: string, options?: Options): ConnectionOptions
+
+export interface Options {
+ // Use libpq semantics when interpreting the connection string
+ useLibpqCompat?: boolean
+}
+
+interface SSLConfig {
+ ca?: string
+ cert?: string | null
+ key?: string
+ rejectUnauthorized?: boolean
+}
+
+export interface ConnectionOptions {
+ host: string | null
+ password?: string
+ user?: string
+ port?: string | null
+ database: string | null | undefined
+ client_encoding?: string
+ ssl?: boolean | string | SSLConfig
+
+ application_name?: string
+ fallback_application_name?: string
+ options?: string
+ keepalives?: number
+
+ // We allow any other options to be passed through
+ [key: string]: unknown
+}
+
+export function toClientConfig(config: ConnectionOptions): ClientConfig
+export function parseIntoClientConfig(connectionString: string): ClientConfig
diff --git a/api/node_modules/pg-connection-string/index.js b/api/node_modules/pg-connection-string/index.js
new file mode 100644
index 000000000..29ffeafd7
--- /dev/null
+++ b/api/node_modules/pg-connection-string/index.js
@@ -0,0 +1,231 @@
+'use strict'
+
+//Parse method copied from https://github.com/brianc/node-postgres
+//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com)
+//MIT License
+
+//parses a connection string
+function parse(str, options = {}) {
+ //unix socket
+ if (str.charAt(0) === '/') {
+ const config = str.split(' ')
+ return { host: config[0], database: config[1] }
+ }
+
+ // Check for empty host in URL
+
+ const config = {}
+ let result
+ let dummyHost = false
+ if (/ |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str)) {
+ // Ensure spaces are encoded as %20
+ str = encodeURI(str).replace(/%25(\d\d)/g, '%$1')
+ }
+
+ try {
+ try {
+ result = new URL(str, 'postgres://base')
+ } catch (e) {
+ // The URL is invalid so try again with a dummy host
+ result = new URL(str.replace('@/', '@___DUMMY___/'), 'postgres://base')
+ dummyHost = true
+ }
+ } catch (err) {
+ // Remove the input from the error message to avoid leaking sensitive information
+ err.input && (err.input = '*****REDACTED*****')
+ throw err
+ }
+
+ // We'd like to use Object.fromEntries() here but Node.js 10 does not support it
+ for (const entry of result.searchParams.entries()) {
+ config[entry[0]] = entry[1]
+ }
+
+ config.user = config.user || decodeURIComponent(result.username)
+ config.password = config.password || decodeURIComponent(result.password)
+
+ if (result.protocol == 'socket:') {
+ config.host = decodeURI(result.pathname)
+ config.database = result.searchParams.get('db')
+ config.client_encoding = result.searchParams.get('encoding')
+ return config
+ }
+ const hostname = dummyHost ? '' : result.hostname
+ if (!config.host) {
+ // Only set the host if there is no equivalent query param.
+ config.host = decodeURIComponent(hostname)
+ } else if (hostname && /^%2f/i.test(hostname)) {
+ // Only prepend the hostname to the pathname if it is not a URL encoded Unix socket host.
+ result.pathname = hostname + result.pathname
+ }
+ if (!config.port) {
+ // Only set the port if there is no equivalent query param.
+ config.port = result.port
+ }
+
+ const pathname = result.pathname.slice(1) || null
+ config.database = pathname ? decodeURI(pathname) : null
+
+ if (config.ssl === 'true' || config.ssl === '1') {
+ config.ssl = true
+ }
+
+ if (config.ssl === '0') {
+ config.ssl = false
+ }
+
+ if (config.sslcert || config.sslkey || config.sslrootcert || config.sslmode) {
+ config.ssl = {}
+ }
+
+ // Only try to load fs if we expect to read from the disk
+ const fs = config.sslcert || config.sslkey || config.sslrootcert ? require('fs') : null
+
+ if (config.sslcert) {
+ config.ssl.cert = fs.readFileSync(config.sslcert).toString()
+ }
+
+ if (config.sslkey) {
+ config.ssl.key = fs.readFileSync(config.sslkey).toString()
+ }
+
+ if (config.sslrootcert) {
+ config.ssl.ca = fs.readFileSync(config.sslrootcert).toString()
+ }
+
+ if (options.useLibpqCompat && config.uselibpqcompat) {
+ throw new Error('Both useLibpqCompat and uselibpqcompat are set. Please use only one of them.')
+ }
+
+ if (config.uselibpqcompat === 'true' || options.useLibpqCompat) {
+ switch (config.sslmode) {
+ case 'disable': {
+ config.ssl = false
+ break
+ }
+ case 'prefer': {
+ config.ssl.rejectUnauthorized = false
+ break
+ }
+ case 'require': {
+ if (config.sslrootcert) {
+ // If a root CA is specified, behavior of `sslmode=require` will be the same as that of `verify-ca`
+ config.ssl.checkServerIdentity = function () {}
+ } else {
+ config.ssl.rejectUnauthorized = false
+ }
+ break
+ }
+ case 'verify-ca': {
+ if (!config.ssl.ca) {
+ throw new Error(
+ 'SECURITY WARNING: Using sslmode=verify-ca requires specifying a CA with sslrootcert. If a public CA is used, verify-ca allows connections to a server that somebody else may have registered with the CA, making you vulnerable to Man-in-the-Middle attacks. Either specify a custom CA certificate with sslrootcert parameter or use sslmode=verify-full for proper security.'
+ )
+ }
+ config.ssl.checkServerIdentity = function () {}
+ break
+ }
+ case 'verify-full': {
+ break
+ }
+ }
+ } else {
+ switch (config.sslmode) {
+ case 'disable': {
+ config.ssl = false
+ break
+ }
+ case 'prefer':
+ case 'require':
+ case 'verify-ca':
+ case 'verify-full': {
+ if (config.sslmode !== 'verify-full') {
+ deprecatedSslModeWarning(config.sslmode)
+ }
+ break
+ }
+ case 'no-verify': {
+ config.ssl.rejectUnauthorized = false
+ break
+ }
+ }
+ }
+
+ return config
+}
+
+// convert pg-connection-string ssl config to a ClientConfig.ConnectionOptions
+function toConnectionOptions(sslConfig) {
+ const connectionOptions = Object.entries(sslConfig).reduce((c, [key, value]) => {
+ // we explicitly check for undefined and null instead of `if (value)` because some
+ // options accept falsy values. Example: `ssl.rejectUnauthorized = false`
+ if (value !== undefined && value !== null) {
+ c[key] = value
+ }
+
+ return c
+ }, {})
+
+ return connectionOptions
+}
+
+// convert pg-connection-string config to a ClientConfig
+function toClientConfig(config) {
+ const poolConfig = Object.entries(config).reduce((c, [key, value]) => {
+ if (key === 'ssl') {
+ const sslConfig = value
+
+ if (typeof sslConfig === 'boolean') {
+ c[key] = sslConfig
+ }
+
+ if (typeof sslConfig === 'object') {
+ c[key] = toConnectionOptions(sslConfig)
+ }
+ } else if (value !== undefined && value !== null) {
+ if (key === 'port') {
+ // when port is not specified, it is converted into an empty string
+ // we want to avoid NaN or empty string as a values in ClientConfig
+ if (value !== '') {
+ const v = parseInt(value, 10)
+ if (isNaN(v)) {
+ throw new Error(`Invalid ${key}: ${value}`)
+ }
+
+ c[key] = v
+ }
+ } else {
+ c[key] = value
+ }
+ }
+
+ return c
+ }, {})
+
+ return poolConfig
+}
+
+// parses a connection string into ClientConfig
+function parseIntoClientConfig(str) {
+ return toClientConfig(parse(str))
+}
+
+function deprecatedSslModeWarning(sslmode) {
+ if (!deprecatedSslModeWarning.warned && typeof process !== 'undefined' && process.emitWarning) {
+ deprecatedSslModeWarning.warned = true
+ process.emitWarning(`SECURITY WARNING: The SSL modes 'prefer', 'require', and 'verify-ca' are treated as aliases for 'verify-full'.
+In the next major version (pg-connection-string v3.0.0 and pg v9.0.0), these modes will adopt standard libpq semantics, which have weaker security guarantees.
+
+To prepare for this change:
+- If you want the current behavior, explicitly use 'sslmode=verify-full'
+- If you want libpq compatibility now, use 'uselibpqcompat=true&sslmode=${sslmode}'
+
+See https://www.postgresql.org/docs/current/libpq-ssl.html for libpq SSL mode definitions.`)
+ }
+}
+
+module.exports = parse
+
+parse.parse = parse
+parse.toClientConfig = toClientConfig
+parse.parseIntoClientConfig = parseIntoClientConfig
diff --git a/api/node_modules/pg-connection-string/package.json b/api/node_modules/pg-connection-string/package.json
new file mode 100644
index 000000000..083f44be1
--- /dev/null
+++ b/api/node_modules/pg-connection-string/package.json
@@ -0,0 +1,52 @@
+{
+ "name": "pg-connection-string",
+ "version": "2.12.0",
+ "description": "Functions for dealing with a PostgresSQL connection string",
+ "main": "./index.js",
+ "types": "./index.d.ts",
+ "exports": {
+ ".": {
+ "types": "./index.d.ts",
+ "import": "./esm/index.mjs",
+ "require": "./index.js",
+ "default": "./index.js"
+ }
+ },
+ "scripts": {
+ "test": "nyc --reporter=lcov mocha && npm run check-coverage",
+ "check-coverage": "nyc check-coverage --statements 100 --branches 100 --lines 100 --functions 100"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/brianc/node-postgres.git",
+ "directory": "packages/pg-connection-string"
+ },
+ "keywords": [
+ "pg",
+ "connection",
+ "string",
+ "parse"
+ ],
+ "author": "Blaine Bublitz (http://iceddev.com/)",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/brianc/node-postgres/issues"
+ },
+ "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string",
+ "devDependencies": {
+ "@types/pg": "^8.12.0",
+ "chai": "^4.1.1",
+ "coveralls": "^3.0.4",
+ "istanbul": "^0.4.5",
+ "mocha": "^11.7.5",
+ "nyc": "^15",
+ "tsx": "^4.19.4",
+ "typescript": "^4.0.3"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts",
+ "esm"
+ ],
+ "gitHead": "c9070cc8d526fca65780cedc25c1966b57cf7532"
+}
diff --git a/api/node_modules/pg-int8/LICENSE b/api/node_modules/pg-int8/LICENSE
new file mode 100644
index 000000000..c56c9731c
--- /dev/null
+++ b/api/node_modules/pg-int8/LICENSE
@@ -0,0 +1,13 @@
+Copyright © 2017, Charmander <~@charmander.me>
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/api/node_modules/pg-int8/README.md b/api/node_modules/pg-int8/README.md
new file mode 100644
index 000000000..ef2e60843
--- /dev/null
+++ b/api/node_modules/pg-int8/README.md
@@ -0,0 +1,16 @@
+[![Build status][ci image]][ci]
+
+64-bit big-endian signed integer-to-string conversion designed for [pg][].
+
+```js
+const readInt8 = require('pg-int8');
+
+readInt8(Buffer.from([0, 1, 2, 3, 4, 5, 6, 7]))
+// '283686952306183'
+```
+
+
+ [pg]: https://github.com/brianc/node-postgres
+
+ [ci]: https://travis-ci.org/charmander/pg-int8
+ [ci image]: https://api.travis-ci.org/charmander/pg-int8.svg
diff --git a/api/node_modules/pg-int8/index.js b/api/node_modules/pg-int8/index.js
new file mode 100644
index 000000000..db7797503
--- /dev/null
+++ b/api/node_modules/pg-int8/index.js
@@ -0,0 +1,100 @@
+'use strict';
+
+// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer
+var BASE = 1000000;
+
+function readInt8(buffer) {
+ var high = buffer.readInt32BE(0);
+ var low = buffer.readUInt32BE(4);
+ var sign = '';
+
+ if (high < 0) {
+ high = ~high + (low === 0);
+ low = (~low + 1) >>> 0;
+ sign = '-';
+ }
+
+ var result = '';
+ var carry;
+ var t;
+ var digits;
+ var pad;
+ var l;
+ var i;
+
+ {
+ carry = high % BASE;
+ high = high / BASE >>> 0;
+
+ t = 0x100000000 * carry + low;
+ low = t / BASE >>> 0;
+ digits = '' + (t - BASE * low);
+
+ if (low === 0 && high === 0) {
+ return sign + digits + result;
+ }
+
+ pad = '';
+ l = 6 - digits.length;
+
+ for (i = 0; i < l; i++) {
+ pad += '0';
+ }
+
+ result = pad + digits + result;
+ }
+
+ {
+ carry = high % BASE;
+ high = high / BASE >>> 0;
+
+ t = 0x100000000 * carry + low;
+ low = t / BASE >>> 0;
+ digits = '' + (t - BASE * low);
+
+ if (low === 0 && high === 0) {
+ return sign + digits + result;
+ }
+
+ pad = '';
+ l = 6 - digits.length;
+
+ for (i = 0; i < l; i++) {
+ pad += '0';
+ }
+
+ result = pad + digits + result;
+ }
+
+ {
+ carry = high % BASE;
+ high = high / BASE >>> 0;
+
+ t = 0x100000000 * carry + low;
+ low = t / BASE >>> 0;
+ digits = '' + (t - BASE * low);
+
+ if (low === 0 && high === 0) {
+ return sign + digits + result;
+ }
+
+ pad = '';
+ l = 6 - digits.length;
+
+ for (i = 0; i < l; i++) {
+ pad += '0';
+ }
+
+ result = pad + digits + result;
+ }
+
+ {
+ carry = high % BASE;
+ t = 0x100000000 * carry + low;
+ digits = '' + t % BASE;
+
+ return sign + digits + result;
+ }
+}
+
+module.exports = readInt8;
diff --git a/api/node_modules/pg-int8/package.json b/api/node_modules/pg-int8/package.json
new file mode 100644
index 000000000..4b937e1b2
--- /dev/null
+++ b/api/node_modules/pg-int8/package.json
@@ -0,0 +1,24 @@
+{
+ "name": "pg-int8",
+ "version": "1.0.1",
+ "description": "64-bit big-endian signed integer-to-string conversion",
+ "bugs": "https://github.com/charmander/pg-int8/issues",
+ "license": "ISC",
+ "files": [
+ "index.js"
+ ],
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/charmander/pg-int8"
+ },
+ "scripts": {
+ "test": "tap test"
+ },
+ "devDependencies": {
+ "@charmander/eslint-config-base": "1.0.2",
+ "tap": "10.7.3"
+ },
+ "engines": {
+ "node": ">=4.0.0"
+ }
+}
diff --git a/api/node_modules/pg-pool/LICENSE b/api/node_modules/pg-pool/LICENSE
new file mode 100644
index 000000000..4e9058148
--- /dev/null
+++ b/api/node_modules/pg-pool/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Brian M. Carlson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/api/node_modules/pg-pool/README.md b/api/node_modules/pg-pool/README.md
new file mode 100644
index 000000000..80c644788
--- /dev/null
+++ b/api/node_modules/pg-pool/README.md
@@ -0,0 +1,357 @@
+# pg-pool
+
+[](https://travis-ci.org/brianc/node-pg-pool)
+
+A connection pool for node-postgres
+
+## install
+
+```sh
+npm i pg-pool pg
+```
+
+## use
+
+### create
+
+to use pg-pool you must first create an instance of a pool
+
+```js
+const Pool = require('pg-pool')
+
+// by default the pool uses the same
+// configuration as whatever `pg` version you have installed
+const pool = new Pool()
+
+// you can pass properties to the pool
+// these properties are passed unchanged to both the node-postgres Client constructor
+// and the pool constructor, allowing you to fully configure the behavior of both
+const pool2 = new Pool({
+ database: 'postgres',
+ user: 'brianc',
+ password: 'secret!',
+ port: 5432,
+ ssl: true,
+ max: 20, // set pool max size to 20
+ idleTimeoutMillis: 1000, // close idle clients after 1 second
+ connectionTimeoutMillis: 1000, // return an error after 1 second if connection could not be established
+ maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion)
+})
+
+// you can supply a custom client constructor
+// if you want to use the native postgres client
+const NativeClient = require('pg').native.Client
+const nativePool = new Pool({ Client: NativeClient })
+
+// you can even pool pg-native clients directly
+const PgNativeClient = require('pg-native')
+const pgNativePool = new Pool({ Client: PgNativeClient })
+```
+
+##### Note:
+
+The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL.
+
+```js
+const Pool = require('pg-pool')
+const url = require('url')
+
+const params = url.parse(process.env.DATABASE_URL)
+const auth = params.auth.split(':')
+
+const config = {
+ user: auth[0],
+ password: auth[1],
+ host: params.hostname,
+ port: params.port,
+ database: params.pathname.split('/')[1],
+ ssl: true,
+}
+
+const pool = new Pool(config)
+
+/*
+ Transforms, 'postgres://DBuser:secret@DBHost:#####/myDB', into
+ config = {
+ user: 'DBuser',
+ password: 'secret',
+ host: 'DBHost',
+ port: '#####',
+ database: 'myDB',
+ ssl: true
+ }
+*/
+```
+
+### acquire clients with a promise
+
+pg-pool supports a fully promise-based api for acquiring clients
+
+```js
+const pool = new Pool()
+pool.connect().then((client) => {
+ client
+ .query('select $1::text as name', ['pg-pool'])
+ .then((res) => {
+ client.release()
+ console.log('hello from', res.rows[0].name)
+ })
+ .catch((e) => {
+ client.release()
+ console.error('query error', e.message, e.stack)
+ })
+})
+```
+
+### plays nice with async/await
+
+this ends up looking much nicer if you're using [co](https://github.com/tj/co) or async/await:
+
+```js
+// with async/await
+;(async () => {
+ const pool = new Pool()
+ const client = await pool.connect()
+ try {
+ const result = await client.query('select $1::text as name', ['brianc'])
+ console.log('hello from', result.rows[0])
+ } finally {
+ client.release()
+ }
+})().catch((e) => console.error(e.message, e.stack))
+
+// with co
+co(function* () {
+ const client = yield pool.connect()
+ try {
+ const result = yield client.query('select $1::text as name', ['brianc'])
+ console.log('hello from', result.rows[0])
+ } finally {
+ client.release()
+ }
+}).catch((e) => console.error(e.message, e.stack))
+```
+
+### your new favorite helper method
+
+because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in:
+
+```js
+const pool = new Pool()
+const time = await pool.query('SELECT NOW()')
+const name = await pool.query('select $1::text as name', ['brianc'])
+console.log(name.rows[0].name, 'says hello at', time.rows[0].now)
+```
+
+you can also use a callback here if you'd like:
+
+```js
+const pool = new Pool()
+pool.query('SELECT $1::text as name', ['brianc'], function (err, res) {
+ console.log(res.rows[0].name) // brianc
+})
+```
+
+**pro tip:** unless you need to run a transaction (which requires a single client for multiple queries) or you
+have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor)
+you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return
+clients back to the pool after the query is done.
+
+### drop-in backwards compatible
+
+pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years:
+
+```js
+const pool = new Pool()
+pool.connect((err, client, done) => {
+ if (err) return done(err)
+
+ client.query('SELECT $1::text as name', ['pg-pool'], (err, res) => {
+ done()
+ if (err) {
+ return console.error('query error', err.message, err.stack)
+ }
+ console.log('hello from', res.rows[0].name)
+ })
+})
+```
+
+### shut it down
+
+When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app
+will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows:
+
+```js
+const pool = new Pool()
+const client = await pool.connect()
+console.log(await client.query('select now()'))
+client.release()
+await pool.end()
+```
+
+### a note on instances
+
+The pool should be a **long-lived object** in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example:
+
+```js
+// assume this is a file in your program at ./your-app/lib/db.js
+
+// correct usage: create the pool and let it live
+// 'globally' here, controlling access to it through exported methods
+const pool = new pg.Pool()
+
+// this is the right way to export the query method
+module.exports.query = (text, values) => {
+ console.log('query:', text, values)
+ return pool.query(text, values)
+}
+
+// this would be the WRONG way to export the connect method
+module.exports.connect = () => {
+ // notice how we would be creating a pool instance here
+ // every time we called 'connect' to get a new client?
+ // that's a bad thing & results in creating an unbounded
+ // number of pools & therefore connections
+ const aPool = new pg.Pool()
+ return aPool.connect()
+}
+```
+
+### events
+
+Every instance of a `Pool` is an event emitter. These instances emit the following events:
+
+#### error
+
+Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients.
+
+Example:
+
+```js
+const Pool = require('pg-pool')
+const pool = new Pool()
+
+// attach an error handler to the pool for when a connected, idle client
+// receives an error by being disconnected, etc
+pool.on('error', function (error, client) {
+ // handle this in the same way you would treat process.on('uncaughtException')
+ // it is supplied the error as well as the idle client which received the error
+})
+```
+
+#### connect
+
+Fired whenever the pool creates a **new** `pg.Client` instance and successfully connects it to the backend.
+
+Example:
+
+```js
+const Pool = require('pg-pool')
+const pool = new Pool()
+
+const count = 0
+
+pool.on('connect', (client) => {
+ client.count = count++
+})
+
+pool
+ .connect()
+ .then((client) => {
+ return client
+ .query('SELECT $1::int AS "clientCount"', [client.count])
+ .then((res) => console.log(res.rows[0].clientCount)) // outputs 0
+ .then(() => client)
+ })
+ .then((client) => client.release())
+```
+
+#### acquire
+
+Fired whenever a client is acquired from the pool
+
+Example:
+
+This allows you to count the number of clients which have ever been acquired from the pool.
+
+```js
+const Pool = require('pg-pool')
+const pool = new Pool()
+
+const acquireCount = 0
+pool.on('acquire', function (client) {
+ acquireCount++
+})
+
+const connectCount = 0
+pool.on('connect', function () {
+ connectCount++
+})
+
+for (let i = 0; i < 200; i++) {
+ pool.query('SELECT NOW()')
+}
+
+setTimeout(function () {
+ console.log('connect count:', connectCount) // output: connect count: 10
+ console.log('acquire count:', acquireCount) // output: acquire count: 200
+}, 100)
+```
+
+### environment variables
+
+pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are:
+
+```
+PGDATABASE=my_db
+PGUSER=username
+PGPASSWORD="my awesome password"
+PGPORT=5432
+PGSSLMODE=require
+```
+
+Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box.
+
+## maxUses and read-replica autoscaling (e.g. AWS Aurora)
+
+The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections.
+
+The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing.
+
+Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections.
+
+If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas.
+
+This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance.
+
+You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value:
+
+```
+maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
+```
+
+In the example above, assuming we acquire and release 1 connection per request and we are aiming for a 30 minute rebalancing window:
+
+```
+maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize
+ 7200 = 1800 * 1000 / 10 / 25
+```
+
+## tests
+
+To run tests clone the repo, `npm i` in the working dir, and then run `npm test`
+
+## contributions
+
+I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there.
+
+## license
+
+The MIT License (MIT)
+Copyright (c) 2016 Brian M. Carlson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/api/node_modules/pg-pool/esm/index.mjs b/api/node_modules/pg-pool/esm/index.mjs
new file mode 100644
index 000000000..a97fb624d
--- /dev/null
+++ b/api/node_modules/pg-pool/esm/index.mjs
@@ -0,0 +1,5 @@
+// ESM wrapper for pg-pool
+import Pool from '../index.js'
+
+// Export as default only to match CJS module
+export default Pool
diff --git a/api/node_modules/pg-pool/index.js b/api/node_modules/pg-pool/index.js
new file mode 100644
index 000000000..2fbdb78d5
--- /dev/null
+++ b/api/node_modules/pg-pool/index.js
@@ -0,0 +1,517 @@
+'use strict'
+const EventEmitter = require('events').EventEmitter
+
+const NOOP = function () {}
+
+const removeWhere = (list, predicate) => {
+ const i = list.findIndex(predicate)
+
+ return i === -1 ? undefined : list.splice(i, 1)[0]
+}
+
+class IdleItem {
+ constructor(client, idleListener, timeoutId) {
+ this.client = client
+ this.idleListener = idleListener
+ this.timeoutId = timeoutId
+ }
+}
+
+class PendingItem {
+ constructor(callback) {
+ this.callback = callback
+ }
+}
+
+function throwOnDoubleRelease() {
+ throw new Error('Release called on client which has already been released to the pool.')
+}
+
+function promisify(Promise, callback) {
+ if (callback) {
+ return { callback: callback, result: undefined }
+ }
+ let rej
+ let res
+ const cb = function (err, client) {
+ err ? rej(err) : res(client)
+ }
+ const result = new Promise(function (resolve, reject) {
+ res = resolve
+ rej = reject
+ }).catch((err) => {
+ // replace the stack trace that leads to `TCP.onStreamRead` with one that leads back to the
+ // application that created the query
+ Error.captureStackTrace(err)
+ throw err
+ })
+ return { callback: cb, result: result }
+}
+
+function makeIdleListener(pool, client) {
+ return function idleListener(err) {
+ err.client = client
+
+ client.removeListener('error', idleListener)
+ client.on('error', () => {
+ pool.log('additional client error after disconnection due to error', err)
+ })
+ pool._remove(client)
+ // TODO - document that once the pool emits an error
+ // the client has already been closed & purged and is unusable
+ pool.emit('error', err, client)
+ }
+}
+
+class Pool extends EventEmitter {
+ constructor(options, Client) {
+ super()
+ this.options = Object.assign({}, options)
+
+ if (options != null && 'password' in options) {
+ // "hiding" the password so it doesn't show up in stack traces
+ // or if the client is console.logged
+ Object.defineProperty(this.options, 'password', {
+ configurable: true,
+ enumerable: false,
+ writable: true,
+ value: options.password,
+ })
+ }
+ if (options != null && options.ssl && options.ssl.key) {
+ // "hiding" the ssl->key so it doesn't show up in stack traces
+ // or if the client is console.logged
+ Object.defineProperty(this.options.ssl, 'key', {
+ enumerable: false,
+ })
+ }
+
+ this.options.max = this.options.max || this.options.poolSize || 10
+ this.options.min = this.options.min || 0
+ this.options.maxUses = this.options.maxUses || Infinity
+ this.options.allowExitOnIdle = this.options.allowExitOnIdle || false
+ this.options.maxLifetimeSeconds = this.options.maxLifetimeSeconds || 0
+ this.log = this.options.log || function () {}
+ this.Client = this.options.Client || Client || require('pg').Client
+ this.Promise = this.options.Promise || global.Promise
+
+ if (typeof this.options.idleTimeoutMillis === 'undefined') {
+ this.options.idleTimeoutMillis = 10000
+ }
+
+ this._clients = []
+ this._idle = []
+ this._expired = new WeakSet()
+ this._pendingQueue = []
+ this._endCallback = undefined
+ this.ending = false
+ this.ended = false
+ }
+
+ _promiseTry(f) {
+ const Promise = this.Promise
+ if (typeof Promise.try === 'function') {
+ return Promise.try(f)
+ }
+ return new Promise((resolve) => resolve(f()))
+ }
+
+ _isFull() {
+ return this._clients.length >= this.options.max
+ }
+
+ _isAboveMin() {
+ return this._clients.length > this.options.min
+ }
+
+ _pulseQueue() {
+ this.log('pulse queue')
+ if (this.ended) {
+ this.log('pulse queue ended')
+ return
+ }
+ if (this.ending) {
+ this.log('pulse queue on ending')
+ if (this._idle.length) {
+ this._idle.slice().map((item) => {
+ this._remove(item.client)
+ })
+ }
+ if (!this._clients.length) {
+ this.ended = true
+ this._endCallback()
+ }
+ return
+ }
+
+ // if we don't have any waiting, do nothing
+ if (!this._pendingQueue.length) {
+ this.log('no queued requests')
+ return
+ }
+ // if we don't have any idle clients and we have no more room do nothing
+ if (!this._idle.length && this._isFull()) {
+ return
+ }
+ const pendingItem = this._pendingQueue.shift()
+ if (this._idle.length) {
+ const idleItem = this._idle.pop()
+ clearTimeout(idleItem.timeoutId)
+ const client = idleItem.client
+ client.ref && client.ref()
+ const idleListener = idleItem.idleListener
+
+ return this._acquireClient(client, pendingItem, idleListener, false)
+ }
+ if (!this._isFull()) {
+ return this.newClient(pendingItem)
+ }
+ throw new Error('unexpected condition')
+ }
+
+ _remove(client, callback) {
+ const removed = removeWhere(this._idle, (item) => item.client === client)
+
+ if (removed !== undefined) {
+ clearTimeout(removed.timeoutId)
+ }
+
+ this._clients = this._clients.filter((c) => c !== client)
+ const context = this
+ client.end(() => {
+ context.emit('remove', client)
+
+ if (typeof callback === 'function') {
+ callback()
+ }
+ })
+ }
+
+ connect(cb) {
+ if (this.ending) {
+ const err = new Error('Cannot use a pool after calling end on the pool')
+ return cb ? cb(err) : this.Promise.reject(err)
+ }
+
+ const response = promisify(this.Promise, cb)
+ const result = response.result
+
+ // if we don't have to connect a new client, don't do so
+ if (this._isFull() || this._idle.length) {
+ // if we have idle clients schedule a pulse immediately
+ if (this._idle.length) {
+ process.nextTick(() => this._pulseQueue())
+ }
+
+ if (!this.options.connectionTimeoutMillis) {
+ this._pendingQueue.push(new PendingItem(response.callback))
+ return result
+ }
+
+ const queueCallback = (err, res, done) => {
+ clearTimeout(tid)
+ response.callback(err, res, done)
+ }
+
+ const pendingItem = new PendingItem(queueCallback)
+
+ // set connection timeout on checking out an existing client
+ const tid = setTimeout(() => {
+ // remove the callback from pending waiters because
+ // we're going to call it with a timeout error
+ removeWhere(this._pendingQueue, (i) => i.callback === queueCallback)
+ pendingItem.timedOut = true
+ response.callback(new Error('timeout exceeded when trying to connect'))
+ }, this.options.connectionTimeoutMillis)
+
+ if (tid.unref) {
+ tid.unref()
+ }
+
+ this._pendingQueue.push(pendingItem)
+ return result
+ }
+
+ this.newClient(new PendingItem(response.callback))
+
+ return result
+ }
+
+ newClient(pendingItem) {
+ const client = new this.Client(this.options)
+ this._clients.push(client)
+ const idleListener = makeIdleListener(this, client)
+
+ this.log('checking client timeout')
+
+ // connection timeout logic
+ let tid
+ let timeoutHit = false
+ if (this.options.connectionTimeoutMillis) {
+ tid = setTimeout(() => {
+ if (client.connection) {
+ this.log('ending client due to timeout')
+ timeoutHit = true
+ client.connection.stream.destroy()
+ } else if (!client.isConnected()) {
+ this.log('ending client due to timeout')
+ timeoutHit = true
+ // force kill the node driver, and let libpq do its teardown
+ client.end()
+ }
+ }, this.options.connectionTimeoutMillis)
+ }
+
+ this.log('connecting new client')
+ client.connect((err) => {
+ if (tid) {
+ clearTimeout(tid)
+ }
+ client.on('error', idleListener)
+ if (err) {
+ this.log('client failed to connect', err)
+ // remove the dead client from our list of clients
+ this._clients = this._clients.filter((c) => c !== client)
+ if (timeoutHit) {
+ err = new Error('Connection terminated due to connection timeout', { cause: err })
+ }
+
+ // this client won’t be released, so move on immediately
+ this._pulseQueue()
+
+ if (!pendingItem.timedOut) {
+ pendingItem.callback(err, undefined, NOOP)
+ }
+ } else {
+ this.log('new client connected')
+
+ if (this.options.onConnect) {
+ this._promiseTry(() => this.options.onConnect(client)).then(
+ () => {
+ this._afterConnect(client, pendingItem, idleListener)
+ },
+ (hookErr) => {
+ this._clients = this._clients.filter((c) => c !== client)
+ client.end(() => {
+ this._pulseQueue()
+ if (!pendingItem.timedOut) {
+ pendingItem.callback(hookErr, undefined, NOOP)
+ }
+ })
+ }
+ )
+ return
+ }
+
+ return this._afterConnect(client, pendingItem, idleListener)
+ }
+ })
+ }
+
+ _afterConnect(client, pendingItem, idleListener) {
+ if (this.options.maxLifetimeSeconds !== 0) {
+ const maxLifetimeTimeout = setTimeout(() => {
+ this.log('ending client due to expired lifetime')
+ this._expired.add(client)
+ const idleIndex = this._idle.findIndex((idleItem) => idleItem.client === client)
+ if (idleIndex !== -1) {
+ this._acquireClient(
+ client,
+ new PendingItem((err, client, clientRelease) => clientRelease()),
+ idleListener,
+ false
+ )
+ }
+ }, this.options.maxLifetimeSeconds * 1000)
+
+ maxLifetimeTimeout.unref()
+ client.once('end', () => clearTimeout(maxLifetimeTimeout))
+ }
+
+ return this._acquireClient(client, pendingItem, idleListener, true)
+ }
+
+ // acquire a client for a pending work item
+ _acquireClient(client, pendingItem, idleListener, isNew) {
+ if (isNew) {
+ this.emit('connect', client)
+ }
+
+ this.emit('acquire', client)
+
+ client.release = this._releaseOnce(client, idleListener)
+
+ client.removeListener('error', idleListener)
+
+ if (!pendingItem.timedOut) {
+ if (isNew && this.options.verify) {
+ this.options.verify(client, (err) => {
+ if (err) {
+ client.release(err)
+ return pendingItem.callback(err, undefined, NOOP)
+ }
+
+ pendingItem.callback(undefined, client, client.release)
+ })
+ } else {
+ pendingItem.callback(undefined, client, client.release)
+ }
+ } else {
+ if (isNew && this.options.verify) {
+ this.options.verify(client, client.release)
+ } else {
+ client.release()
+ }
+ }
+ }
+
+ // returns a function that wraps _release and throws if called more than once
+ _releaseOnce(client, idleListener) {
+ let released = false
+
+ return (err) => {
+ if (released) {
+ throwOnDoubleRelease()
+ }
+
+ released = true
+ this._release(client, idleListener, err)
+ }
+ }
+
+ // release a client back to the poll, include an error
+ // to remove it from the pool
+ _release(client, idleListener, err) {
+ client.on('error', idleListener)
+
+ client._poolUseCount = (client._poolUseCount || 0) + 1
+
+ this.emit('release', err, client)
+
+ // TODO(bmc): expose a proper, public interface _queryable and _ending
+ if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) {
+ if (client._poolUseCount >= this.options.maxUses) {
+ this.log('remove expended client')
+ }
+
+ return this._remove(client, this._pulseQueue.bind(this))
+ }
+
+ const isExpired = this._expired.has(client)
+ if (isExpired) {
+ this.log('remove expired client')
+ this._expired.delete(client)
+ return this._remove(client, this._pulseQueue.bind(this))
+ }
+
+ // idle timeout
+ let tid
+ if (this.options.idleTimeoutMillis && this._isAboveMin()) {
+ tid = setTimeout(() => {
+ if (this._isAboveMin()) {
+ this.log('remove idle client')
+ this._remove(client, this._pulseQueue.bind(this))
+ }
+ }, this.options.idleTimeoutMillis)
+
+ if (this.options.allowExitOnIdle) {
+ // allow Node to exit if this is all that's left
+ tid.unref()
+ }
+ }
+
+ if (this.options.allowExitOnIdle) {
+ client.unref()
+ }
+
+ this._idle.push(new IdleItem(client, idleListener, tid))
+ this._pulseQueue()
+ }
+
+ query(text, values, cb) {
+ // guard clause against passing a function as the first parameter
+ if (typeof text === 'function') {
+ const response = promisify(this.Promise, text)
+ setImmediate(function () {
+ return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported'))
+ })
+ return response.result
+ }
+
+ // allow plain text query without values
+ if (typeof values === 'function') {
+ cb = values
+ values = undefined
+ }
+ const response = promisify(this.Promise, cb)
+ cb = response.callback
+
+ this.connect((err, client) => {
+ if (err) {
+ return cb(err)
+ }
+
+ let clientReleased = false
+ const onError = (err) => {
+ if (clientReleased) {
+ return
+ }
+ clientReleased = true
+ client.release(err)
+ cb(err)
+ }
+
+ client.once('error', onError)
+ this.log('dispatching query')
+ try {
+ client.query(text, values, (err, res) => {
+ this.log('query dispatched')
+ client.removeListener('error', onError)
+ if (clientReleased) {
+ return
+ }
+ clientReleased = true
+ client.release(err)
+ if (err) {
+ return cb(err)
+ }
+ return cb(undefined, res)
+ })
+ } catch (err) {
+ client.release(err)
+ return cb(err)
+ }
+ })
+ return response.result
+ }
+
+ end(cb) {
+ this.log('ending')
+ if (this.ending) {
+ const err = new Error('Called end on pool more than once')
+ return cb ? cb(err) : this.Promise.reject(err)
+ }
+ this.ending = true
+ const promised = promisify(this.Promise, cb)
+ this._endCallback = promised.callback
+ this._pulseQueue()
+ return promised.result
+ }
+
+ get waitingCount() {
+ return this._pendingQueue.length
+ }
+
+ get idleCount() {
+ return this._idle.length
+ }
+
+ get expiredCount() {
+ return this._clients.reduce((acc, client) => acc + (this._expired.has(client) ? 1 : 0), 0)
+ }
+
+ get totalCount() {
+ return this._clients.length
+ }
+}
+module.exports = Pool
diff --git a/api/node_modules/pg-pool/package.json b/api/node_modules/pg-pool/package.json
new file mode 100644
index 000000000..a7fe5873d
--- /dev/null
+++ b/api/node_modules/pg-pool/package.json
@@ -0,0 +1,51 @@
+{
+ "name": "pg-pool",
+ "version": "3.13.0",
+ "description": "Connection pool for node-postgres",
+ "main": "index.js",
+ "exports": {
+ ".": {
+ "import": "./esm/index.mjs",
+ "require": "./index.js",
+ "default": "./index.js"
+ }
+ },
+ "directories": {
+ "test": "test"
+ },
+ "scripts": {
+ "test": " node_modules/.bin/mocha"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/brianc/node-postgres.git",
+ "directory": "packages/pg-pool"
+ },
+ "keywords": [
+ "pg",
+ "postgres",
+ "pool",
+ "database"
+ ],
+ "author": "Brian M. Carlson",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/brianc/node-postgres/issues"
+ },
+ "homepage": "https://github.com/brianc/node-postgres/tree/master/packages/pg-pool#readme",
+ "devDependencies": {
+ "bluebird": "3.7.2",
+ "co": "4.6.0",
+ "expect.js": "0.3.1",
+ "lodash": "^4.17.11",
+ "mocha": "^11.7.5"
+ },
+ "peerDependencies": {
+ "pg": ">=8.0"
+ },
+ "files": [
+ "index.js",
+ "esm"
+ ],
+ "gitHead": "c9070cc8d526fca65780cedc25c1966b57cf7532"
+}
diff --git a/api/node_modules/pg-protocol/LICENSE b/api/node_modules/pg-protocol/LICENSE
new file mode 100644
index 000000000..5c1405646
--- /dev/null
+++ b/api/node_modules/pg-protocol/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2010 - 2021 Brian Carlson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/api/node_modules/pg-protocol/README.md b/api/node_modules/pg-protocol/README.md
new file mode 100644
index 000000000..8c52e40ec
--- /dev/null
+++ b/api/node_modules/pg-protocol/README.md
@@ -0,0 +1,3 @@
+# pg-protocol
+
+Low level postgres wire protocol parser and serializer written in Typescript. Used by node-postgres. Needs more documentation. :smile:
diff --git a/api/node_modules/pg-protocol/dist/b.d.ts b/api/node_modules/pg-protocol/dist/b.d.ts
new file mode 100644
index 000000000..cb0ff5c3b
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/b.d.ts
@@ -0,0 +1 @@
+export {};
diff --git a/api/node_modules/pg-protocol/dist/b.js b/api/node_modules/pg-protocol/dist/b.js
new file mode 100644
index 000000000..6c47c1058
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/b.js
@@ -0,0 +1,23 @@
+"use strict";
+// file for microbenchmarking
+Object.defineProperty(exports, "__esModule", { value: true });
+const buffer_reader_1 = require("./buffer-reader");
+const LOOPS = 1000;
+let count = 0;
+const start = performance.now();
+const reader = new buffer_reader_1.BufferReader();
+const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]);
+const run = () => {
+ if (count > LOOPS) {
+ console.log(performance.now() - start);
+ return;
+ }
+ count++;
+ for (let i = 0; i < LOOPS; i++) {
+ reader.setBuffer(0, buffer);
+ reader.cstring();
+ }
+ setImmediate(run);
+};
+run();
+//# sourceMappingURL=b.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/b.js.map b/api/node_modules/pg-protocol/dist/b.js.map
new file mode 100644
index 000000000..ccc8d6f33
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/b.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"b.js","sourceRoot":"","sources":["../src/b.ts"],"names":[],"mappings":";AAAA,6BAA6B;;AAE7B,mDAA8C;AAE9C,MAAM,KAAK,GAAG,IAAI,CAAA;AAClB,IAAI,KAAK,GAAG,CAAC,CAAA;AACb,MAAM,KAAK,GAAG,WAAW,CAAC,GAAG,EAAE,CAAA;AAE/B,MAAM,MAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;AACjC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;AAE3D,MAAM,GAAG,GAAG,GAAG,EAAE;IACf,IAAI,KAAK,GAAG,KAAK,EAAE;QACjB,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,CAAA;QACtC,OAAM;KACP;IACD,KAAK,EAAE,CAAA;IACP,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC9B,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,EAAE,CAAA;KACjB;IACD,YAAY,CAAC,GAAG,CAAC,CAAA;AACnB,CAAC,CAAA;AAED,GAAG,EAAE,CAAA"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/buffer-reader.d.ts b/api/node_modules/pg-protocol/dist/buffer-reader.d.ts
new file mode 100644
index 000000000..41f753ec8
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/buffer-reader.d.ts
@@ -0,0 +1,15 @@
+///
+export declare class BufferReader {
+ private offset;
+ private buffer;
+ private encoding;
+ constructor(offset?: number);
+ setBuffer(offset: number, buffer: Buffer): void;
+ int16(): number;
+ byte(): number;
+ int32(): number;
+ uint32(): number;
+ string(length: number): string;
+ cstring(): string;
+ bytes(length: number): Buffer;
+}
diff --git a/api/node_modules/pg-protocol/dist/buffer-reader.js b/api/node_modules/pg-protocol/dist/buffer-reader.js
new file mode 100644
index 000000000..1679fa871
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/buffer-reader.js
@@ -0,0 +1,55 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BufferReader = void 0;
+class BufferReader {
+ constructor(offset = 0) {
+ this.offset = offset;
+ this.buffer = Buffer.allocUnsafe(0);
+ // TODO(bmc): support non-utf8 encoding?
+ this.encoding = 'utf-8';
+ }
+ setBuffer(offset, buffer) {
+ this.offset = offset;
+ this.buffer = buffer;
+ }
+ int16() {
+ const result = this.buffer.readInt16BE(this.offset);
+ this.offset += 2;
+ return result;
+ }
+ byte() {
+ const result = this.buffer[this.offset];
+ this.offset++;
+ return result;
+ }
+ int32() {
+ const result = this.buffer.readInt32BE(this.offset);
+ this.offset += 4;
+ return result;
+ }
+ uint32() {
+ const result = this.buffer.readUInt32BE(this.offset);
+ this.offset += 4;
+ return result;
+ }
+ string(length) {
+ const result = this.buffer.toString(this.encoding, this.offset, this.offset + length);
+ this.offset += length;
+ return result;
+ }
+ cstring() {
+ const start = this.offset;
+ let end = start;
+ // eslint-disable-next-line no-empty
+ while (this.buffer[end++] !== 0) { }
+ this.offset = end;
+ return this.buffer.toString(this.encoding, start, end - 1);
+ }
+ bytes(length) {
+ const result = this.buffer.slice(this.offset, this.offset + length);
+ this.offset += length;
+ return result;
+ }
+}
+exports.BufferReader = BufferReader;
+//# sourceMappingURL=buffer-reader.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/buffer-reader.js.map b/api/node_modules/pg-protocol/dist/buffer-reader.js.map
new file mode 100644
index 000000000..c8459d044
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/buffer-reader.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"buffer-reader.js","sourceRoot":"","sources":["../src/buffer-reader.ts"],"names":[],"mappings":";;;AAAA,MAAa,YAAY;IAMvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAL9B,WAAM,GAAW,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;QAE9C,wCAAwC;QAChC,aAAQ,GAAW,OAAO,CAAA;IAEO,CAAC;IAEnC,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACvC,IAAI,CAAC,MAAM,EAAE,CAAA;QACb,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM;QACX,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACpD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACrF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,OAAO;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,GAAG,GAAG,KAAK,CAAA;QACf,oCAAoC;QACpC,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,EAAE,GAAE;QACnC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;QACjB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;IAC5D,CAAC;IAEM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAzDD,oCAyDC"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/buffer-writer.d.ts b/api/node_modules/pg-protocol/dist/buffer-writer.d.ts
new file mode 100644
index 000000000..4ac41e690
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/buffer-writer.d.ts
@@ -0,0 +1,16 @@
+///
+export declare class Writer {
+ private size;
+ private buffer;
+ private offset;
+ private headerPosition;
+ constructor(size?: number);
+ private ensure;
+ addInt32(num: number): Writer;
+ addInt16(num: number): Writer;
+ addCString(string: string): Writer;
+ addString(string?: string): Writer;
+ add(otherBuffer: Buffer): Writer;
+ private join;
+ flush(code?: number): Buffer;
+}
diff --git a/api/node_modules/pg-protocol/dist/buffer-writer.js b/api/node_modules/pg-protocol/dist/buffer-writer.js
new file mode 100644
index 000000000..76b3f8c69
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/buffer-writer.js
@@ -0,0 +1,81 @@
+"use strict";
+//binary data writer tuned for encoding binary specific to the postgres binary protocol
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Writer = void 0;
+class Writer {
+ constructor(size = 256) {
+ this.size = size;
+ this.offset = 5;
+ this.headerPosition = 0;
+ this.buffer = Buffer.allocUnsafe(size);
+ }
+ ensure(size) {
+ const remaining = this.buffer.length - this.offset;
+ if (remaining < size) {
+ const oldBuffer = this.buffer;
+ // exponential growth factor of around ~ 1.5
+ // https://stackoverflow.com/questions/2269063/buffer-growth-strategy
+ const newSize = oldBuffer.length + (oldBuffer.length >> 1) + size;
+ this.buffer = Buffer.allocUnsafe(newSize);
+ oldBuffer.copy(this.buffer);
+ }
+ }
+ addInt32(num) {
+ this.ensure(4);
+ this.buffer[this.offset++] = (num >>> 24) & 0xff;
+ this.buffer[this.offset++] = (num >>> 16) & 0xff;
+ this.buffer[this.offset++] = (num >>> 8) & 0xff;
+ this.buffer[this.offset++] = (num >>> 0) & 0xff;
+ return this;
+ }
+ addInt16(num) {
+ this.ensure(2);
+ this.buffer[this.offset++] = (num >>> 8) & 0xff;
+ this.buffer[this.offset++] = (num >>> 0) & 0xff;
+ return this;
+ }
+ addCString(string) {
+ if (!string) {
+ this.ensure(1);
+ }
+ else {
+ const len = Buffer.byteLength(string);
+ this.ensure(len + 1); // +1 for null terminator
+ this.buffer.write(string, this.offset, 'utf-8');
+ this.offset += len;
+ }
+ this.buffer[this.offset++] = 0; // null terminator
+ return this;
+ }
+ addString(string = '') {
+ const len = Buffer.byteLength(string);
+ this.ensure(len);
+ this.buffer.write(string, this.offset);
+ this.offset += len;
+ return this;
+ }
+ add(otherBuffer) {
+ this.ensure(otherBuffer.length);
+ otherBuffer.copy(this.buffer, this.offset);
+ this.offset += otherBuffer.length;
+ return this;
+ }
+ join(code) {
+ if (code) {
+ this.buffer[this.headerPosition] = code;
+ //length is everything in this packet minus the code
+ const length = this.offset - (this.headerPosition + 1);
+ this.buffer.writeInt32BE(length, this.headerPosition + 1);
+ }
+ return this.buffer.slice(code ? 0 : 5, this.offset);
+ }
+ flush(code) {
+ const result = this.join(code);
+ this.offset = 5;
+ this.headerPosition = 0;
+ this.buffer = Buffer.allocUnsafe(this.size);
+ return result;
+ }
+}
+exports.Writer = Writer;
+//# sourceMappingURL=buffer-writer.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/buffer-writer.js.map b/api/node_modules/pg-protocol/dist/buffer-writer.js.map
new file mode 100644
index 000000000..5aa1ba05f
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/buffer-writer.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"buffer-writer.js","sourceRoot":"","sources":["../src/buffer-writer.ts"],"names":[],"mappings":";AAAA,uFAAuF;;;AAEvF,MAAa,MAAM;IAIjB,YAAoB,OAAO,GAAG;QAAV,SAAI,GAAJ,IAAI,CAAM;QAFtB,WAAM,GAAW,CAAC,CAAA;QAClB,mBAAc,GAAW,CAAC,CAAA;QAEhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,CAAA;IACxC,CAAC;IAEO,MAAM,CAAC,IAAY;QACzB,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAClD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;YAC7B,4CAA4C;YAC5C,qEAAqE;YACrE,MAAM,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAA;YACjE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,OAAO,CAAC,CAAA;YACzC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;SAC5B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACf;aAAM;YACL,MAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;YACrC,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,CAAC,yBAAyB;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;YAC/C,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;SACnB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA,CAAC,kBAAkB;QACjD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,MAAM,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;QACrC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;QAClB,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAA;QACjC,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,IAAI,CAAC,IAAa;QACxB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,IAAI,CAAA;YACvC,oDAAoD;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;YACtD,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;SAC1D;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;IACrD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,MAAM,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC9B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACf,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC3C,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlFD,wBAkFC"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/inbound-parser.test.d.ts b/api/node_modules/pg-protocol/dist/inbound-parser.test.d.ts
new file mode 100644
index 000000000..cb0ff5c3b
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/inbound-parser.test.d.ts
@@ -0,0 +1 @@
+export {};
diff --git a/api/node_modules/pg-protocol/dist/inbound-parser.test.js b/api/node_modules/pg-protocol/dist/inbound-parser.test.js
new file mode 100644
index 000000000..f64fa7466
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/inbound-parser.test.js
@@ -0,0 +1,530 @@
+"use strict";
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const test_buffers_1 = __importDefault(require("./testing/test-buffers"));
+const buffer_list_1 = __importDefault(require("./testing/buffer-list"));
+const _1 = require(".");
+const assert_1 = __importDefault(require("assert"));
+const stream_1 = require("stream");
+const parser_1 = require("./parser");
+const authOkBuffer = test_buffers_1.default.authenticationOk();
+const paramStatusBuffer = test_buffers_1.default.parameterStatus('client_encoding', 'UTF8');
+const readyForQueryBuffer = test_buffers_1.default.readyForQuery();
+const backendKeyDataBuffer = test_buffers_1.default.backendKeyData(1, 2);
+const commandCompleteBuffer = test_buffers_1.default.commandComplete('SELECT 3');
+const parseCompleteBuffer = test_buffers_1.default.parseComplete();
+const bindCompleteBuffer = test_buffers_1.default.bindComplete();
+const portalSuspendedBuffer = test_buffers_1.default.portalSuspended();
+const row1 = {
+ name: 'id',
+ tableID: 1,
+ attributeNumber: 2,
+ dataTypeID: 3,
+ dataTypeSize: 4,
+ typeModifier: 5,
+ formatCode: 0,
+};
+const oneRowDescBuff = test_buffers_1.default.rowDescription([row1]);
+row1.name = 'bang';
+const twoRowBuf = test_buffers_1.default.rowDescription([
+ row1,
+ {
+ name: 'whoah',
+ tableID: 10,
+ attributeNumber: 11,
+ dataTypeID: 12,
+ dataTypeSize: 13,
+ typeModifier: 14,
+ formatCode: 0,
+ },
+]);
+const rowWithBigOids = {
+ name: 'bigoid',
+ tableID: 3000000001,
+ attributeNumber: 2,
+ dataTypeID: 3000000003,
+ dataTypeSize: 4,
+ typeModifier: 5,
+ formatCode: 0,
+};
+const bigOidDescBuff = test_buffers_1.default.rowDescription([rowWithBigOids]);
+const emptyRowFieldBuf = test_buffers_1.default.dataRow([]);
+const oneFieldBuf = test_buffers_1.default.dataRow(['test']);
+const expectedAuthenticationOkayMessage = {
+ name: 'authenticationOk',
+ length: 8,
+};
+const expectedParameterStatusMessage = {
+ name: 'parameterStatus',
+ parameterName: 'client_encoding',
+ parameterValue: 'UTF8',
+ length: 25,
+};
+const expectedBackendKeyDataMessage = {
+ name: 'backendKeyData',
+ processID: 1,
+ secretKey: 2,
+};
+const expectedReadyForQueryMessage = {
+ name: 'readyForQuery',
+ length: 5,
+ status: 'I',
+};
+const expectedCommandCompleteMessage = {
+ name: 'commandComplete',
+ length: 13,
+ text: 'SELECT 3',
+};
+const emptyRowDescriptionBuffer = new buffer_list_1.default()
+ .addInt16(0) // number of fields
+ .join(true, 'T');
+const expectedEmptyRowDescriptionMessage = {
+ name: 'rowDescription',
+ length: 6,
+ fieldCount: 0,
+ fields: [],
+};
+const expectedOneRowMessage = {
+ name: 'rowDescription',
+ length: 27,
+ fieldCount: 1,
+ fields: [
+ {
+ name: 'id',
+ tableID: 1,
+ columnID: 2,
+ dataTypeID: 3,
+ dataTypeSize: 4,
+ dataTypeModifier: 5,
+ format: 'text',
+ },
+ ],
+};
+const expectedTwoRowMessage = {
+ name: 'rowDescription',
+ length: 53,
+ fieldCount: 2,
+ fields: [
+ {
+ name: 'bang',
+ tableID: 1,
+ columnID: 2,
+ dataTypeID: 3,
+ dataTypeSize: 4,
+ dataTypeModifier: 5,
+ format: 'text',
+ },
+ {
+ name: 'whoah',
+ tableID: 10,
+ columnID: 11,
+ dataTypeID: 12,
+ dataTypeSize: 13,
+ dataTypeModifier: 14,
+ format: 'text',
+ },
+ ],
+};
+const expectedBigOidMessage = {
+ name: 'rowDescription',
+ length: 31,
+ fieldCount: 1,
+ fields: [
+ {
+ name: 'bigoid',
+ tableID: 3000000001,
+ columnID: 2,
+ dataTypeID: 3000000003,
+ dataTypeSize: 4,
+ dataTypeModifier: 5,
+ format: 'text',
+ },
+ ],
+};
+const emptyParameterDescriptionBuffer = new buffer_list_1.default()
+ .addInt16(0) // number of parameters
+ .join(true, 't');
+const oneParameterDescBuf = test_buffers_1.default.parameterDescription([1111]);
+const twoParameterDescBuf = test_buffers_1.default.parameterDescription([2222, 3333]);
+const expectedEmptyParameterDescriptionMessage = {
+ name: 'parameterDescription',
+ length: 6,
+ parameterCount: 0,
+ dataTypeIDs: [],
+};
+const expectedOneParameterMessage = {
+ name: 'parameterDescription',
+ length: 10,
+ parameterCount: 1,
+ dataTypeIDs: [1111],
+};
+const expectedTwoParameterMessage = {
+ name: 'parameterDescription',
+ length: 14,
+ parameterCount: 2,
+ dataTypeIDs: [2222, 3333],
+};
+const testForMessage = function (buffer, expectedMessage) {
+ it('receives and parses ' + expectedMessage.name, () => __awaiter(this, void 0, void 0, function* () {
+ const messages = yield parseBuffers([buffer]);
+ const [lastMessage] = messages;
+ for (const key in expectedMessage) {
+ assert_1.default.deepEqual(lastMessage[key], expectedMessage[key]);
+ }
+ }));
+};
+const plainPasswordBuffer = test_buffers_1.default.authenticationCleartextPassword();
+const md5PasswordBuffer = test_buffers_1.default.authenticationMD5Password();
+const SASLBuffer = test_buffers_1.default.authenticationSASL();
+const SASLContinueBuffer = test_buffers_1.default.authenticationSASLContinue();
+const SASLFinalBuffer = test_buffers_1.default.authenticationSASLFinal();
+const expectedPlainPasswordMessage = {
+ name: 'authenticationCleartextPassword',
+};
+const expectedMD5PasswordMessage = {
+ name: 'authenticationMD5Password',
+ salt: Buffer.from([1, 2, 3, 4]),
+};
+const expectedSASLMessage = {
+ name: 'authenticationSASL',
+ mechanisms: ['SCRAM-SHA-256'],
+};
+const expectedSASLContinueMessage = {
+ name: 'authenticationSASLContinue',
+ data: 'data',
+};
+const expectedSASLFinalMessage = {
+ name: 'authenticationSASLFinal',
+ data: 'data',
+};
+const notificationResponseBuffer = test_buffers_1.default.notification(4, 'hi', 'boom');
+const expectedNotificationResponseMessage = {
+ name: 'notification',
+ processId: 4,
+ channel: 'hi',
+ payload: 'boom',
+};
+const parseBuffers = (buffers) => __awaiter(void 0, void 0, void 0, function* () {
+ const stream = new stream_1.PassThrough();
+ for (const buffer of buffers) {
+ stream.write(buffer);
+ }
+ stream.end();
+ const msgs = [];
+ yield (0, _1.parse)(stream, (msg) => msgs.push(msg));
+ return msgs;
+});
+describe('PgPacketStream', function () {
+ testForMessage(authOkBuffer, expectedAuthenticationOkayMessage);
+ testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage);
+ testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage);
+ testForMessage(SASLBuffer, expectedSASLMessage);
+ testForMessage(SASLContinueBuffer, expectedSASLContinueMessage);
+ // this exercises a found bug in the parser:
+ // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
+ // and adds a test which is deterministic, rather than relying on network packet chunking
+ const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]);
+ testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage);
+ testForMessage(SASLFinalBuffer, expectedSASLFinalMessage);
+ // this exercises a found bug in the parser:
+ // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
+ // and adds a test which is deterministic, rather than relying on network packet chunking
+ const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]);
+ testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage);
+ testForMessage(paramStatusBuffer, expectedParameterStatusMessage);
+ testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage);
+ testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage);
+ testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage);
+ testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage);
+ testForMessage(test_buffers_1.default.emptyQuery(), {
+ name: 'emptyQuery',
+ length: 4,
+ });
+ testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), {
+ name: 'noData',
+ });
+ describe('rowDescription messages', function () {
+ testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage);
+ testForMessage(oneRowDescBuff, expectedOneRowMessage);
+ testForMessage(twoRowBuf, expectedTwoRowMessage);
+ testForMessage(bigOidDescBuff, expectedBigOidMessage);
+ });
+ describe('parameterDescription messages', function () {
+ testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage);
+ testForMessage(oneParameterDescBuf, expectedOneParameterMessage);
+ testForMessage(twoParameterDescBuf, expectedTwoParameterMessage);
+ });
+ describe('parsing rows', function () {
+ describe('parsing empty row', function () {
+ testForMessage(emptyRowFieldBuf, {
+ name: 'dataRow',
+ fieldCount: 0,
+ });
+ });
+ describe('parsing data row with fields', function () {
+ testForMessage(oneFieldBuf, {
+ name: 'dataRow',
+ fieldCount: 1,
+ fields: ['test'],
+ });
+ });
+ });
+ describe('notice message', function () {
+ // this uses the same logic as error message
+ const buff = test_buffers_1.default.notice([{ type: 'C', value: 'code' }]);
+ testForMessage(buff, {
+ name: 'notice',
+ code: 'code',
+ });
+ });
+ testForMessage(test_buffers_1.default.error([]), {
+ name: 'error',
+ });
+ describe('with all the fields', function () {
+ const buffer = test_buffers_1.default.error([
+ {
+ type: 'S',
+ value: 'ERROR',
+ },
+ {
+ type: 'C',
+ value: 'code',
+ },
+ {
+ type: 'M',
+ value: 'message',
+ },
+ {
+ type: 'D',
+ value: 'details',
+ },
+ {
+ type: 'H',
+ value: 'hint',
+ },
+ {
+ type: 'P',
+ value: '100',
+ },
+ {
+ type: 'p',
+ value: '101',
+ },
+ {
+ type: 'q',
+ value: 'query',
+ },
+ {
+ type: 'W',
+ value: 'where',
+ },
+ {
+ type: 'F',
+ value: 'file',
+ },
+ {
+ type: 'L',
+ value: 'line',
+ },
+ {
+ type: 'R',
+ value: 'routine',
+ },
+ {
+ type: 'Z',
+ value: 'alsdkf',
+ },
+ ]);
+ testForMessage(buffer, {
+ name: 'error',
+ severity: 'ERROR',
+ code: 'code',
+ message: 'message',
+ detail: 'details',
+ hint: 'hint',
+ position: '100',
+ internalPosition: '101',
+ internalQuery: 'query',
+ where: 'where',
+ file: 'file',
+ line: 'line',
+ routine: 'routine',
+ });
+ });
+ testForMessage(parseCompleteBuffer, {
+ name: 'parseComplete',
+ });
+ testForMessage(bindCompleteBuffer, {
+ name: 'bindComplete',
+ });
+ testForMessage(bindCompleteBuffer, {
+ name: 'bindComplete',
+ });
+ testForMessage(test_buffers_1.default.closeComplete(), {
+ name: 'closeComplete',
+ });
+ describe('parses portal suspended message', function () {
+ testForMessage(portalSuspendedBuffer, {
+ name: 'portalSuspended',
+ });
+ });
+ describe('parses replication start message', function () {
+ testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), {
+ name: 'replicationStart',
+ length: 4,
+ });
+ });
+ describe('copy', () => {
+ testForMessage(test_buffers_1.default.copyIn(0), {
+ name: 'copyInResponse',
+ length: 7,
+ binary: false,
+ columnTypes: [],
+ });
+ testForMessage(test_buffers_1.default.copyIn(2), {
+ name: 'copyInResponse',
+ length: 11,
+ binary: false,
+ columnTypes: [0, 1],
+ });
+ testForMessage(test_buffers_1.default.copyOut(0), {
+ name: 'copyOutResponse',
+ length: 7,
+ binary: false,
+ columnTypes: [],
+ });
+ testForMessage(test_buffers_1.default.copyOut(3), {
+ name: 'copyOutResponse',
+ length: 13,
+ binary: false,
+ columnTypes: [0, 1, 2],
+ });
+ testForMessage(test_buffers_1.default.copyDone(), {
+ name: 'copyDone',
+ length: 4,
+ });
+ testForMessage(test_buffers_1.default.copyData(Buffer.from([5, 6, 7])), {
+ name: 'copyData',
+ length: 7,
+ chunk: Buffer.from([5, 6, 7]),
+ });
+ });
+ // since the data message on a stream can randomly divide the incomming
+ // tcp packets anywhere, we need to make sure we can parse every single
+ // split on a tcp message
+ describe('split buffer, single message parsing', function () {
+ const fullBuffer = test_buffers_1.default.dataRow([null, 'bang', 'zug zug', null, '!']);
+ it('parses when full buffer comes in', function () {
+ return __awaiter(this, void 0, void 0, function* () {
+ const messages = yield parseBuffers([fullBuffer]);
+ const message = messages[0];
+ assert_1.default.equal(message.fields.length, 5);
+ assert_1.default.equal(message.fields[0], null);
+ assert_1.default.equal(message.fields[1], 'bang');
+ assert_1.default.equal(message.fields[2], 'zug zug');
+ assert_1.default.equal(message.fields[3], null);
+ assert_1.default.equal(message.fields[4], '!');
+ });
+ });
+ const testMessageReceivedAfterSplitAt = function (split) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const firstBuffer = Buffer.alloc(fullBuffer.length - split);
+ const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length);
+ fullBuffer.copy(firstBuffer, 0, 0);
+ fullBuffer.copy(secondBuffer, 0, firstBuffer.length);
+ const messages = yield parseBuffers([firstBuffer, secondBuffer]);
+ const message = messages[0];
+ assert_1.default.equal(message.fields.length, 5);
+ assert_1.default.equal(message.fields[0], null);
+ assert_1.default.equal(message.fields[1], 'bang');
+ assert_1.default.equal(message.fields[2], 'zug zug');
+ assert_1.default.equal(message.fields[3], null);
+ assert_1.default.equal(message.fields[4], '!');
+ });
+ };
+ it('parses when split in the middle', function () {
+ return testMessageReceivedAfterSplitAt(6);
+ });
+ it('parses when split at end', function () {
+ return testMessageReceivedAfterSplitAt(2);
+ });
+ it('parses when split at beginning', function () {
+ return Promise.all([
+ testMessageReceivedAfterSplitAt(fullBuffer.length - 2),
+ testMessageReceivedAfterSplitAt(fullBuffer.length - 1),
+ testMessageReceivedAfterSplitAt(fullBuffer.length - 5),
+ ]);
+ });
+ });
+ describe('split buffer, multiple message parsing', function () {
+ const dataRowBuffer = test_buffers_1.default.dataRow(['!']);
+ const readyForQueryBuffer = test_buffers_1.default.readyForQuery();
+ const fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length);
+ dataRowBuffer.copy(fullBuffer, 0, 0);
+ readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0);
+ const verifyMessages = function (messages) {
+ assert_1.default.strictEqual(messages.length, 2);
+ assert_1.default.deepEqual(messages[0], {
+ name: 'dataRow',
+ fieldCount: 1,
+ length: 11,
+ fields: ['!'],
+ });
+ assert_1.default.equal(messages[0].fields[0], '!');
+ assert_1.default.deepEqual(messages[1], {
+ name: 'readyForQuery',
+ length: 5,
+ status: 'I',
+ });
+ };
+ // sanity check
+ it('receives both messages when packet is not split', function () {
+ return __awaiter(this, void 0, void 0, function* () {
+ const messages = yield parseBuffers([fullBuffer]);
+ verifyMessages(messages);
+ });
+ });
+ const splitAndVerifyTwoMessages = function (split) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const firstBuffer = Buffer.alloc(fullBuffer.length - split);
+ const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length);
+ fullBuffer.copy(firstBuffer, 0, 0);
+ fullBuffer.copy(secondBuffer, 0, firstBuffer.length);
+ const messages = yield parseBuffers([firstBuffer, secondBuffer]);
+ verifyMessages(messages);
+ });
+ };
+ describe('receives both messages when packet is split', function () {
+ it('in the middle', function () {
+ return splitAndVerifyTwoMessages(11);
+ });
+ it('at the front', function () {
+ return Promise.all([
+ splitAndVerifyTwoMessages(fullBuffer.length - 1),
+ splitAndVerifyTwoMessages(fullBuffer.length - 4),
+ splitAndVerifyTwoMessages(fullBuffer.length - 6),
+ ]);
+ });
+ it('at the end', function () {
+ return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]);
+ });
+ });
+ });
+ it('cleans up the reader after handling a packet', function () {
+ const parser = new parser_1.Parser();
+ parser.parse(oneFieldBuf, () => { });
+ assert_1.default.strictEqual(parser.reader.buffer.byteLength, 0);
+ });
+});
+//# sourceMappingURL=inbound-parser.test.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/inbound-parser.test.js.map b/api/node_modules/pg-protocol/dist/inbound-parser.test.js.map
new file mode 100644
index 000000000..aaa00e64b
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/inbound-parser.test.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"inbound-parser.test.js","sourceRoot":"","sources":["../src/inbound-parser.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,0EAA4C;AAC5C,wEAA8C;AAC9C,wBAAyB;AACzB,oDAA2B;AAC3B,mCAAoC;AAEpC,qCAAiC;AAEjC,MAAM,YAAY,GAAG,sBAAO,CAAC,gBAAgB,EAAE,CAAA;AAC/C,MAAM,iBAAiB,GAAG,sBAAO,CAAC,eAAe,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;AAC5E,MAAM,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACnD,MAAM,oBAAoB,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;AACzD,MAAM,qBAAqB,GAAG,sBAAO,CAAC,eAAe,CAAC,UAAU,CAAC,CAAA;AACjE,MAAM,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACnD,MAAM,kBAAkB,GAAG,sBAAO,CAAC,YAAY,EAAE,CAAA;AACjD,MAAM,qBAAqB,GAAG,sBAAO,CAAC,eAAe,EAAE,CAAA;AAEvD,MAAM,IAAI,GAAG;IACX,IAAI,EAAE,IAAI;IACV,OAAO,EAAE,CAAC;IACV,eAAe,EAAE,CAAC;IAClB,UAAU,EAAE,CAAC;IACb,YAAY,EAAE,CAAC;IACf,YAAY,EAAE,CAAC;IACf,UAAU,EAAE,CAAC;CACd,CAAA;AACD,MAAM,cAAc,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;AACrD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAA;AAElB,MAAM,SAAS,GAAG,sBAAO,CAAC,cAAc,CAAC;IACvC,IAAI;IACJ;QACE,IAAI,EAAE,OAAO;QACb,OAAO,EAAE,EAAE;QACX,eAAe,EAAE,EAAE;QACnB,UAAU,EAAE,EAAE;QACd,YAAY,EAAE,EAAE;QAChB,YAAY,EAAE,EAAE;QAChB,UAAU,EAAE,CAAC;KACd;CACF,CAAC,CAAA;AAEF,MAAM,cAAc,GAAG;IACrB,IAAI,EAAE,QAAQ;IACd,OAAO,EAAE,UAAU;IACnB,eAAe,EAAE,CAAC;IAClB,UAAU,EAAE,UAAU;IACtB,YAAY,EAAE,CAAC;IACf,YAAY,EAAE,CAAC;IACf,UAAU,EAAE,CAAC;CACd,CAAA;AACD,MAAM,cAAc,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,cAAc,CAAC,CAAC,CAAA;AAE/D,MAAM,gBAAgB,GAAG,sBAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;AAE5C,MAAM,WAAW,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;AAE7C,MAAM,iCAAiC,GAAG;IACxC,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAED,MAAM,8BAA8B,GAAG;IACrC,IAAI,EAAE,iBAAiB;IACvB,aAAa,EAAE,iBAAiB;IAChC,cAAc,EAAE,MAAM;IACtB,MAAM,EAAE,EAAE;CACX,CAAA;AAED,MAAM,6BAA6B,GAAG;IACpC,IAAI,EAAE,gBAAgB;IACtB,SAAS,EAAE,CAAC;IACZ,SAAS,EAAE,CAAC;CACb,CAAA;AAED,MAAM,4BAA4B,GAAG;IACnC,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;IACT,MAAM,EAAE,GAAG;CACZ,CAAA;AAED,MAAM,8BAA8B,GAAG;IACrC,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,EAAE;IACV,IAAI,EAAE,UAAU;CACjB,CAAA;AACD,MAAM,yBAAyB,GAAG,IAAI,qBAAU,EAAE;KAC/C,QAAQ,CAAC,CAAC,CAAC,CAAC,mBAAmB;KAC/B,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,MAAM,kCAAkC,GAAG;IACzC,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,CAAC;IACT,UAAU,EAAE,CAAC;IACb,MAAM,EAAE,EAAE;CACX,CAAA;AACD,MAAM,qBAAqB,GAAG;IAC5B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,IAAI;YACV,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,MAAM,qBAAqB,GAAG;IAC5B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;QACD;YACE,IAAI,EAAE,OAAO;YACb,OAAO,EAAE,EAAE;YACX,QAAQ,EAAE,EAAE;YACZ,UAAU,EAAE,EAAE;YACd,YAAY,EAAE,EAAE;YAChB,gBAAgB,EAAE,EAAE;YACpB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AACD,MAAM,qBAAqB,GAAG;IAC5B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,QAAQ;YACd,OAAO,EAAE,UAAU;YACnB,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,UAAU;YACtB,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,MAAM,+BAA+B,GAAG,IAAI,qBAAU,EAAE;KACrD,QAAQ,CAAC,CAAC,CAAC,CAAC,uBAAuB;KACnC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,MAAM,mBAAmB,GAAG,sBAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;AAEhE,MAAM,mBAAmB,GAAG,sBAAO,CAAC,oBAAoB,CAAC,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAEtE,MAAM,wCAAwC,GAAG;IAC/C,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,CAAC;IACT,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,EAAE;CAChB,CAAA;AAED,MAAM,2BAA2B,GAAG;IAClC,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,EAAE;IACV,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,CAAC,IAAI,CAAC;CACpB,CAAA;AAED,MAAM,2BAA2B,GAAG;IAClC,IAAI,EAAE,sBAAsB;IAC5B,MAAM,EAAE,EAAE;IACV,cAAc,EAAE,CAAC;IACjB,WAAW,EAAE,CAAC,IAAI,EAAE,IAAI,CAAC;CAC1B,CAAA;AAED,MAAM,cAAc,GAAG,UAAU,MAAc,EAAE,eAAoB;IACnE,EAAE,CAAC,sBAAsB,GAAG,eAAe,CAAC,IAAI,EAAE,GAAS,EAAE;QAC3D,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAA;QAE9B,KAAK,MAAM,GAAG,IAAI,eAAe,EAAE;YACjC,gBAAM,CAAC,SAAS,CAAE,WAAmB,CAAC,GAAG,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,CAAC,CAAA;SAClE;IACH,CAAC,CAAA,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,sBAAO,CAAC,+BAA+B,EAAE,CAAA;AACrE,MAAM,iBAAiB,GAAG,sBAAO,CAAC,yBAAyB,EAAE,CAAA;AAC7D,MAAM,UAAU,GAAG,sBAAO,CAAC,kBAAkB,EAAE,CAAA;AAC/C,MAAM,kBAAkB,GAAG,sBAAO,CAAC,0BAA0B,EAAE,CAAA;AAC/D,MAAM,eAAe,GAAG,sBAAO,CAAC,uBAAuB,EAAE,CAAA;AAEzD,MAAM,4BAA4B,GAAG;IACnC,IAAI,EAAE,iCAAiC;CACxC,CAAA;AAED,MAAM,0BAA0B,GAAG;IACjC,IAAI,EAAE,2BAA2B;IACjC,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;CAChC,CAAA;AAED,MAAM,mBAAmB,GAAG;IAC1B,IAAI,EAAE,oBAAoB;IAC1B,UAAU,EAAE,CAAC,eAAe,CAAC;CAC9B,CAAA;AAED,MAAM,2BAA2B,GAAG;IAClC,IAAI,EAAE,4BAA4B;IAClC,IAAI,EAAE,MAAM;CACb,CAAA;AAED,MAAM,wBAAwB,GAAG;IAC/B,IAAI,EAAE,yBAAyB;IAC/B,IAAI,EAAE,MAAM;CACb,CAAA;AAED,MAAM,0BAA0B,GAAG,sBAAO,CAAC,YAAY,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;AACxE,MAAM,mCAAmC,GAAG;IAC1C,IAAI,EAAE,cAAc;IACpB,SAAS,EAAE,CAAC;IACZ,OAAO,EAAE,IAAI;IACb,OAAO,EAAE,MAAM;CAChB,CAAA;AAED,MAAM,YAAY,GAAG,CAAO,OAAiB,EAA6B,EAAE;IAC1E,MAAM,MAAM,GAAG,IAAI,oBAAW,EAAE,CAAA;IAChC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;QAC5B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;KACrB;IACD,MAAM,CAAC,GAAG,EAAE,CAAA;IACZ,MAAM,IAAI,GAAqB,EAAE,CAAA;IACjC,MAAM,IAAA,QAAK,EAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;IAC5C,OAAO,IAAI,CAAA;AACb,CAAC,CAAA,CAAA;AAED,QAAQ,CAAC,gBAAgB,EAAE;IACzB,cAAc,CAAC,YAAY,EAAE,iCAAiC,CAAC,CAAA;IAC/D,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,iBAAiB,EAAE,0BAA0B,CAAC,CAAA;IAC7D,cAAc,CAAC,UAAU,EAAE,mBAAmB,CAAC,CAAA;IAC/C,cAAc,CAAC,kBAAkB,EAAE,2BAA2B,CAAC,CAAA;IAE/D,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,0BAA0B,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,kBAAkB,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACjG,cAAc,CAAC,0BAA0B,EAAE,2BAA2B,CAAC,CAAA;IAEvE,cAAc,CAAC,eAAe,EAAE,wBAAwB,CAAC,CAAA;IAEzD,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,uBAAuB,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,eAAe,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC3F,cAAc,CAAC,uBAAuB,EAAE,wBAAwB,CAAC,CAAA;IAEjE,cAAc,CAAC,iBAAiB,EAAE,8BAA8B,CAAC,CAAA;IACjE,cAAc,CAAC,oBAAoB,EAAE,6BAA6B,CAAC,CAAA;IACnE,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,qBAAqB,EAAE,8BAA8B,CAAC,CAAA;IACrE,cAAc,CAAC,0BAA0B,EAAE,mCAAmC,CAAC,CAAA;IAC/E,cAAc,CAAC,sBAAO,CAAC,UAAU,EAAE,EAAE;QACnC,IAAI,EAAE,YAAY;QAClB,MAAM,EAAE,CAAC;KACV,CAAC,CAAA;IAEF,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QAC9C,IAAI,EAAE,QAAQ;KACf,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,cAAc,CAAC,yBAAyB,EAAE,kCAAkC,CAAC,CAAA;QAC7E,cAAc,CAAC,cAAc,EAAE,qBAAqB,CAAC,CAAA;QACrD,cAAc,CAAC,SAAS,EAAE,qBAAqB,CAAC,CAAA;QAChD,cAAc,CAAC,cAAc,EAAE,qBAAqB,CAAC,CAAA;IACvD,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,+BAA+B,EAAE;QACxC,cAAc,CAAC,+BAA+B,EAAE,wCAAwC,CAAC,CAAA;QACzF,cAAc,CAAC,mBAAmB,EAAE,2BAA2B,CAAC,CAAA;QAChE,cAAc,CAAC,mBAAmB,EAAE,2BAA2B,CAAC,CAAA;IAClE,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,cAAc,EAAE;QACvB,QAAQ,CAAC,mBAAmB,EAAE;YAC5B,cAAc,CAAC,gBAAgB,EAAE;gBAC/B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;aACd,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QAEF,QAAQ,CAAC,8BAA8B,EAAE;YACvC,cAAc,CAAC,WAAW,EAAE;gBAC1B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,CAAC,MAAM,CAAC;aACjB,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,gBAAgB,EAAE;QACzB,4CAA4C;QAC5C,MAAM,IAAI,GAAG,sBAAO,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC,CAAA;QAC3D,cAAc,CAAC,IAAI,EAAE;YACnB,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,MAAM;SACb,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;QAChC,IAAI,EAAE,OAAO;KACd,CAAC,CAAA;IAEF,QAAQ,CAAC,qBAAqB,EAAE;QAC9B,MAAM,MAAM,GAAG,sBAAO,CAAC,KAAK,CAAC;YAC3B;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,QAAQ;aAChB;SACF,CAAC,CAAA;QAEF,cAAc,CAAC,MAAM,EAAE;YACrB,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,OAAO;YACjB,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;YAClB,MAAM,EAAE,SAAS;YACjB,IAAI,EAAE,MAAM;YACZ,QAAQ,EAAE,KAAK;YACf,gBAAgB,EAAE,KAAK;YACvB,aAAa,EAAE,OAAO;YACtB,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;SACnB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,mBAAmB,EAAE;QAClC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,aAAa,EAAE,EAAE;QACtC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,QAAQ,CAAC,iCAAiC,EAAE;QAC1C,cAAc,CAAC,qBAAqB,EAAE;YACpC,IAAI,EAAE,iBAAiB;SACxB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,kCAAkC,EAAE;QAC3C,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE;YAC1D,IAAI,EAAE,kBAAkB;YACxB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE;QACpB,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC;SACpB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;SACvB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,EAAE,EAAE;YACjC,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;YACvD,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;YACT,KAAK,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC9B,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,uEAAuE;IACvE,uEAAuE;IACvE,yBAAyB;IACzB,QAAQ,CAAC,sCAAsC,EAAE;QAC/C,MAAM,UAAU,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAExE,EAAE,CAAC,kCAAkC,EAAE;;gBACrC,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAC,CAAA;QAEF,MAAM,+BAA+B,GAAG,UAAgB,KAAa;;gBACnE,MAAM,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBAC3D,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACzE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,WAAW,EAAE,YAAY,CAAC,CAAC,CAAA;gBAChE,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAA;QAED,EAAE,CAAC,iCAAiC,EAAE;YACpC,OAAO,+BAA+B,CAAC,CAAC,CAAC,CAAA;QAC3C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,OAAO,+BAA+B,CAAC,CAAC,CAAC,CAAA;QAC3C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,gCAAgC,EAAE;YACnC,OAAO,OAAO,CAAC,GAAG,CAAC;gBACjB,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;gBACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;gBACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;aACvD,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wCAAwC,EAAE;QACjD,MAAM,aAAa,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;QAC5C,MAAM,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;QACnD,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,MAAM,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QAClF,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QACpC,mBAAmB,CAAC,IAAI,CAAC,UAAU,EAAE,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;QAE7D,MAAM,cAAc,GAAG,UAAU,QAAe;YAC9C,gBAAM,CAAC,WAAW,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;YACtC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,EAAE;gBACV,MAAM,EAAE,CAAC,GAAG,CAAC;aACd,CAAC,CAAA;YACF,gBAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACxC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,eAAe;gBACrB,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,GAAG;aACZ,CAAC,CAAA;QACJ,CAAC,CAAA;QACD,eAAe;QACf,EAAE,CAAC,iDAAiD,EAAE;;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAC,CAAA;QAEF,MAAM,yBAAyB,GAAG,UAAgB,KAAa;;gBAC7D,MAAM,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBAC3D,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACzE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,WAAW,EAAE,YAAY,CAAC,CAAC,CAAA;gBAChE,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAA;QAED,QAAQ,CAAC,6CAA6C,EAAE;YACtD,EAAE,CAAC,eAAe,EAAE;gBAClB,OAAO,yBAAyB,CAAC,EAAE,CAAC,CAAA;YACtC,CAAC,CAAC,CAAA;YACF,EAAE,CAAC,cAAc,EAAE;gBACjB,OAAO,OAAO,CAAC,GAAG,CAAC;oBACjB,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;iBACjD,CAAC,CAAA;YACJ,CAAC,CAAC,CAAA;YAEF,EAAE,CAAC,YAAY,EAAE;gBACf,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,yBAAyB,CAAC,CAAC,CAAC,EAAE,yBAAyB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;YAClF,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,8CAA8C,EAAE;QACjD,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAA;QAC3B,MAAM,CAAC,KAAK,CAAC,WAAW,EAAE,GAAG,EAAE,GAAE,CAAC,CAAC,CAAA;QACnC,gBAAM,CAAC,WAAW,CAAE,MAAc,CAAC,MAAM,CAAC,MAAM,CAAC,UAAU,EAAE,CAAC,CAAC,CAAA;IACjE,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/index.d.ts b/api/node_modules/pg-protocol/dist/index.d.ts
new file mode 100644
index 000000000..3961def0f
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/index.d.ts
@@ -0,0 +1,6 @@
+///
+import { DatabaseError } from './messages';
+import { serialize } from './serializer';
+import { MessageCallback } from './parser';
+export declare function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise;
+export { serialize, DatabaseError };
diff --git a/api/node_modules/pg-protocol/dist/index.js b/api/node_modules/pg-protocol/dist/index.js
new file mode 100644
index 000000000..7eca3bf1d
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/index.js
@@ -0,0 +1,15 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DatabaseError = exports.serialize = exports.parse = void 0;
+const messages_1 = require("./messages");
+Object.defineProperty(exports, "DatabaseError", { enumerable: true, get: function () { return messages_1.DatabaseError; } });
+const serializer_1 = require("./serializer");
+Object.defineProperty(exports, "serialize", { enumerable: true, get: function () { return serializer_1.serialize; } });
+const parser_1 = require("./parser");
+function parse(stream, callback) {
+ const parser = new parser_1.Parser();
+ stream.on('data', (buffer) => parser.parse(buffer, callback));
+ return new Promise((resolve) => stream.on('end', () => resolve()));
+}
+exports.parse = parse;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/index.js.map b/api/node_modules/pg-protocol/dist/index.js.map
new file mode 100644
index 000000000..db9423fd9
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/index.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;AAAA,yCAA0C;AAUtB,8FAVX,wBAAa,OAUW;AATjC,6CAAwC;AAS/B,0FATA,sBAAS,OASA;AARlB,qCAAkD;AAElD,SAAgB,KAAK,CAAC,MAA6B,EAAE,QAAyB;IAC5E,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAA;IAC3B,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,MAAc,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;IACrE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AACpE,CAAC;AAJD,sBAIC"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/messages.d.ts b/api/node_modules/pg-protocol/dist/messages.d.ts
new file mode 100644
index 000000000..f8f2e63d6
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/messages.d.ts
@@ -0,0 +1,162 @@
+///
+export declare type Mode = 'text' | 'binary';
+export declare type MessageName = 'parseComplete' | 'bindComplete' | 'closeComplete' | 'noData' | 'portalSuspended' | 'replicationStart' | 'emptyQuery' | 'copyDone' | 'copyData' | 'rowDescription' | 'parameterDescription' | 'parameterStatus' | 'backendKeyData' | 'notification' | 'readyForQuery' | 'commandComplete' | 'dataRow' | 'copyInResponse' | 'copyOutResponse' | 'authenticationOk' | 'authenticationMD5Password' | 'authenticationCleartextPassword' | 'authenticationSASL' | 'authenticationSASLContinue' | 'authenticationSASLFinal' | 'error' | 'notice';
+export interface BackendMessage {
+ name: MessageName;
+ length: number;
+}
+export declare const parseComplete: BackendMessage;
+export declare const bindComplete: BackendMessage;
+export declare const closeComplete: BackendMessage;
+export declare const noData: BackendMessage;
+export declare const portalSuspended: BackendMessage;
+export declare const replicationStart: BackendMessage;
+export declare const emptyQuery: BackendMessage;
+export declare const copyDone: BackendMessage;
+interface NoticeOrError {
+ message: string | undefined;
+ severity: string | undefined;
+ code: string | undefined;
+ detail: string | undefined;
+ hint: string | undefined;
+ position: string | undefined;
+ internalPosition: string | undefined;
+ internalQuery: string | undefined;
+ where: string | undefined;
+ schema: string | undefined;
+ table: string | undefined;
+ column: string | undefined;
+ dataType: string | undefined;
+ constraint: string | undefined;
+ file: string | undefined;
+ line: string | undefined;
+ routine: string | undefined;
+}
+export declare class DatabaseError extends Error implements NoticeOrError {
+ readonly length: number;
+ readonly name: MessageName;
+ severity: string | undefined;
+ code: string | undefined;
+ detail: string | undefined;
+ hint: string | undefined;
+ position: string | undefined;
+ internalPosition: string | undefined;
+ internalQuery: string | undefined;
+ where: string | undefined;
+ schema: string | undefined;
+ table: string | undefined;
+ column: string | undefined;
+ dataType: string | undefined;
+ constraint: string | undefined;
+ file: string | undefined;
+ line: string | undefined;
+ routine: string | undefined;
+ constructor(message: string, length: number, name: MessageName);
+}
+export declare class CopyDataMessage {
+ readonly length: number;
+ readonly chunk: Buffer;
+ readonly name = "copyData";
+ constructor(length: number, chunk: Buffer);
+}
+export declare class CopyResponse {
+ readonly length: number;
+ readonly name: MessageName;
+ readonly binary: boolean;
+ readonly columnTypes: number[];
+ constructor(length: number, name: MessageName, binary: boolean, columnCount: number);
+}
+export declare class Field {
+ readonly name: string;
+ readonly tableID: number;
+ readonly columnID: number;
+ readonly dataTypeID: number;
+ readonly dataTypeSize: number;
+ readonly dataTypeModifier: number;
+ readonly format: Mode;
+ constructor(name: string, tableID: number, columnID: number, dataTypeID: number, dataTypeSize: number, dataTypeModifier: number, format: Mode);
+}
+export declare class RowDescriptionMessage {
+ readonly length: number;
+ readonly fieldCount: number;
+ readonly name: MessageName;
+ readonly fields: Field[];
+ constructor(length: number, fieldCount: number);
+}
+export declare class ParameterDescriptionMessage {
+ readonly length: number;
+ readonly parameterCount: number;
+ readonly name: MessageName;
+ readonly dataTypeIDs: number[];
+ constructor(length: number, parameterCount: number);
+}
+export declare class ParameterStatusMessage {
+ readonly length: number;
+ readonly parameterName: string;
+ readonly parameterValue: string;
+ readonly name: MessageName;
+ constructor(length: number, parameterName: string, parameterValue: string);
+}
+export declare class AuthenticationMD5Password implements BackendMessage {
+ readonly length: number;
+ readonly salt: Buffer;
+ readonly name: MessageName;
+ constructor(length: number, salt: Buffer);
+}
+export declare class BackendKeyDataMessage {
+ readonly length: number;
+ readonly processID: number;
+ readonly secretKey: number;
+ readonly name: MessageName;
+ constructor(length: number, processID: number, secretKey: number);
+}
+export declare class NotificationResponseMessage {
+ readonly length: number;
+ readonly processId: number;
+ readonly channel: string;
+ readonly payload: string;
+ readonly name: MessageName;
+ constructor(length: number, processId: number, channel: string, payload: string);
+}
+export declare class ReadyForQueryMessage {
+ readonly length: number;
+ readonly status: string;
+ readonly name: MessageName;
+ constructor(length: number, status: string);
+}
+export declare class CommandCompleteMessage {
+ readonly length: number;
+ readonly text: string;
+ readonly name: MessageName;
+ constructor(length: number, text: string);
+}
+export declare class DataRowMessage {
+ length: number;
+ fields: any[];
+ readonly fieldCount: number;
+ readonly name: MessageName;
+ constructor(length: number, fields: any[]);
+}
+export declare class NoticeMessage implements BackendMessage, NoticeOrError {
+ readonly length: number;
+ readonly message: string | undefined;
+ constructor(length: number, message: string | undefined);
+ readonly name = "notice";
+ severity: string | undefined;
+ code: string | undefined;
+ detail: string | undefined;
+ hint: string | undefined;
+ position: string | undefined;
+ internalPosition: string | undefined;
+ internalQuery: string | undefined;
+ where: string | undefined;
+ schema: string | undefined;
+ table: string | undefined;
+ column: string | undefined;
+ dataType: string | undefined;
+ constraint: string | undefined;
+ file: string | undefined;
+ line: string | undefined;
+ routine: string | undefined;
+}
+export {};
diff --git a/api/node_modules/pg-protocol/dist/messages.js b/api/node_modules/pg-protocol/dist/messages.js
new file mode 100644
index 000000000..b9f2c4482
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/messages.js
@@ -0,0 +1,160 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.NoticeMessage = exports.DataRowMessage = exports.CommandCompleteMessage = exports.ReadyForQueryMessage = exports.NotificationResponseMessage = exports.BackendKeyDataMessage = exports.AuthenticationMD5Password = exports.ParameterStatusMessage = exports.ParameterDescriptionMessage = exports.RowDescriptionMessage = exports.Field = exports.CopyResponse = exports.CopyDataMessage = exports.DatabaseError = exports.copyDone = exports.emptyQuery = exports.replicationStart = exports.portalSuspended = exports.noData = exports.closeComplete = exports.bindComplete = exports.parseComplete = void 0;
+exports.parseComplete = {
+ name: 'parseComplete',
+ length: 5,
+};
+exports.bindComplete = {
+ name: 'bindComplete',
+ length: 5,
+};
+exports.closeComplete = {
+ name: 'closeComplete',
+ length: 5,
+};
+exports.noData = {
+ name: 'noData',
+ length: 5,
+};
+exports.portalSuspended = {
+ name: 'portalSuspended',
+ length: 5,
+};
+exports.replicationStart = {
+ name: 'replicationStart',
+ length: 4,
+};
+exports.emptyQuery = {
+ name: 'emptyQuery',
+ length: 4,
+};
+exports.copyDone = {
+ name: 'copyDone',
+ length: 4,
+};
+class DatabaseError extends Error {
+ constructor(message, length, name) {
+ super(message);
+ this.length = length;
+ this.name = name;
+ }
+}
+exports.DatabaseError = DatabaseError;
+class CopyDataMessage {
+ constructor(length, chunk) {
+ this.length = length;
+ this.chunk = chunk;
+ this.name = 'copyData';
+ }
+}
+exports.CopyDataMessage = CopyDataMessage;
+class CopyResponse {
+ constructor(length, name, binary, columnCount) {
+ this.length = length;
+ this.name = name;
+ this.binary = binary;
+ this.columnTypes = new Array(columnCount);
+ }
+}
+exports.CopyResponse = CopyResponse;
+class Field {
+ constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) {
+ this.name = name;
+ this.tableID = tableID;
+ this.columnID = columnID;
+ this.dataTypeID = dataTypeID;
+ this.dataTypeSize = dataTypeSize;
+ this.dataTypeModifier = dataTypeModifier;
+ this.format = format;
+ }
+}
+exports.Field = Field;
+class RowDescriptionMessage {
+ constructor(length, fieldCount) {
+ this.length = length;
+ this.fieldCount = fieldCount;
+ this.name = 'rowDescription';
+ this.fields = new Array(this.fieldCount);
+ }
+}
+exports.RowDescriptionMessage = RowDescriptionMessage;
+class ParameterDescriptionMessage {
+ constructor(length, parameterCount) {
+ this.length = length;
+ this.parameterCount = parameterCount;
+ this.name = 'parameterDescription';
+ this.dataTypeIDs = new Array(this.parameterCount);
+ }
+}
+exports.ParameterDescriptionMessage = ParameterDescriptionMessage;
+class ParameterStatusMessage {
+ constructor(length, parameterName, parameterValue) {
+ this.length = length;
+ this.parameterName = parameterName;
+ this.parameterValue = parameterValue;
+ this.name = 'parameterStatus';
+ }
+}
+exports.ParameterStatusMessage = ParameterStatusMessage;
+class AuthenticationMD5Password {
+ constructor(length, salt) {
+ this.length = length;
+ this.salt = salt;
+ this.name = 'authenticationMD5Password';
+ }
+}
+exports.AuthenticationMD5Password = AuthenticationMD5Password;
+class BackendKeyDataMessage {
+ constructor(length, processID, secretKey) {
+ this.length = length;
+ this.processID = processID;
+ this.secretKey = secretKey;
+ this.name = 'backendKeyData';
+ }
+}
+exports.BackendKeyDataMessage = BackendKeyDataMessage;
+class NotificationResponseMessage {
+ constructor(length, processId, channel, payload) {
+ this.length = length;
+ this.processId = processId;
+ this.channel = channel;
+ this.payload = payload;
+ this.name = 'notification';
+ }
+}
+exports.NotificationResponseMessage = NotificationResponseMessage;
+class ReadyForQueryMessage {
+ constructor(length, status) {
+ this.length = length;
+ this.status = status;
+ this.name = 'readyForQuery';
+ }
+}
+exports.ReadyForQueryMessage = ReadyForQueryMessage;
+class CommandCompleteMessage {
+ constructor(length, text) {
+ this.length = length;
+ this.text = text;
+ this.name = 'commandComplete';
+ }
+}
+exports.CommandCompleteMessage = CommandCompleteMessage;
+class DataRowMessage {
+ constructor(length, fields) {
+ this.length = length;
+ this.fields = fields;
+ this.name = 'dataRow';
+ this.fieldCount = fields.length;
+ }
+}
+exports.DataRowMessage = DataRowMessage;
+class NoticeMessage {
+ constructor(length, message) {
+ this.length = length;
+ this.message = message;
+ this.name = 'notice';
+ }
+}
+exports.NoticeMessage = NoticeMessage;
+//# sourceMappingURL=messages.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/messages.js.map b/api/node_modules/pg-protocol/dist/messages.js.map
new file mode 100644
index 000000000..9cf2581b1
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/messages.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"messages.js","sourceRoot":"","sources":["../src/messages.ts"],"names":[],"mappings":";;;AAoCa,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,YAAY,GAAmB;IAC1C,IAAI,EAAE,cAAc;IACpB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,aAAa,GAAmB;IAC3C,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,MAAM,GAAmB;IACpC,IAAI,EAAE,QAAQ;IACd,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,eAAe,GAAmB;IAC7C,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,gBAAgB,GAAmB;IAC9C,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,UAAU,GAAmB;IACxC,IAAI,EAAE,YAAY;IAClB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,QAAQ,GAAmB;IACtC,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,CAAC;CACV,CAAA;AAsBD,MAAa,aAAc,SAAQ,KAAK;IAiBtC,YACE,OAAe,EACC,MAAc,EACd,IAAiB;QAEjC,KAAK,CAAC,OAAO,CAAC,CAAA;QAHE,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;IAGnC,CAAC;CACF;AAxBD,sCAwBC;AAED,MAAa,eAAe;IAE1B,YACkB,MAAc,EACd,KAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,UAAK,GAAL,KAAK,CAAQ;QAHf,SAAI,GAAG,UAAU,CAAA;IAI9B,CAAC;CACL;AAND,0CAMC;AAED,MAAa,YAAY;IAEvB,YACkB,MAAc,EACd,IAAiB,EACjB,MAAe,EAC/B,WAAmB;QAHH,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;QACjB,WAAM,GAAN,MAAM,CAAS;QAG/B,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAA;IAC3C,CAAC;CACF;AAVD,oCAUC;AAED,MAAa,KAAK;IAChB,YACkB,IAAY,EACZ,OAAe,EACf,QAAgB,EAChB,UAAkB,EAClB,YAAoB,EACpB,gBAAwB,EACxB,MAAY;QANZ,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAQ;QACf,aAAQ,GAAR,QAAQ,CAAQ;QAChB,eAAU,GAAV,UAAU,CAAQ;QAClB,iBAAY,GAAZ,YAAY,CAAQ;QACpB,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAM;IAC3B,CAAC;CACL;AAVD,sBAUC;AAED,MAAa,qBAAqB;IAGhC,YACkB,MAAc,EACd,UAAkB;QADlB,WAAM,GAAN,MAAM,CAAQ;QACd,eAAU,GAAV,UAAU,CAAQ;QAJpB,SAAI,GAAgB,gBAAgB,CAAA;QAMlD,IAAI,CAAC,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IAC1C,CAAC;CACF;AATD,sDASC;AAED,MAAa,2BAA2B;IAGtC,YACkB,MAAc,EACd,cAAsB;QADtB,WAAM,GAAN,MAAM,CAAQ;QACd,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,sBAAsB,CAAA;QAMxD,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,CAAA;IACnD,CAAC;CACF;AATD,kEASC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,aAAqB,EACrB,cAAsB;QAFtB,WAAM,GAAN,MAAM,CAAQ;QACd,kBAAa,GAAb,aAAa,CAAQ;QACrB,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,GAAgB,iBAAiB,CAAA;IAKlD,CAAC;CACL;AAPD,wDAOC;AAED,MAAa,yBAAyB;IAEpC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,2BAA2B,CAAA;IAI5D,CAAC;CACL;AAND,8DAMC;AAED,MAAa,qBAAqB;IAEhC,YACkB,MAAc,EACd,SAAiB,EACjB,SAAiB;QAFjB,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,cAAS,GAAT,SAAS,CAAQ;QAJnB,SAAI,GAAgB,gBAAgB,CAAA;IAKjD,CAAC;CACL;AAPD,sDAOC;AAED,MAAa,2BAA2B;IAEtC,YACkB,MAAc,EACd,SAAiB,EACjB,OAAe,EACf,OAAe;QAHf,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QALjB,SAAI,GAAgB,cAAc,CAAA;IAM/C,CAAC;CACL;AARD,kEAQC;AAED,MAAa,oBAAoB;IAE/B,YACkB,MAAc,EACd,MAAc;QADd,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAQ;QAHhB,SAAI,GAAgB,eAAe,CAAA;IAIhD,CAAC;CACL;AAND,oDAMC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,IAAY;QADZ,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAQ;QAHd,SAAI,GAAgB,iBAAiB,CAAA;IAIlD,CAAC;CACL;AAND,wDAMC;AAED,MAAa,cAAc;IAGzB,YACS,MAAc,EACd,MAAa;QADb,WAAM,GAAN,MAAM,CAAQ;QACd,WAAM,GAAN,MAAM,CAAO;QAHN,SAAI,GAAgB,SAAS,CAAA;QAK3C,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,MAAM,CAAA;IACjC,CAAC;CACF;AATD,wCASC;AAED,MAAa,aAAa;IACxB,YACkB,MAAc,EACd,OAA2B;QAD3B,WAAM,GAAN,MAAM,CAAQ;QACd,YAAO,GAAP,OAAO,CAAoB;QAE7B,SAAI,GAAG,QAAQ,CAAA;IAD5B,CAAC;CAkBL;AAtBD,sCAsBC"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts b/api/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts
new file mode 100644
index 000000000..cb0ff5c3b
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts
@@ -0,0 +1 @@
+export {};
diff --git a/api/node_modules/pg-protocol/dist/outbound-serializer.test.js b/api/node_modules/pg-protocol/dist/outbound-serializer.test.js
new file mode 100644
index 000000000..9644e1e5d
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/outbound-serializer.test.js
@@ -0,0 +1,252 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const assert_1 = __importDefault(require("assert"));
+const serializer_1 = require("./serializer");
+const buffer_list_1 = __importDefault(require("./testing/buffer-list"));
+describe('serializer', () => {
+ it('builds startup message', function () {
+ const actual = serializer_1.serialize.startup({
+ user: 'brian',
+ database: 'bang',
+ });
+ assert_1.default.deepEqual(actual, new buffer_list_1.default()
+ .addInt16(3)
+ .addInt16(0)
+ .addCString('user')
+ .addCString('brian')
+ .addCString('database')
+ .addCString('bang')
+ .addCString('client_encoding')
+ .addCString('UTF8')
+ .addCString('')
+ .join(true));
+ });
+ it('builds password message', function () {
+ const actual = serializer_1.serialize.password('!');
+ assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('!').join(true, 'p'));
+ });
+ it('builds request ssl message', function () {
+ const actual = serializer_1.serialize.requestSsl();
+ const expected = new buffer_list_1.default().addInt32(80877103).join(true);
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('builds SASLInitialResponseMessage message', function () {
+ const actual = serializer_1.serialize.sendSASLInitialResponseMessage('mech', 'data');
+ assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('mech').addInt32(4).addString('data').join(true, 'p'));
+ });
+ it('builds SCRAMClientFinalMessage message', function () {
+ const actual = serializer_1.serialize.sendSCRAMClientFinalMessage('data');
+ assert_1.default.deepEqual(actual, new buffer_list_1.default().addString('data').join(true, 'p'));
+ });
+ it('builds query message', function () {
+ const txt = 'select * from boom';
+ const actual = serializer_1.serialize.query(txt);
+ assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString(txt).join(true, 'Q'));
+ });
+ describe('parse message', () => {
+ it('builds parse message', function () {
+ const actual = serializer_1.serialize.parse({ text: '!' });
+ const expected = new buffer_list_1.default().addCString('').addCString('!').addInt16(0).join(true, 'P');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('builds parse message with named query', function () {
+ const actual = serializer_1.serialize.parse({
+ name: 'boom',
+ text: 'select * from boom',
+ types: [],
+ });
+ const expected = new buffer_list_1.default().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('with multiple parameters', function () {
+ const actual = serializer_1.serialize.parse({
+ name: 'force',
+ text: 'select * from bang where name = $1',
+ types: [1, 2, 3, 4],
+ });
+ const expected = new buffer_list_1.default()
+ .addCString('force')
+ .addCString('select * from bang where name = $1')
+ .addInt16(4)
+ .addInt32(1)
+ .addInt32(2)
+ .addInt32(3)
+ .addInt32(4)
+ .join(true, 'P');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ });
+ describe('bind messages', function () {
+ it('with no values', function () {
+ const actual = serializer_1.serialize.bind();
+ const expectedBuffer = new buffer_list_1.default()
+ .addCString('')
+ .addCString('')
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B');
+ assert_1.default.deepEqual(actual, expectedBuffer);
+ });
+ it('with named statement, portal, and values', function () {
+ const actual = serializer_1.serialize.bind({
+ portal: 'bang',
+ statement: 'woo',
+ values: ['1', 'hi', null, 'zing'],
+ });
+ const expectedBuffer = new buffer_list_1.default()
+ .addCString('bang') // portal name
+ .addCString('woo') // statement name
+ .addInt16(4)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(4)
+ .addInt32(1)
+ .add(Buffer.from('1'))
+ .addInt32(2)
+ .add(Buffer.from('hi'))
+ .addInt32(-1)
+ .addInt32(4)
+ .add(Buffer.from('zing'))
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B');
+ assert_1.default.deepEqual(actual, expectedBuffer);
+ });
+ });
+ it('with custom valueMapper', function () {
+ const actual = serializer_1.serialize.bind({
+ portal: 'bang',
+ statement: 'woo',
+ values: ['1', 'hi', null, 'zing'],
+ valueMapper: () => null,
+ });
+ const expectedBuffer = new buffer_list_1.default()
+ .addCString('bang') // portal name
+ .addCString('woo') // statement name
+ .addInt16(4)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(4)
+ .addInt32(-1)
+ .addInt32(-1)
+ .addInt32(-1)
+ .addInt32(-1)
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B');
+ assert_1.default.deepEqual(actual, expectedBuffer);
+ });
+ it('with named statement, portal, and buffer value', function () {
+ const actual = serializer_1.serialize.bind({
+ portal: 'bang',
+ statement: 'woo',
+ values: ['1', 'hi', null, Buffer.from('zing', 'utf8')],
+ });
+ const expectedBuffer = new buffer_list_1.default()
+ .addCString('bang') // portal name
+ .addCString('woo') // statement name
+ .addInt16(4) // value count
+ .addInt16(0) // string
+ .addInt16(0) // string
+ .addInt16(0) // string
+ .addInt16(1) // binary
+ .addInt16(4)
+ .addInt32(1)
+ .add(Buffer.from('1'))
+ .addInt32(2)
+ .add(Buffer.from('hi'))
+ .addInt32(-1)
+ .addInt32(4)
+ .add(Buffer.from('zing', 'utf-8'))
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B');
+ assert_1.default.deepEqual(actual, expectedBuffer);
+ });
+ describe('builds execute message', function () {
+ it('for unamed portal with no row limit', function () {
+ const actual = serializer_1.serialize.execute();
+ const expectedBuffer = new buffer_list_1.default().addCString('').addInt32(0).join(true, 'E');
+ assert_1.default.deepEqual(actual, expectedBuffer);
+ });
+ it('for named portal with row limit', function () {
+ const actual = serializer_1.serialize.execute({
+ portal: 'my favorite portal',
+ rows: 100,
+ });
+ const expectedBuffer = new buffer_list_1.default().addCString('my favorite portal').addInt32(100).join(true, 'E');
+ assert_1.default.deepEqual(actual, expectedBuffer);
+ });
+ });
+ it('builds flush command', function () {
+ const actual = serializer_1.serialize.flush();
+ const expected = new buffer_list_1.default().join(true, 'H');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('builds sync command', function () {
+ const actual = serializer_1.serialize.sync();
+ const expected = new buffer_list_1.default().join(true, 'S');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('builds end command', function () {
+ const actual = serializer_1.serialize.end();
+ const expected = Buffer.from([0x58, 0, 0, 0, 4]);
+ assert_1.default.deepEqual(actual, expected);
+ });
+ describe('builds describe command', function () {
+ it('describe statement', function () {
+ const actual = serializer_1.serialize.describe({ type: 'S', name: 'bang' });
+ const expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'D');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('describe unnamed portal', function () {
+ const actual = serializer_1.serialize.describe({ type: 'P' });
+ const expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'D');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ });
+ describe('builds close command', function () {
+ it('describe statement', function () {
+ const actual = serializer_1.serialize.close({ type: 'S', name: 'bang' });
+ const expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'C');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('describe unnamed portal', function () {
+ const actual = serializer_1.serialize.close({ type: 'P' });
+ const expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'C');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ });
+ describe('copy messages', function () {
+ it('builds copyFromChunk', () => {
+ const actual = serializer_1.serialize.copyData(Buffer.from([1, 2, 3]));
+ const expected = new buffer_list_1.default().add(Buffer.from([1, 2, 3])).join(true, 'd');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('builds copy fail', () => {
+ const actual = serializer_1.serialize.copyFail('err!');
+ const expected = new buffer_list_1.default().addCString('err!').join(true, 'f');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ it('builds copy done', () => {
+ const actual = serializer_1.serialize.copyDone();
+ const expected = new buffer_list_1.default().join(true, 'c');
+ assert_1.default.deepEqual(actual, expected);
+ });
+ });
+ it('builds cancel message', () => {
+ const actual = serializer_1.serialize.cancel(3, 4);
+ const expected = new buffer_list_1.default().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true);
+ assert_1.default.deepEqual(actual, expected);
+ });
+});
+//# sourceMappingURL=outbound-serializer.test.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/outbound-serializer.test.js.map b/api/node_modules/pg-protocol/dist/outbound-serializer.test.js.map
new file mode 100644
index 000000000..0a3cdce18
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/outbound-serializer.test.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"outbound-serializer.test.js","sourceRoot":"","sources":["../src/outbound-serializer.test.ts"],"names":[],"mappings":";;;;;AAAA,oDAA2B;AAC3B,6CAAwC;AACxC,wEAA8C;AAE9C,QAAQ,CAAC,YAAY,EAAE,GAAG,EAAE;IAC1B,EAAE,CAAC,wBAAwB,EAAE;QAC3B,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;YAC/B,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,MAAM;SACjB,CAAC,CAAA;QACF,gBAAM,CAAC,SAAS,CACd,MAAM,EACN,IAAI,qBAAU,EAAE;aACb,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,OAAO,CAAC;aACnB,UAAU,CAAC,UAAU,CAAC;aACtB,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,iBAAiB,CAAC;aAC7B,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,EAAE,CAAC;aACd,IAAI,CAAC,IAAI,CAAC,CACd,CAAA;IACH,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,yBAAyB,EAAE;QAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;QACtC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,4BAA4B,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,UAAU,EAAE,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC/D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,2CAA2C,EAAE;QAC9C,MAAM,MAAM,GAAG,sBAAS,CAAC,8BAA8B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QACvE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC7G,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,wCAAwC,EAAE;QAC3C,MAAM,MAAM,GAAG,sBAAS,CAAC,2BAA2B,CAAC,MAAM,CAAC,CAAA;QAC5D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC9E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,MAAM,GAAG,GAAG,oBAAoB,CAAA;QAChC,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACnC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE,GAAG,EAAE;QAC7B,EAAE,CAAC,sBAAsB,EAAE;YACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC5F,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,uCAAuC,EAAE;YAC1C,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,oBAAoB;gBAC1B,KAAK,EAAE,EAAE;aACV,CAAC,CAAA;YACF,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACjH,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,OAAO;gBACb,IAAI,EAAE,oCAAoC;gBAC1C,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;aACpB,CAAC,CAAA;YACF,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE;iBAC9B,UAAU,CAAC,OAAO,CAAC;iBACnB,UAAU,CAAC,oCAAoC,CAAC;iBAChD,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,gBAAgB,EAAE;YACnB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;YAE/B,MAAM,cAAc,GAAG,IAAI,qBAAU,EAAE;iBACpC,UAAU,CAAC,EAAE,CAAC;iBACd,UAAU,CAAC,EAAE,CAAC;iBACd,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0CAA0C,EAAE;YAC7C,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;gBAC5B,MAAM,EAAE,MAAM;gBACd,SAAS,EAAE,KAAK;gBAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;aAClC,CAAC,CAAA;YACF,MAAM,cAAc,GAAG,IAAI,qBAAU,EAAE;iBACpC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;iBACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;iBACnC,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;iBACrB,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;iBACZ,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;iBACxB,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,yBAAyB,EAAE;QAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;YACjC,WAAW,EAAE,GAAG,EAAE,CAAC,IAAI;SACxB,CAAC,CAAA;QACF,MAAM,cAAc,GAAG,IAAI,qBAAU,EAAE;aACpC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;aACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;aACnC,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,gDAAgD,EAAE;QACnD,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;SACvD,CAAC,CAAA;QACF,MAAM,cAAc,GAAG,IAAI,qBAAU,EAAE;aACpC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;aACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;aACnC,QAAQ,CAAC,CAAC,CAAC,CAAC,cAAc;aAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;aACjC,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wBAAwB,EAAE;QACjC,EAAE,CAAC,qCAAqC,EAAE;YACxC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,EAAE,CAAA;YAClC,MAAM,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClF,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,iCAAiC,EAAE;YACpC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;gBAC/B,MAAM,EAAE,oBAAoB;gBAC5B,IAAI,EAAE,GAAG;aACV,CAAC,CAAA;YACF,MAAM,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACtG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,EAAE,CAAA;QAChC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACjD,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,qBAAqB,EAAE;QACxB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;QAC/B,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACjD,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,oBAAoB,EAAE;QACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,GAAG,EAAE,CAAA;QAC9B,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;QAChD,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC9D,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACjF,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAChD,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC7E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,sBAAsB,EAAE;QAC/B,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC3D,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACjF,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC7E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,sBAAsB,EAAE,GAAG,EAAE;YAC9B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YACzD,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC7E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;YACzC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACpE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,EAAE,CAAA;YACnC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACjD,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,uBAAuB,EAAE,GAAG,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAClG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/parser.d.ts b/api/node_modules/pg-protocol/dist/parser.d.ts
new file mode 100644
index 000000000..f1e37f920
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/parser.d.ts
@@ -0,0 +1,24 @@
+///
+///
+import { TransformOptions } from 'stream';
+import { Mode, BackendMessage } from './messages';
+export declare type Packet = {
+ code: number;
+ packet: Buffer;
+};
+declare type StreamOptions = TransformOptions & {
+ mode: Mode;
+};
+export declare type MessageCallback = (msg: BackendMessage) => void;
+export declare class Parser {
+ private buffer;
+ private bufferLength;
+ private bufferOffset;
+ private reader;
+ private mode;
+ constructor(opts?: StreamOptions);
+ parse(buffer: Buffer, callback: MessageCallback): void;
+ private mergeBuffer;
+ private handlePacket;
+}
+export {};
diff --git a/api/node_modules/pg-protocol/dist/parser.js b/api/node_modules/pg-protocol/dist/parser.js
new file mode 100644
index 000000000..b05fe75a9
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/parser.js
@@ -0,0 +1,324 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Parser = void 0;
+const messages_1 = require("./messages");
+const buffer_reader_1 = require("./buffer-reader");
+// every message is prefixed with a single bye
+const CODE_LENGTH = 1;
+// every message has an int32 length which includes itself but does
+// NOT include the code in the length
+const LEN_LENGTH = 4;
+const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH;
+// A placeholder for a `BackendMessage`’s length value that will be set after construction.
+const LATEINIT_LENGTH = -1;
+const emptyBuffer = Buffer.allocUnsafe(0);
+class Parser {
+ constructor(opts) {
+ this.buffer = emptyBuffer;
+ this.bufferLength = 0;
+ this.bufferOffset = 0;
+ this.reader = new buffer_reader_1.BufferReader();
+ if ((opts === null || opts === void 0 ? void 0 : opts.mode) === 'binary') {
+ throw new Error('Binary mode not supported yet');
+ }
+ this.mode = (opts === null || opts === void 0 ? void 0 : opts.mode) || 'text';
+ }
+ parse(buffer, callback) {
+ this.mergeBuffer(buffer);
+ const bufferFullLength = this.bufferOffset + this.bufferLength;
+ let offset = this.bufferOffset;
+ while (offset + HEADER_LENGTH <= bufferFullLength) {
+ // code is 1 byte long - it identifies the message type
+ const code = this.buffer[offset];
+ // length is 1 Uint32BE - it is the length of the message EXCLUDING the code
+ const length = this.buffer.readUInt32BE(offset + CODE_LENGTH);
+ const fullMessageLength = CODE_LENGTH + length;
+ if (fullMessageLength + offset <= bufferFullLength) {
+ const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer);
+ callback(message);
+ offset += fullMessageLength;
+ }
+ else {
+ break;
+ }
+ }
+ if (offset === bufferFullLength) {
+ // No more use for the buffer
+ this.buffer = emptyBuffer;
+ this.bufferLength = 0;
+ this.bufferOffset = 0;
+ }
+ else {
+ // Adjust the cursors of remainingBuffer
+ this.bufferLength = bufferFullLength - offset;
+ this.bufferOffset = offset;
+ }
+ }
+ mergeBuffer(buffer) {
+ if (this.bufferLength > 0) {
+ const newLength = this.bufferLength + buffer.byteLength;
+ const newFullLength = newLength + this.bufferOffset;
+ if (newFullLength > this.buffer.byteLength) {
+ // We can't concat the new buffer with the remaining one
+ let newBuffer;
+ if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) {
+ // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer
+ newBuffer = this.buffer;
+ }
+ else {
+ // Allocate a new larger buffer
+ let newBufferLength = this.buffer.byteLength * 2;
+ while (newLength >= newBufferLength) {
+ newBufferLength *= 2;
+ }
+ newBuffer = Buffer.allocUnsafe(newBufferLength);
+ }
+ // Move the remaining buffer to the new one
+ this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength);
+ this.buffer = newBuffer;
+ this.bufferOffset = 0;
+ }
+ // Concat the new buffer with the remaining one
+ buffer.copy(this.buffer, this.bufferOffset + this.bufferLength);
+ this.bufferLength = newLength;
+ }
+ else {
+ this.buffer = buffer;
+ this.bufferOffset = 0;
+ this.bufferLength = buffer.byteLength;
+ }
+ }
+ handlePacket(offset, code, length, bytes) {
+ const { reader } = this;
+ // NOTE: This undesirably retains the buffer in `this.reader` if the `parse*Message` calls below throw. However, those should only throw in the case of a protocol error, which normally results in the reader being discarded.
+ reader.setBuffer(offset, bytes);
+ let message;
+ switch (code) {
+ case 50 /* MessageCodes.BindComplete */:
+ message = messages_1.bindComplete;
+ break;
+ case 49 /* MessageCodes.ParseComplete */:
+ message = messages_1.parseComplete;
+ break;
+ case 51 /* MessageCodes.CloseComplete */:
+ message = messages_1.closeComplete;
+ break;
+ case 110 /* MessageCodes.NoData */:
+ message = messages_1.noData;
+ break;
+ case 115 /* MessageCodes.PortalSuspended */:
+ message = messages_1.portalSuspended;
+ break;
+ case 99 /* MessageCodes.CopyDone */:
+ message = messages_1.copyDone;
+ break;
+ case 87 /* MessageCodes.ReplicationStart */:
+ message = messages_1.replicationStart;
+ break;
+ case 73 /* MessageCodes.EmptyQuery */:
+ message = messages_1.emptyQuery;
+ break;
+ case 68 /* MessageCodes.DataRow */:
+ message = parseDataRowMessage(reader);
+ break;
+ case 67 /* MessageCodes.CommandComplete */:
+ message = parseCommandCompleteMessage(reader);
+ break;
+ case 90 /* MessageCodes.ReadyForQuery */:
+ message = parseReadyForQueryMessage(reader);
+ break;
+ case 65 /* MessageCodes.NotificationResponse */:
+ message = parseNotificationMessage(reader);
+ break;
+ case 82 /* MessageCodes.AuthenticationResponse */:
+ message = parseAuthenticationResponse(reader, length);
+ break;
+ case 83 /* MessageCodes.ParameterStatus */:
+ message = parseParameterStatusMessage(reader);
+ break;
+ case 75 /* MessageCodes.BackendKeyData */:
+ message = parseBackendKeyData(reader);
+ break;
+ case 69 /* MessageCodes.ErrorMessage */:
+ message = parseErrorMessage(reader, 'error');
+ break;
+ case 78 /* MessageCodes.NoticeMessage */:
+ message = parseErrorMessage(reader, 'notice');
+ break;
+ case 84 /* MessageCodes.RowDescriptionMessage */:
+ message = parseRowDescriptionMessage(reader);
+ break;
+ case 116 /* MessageCodes.ParameterDescriptionMessage */:
+ message = parseParameterDescriptionMessage(reader);
+ break;
+ case 71 /* MessageCodes.CopyIn */:
+ message = parseCopyInMessage(reader);
+ break;
+ case 72 /* MessageCodes.CopyOut */:
+ message = parseCopyOutMessage(reader);
+ break;
+ case 100 /* MessageCodes.CopyData */:
+ message = parseCopyData(reader, length);
+ break;
+ default:
+ return new messages_1.DatabaseError('received invalid response: ' + code.toString(16), length, 'error');
+ }
+ reader.setBuffer(0, emptyBuffer);
+ message.length = length;
+ return message;
+ }
+}
+exports.Parser = Parser;
+const parseReadyForQueryMessage = (reader) => {
+ const status = reader.string(1);
+ return new messages_1.ReadyForQueryMessage(LATEINIT_LENGTH, status);
+};
+const parseCommandCompleteMessage = (reader) => {
+ const text = reader.cstring();
+ return new messages_1.CommandCompleteMessage(LATEINIT_LENGTH, text);
+};
+const parseCopyData = (reader, length) => {
+ const chunk = reader.bytes(length - 4);
+ return new messages_1.CopyDataMessage(LATEINIT_LENGTH, chunk);
+};
+const parseCopyInMessage = (reader) => parseCopyMessage(reader, 'copyInResponse');
+const parseCopyOutMessage = (reader) => parseCopyMessage(reader, 'copyOutResponse');
+const parseCopyMessage = (reader, messageName) => {
+ const isBinary = reader.byte() !== 0;
+ const columnCount = reader.int16();
+ const message = new messages_1.CopyResponse(LATEINIT_LENGTH, messageName, isBinary, columnCount);
+ for (let i = 0; i < columnCount; i++) {
+ message.columnTypes[i] = reader.int16();
+ }
+ return message;
+};
+const parseNotificationMessage = (reader) => {
+ const processId = reader.int32();
+ const channel = reader.cstring();
+ const payload = reader.cstring();
+ return new messages_1.NotificationResponseMessage(LATEINIT_LENGTH, processId, channel, payload);
+};
+const parseRowDescriptionMessage = (reader) => {
+ const fieldCount = reader.int16();
+ const message = new messages_1.RowDescriptionMessage(LATEINIT_LENGTH, fieldCount);
+ for (let i = 0; i < fieldCount; i++) {
+ message.fields[i] = parseField(reader);
+ }
+ return message;
+};
+const parseField = (reader) => {
+ const name = reader.cstring();
+ const tableID = reader.uint32();
+ const columnID = reader.int16();
+ const dataTypeID = reader.uint32();
+ const dataTypeSize = reader.int16();
+ const dataTypeModifier = reader.int32();
+ const mode = reader.int16() === 0 ? 'text' : 'binary';
+ return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode);
+};
+const parseParameterDescriptionMessage = (reader) => {
+ const parameterCount = reader.int16();
+ const message = new messages_1.ParameterDescriptionMessage(LATEINIT_LENGTH, parameterCount);
+ for (let i = 0; i < parameterCount; i++) {
+ message.dataTypeIDs[i] = reader.int32();
+ }
+ return message;
+};
+const parseDataRowMessage = (reader) => {
+ const fieldCount = reader.int16();
+ const fields = new Array(fieldCount);
+ for (let i = 0; i < fieldCount; i++) {
+ const len = reader.int32();
+ // a -1 for length means the value of the field is null
+ fields[i] = len === -1 ? null : reader.string(len);
+ }
+ return new messages_1.DataRowMessage(LATEINIT_LENGTH, fields);
+};
+const parseParameterStatusMessage = (reader) => {
+ const name = reader.cstring();
+ const value = reader.cstring();
+ return new messages_1.ParameterStatusMessage(LATEINIT_LENGTH, name, value);
+};
+const parseBackendKeyData = (reader) => {
+ const processID = reader.int32();
+ const secretKey = reader.int32();
+ return new messages_1.BackendKeyDataMessage(LATEINIT_LENGTH, processID, secretKey);
+};
+const parseAuthenticationResponse = (reader, length) => {
+ const code = reader.int32();
+ // TODO(bmc): maybe better types here
+ const message = {
+ name: 'authenticationOk',
+ length,
+ };
+ switch (code) {
+ case 0: // AuthenticationOk
+ break;
+ case 3: // AuthenticationCleartextPassword
+ if (message.length === 8) {
+ message.name = 'authenticationCleartextPassword';
+ }
+ break;
+ case 5: // AuthenticationMD5Password
+ if (message.length === 12) {
+ message.name = 'authenticationMD5Password';
+ const salt = reader.bytes(4);
+ return new messages_1.AuthenticationMD5Password(LATEINIT_LENGTH, salt);
+ }
+ break;
+ case 10: // AuthenticationSASL
+ {
+ message.name = 'authenticationSASL';
+ message.mechanisms = [];
+ let mechanism;
+ do {
+ mechanism = reader.cstring();
+ if (mechanism) {
+ message.mechanisms.push(mechanism);
+ }
+ } while (mechanism);
+ }
+ break;
+ case 11: // AuthenticationSASLContinue
+ message.name = 'authenticationSASLContinue';
+ message.data = reader.string(length - 8);
+ break;
+ case 12: // AuthenticationSASLFinal
+ message.name = 'authenticationSASLFinal';
+ message.data = reader.string(length - 8);
+ break;
+ default:
+ throw new Error('Unknown authenticationOk message type ' + code);
+ }
+ return message;
+};
+const parseErrorMessage = (reader, name) => {
+ const fields = {};
+ let fieldType = reader.string(1);
+ while (fieldType !== '\0') {
+ fields[fieldType] = reader.cstring();
+ fieldType = reader.string(1);
+ }
+ const messageValue = fields.M;
+ const message = name === 'notice'
+ ? new messages_1.NoticeMessage(LATEINIT_LENGTH, messageValue)
+ : new messages_1.DatabaseError(messageValue, LATEINIT_LENGTH, name);
+ message.severity = fields.S;
+ message.code = fields.C;
+ message.detail = fields.D;
+ message.hint = fields.H;
+ message.position = fields.P;
+ message.internalPosition = fields.p;
+ message.internalQuery = fields.q;
+ message.where = fields.W;
+ message.schema = fields.s;
+ message.table = fields.t;
+ message.column = fields.c;
+ message.dataType = fields.d;
+ message.constraint = fields.n;
+ message.file = fields.F;
+ message.line = fields.L;
+ message.routine = fields.R;
+ return message;
+};
+//# sourceMappingURL=parser.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/parser.js.map b/api/node_modules/pg-protocol/dist/parser.js.map
new file mode 100644
index 000000000..947ab29dc
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/parser.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"parser.js","sourceRoot":"","sources":["../src/parser.ts"],"names":[],"mappings":";;;AACA,yCA0BmB;AACnB,mDAA8C;AAE9C,8CAA8C;AAC9C,MAAM,WAAW,GAAG,CAAC,CAAA;AACrB,mEAAmE;AACnE,qCAAqC;AACrC,MAAM,UAAU,GAAG,CAAC,CAAA;AAEpB,MAAM,aAAa,GAAG,WAAW,GAAG,UAAU,CAAA;AAE9C,2FAA2F;AAC3F,MAAM,eAAe,GAAG,CAAC,CAAC,CAAA;AAO1B,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAiCzC,MAAa,MAAM;IAOjB,YAAY,IAAoB;QANxB,WAAM,GAAW,WAAW,CAAA;QAC5B,iBAAY,GAAW,CAAC,CAAA;QACxB,iBAAY,GAAW,CAAC,CAAA;QACxB,WAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;QAIjC,IAAI,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,MAAK,QAAQ,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAA;SACjD;QACD,IAAI,CAAC,IAAI,GAAG,CAAA,IAAI,aAAJ,IAAI,uBAAJ,IAAI,CAAE,IAAI,KAAI,MAAM,CAAA;IAClC,CAAC;IAEM,KAAK,CAAC,MAAc,EAAE,QAAyB;QACpD,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QACxB,MAAM,gBAAgB,GAAG,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9D,IAAI,MAAM,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9B,OAAO,MAAM,GAAG,aAAa,IAAI,gBAAgB,EAAE;YACjD,uDAAuD;YACvD,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;YAChC,4EAA4E;YAC5E,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,GAAG,WAAW,CAAC,CAAA;YAC7D,MAAM,iBAAiB,GAAG,WAAW,GAAG,MAAM,CAAA;YAC9C,IAAI,iBAAiB,GAAG,MAAM,IAAI,gBAAgB,EAAE;gBAClD,MAAM,OAAO,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,GAAG,aAAa,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;gBACpF,QAAQ,CAAC,OAAO,CAAC,CAAA;gBACjB,MAAM,IAAI,iBAAiB,CAAA;aAC5B;iBAAM;gBACL,MAAK;aACN;SACF;QACD,IAAI,MAAM,KAAK,gBAAgB,EAAE;YAC/B,6BAA6B;YAC7B,IAAI,CAAC,MAAM,GAAG,WAAW,CAAA;YACzB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;SACtB;aAAM;YACL,wCAAwC;YACxC,IAAI,CAAC,YAAY,GAAG,gBAAgB,GAAG,MAAM,CAAA;YAC7C,IAAI,CAAC,YAAY,GAAG,MAAM,CAAA;SAC3B;IACH,CAAC;IAEO,WAAW,CAAC,MAAc;QAChC,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YACzB,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;YACvD,MAAM,aAAa,GAAG,SAAS,GAAG,IAAI,CAAC,YAAY,CAAA;YACnD,IAAI,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE;gBAC1C,wDAAwD;gBACxD,IAAI,SAAiB,CAAA;gBACrB,IAAI,SAAS,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,EAAE;oBACjF,kGAAkG;oBAClG,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;iBACxB;qBAAM;oBACL,+BAA+B;oBAC/B,IAAI,eAAe,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,CAAC,CAAA;oBAChD,OAAO,SAAS,IAAI,eAAe,EAAE;wBACnC,eAAe,IAAI,CAAC,CAAA;qBACrB;oBACD,SAAS,GAAG,MAAM,CAAC,WAAW,CAAC,eAAe,CAAC,CAAA;iBAChD;gBACD,2CAA2C;gBAC3C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;gBACxF,IAAI,CAAC,MAAM,GAAG,SAAS,CAAA;gBACvB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;aACtB;YACD,+CAA+C;YAC/C,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;YAC/D,IAAI,CAAC,YAAY,GAAG,SAAS,CAAA;SAC9B;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;YACpB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;SACtC;IACH,CAAC;IAEO,YAAY,CAAC,MAAc,EAAE,IAAY,EAAE,MAAc,EAAE,KAAa;QAC9E,MAAM,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;QAEvB,+NAA+N;QAC/N,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QAE/B,IAAI,OAAuB,CAAA;QAE3B,QAAQ,IAAI,EAAE;YACZ;gBACE,OAAO,GAAG,uBAAY,CAAA;gBACtB,MAAK;YACP;gBACE,OAAO,GAAG,wBAAa,CAAA;gBACvB,MAAK;YACP;gBACE,OAAO,GAAG,wBAAa,CAAA;gBACvB,MAAK;YACP;gBACE,OAAO,GAAG,iBAAM,CAAA;gBAChB,MAAK;YACP;gBACE,OAAO,GAAG,0BAAe,CAAA;gBACzB,MAAK;YACP;gBACE,OAAO,GAAG,mBAAQ,CAAA;gBAClB,MAAK;YACP;gBACE,OAAO,GAAG,2BAAgB,CAAA;gBAC1B,MAAK;YACP;gBACE,OAAO,GAAG,qBAAU,CAAA;gBACpB,MAAK;YACP;gBACE,OAAO,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;gBACrC,MAAK;YACP;gBACE,OAAO,GAAG,2BAA2B,CAAC,MAAM,CAAC,CAAA;gBAC7C,MAAK;YACP;gBACE,OAAO,GAAG,yBAAyB,CAAC,MAAM,CAAC,CAAA;gBAC3C,MAAK;YACP;gBACE,OAAO,GAAG,wBAAwB,CAAC,MAAM,CAAC,CAAA;gBAC1C,MAAK;YACP;gBACE,OAAO,GAAG,2BAA2B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;gBACrD,MAAK;YACP;gBACE,OAAO,GAAG,2BAA2B,CAAC,MAAM,CAAC,CAAA;gBAC7C,MAAK;YACP;gBACE,OAAO,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;gBACrC,MAAK;YACP;gBACE,OAAO,GAAG,iBAAiB,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;gBAC5C,MAAK;YACP;gBACE,OAAO,GAAG,iBAAiB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;gBAC7C,MAAK;YACP;gBACE,OAAO,GAAG,0BAA0B,CAAC,MAAM,CAAC,CAAA;gBAC5C,MAAK;YACP;gBACE,OAAO,GAAG,gCAAgC,CAAC,MAAM,CAAC,CAAA;gBAClD,MAAK;YACP;gBACE,OAAO,GAAG,kBAAkB,CAAC,MAAM,CAAC,CAAA;gBACpC,MAAK;YACP;gBACE,OAAO,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;gBACrC,MAAK;YACP;gBACE,OAAO,GAAG,aAAa,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;gBACvC,MAAK;YACP;gBACE,OAAO,IAAI,wBAAa,CAAC,6BAA6B,GAAG,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,MAAM,EAAE,OAAO,CAAC,CAAA;SAC/F;QAED,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,WAAW,CAAC,CAAA;QAEhC,OAAO,CAAC,MAAM,GAAG,MAAM,CAAA;QACvB,OAAO,OAAO,CAAA;IAChB,CAAC;CACF;AAjKD,wBAiKC;AAED,MAAM,yBAAyB,GAAG,CAAC,MAAoB,EAAE,EAAE;IACzD,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;IAC/B,OAAO,IAAI,+BAAoB,CAAC,eAAe,EAAE,MAAM,CAAC,CAAA;AAC1D,CAAC,CAAA;AAED,MAAM,2BAA2B,GAAG,CAAC,MAAoB,EAAE,EAAE;IAC3D,MAAM,IAAI,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;IAC7B,OAAO,IAAI,iCAAsB,CAAC,eAAe,EAAE,IAAI,CAAC,CAAA;AAC1D,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CAAC,MAAoB,EAAE,MAAc,EAAE,EAAE;IAC7D,MAAM,KAAK,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;IACtC,OAAO,IAAI,0BAAe,CAAC,eAAe,EAAE,KAAK,CAAC,CAAA;AACpD,CAAC,CAAA;AAED,MAAM,kBAAkB,GAAG,CAAC,MAAoB,EAAE,EAAE,CAAC,gBAAgB,CAAC,MAAM,EAAE,gBAAgB,CAAC,CAAA;AAE/F,MAAM,mBAAmB,GAAG,CAAC,MAAoB,EAAE,EAAE,CAAC,gBAAgB,CAAC,MAAM,EAAE,iBAAiB,CAAC,CAAA;AAEjG,MAAM,gBAAgB,GAAG,CAAC,MAAoB,EAAE,WAAwB,EAAE,EAAE;IAC1E,MAAM,QAAQ,GAAG,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;IACpC,MAAM,WAAW,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IAClC,MAAM,OAAO,GAAG,IAAI,uBAAY,CAAC,eAAe,EAAE,WAAW,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAA;IACrF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,EAAE,CAAC,EAAE,EAAE;QACpC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;KACxC;IACD,OAAO,OAAO,CAAA;AAChB,CAAC,CAAA;AAED,MAAM,wBAAwB,GAAG,CAAC,MAAoB,EAAE,EAAE;IACxD,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IAChC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;IAChC,MAAM,OAAO,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;IAChC,OAAO,IAAI,sCAA2B,CAAC,eAAe,EAAE,SAAS,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;AACtF,CAAC,CAAA;AAED,MAAM,0BAA0B,GAAG,CAAC,MAAoB,EAAE,EAAE;IAC1D,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IACjC,MAAM,OAAO,GAAG,IAAI,gCAAqB,CAAC,eAAe,EAAE,UAAU,CAAC,CAAA;IACtE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;QACnC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,UAAU,CAAC,MAAM,CAAC,CAAA;KACvC;IACD,OAAO,OAAO,CAAA;AAChB,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,CAAC,MAAoB,EAAE,EAAE;IAC1C,MAAM,IAAI,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;IAC7B,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,EAAE,CAAA;IAC/B,MAAM,QAAQ,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IAC/B,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,CAAA;IAClC,MAAM,YAAY,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IACnC,MAAM,gBAAgB,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IACvC,MAAM,IAAI,GAAG,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAA;IACrD,OAAO,IAAI,gBAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,YAAY,EAAE,gBAAgB,EAAE,IAAI,CAAC,CAAA;AAC7F,CAAC,CAAA;AAED,MAAM,gCAAgC,GAAG,CAAC,MAAoB,EAAE,EAAE;IAChE,MAAM,cAAc,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IACrC,MAAM,OAAO,GAAG,IAAI,sCAA2B,CAAC,eAAe,EAAE,cAAc,CAAC,CAAA;IAChF,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,cAAc,EAAE,CAAC,EAAE,EAAE;QACvC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;KACxC;IACD,OAAO,OAAO,CAAA;AAChB,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,MAAoB,EAAE,EAAE;IACnD,MAAM,UAAU,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IACjC,MAAM,MAAM,GAAU,IAAI,KAAK,CAAC,UAAU,CAAC,CAAA;IAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;QACnC,MAAM,GAAG,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;QAC1B,uDAAuD;QACvD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;KACnD;IACD,OAAO,IAAI,yBAAc,CAAC,eAAe,EAAE,MAAM,CAAC,CAAA;AACpD,CAAC,CAAA;AAED,MAAM,2BAA2B,GAAG,CAAC,MAAoB,EAAE,EAAE;IAC3D,MAAM,IAAI,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;IAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;IAC9B,OAAO,IAAI,iCAAsB,CAAC,eAAe,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;AACjE,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,MAAoB,EAAE,EAAE;IACnD,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IAChC,MAAM,SAAS,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IAChC,OAAO,IAAI,gCAAqB,CAAC,eAAe,EAAE,SAAS,EAAE,SAAS,CAAC,CAAA;AACzE,CAAC,CAAA;AAED,MAAM,2BAA2B,GAAG,CAAC,MAAoB,EAAE,MAAc,EAAE,EAAE;IAC3E,MAAM,IAAI,GAAG,MAAM,CAAC,KAAK,EAAE,CAAA;IAC3B,qCAAqC;IACrC,MAAM,OAAO,GAAyB;QACpC,IAAI,EAAE,kBAAkB;QACxB,MAAM;KACP,CAAA;IAED,QAAQ,IAAI,EAAE;QACZ,KAAK,CAAC,EAAE,mBAAmB;YACzB,MAAK;QACP,KAAK,CAAC,EAAE,kCAAkC;YACxC,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;gBACxB,OAAO,CAAC,IAAI,GAAG,iCAAiC,CAAA;aACjD;YACD,MAAK;QACP,KAAK,CAAC,EAAE,4BAA4B;YAClC,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;gBACzB,OAAO,CAAC,IAAI,GAAG,2BAA2B,CAAA;gBAC1C,MAAM,IAAI,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;gBAC5B,OAAO,IAAI,oCAAyB,CAAC,eAAe,EAAE,IAAI,CAAC,CAAA;aAC5D;YACD,MAAK;QACP,KAAK,EAAE,EAAE,qBAAqB;YAC5B;gBACE,OAAO,CAAC,IAAI,GAAG,oBAAoB,CAAA;gBACnC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAA;gBACvB,IAAI,SAAiB,CAAA;gBACrB,GAAG;oBACD,SAAS,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;oBAC5B,IAAI,SAAS,EAAE;wBACb,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;qBACnC;iBACF,QAAQ,SAAS,EAAC;aACpB;YACD,MAAK;QACP,KAAK,EAAE,EAAE,6BAA6B;YACpC,OAAO,CAAC,IAAI,GAAG,4BAA4B,CAAA;YAC3C,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACxC,MAAK;QACP,KAAK,EAAE,EAAE,0BAA0B;YACjC,OAAO,CAAC,IAAI,GAAG,yBAAyB,CAAA;YACxC,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACxC,MAAK;QACP;YACE,MAAM,IAAI,KAAK,CAAC,wCAAwC,GAAG,IAAI,CAAC,CAAA;KACnE;IACD,OAAO,OAAO,CAAA;AAChB,CAAC,CAAA;AAED,MAAM,iBAAiB,GAAG,CAAC,MAAoB,EAAE,IAAiB,EAAE,EAAE;IACpE,MAAM,MAAM,GAA2B,EAAE,CAAA;IACzC,IAAI,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;IAChC,OAAO,SAAS,KAAK,IAAI,EAAE;QACzB,MAAM,CAAC,SAAS,CAAC,GAAG,MAAM,CAAC,OAAO,EAAE,CAAA;QACpC,SAAS,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;KAC7B;IAED,MAAM,YAAY,GAAG,MAAM,CAAC,CAAC,CAAA;IAE7B,MAAM,OAAO,GACX,IAAI,KAAK,QAAQ;QACf,CAAC,CAAC,IAAI,wBAAa,CAAC,eAAe,EAAE,YAAY,CAAC;QAClD,CAAC,CAAC,IAAI,wBAAa,CAAC,YAAY,EAAE,eAAe,EAAE,IAAI,CAAC,CAAA;IAE5D,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;IAC3B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;IACvB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;IACzB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;IACvB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;IAC3B,OAAO,CAAC,gBAAgB,GAAG,MAAM,CAAC,CAAC,CAAA;IACnC,OAAO,CAAC,aAAa,GAAG,MAAM,CAAC,CAAC,CAAA;IAChC,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;IACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;IACzB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;IACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;IACzB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;IAC3B,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC,CAAC,CAAA;IAC7B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;IACvB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;IACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,CAAC,CAAA;IAC1B,OAAO,OAAO,CAAA;AAChB,CAAC,CAAA"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/serializer.d.ts b/api/node_modules/pg-protocol/dist/serializer.d.ts
new file mode 100644
index 000000000..a9ef64af6
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/serializer.d.ts
@@ -0,0 +1,42 @@
+declare type ParseOpts = {
+ name?: string;
+ types?: number[];
+ text: string;
+};
+declare type ValueMapper = (param: any, index: number) => any;
+declare type BindOpts = {
+ portal?: string;
+ binary?: boolean;
+ statement?: string;
+ values?: any[];
+ valueMapper?: ValueMapper;
+};
+declare type ExecOpts = {
+ portal?: string;
+ rows?: number;
+};
+declare type PortalOpts = {
+ type: 'S' | 'P';
+ name?: string;
+};
+declare const serialize: {
+ startup: (opts: Record) => Buffer;
+ password: (password: string) => Buffer;
+ requestSsl: () => Buffer;
+ sendSASLInitialResponseMessage: (mechanism: string, initialResponse: string) => Buffer;
+ sendSCRAMClientFinalMessage: (additionalData: string) => Buffer;
+ query: (text: string) => Buffer;
+ parse: (query: ParseOpts) => Buffer;
+ bind: (config?: BindOpts) => Buffer;
+ execute: (config?: ExecOpts) => Buffer;
+ describe: (msg: PortalOpts) => Buffer;
+ close: (msg: PortalOpts) => Buffer;
+ flush: () => Buffer;
+ sync: () => Buffer;
+ end: () => Buffer;
+ copyData: (chunk: Buffer) => Buffer;
+ copyDone: () => Buffer;
+ copyFail: (message: string) => Buffer;
+ cancel: (processID: number, secretKey: number) => Buffer;
+};
+export { serialize };
diff --git a/api/node_modules/pg-protocol/dist/serializer.js b/api/node_modules/pg-protocol/dist/serializer.js
new file mode 100644
index 000000000..f3e5d28ab
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/serializer.js
@@ -0,0 +1,189 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.serialize = void 0;
+const buffer_writer_1 = require("./buffer-writer");
+const writer = new buffer_writer_1.Writer();
+const startup = (opts) => {
+ // protocol version
+ writer.addInt16(3).addInt16(0);
+ for (const key of Object.keys(opts)) {
+ writer.addCString(key).addCString(opts[key]);
+ }
+ writer.addCString('client_encoding').addCString('UTF8');
+ const bodyBuffer = writer.addCString('').flush();
+ // this message is sent without a code
+ const length = bodyBuffer.length + 4;
+ return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush();
+};
+const requestSsl = () => {
+ const response = Buffer.allocUnsafe(8);
+ response.writeInt32BE(8, 0);
+ response.writeInt32BE(80877103, 4);
+ return response;
+};
+const password = (password) => {
+ return writer.addCString(password).flush(112 /* code.startup */);
+};
+const sendSASLInitialResponseMessage = function (mechanism, initialResponse) {
+ // 0x70 = 'p'
+ writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse);
+ return writer.flush(112 /* code.startup */);
+};
+const sendSCRAMClientFinalMessage = function (additionalData) {
+ return writer.addString(additionalData).flush(112 /* code.startup */);
+};
+const query = (text) => {
+ return writer.addCString(text).flush(81 /* code.query */);
+};
+const emptyArray = [];
+const parse = (query) => {
+ // expect something like this:
+ // { name: 'queryName',
+ // text: 'select * from blah',
+ // types: ['int8', 'bool'] }
+ // normalize missing query names to allow for null
+ const name = query.name || '';
+ if (name.length > 63) {
+ console.error('Warning! Postgres only supports 63 characters for query names.');
+ console.error('You supplied %s (%s)', name, name.length);
+ console.error('This can cause conflicts and silent errors executing queries');
+ }
+ const types = query.types || emptyArray;
+ const len = types.length;
+ const buffer = writer
+ .addCString(name) // name of query
+ .addCString(query.text) // actual query text
+ .addInt16(len);
+ for (let i = 0; i < len; i++) {
+ buffer.addInt32(types[i]);
+ }
+ return writer.flush(80 /* code.parse */);
+};
+const paramWriter = new buffer_writer_1.Writer();
+const writeValues = function (values, valueMapper) {
+ for (let i = 0; i < values.length; i++) {
+ const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i];
+ if (mappedVal == null) {
+ // add the param type (string) to the writer
+ writer.addInt16(0 /* ParamType.STRING */);
+ // write -1 to the param writer to indicate null
+ paramWriter.addInt32(-1);
+ }
+ else if (mappedVal instanceof Buffer) {
+ // add the param type (binary) to the writer
+ writer.addInt16(1 /* ParamType.BINARY */);
+ // add the buffer to the param writer
+ paramWriter.addInt32(mappedVal.length);
+ paramWriter.add(mappedVal);
+ }
+ else {
+ // add the param type (string) to the writer
+ writer.addInt16(0 /* ParamType.STRING */);
+ paramWriter.addInt32(Buffer.byteLength(mappedVal));
+ paramWriter.addString(mappedVal);
+ }
+ }
+};
+const bind = (config = {}) => {
+ // normalize config
+ const portal = config.portal || '';
+ const statement = config.statement || '';
+ const binary = config.binary || false;
+ const values = config.values || emptyArray;
+ const len = values.length;
+ writer.addCString(portal).addCString(statement);
+ writer.addInt16(len);
+ writeValues(values, config.valueMapper);
+ writer.addInt16(len);
+ writer.add(paramWriter.flush());
+ // all results use the same format code
+ writer.addInt16(1);
+ // format code
+ writer.addInt16(binary ? 1 /* ParamType.BINARY */ : 0 /* ParamType.STRING */);
+ return writer.flush(66 /* code.bind */);
+};
+const emptyExecute = Buffer.from([69 /* code.execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]);
+const execute = (config) => {
+ // this is the happy path for most queries
+ if (!config || (!config.portal && !config.rows)) {
+ return emptyExecute;
+ }
+ const portal = config.portal || '';
+ const rows = config.rows || 0;
+ const portalLength = Buffer.byteLength(portal);
+ const len = 4 + portalLength + 1 + 4;
+ // one extra bit for code
+ const buff = Buffer.allocUnsafe(1 + len);
+ buff[0] = 69 /* code.execute */;
+ buff.writeInt32BE(len, 1);
+ buff.write(portal, 5, 'utf-8');
+ buff[portalLength + 5] = 0; // null terminate portal cString
+ buff.writeUInt32BE(rows, buff.length - 4);
+ return buff;
+};
+const cancel = (processID, secretKey) => {
+ const buffer = Buffer.allocUnsafe(16);
+ buffer.writeInt32BE(16, 0);
+ buffer.writeInt16BE(1234, 4);
+ buffer.writeInt16BE(5678, 6);
+ buffer.writeInt32BE(processID, 8);
+ buffer.writeInt32BE(secretKey, 12);
+ return buffer;
+};
+const cstringMessage = (code, string) => {
+ const stringLen = Buffer.byteLength(string);
+ const len = 4 + stringLen + 1;
+ // one extra bit for code
+ const buffer = Buffer.allocUnsafe(1 + len);
+ buffer[0] = code;
+ buffer.writeInt32BE(len, 1);
+ buffer.write(string, 5, 'utf-8');
+ buffer[len] = 0; // null terminate cString
+ return buffer;
+};
+const emptyDescribePortal = writer.addCString('P').flush(68 /* code.describe */);
+const emptyDescribeStatement = writer.addCString('S').flush(68 /* code.describe */);
+const describe = (msg) => {
+ return msg.name
+ ? cstringMessage(68 /* code.describe */, `${msg.type}${msg.name || ''}`)
+ : msg.type === 'P'
+ ? emptyDescribePortal
+ : emptyDescribeStatement;
+};
+const close = (msg) => {
+ const text = `${msg.type}${msg.name || ''}`;
+ return cstringMessage(67 /* code.close */, text);
+};
+const copyData = (chunk) => {
+ return writer.add(chunk).flush(100 /* code.copyFromChunk */);
+};
+const copyFail = (message) => {
+ return cstringMessage(102 /* code.copyFail */, message);
+};
+const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]);
+const flushBuffer = codeOnlyBuffer(72 /* code.flush */);
+const syncBuffer = codeOnlyBuffer(83 /* code.sync */);
+const endBuffer = codeOnlyBuffer(88 /* code.end */);
+const copyDoneBuffer = codeOnlyBuffer(99 /* code.copyDone */);
+const serialize = {
+ startup,
+ password,
+ requestSsl,
+ sendSASLInitialResponseMessage,
+ sendSCRAMClientFinalMessage,
+ query,
+ parse,
+ bind,
+ execute,
+ describe,
+ close,
+ flush: () => flushBuffer,
+ sync: () => syncBuffer,
+ end: () => endBuffer,
+ copyData,
+ copyDone: () => copyDoneBuffer,
+ copyFail,
+ cancel,
+};
+exports.serialize = serialize;
+//# sourceMappingURL=serializer.js.map
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/dist/serializer.js.map b/api/node_modules/pg-protocol/dist/serializer.js.map
new file mode 100644
index 000000000..513c2fa5a
--- /dev/null
+++ b/api/node_modules/pg-protocol/dist/serializer.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../src/serializer.ts"],"names":[],"mappings":";;;AAAA,mDAAwC;AAkBxC,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,OAAO,GAAG,CAAC,IAA4B,EAAU,EAAE;IACvD,mBAAmB;IACnB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;KAC7C;IAED,MAAM,CAAC,UAAU,CAAC,iBAAiB,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAEvD,MAAM,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAChD,sCAAsC;IAEtC,MAAM,MAAM,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAA;IAEpC,OAAO,IAAI,sBAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE,CAAA;AAC9D,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,GAAW,EAAE;IAC9B,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;IACtC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAC3B,QAAQ,CAAC,YAAY,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAA;IAClC,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,QAAgB,EAAU,EAAE;IAC5C,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,KAAK,wBAAc,CAAA;AACxD,CAAC,CAAA;AAED,MAAM,8BAA8B,GAAG,UAAU,SAAiB,EAAE,eAAuB;IACzF,aAAa;IACb,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,CAAA;IAEpG,OAAO,MAAM,CAAC,KAAK,wBAAc,CAAA;AACnC,CAAC,CAAA;AAED,MAAM,2BAA2B,GAAG,UAAU,cAAsB;IAClE,OAAO,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,wBAAc,CAAA;AAC7D,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAU,EAAE;IACrC,OAAO,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,KAAK,qBAAY,CAAA;AAClD,CAAC,CAAA;AAQD,MAAM,UAAU,GAAU,EAAE,CAAA;AAE5B,MAAM,KAAK,GAAG,CAAC,KAAgB,EAAU,EAAE;IACzC,8BAA8B;IAC9B,uBAAuB;IACvB,gCAAgC;IAChC,8BAA8B;IAE9B,kDAAkD;IAClD,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,IAAI,EAAE,CAAA;IAC7B,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;QACpB,OAAO,CAAC,KAAK,CAAC,gEAAgE,CAAC,CAAA;QAC/E,OAAO,CAAC,KAAK,CAAC,sBAAsB,EAAE,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACxD,OAAO,CAAC,KAAK,CAAC,8DAA8D,CAAC,CAAA;KAC9E;IAED,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,IAAI,UAAU,CAAA;IAEvC,MAAM,GAAG,GAAG,KAAK,CAAC,MAAM,CAAA;IAExB,MAAM,MAAM,GAAG,MAAM;SAClB,UAAU,CAAC,IAAI,CAAC,CAAC,gBAAgB;SACjC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,oBAAoB;SAC3C,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEhB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;KAC1B;IAED,OAAO,MAAM,CAAC,KAAK,qBAAY,CAAA;AACjC,CAAC,CAAA;AAaD,MAAM,WAAW,GAAG,IAAI,sBAAM,EAAE,CAAA;AAQhC,MAAM,WAAW,GAAG,UAAU,MAAa,EAAE,WAAyB;IACpE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,MAAM,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;QACtC,MAAM,SAAS,GAAG,WAAW,CAAC,CAAC,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACrE,IAAI,SAAS,IAAI,IAAI,EAAE;YACrB,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,0BAAkB,CAAA;YACjC,gDAAgD;YAChD,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;SACzB;aAAM,IAAI,SAAS,YAAY,MAAM,EAAE;YACtC,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,0BAAkB,CAAA;YACjC,qCAAqC;YACrC,WAAW,CAAC,QAAQ,CAAC,SAAS,CAAC,MAAM,CAAC,CAAA;YACtC,WAAW,CAAC,GAAG,CAAC,SAAS,CAAC,CAAA;SAC3B;aAAM;YACL,4CAA4C;YAC5C,MAAM,CAAC,QAAQ,0BAAkB,CAAA;YACjC,WAAW,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAA;YAClD,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,CAAA;SACjC;KACF;AACH,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,CAAC,SAAmB,EAAE,EAAU,EAAE;IAC7C,mBAAmB;IACnB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,EAAE,CAAA;IACxC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,KAAK,CAAA;IACrC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,UAAU,CAAA;IAC1C,MAAM,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;IAEzB,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;IAC/C,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEpB,WAAW,CAAC,MAAM,EAAE,MAAM,CAAC,WAAW,CAAC,CAAA;IAEvC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IACpB,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC,CAAA;IAE/B,uCAAuC;IACvC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;IAClB,cAAc;IACd,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,0BAAkB,CAAC,yBAAiB,CAAC,CAAA;IAC7D,OAAO,MAAM,CAAC,KAAK,oBAAW,CAAA;AAChC,CAAC,CAAA;AAOD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,wBAAe,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAEtG,MAAM,OAAO,GAAG,CAAC,MAAiB,EAAU,EAAE;IAC5C,0CAA0C;IAC1C,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC/C,OAAO,YAAY,CAAA;KACpB;IAED,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,CAAC,CAAA;IAE7B,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,CAAC,GAAG,YAAY,GAAG,CAAC,GAAG,CAAC,CAAA;IACpC,yBAAyB;IACzB,MAAM,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IACxC,IAAI,CAAC,CAAC,CAAC,wBAAe,CAAA;IACtB,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IACzB,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9B,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA,CAAC,gCAAgC;IAC3D,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;IACzC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,MAAM,GAAG,CAAC,SAAiB,EAAE,SAAiB,EAAU,EAAE;IAC9D,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,CAAA;IACrC,MAAM,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;IAC1B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,CAAA;IACjC,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,EAAE,CAAC,CAAA;IAClC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAOD,MAAM,cAAc,GAAG,CAAC,IAAU,EAAE,MAAc,EAAU,EAAE;IAC5D,MAAM,SAAS,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC3C,MAAM,GAAG,GAAG,CAAC,GAAG,SAAS,GAAG,CAAC,CAAA;IAC7B,yBAAyB;IACzB,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IAC1C,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAA;IAChB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IAC3B,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAChC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA,CAAC,yBAAyB;IACzC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,wBAAe,CAAA;AACvE,MAAM,sBAAsB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,wBAAe,CAAA;AAE1E,MAAM,QAAQ,GAAG,CAAC,GAAe,EAAU,EAAE;IAC3C,OAAO,GAAG,CAAC,IAAI;QACb,CAAC,CAAC,cAAc,yBAAgB,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAC;QAC/D,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,GAAG;YAClB,CAAC,CAAC,mBAAmB;YACrB,CAAC,CAAC,sBAAsB,CAAA;AAC5B,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,GAAe,EAAU,EAAE;IACxC,MAAM,IAAI,GAAG,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAA;IAC3C,OAAO,cAAc,sBAAa,IAAI,CAAC,CAAA;AACzC,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,KAAa,EAAU,EAAE;IACzC,OAAO,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,8BAAoB,CAAA;AACpD,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,OAAe,EAAU,EAAE;IAC3C,OAAO,cAAc,0BAAgB,OAAO,CAAC,CAAA;AAC/C,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,CAAC,IAAU,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAE1F,MAAM,WAAW,GAAG,cAAc,qBAAY,CAAA;AAC9C,MAAM,UAAU,GAAG,cAAc,oBAAW,CAAA;AAC5C,MAAM,SAAS,GAAG,cAAc,mBAAU,CAAA;AAC1C,MAAM,cAAc,GAAG,cAAc,wBAAe,CAAA;AAEpD,MAAM,SAAS,GAAG;IAChB,OAAO;IACP,QAAQ;IACR,UAAU;IACV,8BAA8B;IAC9B,2BAA2B;IAC3B,KAAK;IACL,KAAK;IACL,IAAI;IACJ,OAAO;IACP,QAAQ;IACR,KAAK;IACL,KAAK,EAAE,GAAG,EAAE,CAAC,WAAW;IACxB,IAAI,EAAE,GAAG,EAAE,CAAC,UAAU;IACtB,GAAG,EAAE,GAAG,EAAE,CAAC,SAAS;IACpB,QAAQ;IACR,QAAQ,EAAE,GAAG,EAAE,CAAC,cAAc;IAC9B,QAAQ;IACR,MAAM;CACP,CAAA;AAEQ,8BAAS"}
\ No newline at end of file
diff --git a/api/node_modules/pg-protocol/esm/index.js b/api/node_modules/pg-protocol/esm/index.js
new file mode 100644
index 000000000..c52807d63
--- /dev/null
+++ b/api/node_modules/pg-protocol/esm/index.js
@@ -0,0 +1,11 @@
+// ESM wrapper for pg-protocol
+import * as protocol from '../dist/index.js'
+
+// Re-export all the properties
+export const DatabaseError = protocol.DatabaseError
+export const SASL = protocol.SASL
+export const serialize = protocol.serialize
+export const parse = protocol.parse
+
+// Re-export the default
+export default protocol
diff --git a/api/node_modules/pg-protocol/package.json b/api/node_modules/pg-protocol/package.json
new file mode 100644
index 000000000..42a565cd0
--- /dev/null
+++ b/api/node_modules/pg-protocol/package.json
@@ -0,0 +1,45 @@
+{
+ "name": "pg-protocol",
+ "version": "1.13.0",
+ "description": "The postgres client/server binary protocol, implemented in TypeScript",
+ "main": "dist/index.js",
+ "types": "dist/index.d.ts",
+ "exports": {
+ ".": {
+ "import": "./esm/index.js",
+ "require": "./dist/index.js",
+ "default": "./dist/index.js"
+ },
+ "./dist/*": "./dist/*.js",
+ "./dist/*.js": "./dist/*.js"
+ },
+ "license": "MIT",
+ "devDependencies": {
+ "@types/chai": "^4.2.7",
+ "@types/mocha": "^10.0.10",
+ "@types/node": "^12.12.21",
+ "chai": "^4.2.0",
+ "chunky": "^0.0.0",
+ "mocha": "^11.7.5",
+ "ts-node": "^8.5.4",
+ "typescript": "^4.0.3"
+ },
+ "scripts": {
+ "test": "mocha dist/**/*.test.js",
+ "build": "tsc",
+ "build:watch": "tsc --watch",
+ "prepublish": "yarn build",
+ "pretest": "yarn build"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/brianc/node-postgres.git",
+ "directory": "packages/pg-protocol"
+ },
+ "files": [
+ "/dist/*{js,ts,map}",
+ "/src",
+ "/esm"
+ ],
+ "gitHead": "c9070cc8d526fca65780cedc25c1966b57cf7532"
+}
diff --git a/api/node_modules/pg-protocol/src/b.ts b/api/node_modules/pg-protocol/src/b.ts
new file mode 100644
index 000000000..c8a24113d
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/b.ts
@@ -0,0 +1,25 @@
+// file for microbenchmarking
+
+import { BufferReader } from './buffer-reader'
+
+const LOOPS = 1000
+let count = 0
+const start = performance.now()
+
+const reader = new BufferReader()
+const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0])
+
+const run = () => {
+ if (count > LOOPS) {
+ console.log(performance.now() - start)
+ return
+ }
+ count++
+ for (let i = 0; i < LOOPS; i++) {
+ reader.setBuffer(0, buffer)
+ reader.cstring()
+ }
+ setImmediate(run)
+}
+
+run()
diff --git a/api/node_modules/pg-protocol/src/buffer-reader.ts b/api/node_modules/pg-protocol/src/buffer-reader.ts
new file mode 100644
index 000000000..b89aceb89
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/buffer-reader.ts
@@ -0,0 +1,58 @@
+export class BufferReader {
+ private buffer: Buffer = Buffer.allocUnsafe(0)
+
+ // TODO(bmc): support non-utf8 encoding?
+ private encoding: string = 'utf-8'
+
+ constructor(private offset: number = 0) {}
+
+ public setBuffer(offset: number, buffer: Buffer): void {
+ this.offset = offset
+ this.buffer = buffer
+ }
+
+ public int16(): number {
+ const result = this.buffer.readInt16BE(this.offset)
+ this.offset += 2
+ return result
+ }
+
+ public byte(): number {
+ const result = this.buffer[this.offset]
+ this.offset++
+ return result
+ }
+
+ public int32(): number {
+ const result = this.buffer.readInt32BE(this.offset)
+ this.offset += 4
+ return result
+ }
+
+ public uint32(): number {
+ const result = this.buffer.readUInt32BE(this.offset)
+ this.offset += 4
+ return result
+ }
+
+ public string(length: number): string {
+ const result = this.buffer.toString(this.encoding, this.offset, this.offset + length)
+ this.offset += length
+ return result
+ }
+
+ public cstring(): string {
+ const start = this.offset
+ let end = start
+ // eslint-disable-next-line no-empty
+ while (this.buffer[end++] !== 0) {}
+ this.offset = end
+ return this.buffer.toString(this.encoding, start, end - 1)
+ }
+
+ public bytes(length: number): Buffer {
+ const result = this.buffer.slice(this.offset, this.offset + length)
+ this.offset += length
+ return result
+ }
+}
diff --git a/api/node_modules/pg-protocol/src/buffer-writer.ts b/api/node_modules/pg-protocol/src/buffer-writer.ts
new file mode 100644
index 000000000..cebb0d9ed
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/buffer-writer.ts
@@ -0,0 +1,85 @@
+//binary data writer tuned for encoding binary specific to the postgres binary protocol
+
+export class Writer {
+ private buffer: Buffer
+ private offset: number = 5
+ private headerPosition: number = 0
+ constructor(private size = 256) {
+ this.buffer = Buffer.allocUnsafe(size)
+ }
+
+ private ensure(size: number): void {
+ const remaining = this.buffer.length - this.offset
+ if (remaining < size) {
+ const oldBuffer = this.buffer
+ // exponential growth factor of around ~ 1.5
+ // https://stackoverflow.com/questions/2269063/buffer-growth-strategy
+ const newSize = oldBuffer.length + (oldBuffer.length >> 1) + size
+ this.buffer = Buffer.allocUnsafe(newSize)
+ oldBuffer.copy(this.buffer)
+ }
+ }
+
+ public addInt32(num: number): Writer {
+ this.ensure(4)
+ this.buffer[this.offset++] = (num >>> 24) & 0xff
+ this.buffer[this.offset++] = (num >>> 16) & 0xff
+ this.buffer[this.offset++] = (num >>> 8) & 0xff
+ this.buffer[this.offset++] = (num >>> 0) & 0xff
+ return this
+ }
+
+ public addInt16(num: number): Writer {
+ this.ensure(2)
+ this.buffer[this.offset++] = (num >>> 8) & 0xff
+ this.buffer[this.offset++] = (num >>> 0) & 0xff
+ return this
+ }
+
+ public addCString(string: string): Writer {
+ if (!string) {
+ this.ensure(1)
+ } else {
+ const len = Buffer.byteLength(string)
+ this.ensure(len + 1) // +1 for null terminator
+ this.buffer.write(string, this.offset, 'utf-8')
+ this.offset += len
+ }
+
+ this.buffer[this.offset++] = 0 // null terminator
+ return this
+ }
+
+ public addString(string: string = ''): Writer {
+ const len = Buffer.byteLength(string)
+ this.ensure(len)
+ this.buffer.write(string, this.offset)
+ this.offset += len
+ return this
+ }
+
+ public add(otherBuffer: Buffer): Writer {
+ this.ensure(otherBuffer.length)
+ otherBuffer.copy(this.buffer, this.offset)
+ this.offset += otherBuffer.length
+ return this
+ }
+
+ private join(code?: number): Buffer {
+ if (code) {
+ this.buffer[this.headerPosition] = code
+ //length is everything in this packet minus the code
+ const length = this.offset - (this.headerPosition + 1)
+ this.buffer.writeInt32BE(length, this.headerPosition + 1)
+ }
+ return this.buffer.slice(code ? 0 : 5, this.offset)
+ }
+
+ public flush(code?: number): Buffer {
+ const result = this.join(code)
+ this.offset = 5
+ this.headerPosition = 0
+ this.buffer = Buffer.allocUnsafe(this.size)
+ return result
+ }
+}
diff --git a/api/node_modules/pg-protocol/src/inbound-parser.test.ts b/api/node_modules/pg-protocol/src/inbound-parser.test.ts
new file mode 100644
index 000000000..285f4bf2b
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/inbound-parser.test.ts
@@ -0,0 +1,575 @@
+import buffers from './testing/test-buffers'
+import BufferList from './testing/buffer-list'
+import { parse } from '.'
+import assert from 'assert'
+import { PassThrough } from 'stream'
+import { BackendMessage } from './messages'
+import { Parser } from './parser'
+
+const authOkBuffer = buffers.authenticationOk()
+const paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8')
+const readyForQueryBuffer = buffers.readyForQuery()
+const backendKeyDataBuffer = buffers.backendKeyData(1, 2)
+const commandCompleteBuffer = buffers.commandComplete('SELECT 3')
+const parseCompleteBuffer = buffers.parseComplete()
+const bindCompleteBuffer = buffers.bindComplete()
+const portalSuspendedBuffer = buffers.portalSuspended()
+
+const row1 = {
+ name: 'id',
+ tableID: 1,
+ attributeNumber: 2,
+ dataTypeID: 3,
+ dataTypeSize: 4,
+ typeModifier: 5,
+ formatCode: 0,
+}
+const oneRowDescBuff = buffers.rowDescription([row1])
+row1.name = 'bang'
+
+const twoRowBuf = buffers.rowDescription([
+ row1,
+ {
+ name: 'whoah',
+ tableID: 10,
+ attributeNumber: 11,
+ dataTypeID: 12,
+ dataTypeSize: 13,
+ typeModifier: 14,
+ formatCode: 0,
+ },
+])
+
+const rowWithBigOids = {
+ name: 'bigoid',
+ tableID: 3000000001,
+ attributeNumber: 2,
+ dataTypeID: 3000000003,
+ dataTypeSize: 4,
+ typeModifier: 5,
+ formatCode: 0,
+}
+const bigOidDescBuff = buffers.rowDescription([rowWithBigOids])
+
+const emptyRowFieldBuf = buffers.dataRow([])
+
+const oneFieldBuf = buffers.dataRow(['test'])
+
+const expectedAuthenticationOkayMessage = {
+ name: 'authenticationOk',
+ length: 8,
+}
+
+const expectedParameterStatusMessage = {
+ name: 'parameterStatus',
+ parameterName: 'client_encoding',
+ parameterValue: 'UTF8',
+ length: 25,
+}
+
+const expectedBackendKeyDataMessage = {
+ name: 'backendKeyData',
+ processID: 1,
+ secretKey: 2,
+}
+
+const expectedReadyForQueryMessage = {
+ name: 'readyForQuery',
+ length: 5,
+ status: 'I',
+}
+
+const expectedCommandCompleteMessage = {
+ name: 'commandComplete',
+ length: 13,
+ text: 'SELECT 3',
+}
+const emptyRowDescriptionBuffer = new BufferList()
+ .addInt16(0) // number of fields
+ .join(true, 'T')
+
+const expectedEmptyRowDescriptionMessage = {
+ name: 'rowDescription',
+ length: 6,
+ fieldCount: 0,
+ fields: [],
+}
+const expectedOneRowMessage = {
+ name: 'rowDescription',
+ length: 27,
+ fieldCount: 1,
+ fields: [
+ {
+ name: 'id',
+ tableID: 1,
+ columnID: 2,
+ dataTypeID: 3,
+ dataTypeSize: 4,
+ dataTypeModifier: 5,
+ format: 'text',
+ },
+ ],
+}
+
+const expectedTwoRowMessage = {
+ name: 'rowDescription',
+ length: 53,
+ fieldCount: 2,
+ fields: [
+ {
+ name: 'bang',
+ tableID: 1,
+ columnID: 2,
+ dataTypeID: 3,
+ dataTypeSize: 4,
+ dataTypeModifier: 5,
+ format: 'text',
+ },
+ {
+ name: 'whoah',
+ tableID: 10,
+ columnID: 11,
+ dataTypeID: 12,
+ dataTypeSize: 13,
+ dataTypeModifier: 14,
+ format: 'text',
+ },
+ ],
+}
+const expectedBigOidMessage = {
+ name: 'rowDescription',
+ length: 31,
+ fieldCount: 1,
+ fields: [
+ {
+ name: 'bigoid',
+ tableID: 3000000001,
+ columnID: 2,
+ dataTypeID: 3000000003,
+ dataTypeSize: 4,
+ dataTypeModifier: 5,
+ format: 'text',
+ },
+ ],
+}
+
+const emptyParameterDescriptionBuffer = new BufferList()
+ .addInt16(0) // number of parameters
+ .join(true, 't')
+
+const oneParameterDescBuf = buffers.parameterDescription([1111])
+
+const twoParameterDescBuf = buffers.parameterDescription([2222, 3333])
+
+const expectedEmptyParameterDescriptionMessage = {
+ name: 'parameterDescription',
+ length: 6,
+ parameterCount: 0,
+ dataTypeIDs: [],
+}
+
+const expectedOneParameterMessage = {
+ name: 'parameterDescription',
+ length: 10,
+ parameterCount: 1,
+ dataTypeIDs: [1111],
+}
+
+const expectedTwoParameterMessage = {
+ name: 'parameterDescription',
+ length: 14,
+ parameterCount: 2,
+ dataTypeIDs: [2222, 3333],
+}
+
+const testForMessage = function (buffer: Buffer, expectedMessage: any) {
+ it('receives and parses ' + expectedMessage.name, async () => {
+ const messages = await parseBuffers([buffer])
+ const [lastMessage] = messages
+
+ for (const key in expectedMessage) {
+ assert.deepEqual((lastMessage as any)[key], expectedMessage[key])
+ }
+ })
+}
+
+const plainPasswordBuffer = buffers.authenticationCleartextPassword()
+const md5PasswordBuffer = buffers.authenticationMD5Password()
+const SASLBuffer = buffers.authenticationSASL()
+const SASLContinueBuffer = buffers.authenticationSASLContinue()
+const SASLFinalBuffer = buffers.authenticationSASLFinal()
+
+const expectedPlainPasswordMessage = {
+ name: 'authenticationCleartextPassword',
+}
+
+const expectedMD5PasswordMessage = {
+ name: 'authenticationMD5Password',
+ salt: Buffer.from([1, 2, 3, 4]),
+}
+
+const expectedSASLMessage = {
+ name: 'authenticationSASL',
+ mechanisms: ['SCRAM-SHA-256'],
+}
+
+const expectedSASLContinueMessage = {
+ name: 'authenticationSASLContinue',
+ data: 'data',
+}
+
+const expectedSASLFinalMessage = {
+ name: 'authenticationSASLFinal',
+ data: 'data',
+}
+
+const notificationResponseBuffer = buffers.notification(4, 'hi', 'boom')
+const expectedNotificationResponseMessage = {
+ name: 'notification',
+ processId: 4,
+ channel: 'hi',
+ payload: 'boom',
+}
+
+const parseBuffers = async (buffers: Buffer[]): Promise => {
+ const stream = new PassThrough()
+ for (const buffer of buffers) {
+ stream.write(buffer)
+ }
+ stream.end()
+ const msgs: BackendMessage[] = []
+ await parse(stream, (msg) => msgs.push(msg))
+ return msgs
+}
+
+describe('PgPacketStream', function () {
+ testForMessage(authOkBuffer, expectedAuthenticationOkayMessage)
+ testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage)
+ testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage)
+ testForMessage(SASLBuffer, expectedSASLMessage)
+ testForMessage(SASLContinueBuffer, expectedSASLContinueMessage)
+
+ // this exercises a found bug in the parser:
+ // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
+ // and adds a test which is deterministic, rather than relying on network packet chunking
+ const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])])
+ testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage)
+
+ testForMessage(SASLFinalBuffer, expectedSASLFinalMessage)
+
+ // this exercises a found bug in the parser:
+ // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084
+ // and adds a test which is deterministic, rather than relying on network packet chunking
+ const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])])
+ testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage)
+
+ testForMessage(paramStatusBuffer, expectedParameterStatusMessage)
+ testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage)
+ testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage)
+ testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage)
+ testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage)
+ testForMessage(buffers.emptyQuery(), {
+ name: 'emptyQuery',
+ length: 4,
+ })
+
+ testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), {
+ name: 'noData',
+ })
+
+ describe('rowDescription messages', function () {
+ testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage)
+ testForMessage(oneRowDescBuff, expectedOneRowMessage)
+ testForMessage(twoRowBuf, expectedTwoRowMessage)
+ testForMessage(bigOidDescBuff, expectedBigOidMessage)
+ })
+
+ describe('parameterDescription messages', function () {
+ testForMessage(emptyParameterDescriptionBuffer, expectedEmptyParameterDescriptionMessage)
+ testForMessage(oneParameterDescBuf, expectedOneParameterMessage)
+ testForMessage(twoParameterDescBuf, expectedTwoParameterMessage)
+ })
+
+ describe('parsing rows', function () {
+ describe('parsing empty row', function () {
+ testForMessage(emptyRowFieldBuf, {
+ name: 'dataRow',
+ fieldCount: 0,
+ })
+ })
+
+ describe('parsing data row with fields', function () {
+ testForMessage(oneFieldBuf, {
+ name: 'dataRow',
+ fieldCount: 1,
+ fields: ['test'],
+ })
+ })
+ })
+
+ describe('notice message', function () {
+ // this uses the same logic as error message
+ const buff = buffers.notice([{ type: 'C', value: 'code' }])
+ testForMessage(buff, {
+ name: 'notice',
+ code: 'code',
+ })
+ })
+
+ testForMessage(buffers.error([]), {
+ name: 'error',
+ })
+
+ describe('with all the fields', function () {
+ const buffer = buffers.error([
+ {
+ type: 'S',
+ value: 'ERROR',
+ },
+ {
+ type: 'C',
+ value: 'code',
+ },
+ {
+ type: 'M',
+ value: 'message',
+ },
+ {
+ type: 'D',
+ value: 'details',
+ },
+ {
+ type: 'H',
+ value: 'hint',
+ },
+ {
+ type: 'P',
+ value: '100',
+ },
+ {
+ type: 'p',
+ value: '101',
+ },
+ {
+ type: 'q',
+ value: 'query',
+ },
+ {
+ type: 'W',
+ value: 'where',
+ },
+ {
+ type: 'F',
+ value: 'file',
+ },
+ {
+ type: 'L',
+ value: 'line',
+ },
+ {
+ type: 'R',
+ value: 'routine',
+ },
+ {
+ type: 'Z', // ignored
+ value: 'alsdkf',
+ },
+ ])
+
+ testForMessage(buffer, {
+ name: 'error',
+ severity: 'ERROR',
+ code: 'code',
+ message: 'message',
+ detail: 'details',
+ hint: 'hint',
+ position: '100',
+ internalPosition: '101',
+ internalQuery: 'query',
+ where: 'where',
+ file: 'file',
+ line: 'line',
+ routine: 'routine',
+ })
+ })
+
+ testForMessage(parseCompleteBuffer, {
+ name: 'parseComplete',
+ })
+
+ testForMessage(bindCompleteBuffer, {
+ name: 'bindComplete',
+ })
+
+ testForMessage(bindCompleteBuffer, {
+ name: 'bindComplete',
+ })
+
+ testForMessage(buffers.closeComplete(), {
+ name: 'closeComplete',
+ })
+
+ describe('parses portal suspended message', function () {
+ testForMessage(portalSuspendedBuffer, {
+ name: 'portalSuspended',
+ })
+ })
+
+ describe('parses replication start message', function () {
+ testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), {
+ name: 'replicationStart',
+ length: 4,
+ })
+ })
+
+ describe('copy', () => {
+ testForMessage(buffers.copyIn(0), {
+ name: 'copyInResponse',
+ length: 7,
+ binary: false,
+ columnTypes: [],
+ })
+
+ testForMessage(buffers.copyIn(2), {
+ name: 'copyInResponse',
+ length: 11,
+ binary: false,
+ columnTypes: [0, 1],
+ })
+
+ testForMessage(buffers.copyOut(0), {
+ name: 'copyOutResponse',
+ length: 7,
+ binary: false,
+ columnTypes: [],
+ })
+
+ testForMessage(buffers.copyOut(3), {
+ name: 'copyOutResponse',
+ length: 13,
+ binary: false,
+ columnTypes: [0, 1, 2],
+ })
+
+ testForMessage(buffers.copyDone(), {
+ name: 'copyDone',
+ length: 4,
+ })
+
+ testForMessage(buffers.copyData(Buffer.from([5, 6, 7])), {
+ name: 'copyData',
+ length: 7,
+ chunk: Buffer.from([5, 6, 7]),
+ })
+ })
+
+ // since the data message on a stream can randomly divide the incomming
+ // tcp packets anywhere, we need to make sure we can parse every single
+ // split on a tcp message
+ describe('split buffer, single message parsing', function () {
+ const fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!'])
+
+ it('parses when full buffer comes in', async function () {
+ const messages = await parseBuffers([fullBuffer])
+ const message = messages[0] as any
+ assert.equal(message.fields.length, 5)
+ assert.equal(message.fields[0], null)
+ assert.equal(message.fields[1], 'bang')
+ assert.equal(message.fields[2], 'zug zug')
+ assert.equal(message.fields[3], null)
+ assert.equal(message.fields[4], '!')
+ })
+
+ const testMessageReceivedAfterSplitAt = async function (split: number) {
+ const firstBuffer = Buffer.alloc(fullBuffer.length - split)
+ const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length)
+ fullBuffer.copy(firstBuffer, 0, 0)
+ fullBuffer.copy(secondBuffer, 0, firstBuffer.length)
+ const messages = await parseBuffers([firstBuffer, secondBuffer])
+ const message = messages[0] as any
+ assert.equal(message.fields.length, 5)
+ assert.equal(message.fields[0], null)
+ assert.equal(message.fields[1], 'bang')
+ assert.equal(message.fields[2], 'zug zug')
+ assert.equal(message.fields[3], null)
+ assert.equal(message.fields[4], '!')
+ }
+
+ it('parses when split in the middle', function () {
+ return testMessageReceivedAfterSplitAt(6)
+ })
+
+ it('parses when split at end', function () {
+ return testMessageReceivedAfterSplitAt(2)
+ })
+
+ it('parses when split at beginning', function () {
+ return Promise.all([
+ testMessageReceivedAfterSplitAt(fullBuffer.length - 2),
+ testMessageReceivedAfterSplitAt(fullBuffer.length - 1),
+ testMessageReceivedAfterSplitAt(fullBuffer.length - 5),
+ ])
+ })
+ })
+
+ describe('split buffer, multiple message parsing', function () {
+ const dataRowBuffer = buffers.dataRow(['!'])
+ const readyForQueryBuffer = buffers.readyForQuery()
+ const fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length)
+ dataRowBuffer.copy(fullBuffer, 0, 0)
+ readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0)
+
+ const verifyMessages = function (messages: any[]) {
+ assert.strictEqual(messages.length, 2)
+ assert.deepEqual(messages[0], {
+ name: 'dataRow',
+ fieldCount: 1,
+ length: 11,
+ fields: ['!'],
+ })
+ assert.equal(messages[0].fields[0], '!')
+ assert.deepEqual(messages[1], {
+ name: 'readyForQuery',
+ length: 5,
+ status: 'I',
+ })
+ }
+ // sanity check
+ it('receives both messages when packet is not split', async function () {
+ const messages = await parseBuffers([fullBuffer])
+ verifyMessages(messages)
+ })
+
+ const splitAndVerifyTwoMessages = async function (split: number) {
+ const firstBuffer = Buffer.alloc(fullBuffer.length - split)
+ const secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length)
+ fullBuffer.copy(firstBuffer, 0, 0)
+ fullBuffer.copy(secondBuffer, 0, firstBuffer.length)
+ const messages = await parseBuffers([firstBuffer, secondBuffer])
+ verifyMessages(messages)
+ }
+
+ describe('receives both messages when packet is split', function () {
+ it('in the middle', function () {
+ return splitAndVerifyTwoMessages(11)
+ })
+ it('at the front', function () {
+ return Promise.all([
+ splitAndVerifyTwoMessages(fullBuffer.length - 1),
+ splitAndVerifyTwoMessages(fullBuffer.length - 4),
+ splitAndVerifyTwoMessages(fullBuffer.length - 6),
+ ])
+ })
+
+ it('at the end', function () {
+ return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)])
+ })
+ })
+ })
+
+ it('cleans up the reader after handling a packet', function () {
+ const parser = new Parser()
+ parser.parse(oneFieldBuf, () => {})
+ assert.strictEqual((parser as any).reader.buffer.byteLength, 0)
+ })
+})
diff --git a/api/node_modules/pg-protocol/src/index.ts b/api/node_modules/pg-protocol/src/index.ts
new file mode 100644
index 000000000..703ff2e49
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/index.ts
@@ -0,0 +1,11 @@
+import { DatabaseError } from './messages'
+import { serialize } from './serializer'
+import { Parser, MessageCallback } from './parser'
+
+export function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise {
+ const parser = new Parser()
+ stream.on('data', (buffer: Buffer) => parser.parse(buffer, callback))
+ return new Promise((resolve) => stream.on('end', () => resolve()))
+}
+
+export { serialize, DatabaseError }
diff --git a/api/node_modules/pg-protocol/src/messages.ts b/api/node_modules/pg-protocol/src/messages.ts
new file mode 100644
index 000000000..c3fbbdd9b
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/messages.ts
@@ -0,0 +1,262 @@
+export type Mode = 'text' | 'binary'
+
+export type MessageName =
+ | 'parseComplete'
+ | 'bindComplete'
+ | 'closeComplete'
+ | 'noData'
+ | 'portalSuspended'
+ | 'replicationStart'
+ | 'emptyQuery'
+ | 'copyDone'
+ | 'copyData'
+ | 'rowDescription'
+ | 'parameterDescription'
+ | 'parameterStatus'
+ | 'backendKeyData'
+ | 'notification'
+ | 'readyForQuery'
+ | 'commandComplete'
+ | 'dataRow'
+ | 'copyInResponse'
+ | 'copyOutResponse'
+ | 'authenticationOk'
+ | 'authenticationMD5Password'
+ | 'authenticationCleartextPassword'
+ | 'authenticationSASL'
+ | 'authenticationSASLContinue'
+ | 'authenticationSASLFinal'
+ | 'error'
+ | 'notice'
+
+export interface BackendMessage {
+ name: MessageName
+ length: number
+}
+
+export const parseComplete: BackendMessage = {
+ name: 'parseComplete',
+ length: 5,
+}
+
+export const bindComplete: BackendMessage = {
+ name: 'bindComplete',
+ length: 5,
+}
+
+export const closeComplete: BackendMessage = {
+ name: 'closeComplete',
+ length: 5,
+}
+
+export const noData: BackendMessage = {
+ name: 'noData',
+ length: 5,
+}
+
+export const portalSuspended: BackendMessage = {
+ name: 'portalSuspended',
+ length: 5,
+}
+
+export const replicationStart: BackendMessage = {
+ name: 'replicationStart',
+ length: 4,
+}
+
+export const emptyQuery: BackendMessage = {
+ name: 'emptyQuery',
+ length: 4,
+}
+
+export const copyDone: BackendMessage = {
+ name: 'copyDone',
+ length: 4,
+}
+
+interface NoticeOrError {
+ message: string | undefined
+ severity: string | undefined
+ code: string | undefined
+ detail: string | undefined
+ hint: string | undefined
+ position: string | undefined
+ internalPosition: string | undefined
+ internalQuery: string | undefined
+ where: string | undefined
+ schema: string | undefined
+ table: string | undefined
+ column: string | undefined
+ dataType: string | undefined
+ constraint: string | undefined
+ file: string | undefined
+ line: string | undefined
+ routine: string | undefined
+}
+
+export class DatabaseError extends Error implements NoticeOrError {
+ public severity: string | undefined
+ public code: string | undefined
+ public detail: string | undefined
+ public hint: string | undefined
+ public position: string | undefined
+ public internalPosition: string | undefined
+ public internalQuery: string | undefined
+ public where: string | undefined
+ public schema: string | undefined
+ public table: string | undefined
+ public column: string | undefined
+ public dataType: string | undefined
+ public constraint: string | undefined
+ public file: string | undefined
+ public line: string | undefined
+ public routine: string | undefined
+ constructor(
+ message: string,
+ public readonly length: number,
+ public readonly name: MessageName
+ ) {
+ super(message)
+ }
+}
+
+export class CopyDataMessage {
+ public readonly name = 'copyData'
+ constructor(
+ public readonly length: number,
+ public readonly chunk: Buffer
+ ) {}
+}
+
+export class CopyResponse {
+ public readonly columnTypes: number[]
+ constructor(
+ public readonly length: number,
+ public readonly name: MessageName,
+ public readonly binary: boolean,
+ columnCount: number
+ ) {
+ this.columnTypes = new Array(columnCount)
+ }
+}
+
+export class Field {
+ constructor(
+ public readonly name: string,
+ public readonly tableID: number,
+ public readonly columnID: number,
+ public readonly dataTypeID: number,
+ public readonly dataTypeSize: number,
+ public readonly dataTypeModifier: number,
+ public readonly format: Mode
+ ) {}
+}
+
+export class RowDescriptionMessage {
+ public readonly name: MessageName = 'rowDescription'
+ public readonly fields: Field[]
+ constructor(
+ public readonly length: number,
+ public readonly fieldCount: number
+ ) {
+ this.fields = new Array(this.fieldCount)
+ }
+}
+
+export class ParameterDescriptionMessage {
+ public readonly name: MessageName = 'parameterDescription'
+ public readonly dataTypeIDs: number[]
+ constructor(
+ public readonly length: number,
+ public readonly parameterCount: number
+ ) {
+ this.dataTypeIDs = new Array(this.parameterCount)
+ }
+}
+
+export class ParameterStatusMessage {
+ public readonly name: MessageName = 'parameterStatus'
+ constructor(
+ public readonly length: number,
+ public readonly parameterName: string,
+ public readonly parameterValue: string
+ ) {}
+}
+
+export class AuthenticationMD5Password implements BackendMessage {
+ public readonly name: MessageName = 'authenticationMD5Password'
+ constructor(
+ public readonly length: number,
+ public readonly salt: Buffer
+ ) {}
+}
+
+export class BackendKeyDataMessage {
+ public readonly name: MessageName = 'backendKeyData'
+ constructor(
+ public readonly length: number,
+ public readonly processID: number,
+ public readonly secretKey: number
+ ) {}
+}
+
+export class NotificationResponseMessage {
+ public readonly name: MessageName = 'notification'
+ constructor(
+ public readonly length: number,
+ public readonly processId: number,
+ public readonly channel: string,
+ public readonly payload: string
+ ) {}
+}
+
+export class ReadyForQueryMessage {
+ public readonly name: MessageName = 'readyForQuery'
+ constructor(
+ public readonly length: number,
+ public readonly status: string
+ ) {}
+}
+
+export class CommandCompleteMessage {
+ public readonly name: MessageName = 'commandComplete'
+ constructor(
+ public readonly length: number,
+ public readonly text: string
+ ) {}
+}
+
+export class DataRowMessage {
+ public readonly fieldCount: number
+ public readonly name: MessageName = 'dataRow'
+ constructor(
+ public length: number,
+ public fields: any[]
+ ) {
+ this.fieldCount = fields.length
+ }
+}
+
+export class NoticeMessage implements BackendMessage, NoticeOrError {
+ constructor(
+ public readonly length: number,
+ public readonly message: string | undefined
+ ) {}
+ public readonly name = 'notice'
+ public severity: string | undefined
+ public code: string | undefined
+ public detail: string | undefined
+ public hint: string | undefined
+ public position: string | undefined
+ public internalPosition: string | undefined
+ public internalQuery: string | undefined
+ public where: string | undefined
+ public schema: string | undefined
+ public table: string | undefined
+ public column: string | undefined
+ public dataType: string | undefined
+ public constraint: string | undefined
+ public file: string | undefined
+ public line: string | undefined
+ public routine: string | undefined
+}
diff --git a/api/node_modules/pg-protocol/src/outbound-serializer.test.ts b/api/node_modules/pg-protocol/src/outbound-serializer.test.ts
new file mode 100644
index 000000000..0d3e387e4
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/outbound-serializer.test.ts
@@ -0,0 +1,276 @@
+import assert from 'assert'
+import { serialize } from './serializer'
+import BufferList from './testing/buffer-list'
+
+describe('serializer', () => {
+ it('builds startup message', function () {
+ const actual = serialize.startup({
+ user: 'brian',
+ database: 'bang',
+ })
+ assert.deepEqual(
+ actual,
+ new BufferList()
+ .addInt16(3)
+ .addInt16(0)
+ .addCString('user')
+ .addCString('brian')
+ .addCString('database')
+ .addCString('bang')
+ .addCString('client_encoding')
+ .addCString('UTF8')
+ .addCString('')
+ .join(true)
+ )
+ })
+
+ it('builds password message', function () {
+ const actual = serialize.password('!')
+ assert.deepEqual(actual, new BufferList().addCString('!').join(true, 'p'))
+ })
+
+ it('builds request ssl message', function () {
+ const actual = serialize.requestSsl()
+ const expected = new BufferList().addInt32(80877103).join(true)
+ assert.deepEqual(actual, expected)
+ })
+
+ it('builds SASLInitialResponseMessage message', function () {
+ const actual = serialize.sendSASLInitialResponseMessage('mech', 'data')
+ assert.deepEqual(actual, new BufferList().addCString('mech').addInt32(4).addString('data').join(true, 'p'))
+ })
+
+ it('builds SCRAMClientFinalMessage message', function () {
+ const actual = serialize.sendSCRAMClientFinalMessage('data')
+ assert.deepEqual(actual, new BufferList().addString('data').join(true, 'p'))
+ })
+
+ it('builds query message', function () {
+ const txt = 'select * from boom'
+ const actual = serialize.query(txt)
+ assert.deepEqual(actual, new BufferList().addCString(txt).join(true, 'Q'))
+ })
+
+ describe('parse message', () => {
+ it('builds parse message', function () {
+ const actual = serialize.parse({ text: '!' })
+ const expected = new BufferList().addCString('').addCString('!').addInt16(0).join(true, 'P')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('builds parse message with named query', function () {
+ const actual = serialize.parse({
+ name: 'boom',
+ text: 'select * from boom',
+ types: [],
+ })
+ const expected = new BufferList().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('with multiple parameters', function () {
+ const actual = serialize.parse({
+ name: 'force',
+ text: 'select * from bang where name = $1',
+ types: [1, 2, 3, 4],
+ })
+ const expected = new BufferList()
+ .addCString('force')
+ .addCString('select * from bang where name = $1')
+ .addInt16(4)
+ .addInt32(1)
+ .addInt32(2)
+ .addInt32(3)
+ .addInt32(4)
+ .join(true, 'P')
+ assert.deepEqual(actual, expected)
+ })
+ })
+
+ describe('bind messages', function () {
+ it('with no values', function () {
+ const actual = serialize.bind()
+
+ const expectedBuffer = new BufferList()
+ .addCString('')
+ .addCString('')
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B')
+ assert.deepEqual(actual, expectedBuffer)
+ })
+
+ it('with named statement, portal, and values', function () {
+ const actual = serialize.bind({
+ portal: 'bang',
+ statement: 'woo',
+ values: ['1', 'hi', null, 'zing'],
+ })
+ const expectedBuffer = new BufferList()
+ .addCString('bang') // portal name
+ .addCString('woo') // statement name
+ .addInt16(4)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(4)
+ .addInt32(1)
+ .add(Buffer.from('1'))
+ .addInt32(2)
+ .add(Buffer.from('hi'))
+ .addInt32(-1)
+ .addInt32(4)
+ .add(Buffer.from('zing'))
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B')
+ assert.deepEqual(actual, expectedBuffer)
+ })
+ })
+
+ it('with custom valueMapper', function () {
+ const actual = serialize.bind({
+ portal: 'bang',
+ statement: 'woo',
+ values: ['1', 'hi', null, 'zing'],
+ valueMapper: () => null,
+ })
+ const expectedBuffer = new BufferList()
+ .addCString('bang') // portal name
+ .addCString('woo') // statement name
+ .addInt16(4)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(0)
+ .addInt16(4)
+ .addInt32(-1)
+ .addInt32(-1)
+ .addInt32(-1)
+ .addInt32(-1)
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B')
+ assert.deepEqual(actual, expectedBuffer)
+ })
+
+ it('with named statement, portal, and buffer value', function () {
+ const actual = serialize.bind({
+ portal: 'bang',
+ statement: 'woo',
+ values: ['1', 'hi', null, Buffer.from('zing', 'utf8')],
+ })
+ const expectedBuffer = new BufferList()
+ .addCString('bang') // portal name
+ .addCString('woo') // statement name
+ .addInt16(4) // value count
+ .addInt16(0) // string
+ .addInt16(0) // string
+ .addInt16(0) // string
+ .addInt16(1) // binary
+ .addInt16(4)
+ .addInt32(1)
+ .add(Buffer.from('1'))
+ .addInt32(2)
+ .add(Buffer.from('hi'))
+ .addInt32(-1)
+ .addInt32(4)
+ .add(Buffer.from('zing', 'utf-8'))
+ .addInt16(1)
+ .addInt16(0)
+ .join(true, 'B')
+ assert.deepEqual(actual, expectedBuffer)
+ })
+
+ describe('builds execute message', function () {
+ it('for unamed portal with no row limit', function () {
+ const actual = serialize.execute()
+ const expectedBuffer = new BufferList().addCString('').addInt32(0).join(true, 'E')
+ assert.deepEqual(actual, expectedBuffer)
+ })
+
+ it('for named portal with row limit', function () {
+ const actual = serialize.execute({
+ portal: 'my favorite portal',
+ rows: 100,
+ })
+ const expectedBuffer = new BufferList().addCString('my favorite portal').addInt32(100).join(true, 'E')
+ assert.deepEqual(actual, expectedBuffer)
+ })
+ })
+
+ it('builds flush command', function () {
+ const actual = serialize.flush()
+ const expected = new BufferList().join(true, 'H')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('builds sync command', function () {
+ const actual = serialize.sync()
+ const expected = new BufferList().join(true, 'S')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('builds end command', function () {
+ const actual = serialize.end()
+ const expected = Buffer.from([0x58, 0, 0, 0, 4])
+ assert.deepEqual(actual, expected)
+ })
+
+ describe('builds describe command', function () {
+ it('describe statement', function () {
+ const actual = serialize.describe({ type: 'S', name: 'bang' })
+ const expected = new BufferList().addChar('S').addCString('bang').join(true, 'D')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('describe unnamed portal', function () {
+ const actual = serialize.describe({ type: 'P' })
+ const expected = new BufferList().addChar('P').addCString('').join(true, 'D')
+ assert.deepEqual(actual, expected)
+ })
+ })
+
+ describe('builds close command', function () {
+ it('describe statement', function () {
+ const actual = serialize.close({ type: 'S', name: 'bang' })
+ const expected = new BufferList().addChar('S').addCString('bang').join(true, 'C')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('describe unnamed portal', function () {
+ const actual = serialize.close({ type: 'P' })
+ const expected = new BufferList().addChar('P').addCString('').join(true, 'C')
+ assert.deepEqual(actual, expected)
+ })
+ })
+
+ describe('copy messages', function () {
+ it('builds copyFromChunk', () => {
+ const actual = serialize.copyData(Buffer.from([1, 2, 3]))
+ const expected = new BufferList().add(Buffer.from([1, 2, 3])).join(true, 'd')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('builds copy fail', () => {
+ const actual = serialize.copyFail('err!')
+ const expected = new BufferList().addCString('err!').join(true, 'f')
+ assert.deepEqual(actual, expected)
+ })
+
+ it('builds copy done', () => {
+ const actual = serialize.copyDone()
+ const expected = new BufferList().join(true, 'c')
+ assert.deepEqual(actual, expected)
+ })
+ })
+
+ it('builds cancel message', () => {
+ const actual = serialize.cancel(3, 4)
+ const expected = new BufferList().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true)
+ assert.deepEqual(actual, expected)
+ })
+})
diff --git a/api/node_modules/pg-protocol/src/parser.ts b/api/node_modules/pg-protocol/src/parser.ts
new file mode 100644
index 000000000..998077a00
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/parser.ts
@@ -0,0 +1,413 @@
+import { TransformOptions } from 'stream'
+import {
+ Mode,
+ bindComplete,
+ parseComplete,
+ closeComplete,
+ noData,
+ portalSuspended,
+ copyDone,
+ replicationStart,
+ emptyQuery,
+ ReadyForQueryMessage,
+ CommandCompleteMessage,
+ CopyDataMessage,
+ CopyResponse,
+ NotificationResponseMessage,
+ RowDescriptionMessage,
+ ParameterDescriptionMessage,
+ Field,
+ DataRowMessage,
+ ParameterStatusMessage,
+ BackendKeyDataMessage,
+ DatabaseError,
+ BackendMessage,
+ MessageName,
+ AuthenticationMD5Password,
+ NoticeMessage,
+} from './messages'
+import { BufferReader } from './buffer-reader'
+
+// every message is prefixed with a single bye
+const CODE_LENGTH = 1
+// every message has an int32 length which includes itself but does
+// NOT include the code in the length
+const LEN_LENGTH = 4
+
+const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH
+
+// A placeholder for a `BackendMessage`’s length value that will be set after construction.
+const LATEINIT_LENGTH = -1
+
+export type Packet = {
+ code: number
+ packet: Buffer
+}
+
+const emptyBuffer = Buffer.allocUnsafe(0)
+
+type StreamOptions = TransformOptions & {
+ mode: Mode
+}
+
+const enum MessageCodes {
+ DataRow = 0x44, // D
+ ParseComplete = 0x31, // 1
+ BindComplete = 0x32, // 2
+ CloseComplete = 0x33, // 3
+ CommandComplete = 0x43, // C
+ ReadyForQuery = 0x5a, // Z
+ NoData = 0x6e, // n
+ NotificationResponse = 0x41, // A
+ AuthenticationResponse = 0x52, // R
+ ParameterStatus = 0x53, // S
+ BackendKeyData = 0x4b, // K
+ ErrorMessage = 0x45, // E
+ NoticeMessage = 0x4e, // N
+ RowDescriptionMessage = 0x54, // T
+ ParameterDescriptionMessage = 0x74, // t
+ PortalSuspended = 0x73, // s
+ ReplicationStart = 0x57, // W
+ EmptyQuery = 0x49, // I
+ CopyIn = 0x47, // G
+ CopyOut = 0x48, // H
+ CopyDone = 0x63, // c
+ CopyData = 0x64, // d
+}
+
+export type MessageCallback = (msg: BackendMessage) => void
+
+export class Parser {
+ private buffer: Buffer = emptyBuffer
+ private bufferLength: number = 0
+ private bufferOffset: number = 0
+ private reader = new BufferReader()
+ private mode: Mode
+
+ constructor(opts?: StreamOptions) {
+ if (opts?.mode === 'binary') {
+ throw new Error('Binary mode not supported yet')
+ }
+ this.mode = opts?.mode || 'text'
+ }
+
+ public parse(buffer: Buffer, callback: MessageCallback) {
+ this.mergeBuffer(buffer)
+ const bufferFullLength = this.bufferOffset + this.bufferLength
+ let offset = this.bufferOffset
+ while (offset + HEADER_LENGTH <= bufferFullLength) {
+ // code is 1 byte long - it identifies the message type
+ const code = this.buffer[offset]
+ // length is 1 Uint32BE - it is the length of the message EXCLUDING the code
+ const length = this.buffer.readUInt32BE(offset + CODE_LENGTH)
+ const fullMessageLength = CODE_LENGTH + length
+ if (fullMessageLength + offset <= bufferFullLength) {
+ const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer)
+ callback(message)
+ offset += fullMessageLength
+ } else {
+ break
+ }
+ }
+ if (offset === bufferFullLength) {
+ // No more use for the buffer
+ this.buffer = emptyBuffer
+ this.bufferLength = 0
+ this.bufferOffset = 0
+ } else {
+ // Adjust the cursors of remainingBuffer
+ this.bufferLength = bufferFullLength - offset
+ this.bufferOffset = offset
+ }
+ }
+
+ private mergeBuffer(buffer: Buffer): void {
+ if (this.bufferLength > 0) {
+ const newLength = this.bufferLength + buffer.byteLength
+ const newFullLength = newLength + this.bufferOffset
+ if (newFullLength > this.buffer.byteLength) {
+ // We can't concat the new buffer with the remaining one
+ let newBuffer: Buffer
+ if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) {
+ // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer
+ newBuffer = this.buffer
+ } else {
+ // Allocate a new larger buffer
+ let newBufferLength = this.buffer.byteLength * 2
+ while (newLength >= newBufferLength) {
+ newBufferLength *= 2
+ }
+ newBuffer = Buffer.allocUnsafe(newBufferLength)
+ }
+ // Move the remaining buffer to the new one
+ this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength)
+ this.buffer = newBuffer
+ this.bufferOffset = 0
+ }
+ // Concat the new buffer with the remaining one
+ buffer.copy(this.buffer, this.bufferOffset + this.bufferLength)
+ this.bufferLength = newLength
+ } else {
+ this.buffer = buffer
+ this.bufferOffset = 0
+ this.bufferLength = buffer.byteLength
+ }
+ }
+
+ private handlePacket(offset: number, code: number, length: number, bytes: Buffer): BackendMessage {
+ const { reader } = this
+
+ // NOTE: This undesirably retains the buffer in `this.reader` if the `parse*Message` calls below throw. However, those should only throw in the case of a protocol error, which normally results in the reader being discarded.
+ reader.setBuffer(offset, bytes)
+
+ let message: BackendMessage
+
+ switch (code) {
+ case MessageCodes.BindComplete:
+ message = bindComplete
+ break
+ case MessageCodes.ParseComplete:
+ message = parseComplete
+ break
+ case MessageCodes.CloseComplete:
+ message = closeComplete
+ break
+ case MessageCodes.NoData:
+ message = noData
+ break
+ case MessageCodes.PortalSuspended:
+ message = portalSuspended
+ break
+ case MessageCodes.CopyDone:
+ message = copyDone
+ break
+ case MessageCodes.ReplicationStart:
+ message = replicationStart
+ break
+ case MessageCodes.EmptyQuery:
+ message = emptyQuery
+ break
+ case MessageCodes.DataRow:
+ message = parseDataRowMessage(reader)
+ break
+ case MessageCodes.CommandComplete:
+ message = parseCommandCompleteMessage(reader)
+ break
+ case MessageCodes.ReadyForQuery:
+ message = parseReadyForQueryMessage(reader)
+ break
+ case MessageCodes.NotificationResponse:
+ message = parseNotificationMessage(reader)
+ break
+ case MessageCodes.AuthenticationResponse:
+ message = parseAuthenticationResponse(reader, length)
+ break
+ case MessageCodes.ParameterStatus:
+ message = parseParameterStatusMessage(reader)
+ break
+ case MessageCodes.BackendKeyData:
+ message = parseBackendKeyData(reader)
+ break
+ case MessageCodes.ErrorMessage:
+ message = parseErrorMessage(reader, 'error')
+ break
+ case MessageCodes.NoticeMessage:
+ message = parseErrorMessage(reader, 'notice')
+ break
+ case MessageCodes.RowDescriptionMessage:
+ message = parseRowDescriptionMessage(reader)
+ break
+ case MessageCodes.ParameterDescriptionMessage:
+ message = parseParameterDescriptionMessage(reader)
+ break
+ case MessageCodes.CopyIn:
+ message = parseCopyInMessage(reader)
+ break
+ case MessageCodes.CopyOut:
+ message = parseCopyOutMessage(reader)
+ break
+ case MessageCodes.CopyData:
+ message = parseCopyData(reader, length)
+ break
+ default:
+ return new DatabaseError('received invalid response: ' + code.toString(16), length, 'error')
+ }
+
+ reader.setBuffer(0, emptyBuffer)
+
+ message.length = length
+ return message
+ }
+}
+
+const parseReadyForQueryMessage = (reader: BufferReader) => {
+ const status = reader.string(1)
+ return new ReadyForQueryMessage(LATEINIT_LENGTH, status)
+}
+
+const parseCommandCompleteMessage = (reader: BufferReader) => {
+ const text = reader.cstring()
+ return new CommandCompleteMessage(LATEINIT_LENGTH, text)
+}
+
+const parseCopyData = (reader: BufferReader, length: number) => {
+ const chunk = reader.bytes(length - 4)
+ return new CopyDataMessage(LATEINIT_LENGTH, chunk)
+}
+
+const parseCopyInMessage = (reader: BufferReader) => parseCopyMessage(reader, 'copyInResponse')
+
+const parseCopyOutMessage = (reader: BufferReader) => parseCopyMessage(reader, 'copyOutResponse')
+
+const parseCopyMessage = (reader: BufferReader, messageName: MessageName) => {
+ const isBinary = reader.byte() !== 0
+ const columnCount = reader.int16()
+ const message = new CopyResponse(LATEINIT_LENGTH, messageName, isBinary, columnCount)
+ for (let i = 0; i < columnCount; i++) {
+ message.columnTypes[i] = reader.int16()
+ }
+ return message
+}
+
+const parseNotificationMessage = (reader: BufferReader) => {
+ const processId = reader.int32()
+ const channel = reader.cstring()
+ const payload = reader.cstring()
+ return new NotificationResponseMessage(LATEINIT_LENGTH, processId, channel, payload)
+}
+
+const parseRowDescriptionMessage = (reader: BufferReader) => {
+ const fieldCount = reader.int16()
+ const message = new RowDescriptionMessage(LATEINIT_LENGTH, fieldCount)
+ for (let i = 0; i < fieldCount; i++) {
+ message.fields[i] = parseField(reader)
+ }
+ return message
+}
+
+const parseField = (reader: BufferReader) => {
+ const name = reader.cstring()
+ const tableID = reader.uint32()
+ const columnID = reader.int16()
+ const dataTypeID = reader.uint32()
+ const dataTypeSize = reader.int16()
+ const dataTypeModifier = reader.int32()
+ const mode = reader.int16() === 0 ? 'text' : 'binary'
+ return new Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode)
+}
+
+const parseParameterDescriptionMessage = (reader: BufferReader) => {
+ const parameterCount = reader.int16()
+ const message = new ParameterDescriptionMessage(LATEINIT_LENGTH, parameterCount)
+ for (let i = 0; i < parameterCount; i++) {
+ message.dataTypeIDs[i] = reader.int32()
+ }
+ return message
+}
+
+const parseDataRowMessage = (reader: BufferReader) => {
+ const fieldCount = reader.int16()
+ const fields: any[] = new Array(fieldCount)
+ for (let i = 0; i < fieldCount; i++) {
+ const len = reader.int32()
+ // a -1 for length means the value of the field is null
+ fields[i] = len === -1 ? null : reader.string(len)
+ }
+ return new DataRowMessage(LATEINIT_LENGTH, fields)
+}
+
+const parseParameterStatusMessage = (reader: BufferReader) => {
+ const name = reader.cstring()
+ const value = reader.cstring()
+ return new ParameterStatusMessage(LATEINIT_LENGTH, name, value)
+}
+
+const parseBackendKeyData = (reader: BufferReader) => {
+ const processID = reader.int32()
+ const secretKey = reader.int32()
+ return new BackendKeyDataMessage(LATEINIT_LENGTH, processID, secretKey)
+}
+
+const parseAuthenticationResponse = (reader: BufferReader, length: number) => {
+ const code = reader.int32()
+ // TODO(bmc): maybe better types here
+ const message: BackendMessage & any = {
+ name: 'authenticationOk',
+ length,
+ }
+
+ switch (code) {
+ case 0: // AuthenticationOk
+ break
+ case 3: // AuthenticationCleartextPassword
+ if (message.length === 8) {
+ message.name = 'authenticationCleartextPassword'
+ }
+ break
+ case 5: // AuthenticationMD5Password
+ if (message.length === 12) {
+ message.name = 'authenticationMD5Password'
+ const salt = reader.bytes(4)
+ return new AuthenticationMD5Password(LATEINIT_LENGTH, salt)
+ }
+ break
+ case 10: // AuthenticationSASL
+ {
+ message.name = 'authenticationSASL'
+ message.mechanisms = []
+ let mechanism: string
+ do {
+ mechanism = reader.cstring()
+ if (mechanism) {
+ message.mechanisms.push(mechanism)
+ }
+ } while (mechanism)
+ }
+ break
+ case 11: // AuthenticationSASLContinue
+ message.name = 'authenticationSASLContinue'
+ message.data = reader.string(length - 8)
+ break
+ case 12: // AuthenticationSASLFinal
+ message.name = 'authenticationSASLFinal'
+ message.data = reader.string(length - 8)
+ break
+ default:
+ throw new Error('Unknown authenticationOk message type ' + code)
+ }
+ return message
+}
+
+const parseErrorMessage = (reader: BufferReader, name: MessageName) => {
+ const fields: Record = {}
+ let fieldType = reader.string(1)
+ while (fieldType !== '\0') {
+ fields[fieldType] = reader.cstring()
+ fieldType = reader.string(1)
+ }
+
+ const messageValue = fields.M
+
+ const message =
+ name === 'notice'
+ ? new NoticeMessage(LATEINIT_LENGTH, messageValue)
+ : new DatabaseError(messageValue, LATEINIT_LENGTH, name)
+
+ message.severity = fields.S
+ message.code = fields.C
+ message.detail = fields.D
+ message.hint = fields.H
+ message.position = fields.P
+ message.internalPosition = fields.p
+ message.internalQuery = fields.q
+ message.where = fields.W
+ message.schema = fields.s
+ message.table = fields.t
+ message.column = fields.c
+ message.dataType = fields.d
+ message.constraint = fields.n
+ message.file = fields.F
+ message.line = fields.L
+ message.routine = fields.R
+ return message
+}
diff --git a/api/node_modules/pg-protocol/src/serializer.ts b/api/node_modules/pg-protocol/src/serializer.ts
new file mode 100644
index 000000000..bb0441f56
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/serializer.ts
@@ -0,0 +1,274 @@
+import { Writer } from './buffer-writer'
+
+const enum code {
+ startup = 0x70,
+ query = 0x51,
+ parse = 0x50,
+ bind = 0x42,
+ execute = 0x45,
+ flush = 0x48,
+ sync = 0x53,
+ end = 0x58,
+ close = 0x43,
+ describe = 0x44,
+ copyFromChunk = 0x64,
+ copyDone = 0x63,
+ copyFail = 0x66,
+}
+
+const writer = new Writer()
+
+const startup = (opts: Record): Buffer => {
+ // protocol version
+ writer.addInt16(3).addInt16(0)
+ for (const key of Object.keys(opts)) {
+ writer.addCString(key).addCString(opts[key])
+ }
+
+ writer.addCString('client_encoding').addCString('UTF8')
+
+ const bodyBuffer = writer.addCString('').flush()
+ // this message is sent without a code
+
+ const length = bodyBuffer.length + 4
+
+ return new Writer().addInt32(length).add(bodyBuffer).flush()
+}
+
+const requestSsl = (): Buffer => {
+ const response = Buffer.allocUnsafe(8)
+ response.writeInt32BE(8, 0)
+ response.writeInt32BE(80877103, 4)
+ return response
+}
+
+const password = (password: string): Buffer => {
+ return writer.addCString(password).flush(code.startup)
+}
+
+const sendSASLInitialResponseMessage = function (mechanism: string, initialResponse: string): Buffer {
+ // 0x70 = 'p'
+ writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse)
+
+ return writer.flush(code.startup)
+}
+
+const sendSCRAMClientFinalMessage = function (additionalData: string): Buffer {
+ return writer.addString(additionalData).flush(code.startup)
+}
+
+const query = (text: string): Buffer => {
+ return writer.addCString(text).flush(code.query)
+}
+
+type ParseOpts = {
+ name?: string
+ types?: number[]
+ text: string
+}
+
+const emptyArray: any[] = []
+
+const parse = (query: ParseOpts): Buffer => {
+ // expect something like this:
+ // { name: 'queryName',
+ // text: 'select * from blah',
+ // types: ['int8', 'bool'] }
+
+ // normalize missing query names to allow for null
+ const name = query.name || ''
+ if (name.length > 63) {
+ console.error('Warning! Postgres only supports 63 characters for query names.')
+ console.error('You supplied %s (%s)', name, name.length)
+ console.error('This can cause conflicts and silent errors executing queries')
+ }
+
+ const types = query.types || emptyArray
+
+ const len = types.length
+
+ const buffer = writer
+ .addCString(name) // name of query
+ .addCString(query.text) // actual query text
+ .addInt16(len)
+
+ for (let i = 0; i < len; i++) {
+ buffer.addInt32(types[i])
+ }
+
+ return writer.flush(code.parse)
+}
+
+type ValueMapper = (param: any, index: number) => any
+
+type BindOpts = {
+ portal?: string
+ binary?: boolean
+ statement?: string
+ values?: any[]
+ // optional map from JS value to postgres value per parameter
+ valueMapper?: ValueMapper
+}
+
+const paramWriter = new Writer()
+
+// make this a const enum so typescript will inline the value
+const enum ParamType {
+ STRING = 0,
+ BINARY = 1,
+}
+
+const writeValues = function (values: any[], valueMapper?: ValueMapper): void {
+ for (let i = 0; i < values.length; i++) {
+ const mappedVal = valueMapper ? valueMapper(values[i], i) : values[i]
+ if (mappedVal == null) {
+ // add the param type (string) to the writer
+ writer.addInt16(ParamType.STRING)
+ // write -1 to the param writer to indicate null
+ paramWriter.addInt32(-1)
+ } else if (mappedVal instanceof Buffer) {
+ // add the param type (binary) to the writer
+ writer.addInt16(ParamType.BINARY)
+ // add the buffer to the param writer
+ paramWriter.addInt32(mappedVal.length)
+ paramWriter.add(mappedVal)
+ } else {
+ // add the param type (string) to the writer
+ writer.addInt16(ParamType.STRING)
+ paramWriter.addInt32(Buffer.byteLength(mappedVal))
+ paramWriter.addString(mappedVal)
+ }
+ }
+}
+
+const bind = (config: BindOpts = {}): Buffer => {
+ // normalize config
+ const portal = config.portal || ''
+ const statement = config.statement || ''
+ const binary = config.binary || false
+ const values = config.values || emptyArray
+ const len = values.length
+
+ writer.addCString(portal).addCString(statement)
+ writer.addInt16(len)
+
+ writeValues(values, config.valueMapper)
+
+ writer.addInt16(len)
+ writer.add(paramWriter.flush())
+
+ // all results use the same format code
+ writer.addInt16(1)
+ // format code
+ writer.addInt16(binary ? ParamType.BINARY : ParamType.STRING)
+ return writer.flush(code.bind)
+}
+
+type ExecOpts = {
+ portal?: string
+ rows?: number
+}
+
+const emptyExecute = Buffer.from([code.execute, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00])
+
+const execute = (config?: ExecOpts): Buffer => {
+ // this is the happy path for most queries
+ if (!config || (!config.portal && !config.rows)) {
+ return emptyExecute
+ }
+
+ const portal = config.portal || ''
+ const rows = config.rows || 0
+
+ const portalLength = Buffer.byteLength(portal)
+ const len = 4 + portalLength + 1 + 4
+ // one extra bit for code
+ const buff = Buffer.allocUnsafe(1 + len)
+ buff[0] = code.execute
+ buff.writeInt32BE(len, 1)
+ buff.write(portal, 5, 'utf-8')
+ buff[portalLength + 5] = 0 // null terminate portal cString
+ buff.writeUInt32BE(rows, buff.length - 4)
+ return buff
+}
+
+const cancel = (processID: number, secretKey: number): Buffer => {
+ const buffer = Buffer.allocUnsafe(16)
+ buffer.writeInt32BE(16, 0)
+ buffer.writeInt16BE(1234, 4)
+ buffer.writeInt16BE(5678, 6)
+ buffer.writeInt32BE(processID, 8)
+ buffer.writeInt32BE(secretKey, 12)
+ return buffer
+}
+
+type PortalOpts = {
+ type: 'S' | 'P'
+ name?: string
+}
+
+const cstringMessage = (code: code, string: string): Buffer => {
+ const stringLen = Buffer.byteLength(string)
+ const len = 4 + stringLen + 1
+ // one extra bit for code
+ const buffer = Buffer.allocUnsafe(1 + len)
+ buffer[0] = code
+ buffer.writeInt32BE(len, 1)
+ buffer.write(string, 5, 'utf-8')
+ buffer[len] = 0 // null terminate cString
+ return buffer
+}
+
+const emptyDescribePortal = writer.addCString('P').flush(code.describe)
+const emptyDescribeStatement = writer.addCString('S').flush(code.describe)
+
+const describe = (msg: PortalOpts): Buffer => {
+ return msg.name
+ ? cstringMessage(code.describe, `${msg.type}${msg.name || ''}`)
+ : msg.type === 'P'
+ ? emptyDescribePortal
+ : emptyDescribeStatement
+}
+
+const close = (msg: PortalOpts): Buffer => {
+ const text = `${msg.type}${msg.name || ''}`
+ return cstringMessage(code.close, text)
+}
+
+const copyData = (chunk: Buffer): Buffer => {
+ return writer.add(chunk).flush(code.copyFromChunk)
+}
+
+const copyFail = (message: string): Buffer => {
+ return cstringMessage(code.copyFail, message)
+}
+
+const codeOnlyBuffer = (code: code): Buffer => Buffer.from([code, 0x00, 0x00, 0x00, 0x04])
+
+const flushBuffer = codeOnlyBuffer(code.flush)
+const syncBuffer = codeOnlyBuffer(code.sync)
+const endBuffer = codeOnlyBuffer(code.end)
+const copyDoneBuffer = codeOnlyBuffer(code.copyDone)
+
+const serialize = {
+ startup,
+ password,
+ requestSsl,
+ sendSASLInitialResponseMessage,
+ sendSCRAMClientFinalMessage,
+ query,
+ parse,
+ bind,
+ execute,
+ describe,
+ close,
+ flush: () => flushBuffer,
+ sync: () => syncBuffer,
+ end: () => endBuffer,
+ copyData,
+ copyDone: () => copyDoneBuffer,
+ copyFail,
+ cancel,
+}
+
+export { serialize }
diff --git a/api/node_modules/pg-protocol/src/testing/buffer-list.ts b/api/node_modules/pg-protocol/src/testing/buffer-list.ts
new file mode 100644
index 000000000..bef75d405
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/testing/buffer-list.ts
@@ -0,0 +1,67 @@
+export default class BufferList {
+ constructor(public buffers: Buffer[] = []) {}
+
+ public add(buffer: Buffer, front?: boolean) {
+ this.buffers[front ? 'unshift' : 'push'](buffer)
+ return this
+ }
+
+ public addInt16(val: number, front?: boolean) {
+ return this.add(Buffer.from([val >>> 8, val >>> 0]), front)
+ }
+
+ public getByteLength() {
+ return this.buffers.reduce(function (previous, current) {
+ return previous + current.length
+ }, 0)
+ }
+
+ public addInt32(val: number, first?: boolean) {
+ return this.add(
+ Buffer.from([(val >>> 24) & 0xff, (val >>> 16) & 0xff, (val >>> 8) & 0xff, (val >>> 0) & 0xff]),
+ first
+ )
+ }
+
+ public addCString(val: string, front?: boolean) {
+ const len = Buffer.byteLength(val)
+ const buffer = Buffer.alloc(len + 1)
+ buffer.write(val)
+ buffer[len] = 0
+ return this.add(buffer, front)
+ }
+
+ public addString(val: string, front?: boolean) {
+ const len = Buffer.byteLength(val)
+ const buffer = Buffer.alloc(len)
+ buffer.write(val)
+ return this.add(buffer, front)
+ }
+
+ public addChar(char: string, first?: boolean) {
+ return this.add(Buffer.from(char, 'utf8'), first)
+ }
+
+ public addByte(byte: number) {
+ return this.add(Buffer.from([byte]))
+ }
+
+ public join(appendLength?: boolean, char?: string): Buffer {
+ let length = this.getByteLength()
+ if (appendLength) {
+ this.addInt32(length + 4, true)
+ return this.join(false, char)
+ }
+ if (char) {
+ this.addChar(char, true)
+ length++
+ }
+ const result = Buffer.alloc(length)
+ let index = 0
+ this.buffers.forEach(function (buffer) {
+ buffer.copy(result, index, 0)
+ index += buffer.length
+ })
+ return result
+ }
+}
diff --git a/api/node_modules/pg-protocol/src/testing/test-buffers.ts b/api/node_modules/pg-protocol/src/testing/test-buffers.ts
new file mode 100644
index 000000000..1f0d71f2d
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/testing/test-buffers.ts
@@ -0,0 +1,166 @@
+// https://www.postgresql.org/docs/current/protocol-message-formats.html
+import BufferList from './buffer-list'
+
+const buffers = {
+ readyForQuery: function () {
+ return new BufferList().add(Buffer.from('I')).join(true, 'Z')
+ },
+
+ authenticationOk: function () {
+ return new BufferList().addInt32(0).join(true, 'R')
+ },
+
+ authenticationCleartextPassword: function () {
+ return new BufferList().addInt32(3).join(true, 'R')
+ },
+
+ authenticationMD5Password: function () {
+ return new BufferList()
+ .addInt32(5)
+ .add(Buffer.from([1, 2, 3, 4]))
+ .join(true, 'R')
+ },
+
+ authenticationSASL: function () {
+ return new BufferList().addInt32(10).addCString('SCRAM-SHA-256').addCString('').join(true, 'R')
+ },
+
+ authenticationSASLContinue: function () {
+ return new BufferList().addInt32(11).addString('data').join(true, 'R')
+ },
+
+ authenticationSASLFinal: function () {
+ return new BufferList().addInt32(12).addString('data').join(true, 'R')
+ },
+
+ parameterStatus: function (name: string, value: string) {
+ return new BufferList().addCString(name).addCString(value).join(true, 'S')
+ },
+
+ backendKeyData: function (processID: number, secretKey: number) {
+ return new BufferList().addInt32(processID).addInt32(secretKey).join(true, 'K')
+ },
+
+ commandComplete: function (string: string) {
+ return new BufferList().addCString(string).join(true, 'C')
+ },
+
+ rowDescription: function (fields: any[]) {
+ fields = fields || []
+ const buf = new BufferList()
+ buf.addInt16(fields.length)
+ fields.forEach(function (field) {
+ buf
+ .addCString(field.name)
+ .addInt32(field.tableID || 0)
+ .addInt16(field.attributeNumber || 0)
+ .addInt32(field.dataTypeID || 0)
+ .addInt16(field.dataTypeSize || 0)
+ .addInt32(field.typeModifier || 0)
+ .addInt16(field.formatCode || 0)
+ })
+ return buf.join(true, 'T')
+ },
+
+ parameterDescription: function (dataTypeIDs: number[]) {
+ dataTypeIDs = dataTypeIDs || []
+ const buf = new BufferList()
+ buf.addInt16(dataTypeIDs.length)
+ dataTypeIDs.forEach(function (dataTypeID) {
+ buf.addInt32(dataTypeID)
+ })
+ return buf.join(true, 't')
+ },
+
+ dataRow: function (columns: any[]) {
+ columns = columns || []
+ const buf = new BufferList()
+ buf.addInt16(columns.length)
+ columns.forEach(function (col) {
+ if (col == null) {
+ buf.addInt32(-1)
+ } else {
+ const strBuf = Buffer.from(col, 'utf8')
+ buf.addInt32(strBuf.length)
+ buf.add(strBuf)
+ }
+ })
+ return buf.join(true, 'D')
+ },
+
+ error: function (fields: any) {
+ return buffers.errorOrNotice(fields).join(true, 'E')
+ },
+
+ notice: function (fields: any) {
+ return buffers.errorOrNotice(fields).join(true, 'N')
+ },
+
+ errorOrNotice: function (fields: any) {
+ fields = fields || []
+ const buf = new BufferList()
+ fields.forEach(function (field: any) {
+ buf.addChar(field.type)
+ buf.addCString(field.value)
+ })
+ return buf.add(Buffer.from([0])) // terminator
+ },
+
+ parseComplete: function () {
+ return new BufferList().join(true, '1')
+ },
+
+ bindComplete: function () {
+ return new BufferList().join(true, '2')
+ },
+
+ notification: function (id: number, channel: string, payload: string) {
+ return new BufferList().addInt32(id).addCString(channel).addCString(payload).join(true, 'A')
+ },
+
+ emptyQuery: function () {
+ return new BufferList().join(true, 'I')
+ },
+
+ portalSuspended: function () {
+ return new BufferList().join(true, 's')
+ },
+
+ closeComplete: function () {
+ return new BufferList().join(true, '3')
+ },
+
+ copyIn: function (cols: number) {
+ const list = new BufferList()
+ // text mode
+ .addByte(0)
+ // column count
+ .addInt16(cols)
+ for (let i = 0; i < cols; i++) {
+ list.addInt16(i)
+ }
+ return list.join(true, 'G')
+ },
+
+ copyOut: function (cols: number) {
+ const list = new BufferList()
+ // text mode
+ .addByte(0)
+ // column count
+ .addInt16(cols)
+ for (let i = 0; i < cols; i++) {
+ list.addInt16(i)
+ }
+ return list.join(true, 'H')
+ },
+
+ copyData: function (bytes: Buffer) {
+ return new BufferList().add(bytes).join(true, 'd')
+ },
+
+ copyDone: function () {
+ return new BufferList().join(true, 'c')
+ },
+}
+
+export default buffers
diff --git a/api/node_modules/pg-protocol/src/types/chunky.d.ts b/api/node_modules/pg-protocol/src/types/chunky.d.ts
new file mode 100644
index 000000000..7389bda66
--- /dev/null
+++ b/api/node_modules/pg-protocol/src/types/chunky.d.ts
@@ -0,0 +1 @@
+declare module 'chunky'
diff --git a/api/node_modules/pg-types/.travis.yml b/api/node_modules/pg-types/.travis.yml
new file mode 100644
index 000000000..dd6b03329
--- /dev/null
+++ b/api/node_modules/pg-types/.travis.yml
@@ -0,0 +1,7 @@
+language: node_js
+node_js:
+ - '4'
+ - 'lts/*'
+ - 'node'
+env:
+ - PGUSER=postgres
diff --git a/api/node_modules/pg-types/Makefile b/api/node_modules/pg-types/Makefile
new file mode 100644
index 000000000..d7ec83d54
--- /dev/null
+++ b/api/node_modules/pg-types/Makefile
@@ -0,0 +1,14 @@
+.PHONY: publish-patch test
+
+test:
+ npm test
+
+patch: test
+ npm version patch -m "Bump version"
+ git push origin master --tags
+ npm publish
+
+minor: test
+ npm version minor -m "Bump version"
+ git push origin master --tags
+ npm publish
diff --git a/api/node_modules/pg-types/README.md b/api/node_modules/pg-types/README.md
new file mode 100644
index 000000000..54a3f2c6b
--- /dev/null
+++ b/api/node_modules/pg-types/README.md
@@ -0,0 +1,75 @@
+# pg-types
+
+This is the code that turns all the raw text from postgres into JavaScript types for [node-postgres](https://github.com/brianc/node-postgres.git)
+
+## use
+
+This module is consumed and exported from the root `pg` object of node-postgres. To access it, do the following:
+
+```js
+var types = require('pg').types
+```
+
+Generally what you'll want to do is override how a specific data-type is parsed and turned into a JavaScript type. By default the PostgreSQL backend server returns everything as strings. Every data type corresponds to a unique `OID` within the server, and these `OIDs` are sent back with the query response. So, you need to match a particluar `OID` to a function you'd like to use to take the raw text input and produce a valid JavaScript object as a result. `null` values are never parsed.
+
+Let's do something I commonly like to do on projects: return 64-bit integers `(int8)` as JavaScript integers. Because JavaScript doesn't have support for 64-bit integers node-postgres cannot confidently parse `int8` data type results as numbers because if you have a _huge_ number it will overflow and the result you'd get back from node-postgres would not be the result in the datbase. That would be a __very bad thing__ so node-postgres just returns `int8` results as strings and leaves the parsing up to you. Let's say that you know you don't and wont ever have numbers greater than `int4` in your database, but you're tired of recieving results from the `COUNT(*)` function as strings (because that function returns `int8`). You would do this:
+
+```js
+var types = require('pg').types
+types.setTypeParser(20, function(val) {
+ return parseInt(val)
+})
+```
+
+__boom__: now you get numbers instead of strings.
+
+Just as another example -- not saying this is a good idea -- let's say you want to return all dates from your database as [moment](http://momentjs.com/docs/) objects. Okay, do this:
+
+```js
+var types = require('pg').types
+var moment = require('moment')
+var parseFn = function(val) {
+ return val === null ? null : moment(val)
+}
+types.setTypeParser(types.builtins.TIMESTAMPTZ, parseFn)
+types.setTypeParser(types.builtins.TIMESTAMP, parseFn)
+```
+_note: I've never done that with my dates, and I'm not 100% sure moment can parse all the date strings returned from postgres. It's just an example!_
+
+If you're thinking "gee, this seems pretty handy, but how can I get a list of all the OIDs in the database and what they correspond to?!?!?!" worry not:
+
+```bash
+$ psql -c "select typname, oid, typarray from pg_type order by oid"
+```
+
+If you want to find out the OID of a specific type:
+
+```bash
+$ psql -c "select typname, oid, typarray from pg_type where typname = 'daterange' order by oid"
+```
+
+:smile:
+
+## license
+
+The MIT License (MIT)
+
+Copyright (c) 2014 Brian M. Carlson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/pg-types/index.d.ts b/api/node_modules/pg-types/index.d.ts
new file mode 100644
index 000000000..4bebcbe68
--- /dev/null
+++ b/api/node_modules/pg-types/index.d.ts
@@ -0,0 +1,137 @@
+export enum TypeId {
+ BOOL = 16,
+ BYTEA = 17,
+ CHAR = 18,
+ INT8 = 20,
+ INT2 = 21,
+ INT4 = 23,
+ REGPROC = 24,
+ TEXT = 25,
+ OID = 26,
+ TID = 27,
+ XID = 28,
+ CID = 29,
+ JSON = 114,
+ XML = 142,
+ PG_NODE_TREE = 194,
+ SMGR = 210,
+ PATH = 602,
+ POLYGON = 604,
+ CIDR = 650,
+ FLOAT4 = 700,
+ FLOAT8 = 701,
+ ABSTIME = 702,
+ RELTIME = 703,
+ TINTERVAL = 704,
+ CIRCLE = 718,
+ MACADDR8 = 774,
+ MONEY = 790,
+ MACADDR = 829,
+ INET = 869,
+ ACLITEM = 1033,
+ BPCHAR = 1042,
+ VARCHAR = 1043,
+ DATE = 1082,
+ TIME = 1083,
+ TIMESTAMP = 1114,
+ TIMESTAMPTZ = 1184,
+ INTERVAL = 1186,
+ TIMETZ = 1266,
+ BIT = 1560,
+ VARBIT = 1562,
+ NUMERIC = 1700,
+ REFCURSOR = 1790,
+ REGPROCEDURE = 2202,
+ REGOPER = 2203,
+ REGOPERATOR = 2204,
+ REGCLASS = 2205,
+ REGTYPE = 2206,
+ UUID = 2950,
+ TXID_SNAPSHOT = 2970,
+ PG_LSN = 3220,
+ PG_NDISTINCT = 3361,
+ PG_DEPENDENCIES = 3402,
+ TSVECTOR = 3614,
+ TSQUERY = 3615,
+ GTSVECTOR = 3642,
+ REGCONFIG = 3734,
+ REGDICTIONARY = 3769,
+ JSONB = 3802,
+ REGNAMESPACE = 4089,
+ REGROLE = 4096
+}
+
+export type builtinsTypes =
+ 'BOOL' |
+ 'BYTEA' |
+ 'CHAR' |
+ 'INT8' |
+ 'INT2' |
+ 'INT4' |
+ 'REGPROC' |
+ 'TEXT' |
+ 'OID' |
+ 'TID' |
+ 'XID' |
+ 'CID' |
+ 'JSON' |
+ 'XML' |
+ 'PG_NODE_TREE' |
+ 'SMGR' |
+ 'PATH' |
+ 'POLYGON' |
+ 'CIDR' |
+ 'FLOAT4' |
+ 'FLOAT8' |
+ 'ABSTIME' |
+ 'RELTIME' |
+ 'TINTERVAL' |
+ 'CIRCLE' |
+ 'MACADDR8' |
+ 'MONEY' |
+ 'MACADDR' |
+ 'INET' |
+ 'ACLITEM' |
+ 'BPCHAR' |
+ 'VARCHAR' |
+ 'DATE' |
+ 'TIME' |
+ 'TIMESTAMP' |
+ 'TIMESTAMPTZ' |
+ 'INTERVAL' |
+ 'TIMETZ' |
+ 'BIT' |
+ 'VARBIT' |
+ 'NUMERIC' |
+ 'REFCURSOR' |
+ 'REGPROCEDURE' |
+ 'REGOPER' |
+ 'REGOPERATOR' |
+ 'REGCLASS' |
+ 'REGTYPE' |
+ 'UUID' |
+ 'TXID_SNAPSHOT' |
+ 'PG_LSN' |
+ 'PG_NDISTINCT' |
+ 'PG_DEPENDENCIES' |
+ 'TSVECTOR' |
+ 'TSQUERY' |
+ 'GTSVECTOR' |
+ 'REGCONFIG' |
+ 'REGDICTIONARY' |
+ 'JSONB' |
+ 'REGNAMESPACE' |
+ 'REGROLE';
+
+export type TypesBuiltins = {[key in builtinsTypes]: TypeId};
+
+export type TypeFormat = 'text' | 'binary';
+
+export const builtins: TypesBuiltins;
+
+export function setTypeParser (id: TypeId, parseFn: ((value: string) => any)): void;
+export function setTypeParser (id: TypeId, format: TypeFormat, parseFn: (value: string) => any): void;
+
+export const getTypeParser: (id: TypeId, format?: TypeFormat) => any
+
+export const arrayParser: (source: string, transform: (entry: any) => any) => any[];
diff --git a/api/node_modules/pg-types/index.js b/api/node_modules/pg-types/index.js
new file mode 100644
index 000000000..952d8c279
--- /dev/null
+++ b/api/node_modules/pg-types/index.js
@@ -0,0 +1,47 @@
+var textParsers = require('./lib/textParsers');
+var binaryParsers = require('./lib/binaryParsers');
+var arrayParser = require('./lib/arrayParser');
+var builtinTypes = require('./lib/builtins');
+
+exports.getTypeParser = getTypeParser;
+exports.setTypeParser = setTypeParser;
+exports.arrayParser = arrayParser;
+exports.builtins = builtinTypes;
+
+var typeParsers = {
+ text: {},
+ binary: {}
+};
+
+//the empty parse function
+function noParse (val) {
+ return String(val);
+};
+
+//returns a function used to convert a specific type (specified by
+//oid) into a result javascript type
+//note: the oid can be obtained via the following sql query:
+//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE';
+function getTypeParser (oid, format) {
+ format = format || 'text';
+ if (!typeParsers[format]) {
+ return noParse;
+ }
+ return typeParsers[format][oid] || noParse;
+};
+
+function setTypeParser (oid, format, parseFn) {
+ if(typeof format == 'function') {
+ parseFn = format;
+ format = 'text';
+ }
+ typeParsers[format][oid] = parseFn;
+};
+
+textParsers.init(function(oid, converter) {
+ typeParsers.text[oid] = converter;
+});
+
+binaryParsers.init(function(oid, converter) {
+ typeParsers.binary[oid] = converter;
+});
diff --git a/api/node_modules/pg-types/index.test-d.ts b/api/node_modules/pg-types/index.test-d.ts
new file mode 100644
index 000000000..d530e6efc
--- /dev/null
+++ b/api/node_modules/pg-types/index.test-d.ts
@@ -0,0 +1,21 @@
+import * as types from '.';
+import { expectType } from 'tsd';
+
+// builtins
+expectType(types.builtins);
+
+// getTypeParser
+const noParse = types.getTypeParser(types.builtins.NUMERIC, 'text');
+const numericParser = types.getTypeParser(types.builtins.NUMERIC, 'binary');
+expectType(noParse('noParse'));
+expectType(numericParser([200, 1, 0, 15]));
+
+// getArrayParser
+const value = types.arrayParser('{1,2,3}', (num) => parseInt(num));
+expectType(value);
+
+//setTypeParser
+types.setTypeParser(types.builtins.INT8, parseInt);
+types.setTypeParser(types.builtins.FLOAT8, parseFloat);
+types.setTypeParser(types.builtins.FLOAT8, 'binary', (data) => data[0]);
+types.setTypeParser(types.builtins.FLOAT8, 'text', parseFloat);
diff --git a/api/node_modules/pg-types/lib/arrayParser.js b/api/node_modules/pg-types/lib/arrayParser.js
new file mode 100644
index 000000000..81ccffbc8
--- /dev/null
+++ b/api/node_modules/pg-types/lib/arrayParser.js
@@ -0,0 +1,11 @@
+var array = require('postgres-array');
+
+module.exports = {
+ create: function (source, transform) {
+ return {
+ parse: function() {
+ return array.parse(source, transform);
+ }
+ };
+ }
+};
diff --git a/api/node_modules/pg-types/lib/binaryParsers.js b/api/node_modules/pg-types/lib/binaryParsers.js
new file mode 100644
index 000000000..e12c2f463
--- /dev/null
+++ b/api/node_modules/pg-types/lib/binaryParsers.js
@@ -0,0 +1,257 @@
+var parseInt64 = require('pg-int8');
+
+var parseBits = function(data, bits, offset, invert, callback) {
+ offset = offset || 0;
+ invert = invert || false;
+ callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; };
+ var offsetBytes = offset >> 3;
+
+ var inv = function(value) {
+ if (invert) {
+ return ~value & 0xff;
+ }
+
+ return value;
+ };
+
+ // read first (maybe partial) byte
+ var mask = 0xff;
+ var firstBits = 8 - (offset % 8);
+ if (bits < firstBits) {
+ mask = (0xff << (8 - bits)) & 0xff;
+ firstBits = bits;
+ }
+
+ if (offset) {
+ mask = mask >> (offset % 8);
+ }
+
+ var result = 0;
+ if ((offset % 8) + bits >= 8) {
+ result = callback(0, inv(data[offsetBytes]) & mask, firstBits);
+ }
+
+ // read bytes
+ var bytes = (bits + offset) >> 3;
+ for (var i = offsetBytes + 1; i < bytes; i++) {
+ result = callback(result, inv(data[i]), 8);
+ }
+
+ // bits to read, that are not a complete byte
+ var lastBits = (bits + offset) % 8;
+ if (lastBits > 0) {
+ result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits);
+ }
+
+ return result;
+};
+
+var parseFloatFromBits = function(data, precisionBits, exponentBits) {
+ var bias = Math.pow(2, exponentBits - 1) - 1;
+ var sign = parseBits(data, 1);
+ var exponent = parseBits(data, exponentBits, 1);
+
+ if (exponent === 0) {
+ return 0;
+ }
+
+ // parse mantissa
+ var precisionBitsCounter = 1;
+ var parsePrecisionBits = function(lastValue, newValue, bits) {
+ if (lastValue === 0) {
+ lastValue = 1;
+ }
+
+ for (var i = 1; i <= bits; i++) {
+ precisionBitsCounter /= 2;
+ if ((newValue & (0x1 << (bits - i))) > 0) {
+ lastValue += precisionBitsCounter;
+ }
+ }
+
+ return lastValue;
+ };
+
+ var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits);
+
+ // special cases
+ if (exponent == (Math.pow(2, exponentBits + 1) - 1)) {
+ if (mantissa === 0) {
+ return (sign === 0) ? Infinity : -Infinity;
+ }
+
+ return NaN;
+ }
+
+ // normale number
+ return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa;
+};
+
+var parseInt16 = function(value) {
+ if (parseBits(value, 1) == 1) {
+ return -1 * (parseBits(value, 15, 1, true) + 1);
+ }
+
+ return parseBits(value, 15, 1);
+};
+
+var parseInt32 = function(value) {
+ if (parseBits(value, 1) == 1) {
+ return -1 * (parseBits(value, 31, 1, true) + 1);
+ }
+
+ return parseBits(value, 31, 1);
+};
+
+var parseFloat32 = function(value) {
+ return parseFloatFromBits(value, 23, 8);
+};
+
+var parseFloat64 = function(value) {
+ return parseFloatFromBits(value, 52, 11);
+};
+
+var parseNumeric = function(value) {
+ var sign = parseBits(value, 16, 32);
+ if (sign == 0xc000) {
+ return NaN;
+ }
+
+ var weight = Math.pow(10000, parseBits(value, 16, 16));
+ var result = 0;
+
+ var digits = [];
+ var ndigits = parseBits(value, 16);
+ for (var i = 0; i < ndigits; i++) {
+ result += parseBits(value, 16, 64 + (16 * i)) * weight;
+ weight /= 10000;
+ }
+
+ var scale = Math.pow(10, parseBits(value, 16, 48));
+ return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale;
+};
+
+var parseDate = function(isUTC, value) {
+ var sign = parseBits(value, 1);
+ var rawValue = parseBits(value, 63, 1);
+
+ // discard usecs and shift from 2000 to 1970
+ var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000);
+
+ if (!isUTC) {
+ result.setTime(result.getTime() + result.getTimezoneOffset() * 60000);
+ }
+
+ // add microseconds to the date
+ result.usec = rawValue % 1000;
+ result.getMicroSeconds = function() {
+ return this.usec;
+ };
+ result.setMicroSeconds = function(value) {
+ this.usec = value;
+ };
+ result.getUTCMicroSeconds = function() {
+ return this.usec;
+ };
+
+ return result;
+};
+
+var parseArray = function(value) {
+ var dim = parseBits(value, 32);
+
+ var flags = parseBits(value, 32, 32);
+ var elementType = parseBits(value, 32, 64);
+
+ var offset = 96;
+ var dims = [];
+ for (var i = 0; i < dim; i++) {
+ // parse dimension
+ dims[i] = parseBits(value, 32, offset);
+ offset += 32;
+
+ // ignore lower bounds
+ offset += 32;
+ }
+
+ var parseElement = function(elementType) {
+ // parse content length
+ var length = parseBits(value, 32, offset);
+ offset += 32;
+
+ // parse null values
+ if (length == 0xffffffff) {
+ return null;
+ }
+
+ var result;
+ if ((elementType == 0x17) || (elementType == 0x14)) {
+ // int/bigint
+ result = parseBits(value, length * 8, offset);
+ offset += length * 8;
+ return result;
+ }
+ else if (elementType == 0x19) {
+ // string
+ result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3);
+ return result;
+ }
+ else {
+ console.log("ERROR: ElementType not implemented: " + elementType);
+ }
+ };
+
+ var parse = function(dimension, elementType) {
+ var array = [];
+ var i;
+
+ if (dimension.length > 1) {
+ var count = dimension.shift();
+ for (i = 0; i < count; i++) {
+ array[i] = parse(dimension, elementType);
+ }
+ dimension.unshift(count);
+ }
+ else {
+ for (i = 0; i < dimension[0]; i++) {
+ array[i] = parseElement(elementType);
+ }
+ }
+
+ return array;
+ };
+
+ return parse(dims, elementType);
+};
+
+var parseText = function(value) {
+ return value.toString('utf8');
+};
+
+var parseBool = function(value) {
+ if(value === null) return null;
+ return (parseBits(value, 8) > 0);
+};
+
+var init = function(register) {
+ register(20, parseInt64);
+ register(21, parseInt16);
+ register(23, parseInt32);
+ register(26, parseInt32);
+ register(1700, parseNumeric);
+ register(700, parseFloat32);
+ register(701, parseFloat64);
+ register(16, parseBool);
+ register(1114, parseDate.bind(null, false));
+ register(1184, parseDate.bind(null, true));
+ register(1000, parseArray);
+ register(1007, parseArray);
+ register(1016, parseArray);
+ register(1008, parseArray);
+ register(1009, parseArray);
+ register(25, parseText);
+};
+
+module.exports = {
+ init: init
+};
diff --git a/api/node_modules/pg-types/lib/builtins.js b/api/node_modules/pg-types/lib/builtins.js
new file mode 100644
index 000000000..f0c134a8a
--- /dev/null
+++ b/api/node_modules/pg-types/lib/builtins.js
@@ -0,0 +1,73 @@
+/**
+ * Following query was used to generate this file:
+
+ SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid)
+ FROM pg_type PT
+ WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable)
+ AND typtype = 'b' -- Only basic types
+ AND typelem = 0 -- Ignore aliases
+ AND typisdefined -- Ignore undefined types
+ */
+
+module.exports = {
+ BOOL: 16,
+ BYTEA: 17,
+ CHAR: 18,
+ INT8: 20,
+ INT2: 21,
+ INT4: 23,
+ REGPROC: 24,
+ TEXT: 25,
+ OID: 26,
+ TID: 27,
+ XID: 28,
+ CID: 29,
+ JSON: 114,
+ XML: 142,
+ PG_NODE_TREE: 194,
+ SMGR: 210,
+ PATH: 602,
+ POLYGON: 604,
+ CIDR: 650,
+ FLOAT4: 700,
+ FLOAT8: 701,
+ ABSTIME: 702,
+ RELTIME: 703,
+ TINTERVAL: 704,
+ CIRCLE: 718,
+ MACADDR8: 774,
+ MONEY: 790,
+ MACADDR: 829,
+ INET: 869,
+ ACLITEM: 1033,
+ BPCHAR: 1042,
+ VARCHAR: 1043,
+ DATE: 1082,
+ TIME: 1083,
+ TIMESTAMP: 1114,
+ TIMESTAMPTZ: 1184,
+ INTERVAL: 1186,
+ TIMETZ: 1266,
+ BIT: 1560,
+ VARBIT: 1562,
+ NUMERIC: 1700,
+ REFCURSOR: 1790,
+ REGPROCEDURE: 2202,
+ REGOPER: 2203,
+ REGOPERATOR: 2204,
+ REGCLASS: 2205,
+ REGTYPE: 2206,
+ UUID: 2950,
+ TXID_SNAPSHOT: 2970,
+ PG_LSN: 3220,
+ PG_NDISTINCT: 3361,
+ PG_DEPENDENCIES: 3402,
+ TSVECTOR: 3614,
+ TSQUERY: 3615,
+ GTSVECTOR: 3642,
+ REGCONFIG: 3734,
+ REGDICTIONARY: 3769,
+ JSONB: 3802,
+ REGNAMESPACE: 4089,
+ REGROLE: 4096
+};
diff --git a/api/node_modules/pg-types/lib/textParsers.js b/api/node_modules/pg-types/lib/textParsers.js
new file mode 100644
index 000000000..b1218bfe2
--- /dev/null
+++ b/api/node_modules/pg-types/lib/textParsers.js
@@ -0,0 +1,215 @@
+var array = require('postgres-array')
+var arrayParser = require('./arrayParser');
+var parseDate = require('postgres-date');
+var parseInterval = require('postgres-interval');
+var parseByteA = require('postgres-bytea');
+
+function allowNull (fn) {
+ return function nullAllowed (value) {
+ if (value === null) return value
+ return fn(value)
+ }
+}
+
+function parseBool (value) {
+ if (value === null) return value
+ return value === 'TRUE' ||
+ value === 't' ||
+ value === 'true' ||
+ value === 'y' ||
+ value === 'yes' ||
+ value === 'on' ||
+ value === '1';
+}
+
+function parseBoolArray (value) {
+ if (!value) return null
+ return array.parse(value, parseBool)
+}
+
+function parseBaseTenInt (string) {
+ return parseInt(string, 10)
+}
+
+function parseIntegerArray (value) {
+ if (!value) return null
+ return array.parse(value, allowNull(parseBaseTenInt))
+}
+
+function parseBigIntegerArray (value) {
+ if (!value) return null
+ return array.parse(value, allowNull(function (entry) {
+ return parseBigInteger(entry).trim()
+ }))
+}
+
+var parsePointArray = function(value) {
+ if(!value) { return null; }
+ var p = arrayParser.create(value, function(entry) {
+ if(entry !== null) {
+ entry = parsePoint(entry);
+ }
+ return entry;
+ });
+
+ return p.parse();
+};
+
+var parseFloatArray = function(value) {
+ if(!value) { return null; }
+ var p = arrayParser.create(value, function(entry) {
+ if(entry !== null) {
+ entry = parseFloat(entry);
+ }
+ return entry;
+ });
+
+ return p.parse();
+};
+
+var parseStringArray = function(value) {
+ if(!value) { return null; }
+
+ var p = arrayParser.create(value);
+ return p.parse();
+};
+
+var parseDateArray = function(value) {
+ if (!value) { return null; }
+
+ var p = arrayParser.create(value, function(entry) {
+ if (entry !== null) {
+ entry = parseDate(entry);
+ }
+ return entry;
+ });
+
+ return p.parse();
+};
+
+var parseIntervalArray = function(value) {
+ if (!value) { return null; }
+
+ var p = arrayParser.create(value, function(entry) {
+ if (entry !== null) {
+ entry = parseInterval(entry);
+ }
+ return entry;
+ });
+
+ return p.parse();
+};
+
+var parseByteAArray = function(value) {
+ if (!value) { return null; }
+
+ return array.parse(value, allowNull(parseByteA));
+};
+
+var parseInteger = function(value) {
+ return parseInt(value, 10);
+};
+
+var parseBigInteger = function(value) {
+ var valStr = String(value);
+ if (/^\d+$/.test(valStr)) { return valStr; }
+ return value;
+};
+
+var parseJsonArray = function(value) {
+ if (!value) { return null; }
+
+ return array.parse(value, allowNull(JSON.parse));
+};
+
+var parsePoint = function(value) {
+ if (value[0] !== '(') { return null; }
+
+ value = value.substring( 1, value.length - 1 ).split(',');
+
+ return {
+ x: parseFloat(value[0])
+ , y: parseFloat(value[1])
+ };
+};
+
+var parseCircle = function(value) {
+ if (value[0] !== '<' && value[1] !== '(') { return null; }
+
+ var point = '(';
+ var radius = '';
+ var pointParsed = false;
+ for (var i = 2; i < value.length - 1; i++){
+ if (!pointParsed) {
+ point += value[i];
+ }
+
+ if (value[i] === ')') {
+ pointParsed = true;
+ continue;
+ } else if (!pointParsed) {
+ continue;
+ }
+
+ if (value[i] === ','){
+ continue;
+ }
+
+ radius += value[i];
+ }
+ var result = parsePoint(point);
+ result.radius = parseFloat(radius);
+
+ return result;
+};
+
+var init = function(register) {
+ register(20, parseBigInteger); // int8
+ register(21, parseInteger); // int2
+ register(23, parseInteger); // int4
+ register(26, parseInteger); // oid
+ register(700, parseFloat); // float4/real
+ register(701, parseFloat); // float8/double
+ register(16, parseBool);
+ register(1082, parseDate); // date
+ register(1114, parseDate); // timestamp without timezone
+ register(1184, parseDate); // timestamp
+ register(600, parsePoint); // point
+ register(651, parseStringArray); // cidr[]
+ register(718, parseCircle); // circle
+ register(1000, parseBoolArray);
+ register(1001, parseByteAArray);
+ register(1005, parseIntegerArray); // _int2
+ register(1007, parseIntegerArray); // _int4
+ register(1028, parseIntegerArray); // oid[]
+ register(1016, parseBigIntegerArray); // _int8
+ register(1017, parsePointArray); // point[]
+ register(1021, parseFloatArray); // _float4
+ register(1022, parseFloatArray); // _float8
+ register(1231, parseFloatArray); // _numeric
+ register(1014, parseStringArray); //char
+ register(1015, parseStringArray); //varchar
+ register(1008, parseStringArray);
+ register(1009, parseStringArray);
+ register(1040, parseStringArray); // macaddr[]
+ register(1041, parseStringArray); // inet[]
+ register(1115, parseDateArray); // timestamp without time zone[]
+ register(1182, parseDateArray); // _date
+ register(1185, parseDateArray); // timestamp with time zone[]
+ register(1186, parseInterval);
+ register(1187, parseIntervalArray);
+ register(17, parseByteA);
+ register(114, JSON.parse.bind(JSON)); // json
+ register(3802, JSON.parse.bind(JSON)); // jsonb
+ register(199, parseJsonArray); // json[]
+ register(3807, parseJsonArray); // jsonb[]
+ register(3907, parseStringArray); // numrange[]
+ register(2951, parseStringArray); // uuid[]
+ register(791, parseStringArray); // money[]
+ register(1183, parseStringArray); // time[]
+ register(1270, parseStringArray); // timetz[]
+};
+
+module.exports = {
+ init: init
+};
diff --git a/api/node_modules/pg-types/package.json b/api/node_modules/pg-types/package.json
new file mode 100644
index 000000000..5f18026fc
--- /dev/null
+++ b/api/node_modules/pg-types/package.json
@@ -0,0 +1,42 @@
+{
+ "name": "pg-types",
+ "version": "2.2.0",
+ "description": "Query result type converters for node-postgres",
+ "main": "index.js",
+ "scripts": {
+ "test": "tape test/*.js | tap-spec && npm run test-ts",
+ "test-ts": "if-node-version '>= 8' tsd"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/brianc/node-pg-types.git"
+ },
+ "keywords": [
+ "postgres",
+ "PostgreSQL",
+ "pg"
+ ],
+ "author": "Brian M. Carlson",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/brianc/node-pg-types/issues"
+ },
+ "homepage": "https://github.com/brianc/node-pg-types",
+ "devDependencies": {
+ "if-node-version": "^1.1.1",
+ "pff": "^1.0.0",
+ "tap-spec": "^4.0.0",
+ "tape": "^4.0.0",
+ "tsd": "^0.7.4"
+ },
+ "dependencies": {
+ "pg-int8": "1.0.1",
+ "postgres-array": "~2.0.0",
+ "postgres-bytea": "~1.0.0",
+ "postgres-date": "~1.0.4",
+ "postgres-interval": "^1.1.0"
+ },
+ "engines": {
+ "node": ">=4"
+ }
+}
diff --git a/api/node_modules/pg-types/test/index.js b/api/node_modules/pg-types/test/index.js
new file mode 100644
index 000000000..b7d05cd68
--- /dev/null
+++ b/api/node_modules/pg-types/test/index.js
@@ -0,0 +1,24 @@
+
+var test = require('tape')
+var printf = require('pff')
+var getTypeParser = require('../').getTypeParser
+var types = require('./types')
+
+test('types', function (t) {
+ Object.keys(types).forEach(function (typeName) {
+ var type = types[typeName]
+ t.test(typeName, function (t) {
+ var parser = getTypeParser(type.id, type.format)
+ type.tests.forEach(function (tests) {
+ var input = tests[0]
+ var expected = tests[1]
+ var result = parser(input)
+ if (typeof expected === 'function') {
+ return expected(t, result)
+ }
+ t.equal(result, expected)
+ })
+ t.end()
+ })
+ })
+})
diff --git a/api/node_modules/pg-types/test/types.js b/api/node_modules/pg-types/test/types.js
new file mode 100644
index 000000000..af708a5c3
--- /dev/null
+++ b/api/node_modules/pg-types/test/types.js
@@ -0,0 +1,597 @@
+'use strict'
+
+exports['string/varchar'] = {
+ format: 'text',
+ id: 1043,
+ tests: [
+ ['bang', 'bang']
+ ]
+}
+
+exports['integer/int4'] = {
+ format: 'text',
+ id: 23,
+ tests: [
+ ['2147483647', 2147483647]
+ ]
+}
+
+exports['smallint/int2'] = {
+ format: 'text',
+ id: 21,
+ tests: [
+ ['32767', 32767]
+ ]
+}
+
+exports['bigint/int8'] = {
+ format: 'text',
+ id: 20,
+ tests: [
+ ['9223372036854775807', '9223372036854775807']
+ ]
+}
+
+exports.oid = {
+ format: 'text',
+ id: 26,
+ tests: [
+ ['103', 103]
+ ]
+}
+
+var bignum = '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628'
+exports.numeric = {
+ format: 'text',
+ id: 1700,
+ tests: [
+ [bignum, bignum]
+ ]
+}
+
+exports['real/float4'] = {
+ format: 'text',
+ id: 700,
+ tests: [
+ ['123.456', 123.456]
+ ]
+}
+
+exports['double precision / float 8'] = {
+ format: 'text',
+ id: 701,
+ tests: [
+ ['12345678.12345678', 12345678.12345678]
+ ]
+}
+
+exports.boolean = {
+ format: 'text',
+ id: 16,
+ tests: [
+ ['TRUE', true],
+ ['t', true],
+ ['true', true],
+ ['y', true],
+ ['yes', true],
+ ['on', true],
+ ['1', true],
+ ['f', false],
+ [null, null]
+ ]
+}
+
+exports.timestamptz = {
+ format: 'text',
+ id: 1184,
+ tests: [
+ [
+ '2010-10-31 14:54:13.74-05:30',
+ dateEquals(2010, 9, 31, 20, 24, 13, 740)
+ ],
+ [
+ '2011-01-23 22:05:00.68-06',
+ dateEquals(2011, 0, 24, 4, 5, 0, 680)
+ ],
+ [
+ '2010-10-30 14:11:12.730838Z',
+ dateEquals(2010, 9, 30, 14, 11, 12, 730)
+ ],
+ [
+ '2010-10-30 13:10:01+05',
+ dateEquals(2010, 9, 30, 8, 10, 1, 0)
+ ]
+ ]
+}
+
+exports.timestamp = {
+ format: 'text',
+ id: 1114,
+ tests: [
+ [
+ '2010-10-31 00:00:00',
+ function (t, value) {
+ t.equal(
+ value.toUTCString(),
+ new Date(2010, 9, 31, 0, 0, 0, 0, 0).toUTCString()
+ )
+ t.equal(
+ value.toString(),
+ new Date(2010, 9, 31, 0, 0, 0, 0, 0, 0).toString()
+ )
+ }
+ ]
+ ]
+}
+
+exports.date = {
+ format: 'text',
+ id: 1082,
+ tests: [
+ ['2010-10-31', function (t, value) {
+ var now = new Date(2010, 9, 31)
+ dateEquals(
+ 2010,
+ now.getUTCMonth(),
+ now.getUTCDate(),
+ now.getUTCHours(), 0, 0, 0)(t, value)
+ t.equal(value.getHours(), now.getHours())
+ }]
+ ]
+}
+
+exports.inet = {
+ format: 'text',
+ id: 869,
+ tests: [
+ ['8.8.8.8', '8.8.8.8'],
+ ['2001:4860:4860::8888', '2001:4860:4860::8888'],
+ ['127.0.0.1', '127.0.0.1'],
+ ['fd00:1::40e', 'fd00:1::40e'],
+ ['1.2.3.4', '1.2.3.4']
+ ]
+}
+
+exports.cidr = {
+ format: 'text',
+ id: 650,
+ tests: [
+ ['172.16.0.0/12', '172.16.0.0/12'],
+ ['fe80::/10', 'fe80::/10'],
+ ['fc00::/7', 'fc00::/7'],
+ ['192.168.0.0/24', '192.168.0.0/24'],
+ ['10.0.0.0/8', '10.0.0.0/8']
+ ]
+}
+
+exports.macaddr = {
+ format: 'text',
+ id: 829,
+ tests: [
+ ['08:00:2b:01:02:03', '08:00:2b:01:02:03'],
+ ['16:10:9f:0d:66:00', '16:10:9f:0d:66:00']
+ ]
+}
+
+exports.numrange = {
+ format: 'text',
+ id: 3906,
+ tests: [
+ ['[,]', '[,]'],
+ ['(,)', '(,)'],
+ ['(,]', '(,]'],
+ ['[1,)', '[1,)'],
+ ['[,1]', '[,1]'],
+ ['(1,2)', '(1,2)'],
+ ['(1,20.5]', '(1,20.5]']
+ ]
+}
+
+exports.interval = {
+ format: 'text',
+ id: 1186,
+ tests: [
+ ['01:02:03', function (t, value) {
+ t.equal(value.toPostgres(), '3 seconds 2 minutes 1 hours')
+ t.deepEqual(value, {hours: 1, minutes: 2, seconds: 3})
+ }],
+ ['01:02:03.456', function (t, value) {
+ t.deepEqual(value, {hours: 1, minutes:2, seconds: 3, milliseconds: 456})
+ }],
+ ['1 year -32 days', function (t, value) {
+ t.equal(value.toPostgres(), '-32 days 1 years')
+ t.deepEqual(value, {years: 1, days: -32})
+ }],
+ ['1 day -00:00:03', function (t, value) {
+ t.equal(value.toPostgres(), '-3 seconds 1 days')
+ t.deepEqual(value, {days: 1, seconds: -3})
+ }]
+ ]
+}
+
+exports.bytea = {
+ format: 'text',
+ id: 17,
+ tests: [
+ ['foo\\000\\200\\\\\\377', function (t, value) {
+ var buffer = new Buffer([102, 111, 111, 0, 128, 92, 255])
+ t.ok(buffer.equals(value))
+ }],
+ ['', function (t, value) {
+ var buffer = new Buffer(0)
+ t.ok(buffer.equals(value))
+ }]
+ ]
+}
+
+exports['array/boolean'] = {
+ format: 'text',
+ id: 1000,
+ tests: [
+ ['{true,false}', function (t, value) {
+ t.deepEqual(value, [true, false])
+ }]
+ ]
+}
+
+exports['array/char'] = {
+ format: 'text',
+ id: 1014,
+ tests: [
+ ['{foo,bar}', function (t, value) {
+ t.deepEqual(value, ['foo', 'bar'])
+ }]
+ ]
+}
+
+exports['array/varchar'] = {
+ format: 'text',
+ id: 1015,
+ tests: [
+ ['{foo,bar}', function (t, value) {
+ t.deepEqual(value, ['foo', 'bar'])
+ }]
+ ]
+}
+
+exports['array/text'] = {
+ format: 'text',
+ id: 1008,
+ tests: [
+ ['{foo}', function (t, value) {
+ t.deepEqual(value, ['foo'])
+ }]
+ ]
+}
+
+exports['array/bytea'] = {
+ format: 'text',
+ id: 1001,
+ tests: [
+ ['{"\\\\x00000000"}', function (t, value) {
+ var buffer = new Buffer('00000000', 'hex')
+ t.ok(Array.isArray(value))
+ t.equal(value.length, 1)
+ t.ok(buffer.equals(value[0]))
+ }],
+ ['{NULL,"\\\\x4e554c4c"}', function (t, value) {
+ var buffer = new Buffer('4e554c4c', 'hex')
+ t.ok(Array.isArray(value))
+ t.equal(value.length, 2)
+ t.equal(value[0], null)
+ t.ok(buffer.equals(value[1]))
+ }],
+ ]
+}
+
+exports['array/numeric'] = {
+ format: 'text',
+ id: 1231,
+ tests: [
+ ['{1.2,3.4}', function (t, value) {
+ t.deepEqual(value, [1.2, 3.4])
+ }]
+ ]
+}
+
+exports['array/int2'] = {
+ format: 'text',
+ id: 1005,
+ tests: [
+ ['{-32768, -32767, 32766, 32767}', function (t, value) {
+ t.deepEqual(value, [-32768, -32767, 32766, 32767])
+ }]
+ ]
+}
+
+exports['array/int4'] = {
+ format: 'text',
+ id: 1005,
+ tests: [
+ ['{-2147483648, -2147483647, 2147483646, 2147483647}', function (t, value) {
+ t.deepEqual(value, [-2147483648, -2147483647, 2147483646, 2147483647])
+ }]
+ ]
+}
+
+exports['array/int8'] = {
+ format: 'text',
+ id: 1016,
+ tests: [
+ [
+ '{-9223372036854775808, -9223372036854775807, 9223372036854775806, 9223372036854775807}',
+ function (t, value) {
+ t.deepEqual(value, [
+ '-9223372036854775808',
+ '-9223372036854775807',
+ '9223372036854775806',
+ '9223372036854775807'
+ ])
+ }
+ ]
+ ]
+}
+
+exports['array/json'] = {
+ format: 'text',
+ id: 199,
+ tests: [
+ [
+ '{{1,2},{[3],"[4,5]"},{null,NULL}}',
+ function (t, value) {
+ t.deepEqual(value, [
+ [1, 2],
+ [[3], [4, 5]],
+ [null, null],
+ ])
+ }
+ ]
+ ]
+}
+
+exports['array/jsonb'] = {
+ format: 'text',
+ id: 3807,
+ tests: exports['array/json'].tests
+}
+
+exports['array/point'] = {
+ format: 'text',
+ id: 1017,
+ tests: [
+ ['{"(25.1,50.5)","(10.1,40)"}', function (t, value) {
+ t.deepEqual(value, [{x: 25.1, y: 50.5}, {x: 10.1, y: 40}])
+ }]
+ ]
+}
+
+exports['array/oid'] = {
+ format: 'text',
+ id: 1028,
+ tests: [
+ ['{25864,25860}', function (t, value) {
+ t.deepEqual(value, [25864, 25860])
+ }]
+ ]
+}
+
+exports['array/float4'] = {
+ format: 'text',
+ id: 1021,
+ tests: [
+ ['{1.2, 3.4}', function (t, value) {
+ t.deepEqual(value, [1.2, 3.4])
+ }]
+ ]
+}
+
+exports['array/float8'] = {
+ format: 'text',
+ id: 1022,
+ tests: [
+ ['{-12345678.1234567, 12345678.12345678}', function (t, value) {
+ t.deepEqual(value, [-12345678.1234567, 12345678.12345678])
+ }]
+ ]
+}
+
+exports['array/date'] = {
+ format: 'text',
+ id: 1182,
+ tests: [
+ ['{2014-01-01,2015-12-31}', function (t, value) {
+ var expecteds = [new Date(2014, 0, 1), new Date(2015, 11, 31)]
+ t.equal(value.length, 2)
+ value.forEach(function (date, index) {
+ var expected = expecteds[index]
+ dateEquals(
+ expected.getUTCFullYear(),
+ expected.getUTCMonth(),
+ expected.getUTCDate(),
+ expected.getUTCHours(), 0, 0, 0)(t, date)
+ })
+ }]
+ ]
+}
+
+exports['array/interval'] = {
+ format: 'text',
+ id: 1187,
+ tests: [
+ ['{01:02:03,1 day -00:00:03}', function (t, value) {
+ var expecteds = [{hours: 1, minutes: 2, seconds: 3},
+ {days: 1, seconds: -3}]
+ t.equal(value.length, 2)
+ t.deepEqual(value, expecteds);
+ }]
+ ]
+}
+
+exports['array/inet'] = {
+ format: 'text',
+ id: 1041,
+ tests: [
+ ['{8.8.8.8}', function (t, value) {
+ t.deepEqual(value, ['8.8.8.8']);
+ }],
+ ['{2001:4860:4860::8888}', function (t, value) {
+ t.deepEqual(value, ['2001:4860:4860::8888']);
+ }],
+ ['{127.0.0.1,fd00:1::40e,1.2.3.4}', function (t, value) {
+ t.deepEqual(value, ['127.0.0.1', 'fd00:1::40e', '1.2.3.4']);
+ }]
+ ]
+}
+
+exports['array/cidr'] = {
+ format: 'text',
+ id: 651,
+ tests: [
+ ['{172.16.0.0/12}', function (t, value) {
+ t.deepEqual(value, ['172.16.0.0/12']);
+ }],
+ ['{fe80::/10}', function (t, value) {
+ t.deepEqual(value, ['fe80::/10']);
+ }],
+ ['{10.0.0.0/8,fc00::/7,192.168.0.0/24}', function (t, value) {
+ t.deepEqual(value, ['10.0.0.0/8', 'fc00::/7', '192.168.0.0/24']);
+ }]
+ ]
+}
+
+exports['array/macaddr'] = {
+ format: 'text',
+ id: 1040,
+ tests: [
+ ['{08:00:2b:01:02:03,16:10:9f:0d:66:00}', function (t, value) {
+ t.deepEqual(value, ['08:00:2b:01:02:03', '16:10:9f:0d:66:00']);
+ }]
+ ]
+}
+
+exports['array/numrange'] = {
+ format: 'text',
+ id: 3907,
+ tests: [
+ ['{"[1,2]","(4.5,8)","[10,40)","(-21.2,60.3]"}', function (t, value) {
+ t.deepEqual(value, ['[1,2]', '(4.5,8)', '[10,40)', '(-21.2,60.3]']);
+ }],
+ ['{"[,20]","[3,]","[,]","(,35)","(1,)","(,)"}', function (t, value) {
+ t.deepEqual(value, ['[,20]', '[3,]', '[,]', '(,35)', '(1,)', '(,)']);
+ }],
+ ['{"[,20)","[3,)","[,)","[,35)","[1,)","[,)"}', function (t, value) {
+ t.deepEqual(value, ['[,20)', '[3,)', '[,)', '[,35)', '[1,)', '[,)']);
+ }]
+ ]
+}
+
+exports['binary-string/varchar'] = {
+ format: 'binary',
+ id: 1043,
+ tests: [
+ ['bang', 'bang']
+ ]
+}
+
+exports['binary-integer/int4'] = {
+ format: 'binary',
+ id: 23,
+ tests: [
+ [[0, 0, 0, 100], 100]
+ ]
+}
+
+exports['binary-smallint/int2'] = {
+ format: 'binary',
+ id: 21,
+ tests: [
+ [[0, 101], 101]
+ ]
+}
+
+exports['binary-bigint/int8'] = {
+ format: 'binary',
+ id: 20,
+ tests: [
+ [new Buffer([0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), '9223372036854775807']
+ ]
+}
+
+exports['binary-oid'] = {
+ format: 'binary',
+ id: 26,
+ tests: [
+ [[0, 0, 0, 103], 103]
+ ]
+}
+
+exports['binary-numeric'] = {
+ format: 'binary',
+ id: 1700,
+ tests: [
+ [
+ [0, 2, 0, 0, 0, 0, 0, hex('0x64'), 0, 12, hex('0xd'), hex('0x48'), 0, 0, 0, 0],
+ 12.34
+ ]
+ ]
+}
+
+exports['binary-real/float4'] = {
+ format: 'binary',
+ id: 700,
+ tests: [
+ [['0x41', '0x48', '0x00', '0x00'].map(hex), 12.5]
+ ]
+}
+
+exports['binary-boolean'] = {
+ format: 'binary',
+ id: 16,
+ tests: [
+ [[1], true],
+ [[0], false],
+ [null, null]
+ ]
+}
+
+exports['binary-string'] = {
+ format: 'binary',
+ id: 25,
+ tests: [
+ [
+ new Buffer(['0x73', '0x6c', '0x61', '0x64', '0x64', '0x61'].map(hex)),
+ 'sladda'
+ ]
+ ]
+}
+
+exports.point = {
+ format: 'text',
+ id: 600,
+ tests: [
+ ['(25.1,50.5)', function (t, value) {
+ t.deepEqual(value, {x: 25.1, y: 50.5})
+ }]
+ ]
+}
+
+exports.circle = {
+ format: 'text',
+ id: 718,
+ tests: [
+ ['<(25,10),5>', function (t, value) {
+ t.deepEqual(value, {x: 25, y: 10, radius: 5})
+ }]
+ ]
+}
+
+function hex (string) {
+ return parseInt(string, 16)
+}
+
+function dateEquals () {
+ var timestamp = Date.UTC.apply(Date, arguments)
+ return function (t, value) {
+ t.equal(value.toUTCString(), new Date(timestamp).toUTCString())
+ }
+}
diff --git a/api/node_modules/pg/LICENSE b/api/node_modules/pg/LICENSE
new file mode 100644
index 000000000..5c1405646
--- /dev/null
+++ b/api/node_modules/pg/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2010 - 2021 Brian Carlson
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/api/node_modules/pg/README.md b/api/node_modules/pg/README.md
new file mode 100644
index 000000000..bf4effefb
--- /dev/null
+++ b/api/node_modules/pg/README.md
@@ -0,0 +1,95 @@
+# node-postgres
+
+[](http://travis-ci.org/brianc/node-postgres)
+
+
+
+Non-blocking PostgreSQL client for Node.js. Pure JavaScript and optional native libpq bindings.
+
+## Install
+
+```sh
+$ npm install pg
+```
+
+---
+
+## :star: [Documentation](https://node-postgres.com) :star:
+
+### Features
+
+- Pure JavaScript client and native libpq bindings share _the same API_
+- Connection pooling
+- Extensible JS ↔ PostgreSQL data-type coercion
+- Supported PostgreSQL features
+ - Parameterized queries
+ - Named statements with query plan caching
+ - Async notifications with `LISTEN/NOTIFY`
+ - Bulk import & export with `COPY TO/COPY FROM`
+
+### Extras
+
+node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture.
+The entire list can be found on our [wiki](https://github.com/brianc/node-postgres/wiki/Extras).
+
+## Support
+
+node-postgres is free software. If you encounter a bug with the library please open an issue on the [GitHub repo](https://github.com/brianc/node-postgres). If you have questions unanswered by the documentation please open an issue pointing out how the documentation was unclear & I will do my best to make it better!
+
+When you open an issue please provide:
+
+- version of Node
+- version of Postgres
+- smallest possible snippet of code to reproduce the problem
+
+You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that's your thing. I try to always announce noteworthy changes & developments with node-postgres on Twitter.
+
+## Sponsorship :two_hearts:
+
+node-postgres's continued development has been made possible in part by generous financial support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md).
+
+If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development.
+
+### Featured sponsor
+
+Special thanks to [medplum](https://medplum.com) for their generous and thoughtful support of node-postgres!
+
+
+
+## Contributing
+
+**:heart: contributions!**
+
+I will **happily** accept your pull request if it:
+
+- **has tests**
+- looks reasonable
+- does not break backwards compatibility
+
+If your change involves breaking backwards compatibility please please point that out in the pull request & we can discuss & plan when and how to release it and what type of documentation or communicate it will require.
+
+## Troubleshooting and FAQ
+
+The causes and solutions to common errors can be found among the [Frequently Asked Questions (FAQ)](https://github.com/brianc/node-postgres/wiki/FAQ)
+
+## License
+
+Copyright (c) 2010-2020 Brian Carlson (brian.m.carlson@gmail.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/pg/esm/index.mjs b/api/node_modules/pg/esm/index.mjs
new file mode 100644
index 000000000..587d80c1e
--- /dev/null
+++ b/api/node_modules/pg/esm/index.mjs
@@ -0,0 +1,20 @@
+// ESM wrapper for pg
+import pg from '../lib/index.js'
+
+// Re-export all the properties
+export const Client = pg.Client
+export const Pool = pg.Pool
+export const Connection = pg.Connection
+export const types = pg.types
+export const Query = pg.Query
+export const DatabaseError = pg.DatabaseError
+export const escapeIdentifier = pg.escapeIdentifier
+export const escapeLiteral = pg.escapeLiteral
+export const Result = pg.Result
+export const TypeOverrides = pg.TypeOverrides
+
+// Also export the defaults
+export const defaults = pg.defaults
+
+// Re-export the default
+export default pg
diff --git a/api/node_modules/pg/lib/client.js b/api/node_modules/pg/lib/client.js
new file mode 100644
index 000000000..9200dded6
--- /dev/null
+++ b/api/node_modules/pg/lib/client.js
@@ -0,0 +1,743 @@
+const EventEmitter = require('events').EventEmitter
+const utils = require('./utils')
+const nodeUtils = require('util')
+const sasl = require('./crypto/sasl')
+const TypeOverrides = require('./type-overrides')
+
+const ConnectionParameters = require('./connection-parameters')
+const Query = require('./query')
+const defaults = require('./defaults')
+const Connection = require('./connection')
+const crypto = require('./crypto/utils')
+
+const activeQueryDeprecationNotice = nodeUtils.deprecate(
+ () => {},
+ 'Client.activeQuery is deprecated and will be removed in pg@9.0'
+)
+
+const queryQueueDeprecationNotice = nodeUtils.deprecate(
+ () => {},
+ 'Client.queryQueue is deprecated and will be removed in pg@9.0.'
+)
+
+const pgPassDeprecationNotice = nodeUtils.deprecate(
+ () => {},
+ 'pgpass support is deprecated and will be removed in pg@9.0. ' +
+ 'You can provide an async function as the password property to the Client/Pool constructor that returns a password instead. Within this function you can call the pgpass module in your own code.'
+)
+
+const byoPromiseDeprecationNotice = nodeUtils.deprecate(
+ () => {},
+ 'Passing a custom Promise implementation to the Client/Pool constructor is deprecated and will be removed in pg@9.0.'
+)
+
+const queryQueueLengthDeprecationNotice = nodeUtils.deprecate(
+ () => {},
+ 'Calling client.query() when the client is already executing a query is deprecated and will be removed in pg@9.0. Use async/await or an external async flow control mechanism instead.'
+)
+
+class Client extends EventEmitter {
+ constructor(config) {
+ super()
+
+ this.connectionParameters = new ConnectionParameters(config)
+ this.user = this.connectionParameters.user
+ this.database = this.connectionParameters.database
+ this.port = this.connectionParameters.port
+ this.host = this.connectionParameters.host
+
+ // "hiding" the password so it doesn't show up in stack traces
+ // or if the client is console.logged
+ Object.defineProperty(this, 'password', {
+ configurable: true,
+ enumerable: false,
+ writable: true,
+ value: this.connectionParameters.password,
+ })
+
+ this.replication = this.connectionParameters.replication
+
+ const c = config || {}
+
+ if (c.Promise) {
+ byoPromiseDeprecationNotice()
+ }
+ this._Promise = c.Promise || global.Promise
+ this._types = new TypeOverrides(c.types)
+ this._ending = false
+ this._ended = false
+ this._connecting = false
+ this._connected = false
+ this._connectionError = false
+ this._queryable = true
+ this._activeQuery = null
+
+ this.enableChannelBinding = Boolean(c.enableChannelBinding) // set true to use SCRAM-SHA-256-PLUS when offered
+ this.connection =
+ c.connection ||
+ new Connection({
+ stream: c.stream,
+ ssl: this.connectionParameters.ssl,
+ keepAlive: c.keepAlive || false,
+ keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0,
+ encoding: this.connectionParameters.client_encoding || 'utf8',
+ })
+ this._queryQueue = []
+ this.binary = c.binary || defaults.binary
+ this.processID = null
+ this.secretKey = null
+ this.ssl = this.connectionParameters.ssl || false
+ // As with Password, make SSL->Key (the private key) non-enumerable.
+ // It won't show up in stack traces
+ // or if the client is console.logged
+ if (this.ssl && this.ssl.key) {
+ Object.defineProperty(this.ssl, 'key', {
+ enumerable: false,
+ })
+ }
+
+ this._connectionTimeoutMillis = c.connectionTimeoutMillis || 0
+ }
+
+ get activeQuery() {
+ activeQueryDeprecationNotice()
+ return this._activeQuery
+ }
+
+ set activeQuery(val) {
+ activeQueryDeprecationNotice()
+ this._activeQuery = val
+ }
+
+ _getActiveQuery() {
+ return this._activeQuery
+ }
+
+ _errorAllQueries(err) {
+ const enqueueError = (query) => {
+ process.nextTick(() => {
+ query.handleError(err, this.connection)
+ })
+ }
+
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery) {
+ enqueueError(activeQuery)
+ this._activeQuery = null
+ }
+
+ this._queryQueue.forEach(enqueueError)
+ this._queryQueue.length = 0
+ }
+
+ _connect(callback) {
+ const self = this
+ const con = this.connection
+ this._connectionCallback = callback
+
+ if (this._connecting || this._connected) {
+ const err = new Error('Client has already been connected. You cannot reuse a client.')
+ process.nextTick(() => {
+ callback(err)
+ })
+ return
+ }
+ this._connecting = true
+
+ if (this._connectionTimeoutMillis > 0) {
+ this.connectionTimeoutHandle = setTimeout(() => {
+ con._ending = true
+ con.stream.destroy(new Error('timeout expired'))
+ }, this._connectionTimeoutMillis)
+
+ if (this.connectionTimeoutHandle.unref) {
+ this.connectionTimeoutHandle.unref()
+ }
+ }
+
+ if (this.host && this.host.indexOf('/') === 0) {
+ con.connect(this.host + '/.s.PGSQL.' + this.port)
+ } else {
+ con.connect(this.port, this.host)
+ }
+
+ // once connection is established send startup message
+ con.on('connect', function () {
+ if (self.ssl) {
+ con.requestSsl()
+ } else {
+ con.startup(self.getStartupConf())
+ }
+ })
+
+ con.on('sslconnect', function () {
+ con.startup(self.getStartupConf())
+ })
+
+ this._attachListeners(con)
+
+ con.once('end', () => {
+ const error = this._ending ? new Error('Connection terminated') : new Error('Connection terminated unexpectedly')
+
+ clearTimeout(this.connectionTimeoutHandle)
+ this._errorAllQueries(error)
+ this._ended = true
+
+ if (!this._ending) {
+ // if the connection is ended without us calling .end()
+ // on this client then we have an unexpected disconnection
+ // treat this as an error unless we've already emitted an error
+ // during connection.
+ if (this._connecting && !this._connectionError) {
+ if (this._connectionCallback) {
+ this._connectionCallback(error)
+ } else {
+ this._handleErrorEvent(error)
+ }
+ } else if (!this._connectionError) {
+ this._handleErrorEvent(error)
+ }
+ }
+
+ process.nextTick(() => {
+ this.emit('end')
+ })
+ })
+ }
+
+ connect(callback) {
+ if (callback) {
+ this._connect(callback)
+ return
+ }
+
+ return new this._Promise((resolve, reject) => {
+ this._connect((error) => {
+ if (error) {
+ reject(error)
+ } else {
+ resolve(this)
+ }
+ })
+ })
+ }
+
+ _attachListeners(con) {
+ // password request handling
+ con.on('authenticationCleartextPassword', this._handleAuthCleartextPassword.bind(this))
+ // password request handling
+ con.on('authenticationMD5Password', this._handleAuthMD5Password.bind(this))
+ // password request handling (SASL)
+ con.on('authenticationSASL', this._handleAuthSASL.bind(this))
+ con.on('authenticationSASLContinue', this._handleAuthSASLContinue.bind(this))
+ con.on('authenticationSASLFinal', this._handleAuthSASLFinal.bind(this))
+ con.on('backendKeyData', this._handleBackendKeyData.bind(this))
+ con.on('error', this._handleErrorEvent.bind(this))
+ con.on('errorMessage', this._handleErrorMessage.bind(this))
+ con.on('readyForQuery', this._handleReadyForQuery.bind(this))
+ con.on('notice', this._handleNotice.bind(this))
+ con.on('rowDescription', this._handleRowDescription.bind(this))
+ con.on('dataRow', this._handleDataRow.bind(this))
+ con.on('portalSuspended', this._handlePortalSuspended.bind(this))
+ con.on('emptyQuery', this._handleEmptyQuery.bind(this))
+ con.on('commandComplete', this._handleCommandComplete.bind(this))
+ con.on('parseComplete', this._handleParseComplete.bind(this))
+ con.on('copyInResponse', this._handleCopyInResponse.bind(this))
+ con.on('copyData', this._handleCopyData.bind(this))
+ con.on('notification', this._handleNotification.bind(this))
+ }
+
+ _getPassword(cb) {
+ const con = this.connection
+ if (typeof this.password === 'function') {
+ this._Promise
+ .resolve()
+ .then(() => this.password(this.connectionParameters))
+ .then((pass) => {
+ if (pass !== undefined) {
+ if (typeof pass !== 'string') {
+ con.emit('error', new TypeError('Password must be a string'))
+ return
+ }
+ this.connectionParameters.password = this.password = pass
+ } else {
+ this.connectionParameters.password = this.password = null
+ }
+ cb()
+ })
+ .catch((err) => {
+ con.emit('error', err)
+ })
+ } else if (this.password !== null) {
+ cb()
+ } else {
+ try {
+ const pgPass = require('pgpass')
+ pgPass(this.connectionParameters, (pass) => {
+ if (undefined !== pass) {
+ pgPassDeprecationNotice()
+ this.connectionParameters.password = this.password = pass
+ }
+ cb()
+ })
+ } catch (e) {
+ this.emit('error', e)
+ }
+ }
+ }
+
+ _handleAuthCleartextPassword(msg) {
+ this._getPassword(() => {
+ this.connection.password(this.password)
+ })
+ }
+
+ _handleAuthMD5Password(msg) {
+ this._getPassword(async () => {
+ try {
+ const hashedPassword = await crypto.postgresMd5PasswordHash(this.user, this.password, msg.salt)
+ this.connection.password(hashedPassword)
+ } catch (e) {
+ this.emit('error', e)
+ }
+ })
+ }
+
+ _handleAuthSASL(msg) {
+ this._getPassword(() => {
+ try {
+ this.saslSession = sasl.startSession(msg.mechanisms, this.enableChannelBinding && this.connection.stream)
+ this.connection.sendSASLInitialResponseMessage(this.saslSession.mechanism, this.saslSession.response)
+ } catch (err) {
+ this.connection.emit('error', err)
+ }
+ })
+ }
+
+ async _handleAuthSASLContinue(msg) {
+ try {
+ await sasl.continueSession(
+ this.saslSession,
+ this.password,
+ msg.data,
+ this.enableChannelBinding && this.connection.stream
+ )
+ this.connection.sendSCRAMClientFinalMessage(this.saslSession.response)
+ } catch (err) {
+ this.connection.emit('error', err)
+ }
+ }
+
+ _handleAuthSASLFinal(msg) {
+ try {
+ sasl.finalizeSession(this.saslSession, msg.data)
+ this.saslSession = null
+ } catch (err) {
+ this.connection.emit('error', err)
+ }
+ }
+
+ _handleBackendKeyData(msg) {
+ this.processID = msg.processID
+ this.secretKey = msg.secretKey
+ }
+
+ _handleReadyForQuery(msg) {
+ if (this._connecting) {
+ this._connecting = false
+ this._connected = true
+ clearTimeout(this.connectionTimeoutHandle)
+
+ // process possible callback argument to Client#connect
+ if (this._connectionCallback) {
+ this._connectionCallback(null, this)
+ // remove callback for proper error handling
+ // after the connect event
+ this._connectionCallback = null
+ }
+ this.emit('connect')
+ }
+ const activeQuery = this._getActiveQuery()
+ this._activeQuery = null
+ this.readyForQuery = true
+ if (activeQuery) {
+ activeQuery.handleReadyForQuery(this.connection)
+ }
+ this._pulseQueryQueue()
+ }
+
+ // if we receive an error event or error message
+ // during the connection process we handle it here
+ _handleErrorWhileConnecting(err) {
+ if (this._connectionError) {
+ // TODO(bmc): this is swallowing errors - we shouldn't do this
+ return
+ }
+ this._connectionError = true
+ clearTimeout(this.connectionTimeoutHandle)
+ if (this._connectionCallback) {
+ return this._connectionCallback(err)
+ }
+ this.emit('error', err)
+ }
+
+ // if we're connected and we receive an error event from the connection
+ // this means the socket is dead - do a hard abort of all queries and emit
+ // the socket error on the client as well
+ _handleErrorEvent(err) {
+ if (this._connecting) {
+ return this._handleErrorWhileConnecting(err)
+ }
+ this._queryable = false
+ this._errorAllQueries(err)
+ this.emit('error', err)
+ }
+
+ // handle error messages from the postgres backend
+ _handleErrorMessage(msg) {
+ if (this._connecting) {
+ return this._handleErrorWhileConnecting(msg)
+ }
+ const activeQuery = this._getActiveQuery()
+
+ if (!activeQuery) {
+ this._handleErrorEvent(msg)
+ return
+ }
+
+ this._activeQuery = null
+ activeQuery.handleError(msg, this.connection)
+ }
+
+ _handleRowDescription(msg) {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected rowDescription message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ // delegate rowDescription to active query
+ activeQuery.handleRowDescription(msg)
+ }
+
+ _handleDataRow(msg) {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected dataRow message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ // delegate dataRow to active query
+ activeQuery.handleDataRow(msg)
+ }
+
+ _handlePortalSuspended(msg) {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected portalSuspended message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ // delegate portalSuspended to active query
+ activeQuery.handlePortalSuspended(this.connection)
+ }
+
+ _handleEmptyQuery(msg) {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected emptyQuery message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ // delegate emptyQuery to active query
+ activeQuery.handleEmptyQuery(this.connection)
+ }
+
+ _handleCommandComplete(msg) {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected commandComplete message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ // delegate commandComplete to active query
+ activeQuery.handleCommandComplete(msg, this.connection)
+ }
+
+ _handleParseComplete() {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected parseComplete message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ // if a prepared statement has a name and properly parses
+ // we track that its already been executed so we don't parse
+ // it again on the same client
+ if (activeQuery.name) {
+ this.connection.parsedStatements[activeQuery.name] = activeQuery.text
+ }
+ }
+
+ _handleCopyInResponse(msg) {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected copyInResponse message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ activeQuery.handleCopyInResponse(this.connection)
+ }
+
+ _handleCopyData(msg) {
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery == null) {
+ const error = new Error('Received unexpected copyData message from backend.')
+ this._handleErrorEvent(error)
+ return
+ }
+ activeQuery.handleCopyData(msg, this.connection)
+ }
+
+ _handleNotification(msg) {
+ this.emit('notification', msg)
+ }
+
+ _handleNotice(msg) {
+ this.emit('notice', msg)
+ }
+
+ getStartupConf() {
+ const params = this.connectionParameters
+
+ const data = {
+ user: params.user,
+ database: params.database,
+ }
+
+ const appName = params.application_name || params.fallback_application_name
+ if (appName) {
+ data.application_name = appName
+ }
+ if (params.replication) {
+ data.replication = '' + params.replication
+ }
+ if (params.statement_timeout) {
+ data.statement_timeout = String(parseInt(params.statement_timeout, 10))
+ }
+ if (params.lock_timeout) {
+ data.lock_timeout = String(parseInt(params.lock_timeout, 10))
+ }
+ if (params.idle_in_transaction_session_timeout) {
+ data.idle_in_transaction_session_timeout = String(parseInt(params.idle_in_transaction_session_timeout, 10))
+ }
+ if (params.options) {
+ data.options = params.options
+ }
+
+ return data
+ }
+
+ cancel(client, query) {
+ if (client.activeQuery === query) {
+ const con = this.connection
+
+ if (this.host && this.host.indexOf('/') === 0) {
+ con.connect(this.host + '/.s.PGSQL.' + this.port)
+ } else {
+ con.connect(this.port, this.host)
+ }
+
+ // once connection is established send cancel message
+ con.on('connect', function () {
+ con.cancel(client.processID, client.secretKey)
+ })
+ } else if (client._queryQueue.indexOf(query) !== -1) {
+ client._queryQueue.splice(client._queryQueue.indexOf(query), 1)
+ }
+ }
+
+ setTypeParser(oid, format, parseFn) {
+ return this._types.setTypeParser(oid, format, parseFn)
+ }
+
+ getTypeParser(oid, format) {
+ return this._types.getTypeParser(oid, format)
+ }
+
+ // escapeIdentifier and escapeLiteral moved to utility functions & exported
+ // on PG
+ // re-exported here for backwards compatibility
+ escapeIdentifier(str) {
+ return utils.escapeIdentifier(str)
+ }
+
+ escapeLiteral(str) {
+ return utils.escapeLiteral(str)
+ }
+
+ _pulseQueryQueue() {
+ if (this.readyForQuery === true) {
+ this._activeQuery = this._queryQueue.shift()
+ const activeQuery = this._getActiveQuery()
+ if (activeQuery) {
+ this.readyForQuery = false
+ this.hasExecuted = true
+
+ const queryError = activeQuery.submit(this.connection)
+ if (queryError) {
+ process.nextTick(() => {
+ activeQuery.handleError(queryError, this.connection)
+ this.readyForQuery = true
+ this._pulseQueryQueue()
+ })
+ }
+ } else if (this.hasExecuted) {
+ this._activeQuery = null
+ this.emit('drain')
+ }
+ }
+ }
+
+ query(config, values, callback) {
+ // can take in strings, config object or query object
+ let query
+ let result
+ let readTimeout
+ let readTimeoutTimer
+ let queryCallback
+
+ if (config === null || config === undefined) {
+ throw new TypeError('Client was passed a null or undefined query')
+ } else if (typeof config.submit === 'function') {
+ readTimeout = config.query_timeout || this.connectionParameters.query_timeout
+ result = query = config
+ if (!query.callback) {
+ if (typeof values === 'function') {
+ query.callback = values
+ } else if (callback) {
+ query.callback = callback
+ }
+ }
+ } else {
+ readTimeout = config.query_timeout || this.connectionParameters.query_timeout
+ query = new Query(config, values, callback)
+ if (!query.callback) {
+ result = new this._Promise((resolve, reject) => {
+ query.callback = (err, res) => (err ? reject(err) : resolve(res))
+ }).catch((err) => {
+ // replace the stack trace that leads to `TCP.onStreamRead` with one that leads back to the
+ // application that created the query
+ Error.captureStackTrace(err)
+ throw err
+ })
+ }
+ }
+
+ if (readTimeout) {
+ queryCallback = query.callback || (() => {})
+
+ readTimeoutTimer = setTimeout(() => {
+ const error = new Error('Query read timeout')
+
+ process.nextTick(() => {
+ query.handleError(error, this.connection)
+ })
+
+ queryCallback(error)
+
+ // we already returned an error,
+ // just do nothing if query completes
+ query.callback = () => {}
+
+ // Remove from queue
+ const index = this._queryQueue.indexOf(query)
+ if (index > -1) {
+ this._queryQueue.splice(index, 1)
+ }
+
+ this._pulseQueryQueue()
+ }, readTimeout)
+
+ query.callback = (err, res) => {
+ clearTimeout(readTimeoutTimer)
+ queryCallback(err, res)
+ }
+ }
+
+ if (this.binary && !query.binary) {
+ query.binary = true
+ }
+
+ if (query._result && !query._result._types) {
+ query._result._types = this._types
+ }
+
+ if (!this._queryable) {
+ process.nextTick(() => {
+ query.handleError(new Error('Client has encountered a connection error and is not queryable'), this.connection)
+ })
+ return result
+ }
+
+ if (this._ending) {
+ process.nextTick(() => {
+ query.handleError(new Error('Client was closed and is not queryable'), this.connection)
+ })
+ return result
+ }
+
+ if (this._queryQueue.length > 0) {
+ queryQueueLengthDeprecationNotice()
+ }
+ this._queryQueue.push(query)
+ this._pulseQueryQueue()
+ return result
+ }
+
+ ref() {
+ this.connection.ref()
+ }
+
+ unref() {
+ this.connection.unref()
+ }
+
+ end(cb) {
+ this._ending = true
+
+ // if we have never connected, then end is a noop, callback immediately
+ if (!this.connection._connecting || this._ended) {
+ if (cb) {
+ cb()
+ } else {
+ return this._Promise.resolve()
+ }
+ }
+
+ if (this._getActiveQuery() || !this._queryable) {
+ // if we have an active query we need to force a disconnect
+ // on the socket - otherwise a hung query could block end forever
+ this.connection.stream.destroy()
+ } else {
+ this.connection.end()
+ }
+
+ if (cb) {
+ this.connection.once('end', cb)
+ } else {
+ return new this._Promise((resolve) => {
+ this.connection.once('end', resolve)
+ })
+ }
+ }
+ get queryQueue() {
+ queryQueueDeprecationNotice()
+ return this._queryQueue
+ }
+}
+
+// expose a Query constructor
+Client.Query = Query
+
+module.exports = Client
diff --git a/api/node_modules/pg/lib/connection-parameters.js b/api/node_modules/pg/lib/connection-parameters.js
new file mode 100644
index 000000000..c153932bb
--- /dev/null
+++ b/api/node_modules/pg/lib/connection-parameters.js
@@ -0,0 +1,171 @@
+'use strict'
+
+const dns = require('dns')
+
+const defaults = require('./defaults')
+
+const parse = require('pg-connection-string').parse // parses a connection string
+
+const val = function (key, config, envVar) {
+ if (config[key]) {
+ return config[key]
+ }
+
+ if (envVar === undefined) {
+ envVar = process.env['PG' + key.toUpperCase()]
+ } else if (envVar === false) {
+ // do nothing ... use false
+ } else {
+ envVar = process.env[envVar]
+ }
+
+ return envVar || defaults[key]
+}
+
+const readSSLConfigFromEnvironment = function () {
+ switch (process.env.PGSSLMODE) {
+ case 'disable':
+ return false
+ case 'prefer':
+ case 'require':
+ case 'verify-ca':
+ case 'verify-full':
+ return true
+ case 'no-verify':
+ return { rejectUnauthorized: false }
+ }
+ return defaults.ssl
+}
+
+// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes
+const quoteParamValue = function (value) {
+ return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'"
+}
+
+const add = function (params, config, paramName) {
+ const value = config[paramName]
+ if (value !== undefined && value !== null) {
+ params.push(paramName + '=' + quoteParamValue(value))
+ }
+}
+
+class ConnectionParameters {
+ constructor(config) {
+ // if a string is passed, it is a raw connection string so we parse it into a config
+ config = typeof config === 'string' ? parse(config) : config || {}
+
+ // if the config has a connectionString defined, parse IT into the config we use
+ // this will override other default values with what is stored in connectionString
+ if (config.connectionString) {
+ config = Object.assign({}, config, parse(config.connectionString))
+ }
+
+ this.user = val('user', config)
+ this.database = val('database', config)
+
+ if (this.database === undefined) {
+ this.database = this.user
+ }
+
+ this.port = parseInt(val('port', config), 10)
+ this.host = val('host', config)
+
+ // "hiding" the password so it doesn't show up in stack traces
+ // or if the client is console.logged
+ Object.defineProperty(this, 'password', {
+ configurable: true,
+ enumerable: false,
+ writable: true,
+ value: val('password', config),
+ })
+
+ this.binary = val('binary', config)
+ this.options = val('options', config)
+
+ this.ssl = typeof config.ssl === 'undefined' ? readSSLConfigFromEnvironment() : config.ssl
+
+ if (typeof this.ssl === 'string') {
+ if (this.ssl === 'true') {
+ this.ssl = true
+ }
+ }
+ // support passing in ssl=no-verify via connection string
+ if (this.ssl === 'no-verify') {
+ this.ssl = { rejectUnauthorized: false }
+ }
+ if (this.ssl && this.ssl.key) {
+ Object.defineProperty(this.ssl, 'key', {
+ enumerable: false,
+ })
+ }
+
+ this.client_encoding = val('client_encoding', config)
+ this.replication = val('replication', config)
+ // a domain socket begins with '/'
+ this.isDomainSocket = !(this.host || '').indexOf('/')
+
+ this.application_name = val('application_name', config, 'PGAPPNAME')
+ this.fallback_application_name = val('fallback_application_name', config, false)
+ this.statement_timeout = val('statement_timeout', config, false)
+ this.lock_timeout = val('lock_timeout', config, false)
+ this.idle_in_transaction_session_timeout = val('idle_in_transaction_session_timeout', config, false)
+ this.query_timeout = val('query_timeout', config, false)
+
+ if (config.connectionTimeoutMillis === undefined) {
+ this.connect_timeout = process.env.PGCONNECT_TIMEOUT || 0
+ } else {
+ this.connect_timeout = Math.floor(config.connectionTimeoutMillis / 1000)
+ }
+
+ if (config.keepAlive === false) {
+ this.keepalives = 0
+ } else if (config.keepAlive === true) {
+ this.keepalives = 1
+ }
+
+ if (typeof config.keepAliveInitialDelayMillis === 'number') {
+ this.keepalives_idle = Math.floor(config.keepAliveInitialDelayMillis / 1000)
+ }
+ }
+
+ getLibpqConnectionString(cb) {
+ const params = []
+ add(params, this, 'user')
+ add(params, this, 'password')
+ add(params, this, 'port')
+ add(params, this, 'application_name')
+ add(params, this, 'fallback_application_name')
+ add(params, this, 'connect_timeout')
+ add(params, this, 'options')
+
+ const ssl = typeof this.ssl === 'object' ? this.ssl : this.ssl ? { sslmode: this.ssl } : {}
+ add(params, ssl, 'sslmode')
+ add(params, ssl, 'sslca')
+ add(params, ssl, 'sslkey')
+ add(params, ssl, 'sslcert')
+ add(params, ssl, 'sslrootcert')
+
+ if (this.database) {
+ params.push('dbname=' + quoteParamValue(this.database))
+ }
+ if (this.replication) {
+ params.push('replication=' + quoteParamValue(this.replication))
+ }
+ if (this.host) {
+ params.push('host=' + quoteParamValue(this.host))
+ }
+ if (this.isDomainSocket) {
+ return cb(null, params.join(' '))
+ }
+ if (this.client_encoding) {
+ params.push('client_encoding=' + quoteParamValue(this.client_encoding))
+ }
+ dns.lookup(this.host, function (err, address) {
+ if (err) return cb(err, null)
+ params.push('hostaddr=' + quoteParamValue(address))
+ return cb(null, params.join(' '))
+ })
+ }
+}
+
+module.exports = ConnectionParameters
diff --git a/api/node_modules/pg/lib/connection.js b/api/node_modules/pg/lib/connection.js
new file mode 100644
index 000000000..027f93935
--- /dev/null
+++ b/api/node_modules/pg/lib/connection.js
@@ -0,0 +1,221 @@
+'use strict'
+
+const EventEmitter = require('events').EventEmitter
+
+const { parse, serialize } = require('pg-protocol')
+const { getStream, getSecureStream } = require('./stream')
+
+const flushBuffer = serialize.flush()
+const syncBuffer = serialize.sync()
+const endBuffer = serialize.end()
+
+// TODO(bmc) support binary mode at some point
+class Connection extends EventEmitter {
+ constructor(config) {
+ super()
+ config = config || {}
+
+ this.stream = config.stream || getStream(config.ssl)
+ if (typeof this.stream === 'function') {
+ this.stream = this.stream(config)
+ }
+
+ this._keepAlive = config.keepAlive
+ this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis
+ this.parsedStatements = {}
+ this.ssl = config.ssl || false
+ this._ending = false
+ this._emitMessage = false
+ const self = this
+ this.on('newListener', function (eventName) {
+ if (eventName === 'message') {
+ self._emitMessage = true
+ }
+ })
+ }
+
+ connect(port, host) {
+ const self = this
+
+ this._connecting = true
+ this.stream.setNoDelay(true)
+ this.stream.connect(port, host)
+
+ this.stream.once('connect', function () {
+ if (self._keepAlive) {
+ self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis)
+ }
+ self.emit('connect')
+ })
+
+ const reportStreamError = function (error) {
+ // errors about disconnections should be ignored during disconnect
+ if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) {
+ return
+ }
+ self.emit('error', error)
+ }
+ this.stream.on('error', reportStreamError)
+
+ this.stream.on('close', function () {
+ self.emit('end')
+ })
+
+ if (!this.ssl) {
+ return this.attachListeners(this.stream)
+ }
+
+ this.stream.once('data', function (buffer) {
+ const responseCode = buffer.toString('utf8')
+ switch (responseCode) {
+ case 'S': // Server supports SSL connections, continue with a secure connection
+ break
+ case 'N': // Server does not support SSL connections
+ self.stream.end()
+ return self.emit('error', new Error('The server does not support SSL connections'))
+ default:
+ // Any other response byte, including 'E' (ErrorResponse) indicating a server error
+ self.stream.end()
+ return self.emit('error', new Error('There was an error establishing an SSL connection'))
+ }
+ const options = {
+ socket: self.stream,
+ }
+
+ if (self.ssl !== true) {
+ Object.assign(options, self.ssl)
+
+ if ('key' in self.ssl) {
+ options.key = self.ssl.key
+ }
+ }
+
+ const net = require('net')
+ if (net.isIP && net.isIP(host) === 0) {
+ options.servername = host
+ }
+ try {
+ self.stream = getSecureStream(options)
+ } catch (err) {
+ return self.emit('error', err)
+ }
+ self.attachListeners(self.stream)
+ self.stream.on('error', reportStreamError)
+
+ self.emit('sslconnect')
+ })
+ }
+
+ attachListeners(stream) {
+ parse(stream, (msg) => {
+ const eventName = msg.name === 'error' ? 'errorMessage' : msg.name
+ if (this._emitMessage) {
+ this.emit('message', msg)
+ }
+ this.emit(eventName, msg)
+ })
+ }
+
+ requestSsl() {
+ this.stream.write(serialize.requestSsl())
+ }
+
+ startup(config) {
+ this.stream.write(serialize.startup(config))
+ }
+
+ cancel(processID, secretKey) {
+ this._send(serialize.cancel(processID, secretKey))
+ }
+
+ password(password) {
+ this._send(serialize.password(password))
+ }
+
+ sendSASLInitialResponseMessage(mechanism, initialResponse) {
+ this._send(serialize.sendSASLInitialResponseMessage(mechanism, initialResponse))
+ }
+
+ sendSCRAMClientFinalMessage(additionalData) {
+ this._send(serialize.sendSCRAMClientFinalMessage(additionalData))
+ }
+
+ _send(buffer) {
+ if (!this.stream.writable) {
+ return false
+ }
+ return this.stream.write(buffer)
+ }
+
+ query(text) {
+ this._send(serialize.query(text))
+ }
+
+ // send parse message
+ parse(query) {
+ this._send(serialize.parse(query))
+ }
+
+ // send bind message
+ bind(config) {
+ this._send(serialize.bind(config))
+ }
+
+ // send execute message
+ execute(config) {
+ this._send(serialize.execute(config))
+ }
+
+ flush() {
+ if (this.stream.writable) {
+ this.stream.write(flushBuffer)
+ }
+ }
+
+ sync() {
+ this._ending = true
+ this._send(syncBuffer)
+ }
+
+ ref() {
+ this.stream.ref()
+ }
+
+ unref() {
+ this.stream.unref()
+ }
+
+ end() {
+ // 0x58 = 'X'
+ this._ending = true
+ if (!this._connecting || !this.stream.writable) {
+ this.stream.end()
+ return
+ }
+ return this.stream.write(endBuffer, () => {
+ this.stream.end()
+ })
+ }
+
+ close(msg) {
+ this._send(serialize.close(msg))
+ }
+
+ describe(msg) {
+ this._send(serialize.describe(msg))
+ }
+
+ sendCopyFromChunk(chunk) {
+ this._send(serialize.copyData(chunk))
+ }
+
+ endCopyFrom() {
+ this._send(serialize.copyDone())
+ }
+
+ sendCopyFail(msg) {
+ this._send(serialize.copyFail(msg))
+ }
+}
+
+module.exports = Connection
diff --git a/api/node_modules/pg/lib/crypto/cert-signatures.js b/api/node_modules/pg/lib/crypto/cert-signatures.js
new file mode 100644
index 000000000..8d8df3425
--- /dev/null
+++ b/api/node_modules/pg/lib/crypto/cert-signatures.js
@@ -0,0 +1,122 @@
+function x509Error(msg, cert) {
+ return new Error('SASL channel binding: ' + msg + ' when parsing public certificate ' + cert.toString('base64'))
+}
+
+function readASN1Length(data, index) {
+ let length = data[index++]
+ if (length < 0x80) return { length, index }
+
+ const lengthBytes = length & 0x7f
+ if (lengthBytes > 4) throw x509Error('bad length', data)
+
+ length = 0
+ for (let i = 0; i < lengthBytes; i++) {
+ length = (length << 8) | data[index++]
+ }
+
+ return { length, index }
+}
+
+function readASN1OID(data, index) {
+ if (data[index++] !== 0x6) throw x509Error('non-OID data', data) // 6 = OID
+
+ const { length: OIDLength, index: indexAfterOIDLength } = readASN1Length(data, index)
+ index = indexAfterOIDLength
+ const lastIndex = index + OIDLength
+
+ const byte1 = data[index++]
+ let oid = ((byte1 / 40) >> 0) + '.' + (byte1 % 40)
+
+ while (index < lastIndex) {
+ // loop over numbers in OID
+ let value = 0
+ while (index < lastIndex) {
+ // loop over bytes in number
+ const nextByte = data[index++]
+ value = (value << 7) | (nextByte & 0x7f)
+ if (nextByte < 0x80) break
+ }
+ oid += '.' + value
+ }
+
+ return { oid, index }
+}
+
+function expectASN1Seq(data, index) {
+ if (data[index++] !== 0x30) throw x509Error('non-sequence data', data) // 30 = Sequence
+ return readASN1Length(data, index)
+}
+
+function signatureAlgorithmHashFromCertificate(data, index) {
+ // read this thread: https://www.postgresql.org/message-id/17760-b6c61e752ec07060%40postgresql.org
+ if (index === undefined) index = 0
+ index = expectASN1Seq(data, index).index
+ const { length: certInfoLength, index: indexAfterCertInfoLength } = expectASN1Seq(data, index)
+ index = indexAfterCertInfoLength + certInfoLength // skip over certificate info
+ index = expectASN1Seq(data, index).index // skip over signature length field
+ const { oid, index: indexAfterOID } = readASN1OID(data, index)
+ switch (oid) {
+ // RSA
+ case '1.2.840.113549.1.1.4':
+ return 'MD5'
+ case '1.2.840.113549.1.1.5':
+ return 'SHA-1'
+ case '1.2.840.113549.1.1.11':
+ return 'SHA-256'
+ case '1.2.840.113549.1.1.12':
+ return 'SHA-384'
+ case '1.2.840.113549.1.1.13':
+ return 'SHA-512'
+ case '1.2.840.113549.1.1.14':
+ return 'SHA-224'
+ case '1.2.840.113549.1.1.15':
+ return 'SHA512-224'
+ case '1.2.840.113549.1.1.16':
+ return 'SHA512-256'
+ // ECDSA
+ case '1.2.840.10045.4.1':
+ return 'SHA-1'
+ case '1.2.840.10045.4.3.1':
+ return 'SHA-224'
+ case '1.2.840.10045.4.3.2':
+ return 'SHA-256'
+ case '1.2.840.10045.4.3.3':
+ return 'SHA-384'
+ case '1.2.840.10045.4.3.4':
+ return 'SHA-512'
+ // RSASSA-PSS: hash is indicated separately
+ case '1.2.840.113549.1.1.10': {
+ index = indexAfterOID
+ index = expectASN1Seq(data, index).index
+ if (data[index++] !== 0xa0) throw x509Error('non-tag data', data) // a0 = constructed tag 0
+ index = readASN1Length(data, index).index // skip over tag length field
+ index = expectASN1Seq(data, index).index // skip over sequence length field
+ const { oid: hashOID } = readASN1OID(data, index)
+ switch (hashOID) {
+ // standalone hash OIDs
+ case '1.2.840.113549.2.5':
+ return 'MD5'
+ case '1.3.14.3.2.26':
+ return 'SHA-1'
+ case '2.16.840.1.101.3.4.2.1':
+ return 'SHA-256'
+ case '2.16.840.1.101.3.4.2.2':
+ return 'SHA-384'
+ case '2.16.840.1.101.3.4.2.3':
+ return 'SHA-512'
+ }
+ throw x509Error('unknown hash OID ' + hashOID, data)
+ }
+ // Ed25519 -- see https: return//github.com/openssl/openssl/issues/15477
+ case '1.3.101.110':
+ case '1.3.101.112': // ph
+ return 'SHA-512'
+ // Ed448 -- still not in pg 17.2 (if supported, digest would be SHAKE256 x 64 bytes)
+ case '1.3.101.111':
+ case '1.3.101.113': // ph
+ throw x509Error('Ed448 certificate channel binding is not currently supported by Postgres')
+ }
+ throw x509Error('unknown OID ' + oid, data)
+}
+
+module.exports = { signatureAlgorithmHashFromCertificate }
diff --git a/api/node_modules/pg/lib/crypto/sasl.js b/api/node_modules/pg/lib/crypto/sasl.js
new file mode 100644
index 000000000..47b77610c
--- /dev/null
+++ b/api/node_modules/pg/lib/crypto/sasl.js
@@ -0,0 +1,212 @@
+'use strict'
+const crypto = require('./utils')
+const { signatureAlgorithmHashFromCertificate } = require('./cert-signatures')
+
+function startSession(mechanisms, stream) {
+ const candidates = ['SCRAM-SHA-256']
+ if (stream) candidates.unshift('SCRAM-SHA-256-PLUS') // higher-priority, so placed first
+
+ const mechanism = candidates.find((candidate) => mechanisms.includes(candidate))
+
+ if (!mechanism) {
+ throw new Error('SASL: Only mechanism(s) ' + candidates.join(' and ') + ' are supported')
+ }
+
+ if (mechanism === 'SCRAM-SHA-256-PLUS' && typeof stream.getPeerCertificate !== 'function') {
+ // this should never happen if we are really talking to a Postgres server
+ throw new Error('SASL: Mechanism SCRAM-SHA-256-PLUS requires a certificate')
+ }
+
+ const clientNonce = crypto.randomBytes(18).toString('base64')
+ const gs2Header = mechanism === 'SCRAM-SHA-256-PLUS' ? 'p=tls-server-end-point' : stream ? 'y' : 'n'
+
+ return {
+ mechanism,
+ clientNonce,
+ response: gs2Header + ',,n=*,r=' + clientNonce,
+ message: 'SASLInitialResponse',
+ }
+}
+
+async function continueSession(session, password, serverData, stream) {
+ if (session.message !== 'SASLInitialResponse') {
+ throw new Error('SASL: Last message was not SASLInitialResponse')
+ }
+ if (typeof password !== 'string') {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a string')
+ }
+ if (password === '') {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: client password must be a non-empty string')
+ }
+ if (typeof serverData !== 'string') {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: serverData must be a string')
+ }
+
+ const sv = parseServerFirstMessage(serverData)
+
+ if (!sv.nonce.startsWith(session.clientNonce)) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce does not start with client nonce')
+ } else if (sv.nonce.length === session.clientNonce.length) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce is too short')
+ }
+
+ const clientFirstMessageBare = 'n=*,r=' + session.clientNonce
+ const serverFirstMessage = 'r=' + sv.nonce + ',s=' + sv.salt + ',i=' + sv.iteration
+
+ // without channel binding:
+ let channelBinding = stream ? 'eSws' : 'biws' // 'y,,' or 'n,,', base64-encoded
+
+ // override if channel binding is in use:
+ if (session.mechanism === 'SCRAM-SHA-256-PLUS') {
+ const peerCert = stream.getPeerCertificate().raw
+ let hashName = signatureAlgorithmHashFromCertificate(peerCert)
+ if (hashName === 'MD5' || hashName === 'SHA-1') hashName = 'SHA-256'
+ const certHash = await crypto.hashByName(hashName, peerCert)
+ const bindingData = Buffer.concat([Buffer.from('p=tls-server-end-point,,'), Buffer.from(certHash)])
+ channelBinding = bindingData.toString('base64')
+ }
+
+ const clientFinalMessageWithoutProof = 'c=' + channelBinding + ',r=' + sv.nonce
+ const authMessage = clientFirstMessageBare + ',' + serverFirstMessage + ',' + clientFinalMessageWithoutProof
+
+ const saltBytes = Buffer.from(sv.salt, 'base64')
+ const saltedPassword = await crypto.deriveKey(password, saltBytes, sv.iteration)
+ const clientKey = await crypto.hmacSha256(saltedPassword, 'Client Key')
+ const storedKey = await crypto.sha256(clientKey)
+ const clientSignature = await crypto.hmacSha256(storedKey, authMessage)
+ const clientProof = xorBuffers(Buffer.from(clientKey), Buffer.from(clientSignature)).toString('base64')
+ const serverKey = await crypto.hmacSha256(saltedPassword, 'Server Key')
+ const serverSignatureBytes = await crypto.hmacSha256(serverKey, authMessage)
+
+ session.message = 'SASLResponse'
+ session.serverSignature = Buffer.from(serverSignatureBytes).toString('base64')
+ session.response = clientFinalMessageWithoutProof + ',p=' + clientProof
+}
+
+function finalizeSession(session, serverData) {
+ if (session.message !== 'SASLResponse') {
+ throw new Error('SASL: Last message was not SASLResponse')
+ }
+ if (typeof serverData !== 'string') {
+ throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: serverData must be a string')
+ }
+
+ const { serverSignature } = parseServerFinalMessage(serverData)
+
+ if (serverSignature !== session.serverSignature) {
+ throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature does not match')
+ }
+}
+
+/**
+ * printable = %x21-2B / %x2D-7E
+ * ;; Printable ASCII except ",".
+ * ;; Note that any "printable" is also
+ * ;; a valid "value".
+ */
+function isPrintableChars(text) {
+ if (typeof text !== 'string') {
+ throw new TypeError('SASL: text must be a string')
+ }
+ return text
+ .split('')
+ .map((_, i) => text.charCodeAt(i))
+ .every((c) => (c >= 0x21 && c <= 0x2b) || (c >= 0x2d && c <= 0x7e))
+}
+
+/**
+ * base64-char = ALPHA / DIGIT / "/" / "+"
+ *
+ * base64-4 = 4base64-char
+ *
+ * base64-3 = 3base64-char "="
+ *
+ * base64-2 = 2base64-char "=="
+ *
+ * base64 = *base64-4 [base64-3 / base64-2]
+ */
+function isBase64(text) {
+ return /^(?:[a-zA-Z0-9+/]{4})*(?:[a-zA-Z0-9+/]{2}==|[a-zA-Z0-9+/]{3}=)?$/.test(text)
+}
+
+function parseAttributePairs(text) {
+ if (typeof text !== 'string') {
+ throw new TypeError('SASL: attribute pairs text must be a string')
+ }
+
+ return new Map(
+ text.split(',').map((attrValue) => {
+ if (!/^.=/.test(attrValue)) {
+ throw new Error('SASL: Invalid attribute pair entry')
+ }
+ const name = attrValue[0]
+ const value = attrValue.substring(2)
+ return [name, value]
+ })
+ )
+}
+
+function parseServerFirstMessage(data) {
+ const attrPairs = parseAttributePairs(data)
+
+ const nonce = attrPairs.get('r')
+ if (!nonce) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce missing')
+ } else if (!isPrintableChars(nonce)) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce must only contain printable characters')
+ }
+ const salt = attrPairs.get('s')
+ if (!salt) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt missing')
+ } else if (!isBase64(salt)) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt must be base64')
+ }
+ const iterationText = attrPairs.get('i')
+ if (!iterationText) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: iteration missing')
+ } else if (!/^[1-9][0-9]*$/.test(iterationText)) {
+ throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: invalid iteration count')
+ }
+ const iteration = parseInt(iterationText, 10)
+
+ return {
+ nonce,
+ salt,
+ iteration,
+ }
+}
+
+function parseServerFinalMessage(serverData) {
+ const attrPairs = parseAttributePairs(serverData)
+ const serverSignature = attrPairs.get('v')
+ if (!serverSignature) {
+ throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature is missing')
+ } else if (!isBase64(serverSignature)) {
+ throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature must be base64')
+ }
+ return {
+ serverSignature,
+ }
+}
+
+function xorBuffers(a, b) {
+ if (!Buffer.isBuffer(a)) {
+ throw new TypeError('first argument must be a Buffer')
+ }
+ if (!Buffer.isBuffer(b)) {
+ throw new TypeError('second argument must be a Buffer')
+ }
+ if (a.length !== b.length) {
+ throw new Error('Buffer lengths must match')
+ }
+ if (a.length === 0) {
+ throw new Error('Buffers cannot be empty')
+ }
+ return Buffer.from(a.map((_, i) => a[i] ^ b[i]))
+}
+
+module.exports = {
+ startSession,
+ continueSession,
+ finalizeSession,
+}
diff --git a/api/node_modules/pg/lib/crypto/utils-legacy.js b/api/node_modules/pg/lib/crypto/utils-legacy.js
new file mode 100644
index 000000000..d70fdb638
--- /dev/null
+++ b/api/node_modules/pg/lib/crypto/utils-legacy.js
@@ -0,0 +1,43 @@
+'use strict'
+// This file contains crypto utility functions for versions of Node.js < 15.0.0,
+// which does not support the WebCrypto.subtle API.
+
+const nodeCrypto = require('crypto')
+
+function md5(string) {
+ return nodeCrypto.createHash('md5').update(string, 'utf-8').digest('hex')
+}
+
+// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html
+function postgresMd5PasswordHash(user, password, salt) {
+ const inner = md5(password + user)
+ const outer = md5(Buffer.concat([Buffer.from(inner), salt]))
+ return 'md5' + outer
+}
+
+function sha256(text) {
+ return nodeCrypto.createHash('sha256').update(text).digest()
+}
+
+function hashByName(hashName, text) {
+ hashName = hashName.replace(/(\D)-/, '$1') // e.g. SHA-256 -> SHA256
+ return nodeCrypto.createHash(hashName).update(text).digest()
+}
+
+function hmacSha256(key, msg) {
+ return nodeCrypto.createHmac('sha256', key).update(msg).digest()
+}
+
+async function deriveKey(password, salt, iterations) {
+ return nodeCrypto.pbkdf2Sync(password, salt, iterations, 32, 'sha256')
+}
+
+module.exports = {
+ postgresMd5PasswordHash,
+ randomBytes: nodeCrypto.randomBytes,
+ deriveKey,
+ sha256,
+ hashByName,
+ hmacSha256,
+ md5,
+}
diff --git a/api/node_modules/pg/lib/crypto/utils-webcrypto.js b/api/node_modules/pg/lib/crypto/utils-webcrypto.js
new file mode 100644
index 000000000..65aa4a182
--- /dev/null
+++ b/api/node_modules/pg/lib/crypto/utils-webcrypto.js
@@ -0,0 +1,89 @@
+const nodeCrypto = require('crypto')
+
+module.exports = {
+ postgresMd5PasswordHash,
+ randomBytes,
+ deriveKey,
+ sha256,
+ hashByName,
+ hmacSha256,
+ md5,
+}
+
+/**
+ * The Web Crypto API - grabbed from the Node.js library or the global
+ * @type Crypto
+ */
+// eslint-disable-next-line no-undef
+const webCrypto = nodeCrypto.webcrypto || globalThis.crypto
+/**
+ * The SubtleCrypto API for low level crypto operations.
+ * @type SubtleCrypto
+ */
+const subtleCrypto = webCrypto.subtle
+const textEncoder = new TextEncoder()
+
+/**
+ *
+ * @param {*} length
+ * @returns
+ */
+function randomBytes(length) {
+ return webCrypto.getRandomValues(Buffer.alloc(length))
+}
+
+async function md5(string) {
+ try {
+ return nodeCrypto.createHash('md5').update(string, 'utf-8').digest('hex')
+ } catch (e) {
+ // `createHash()` failed so we are probably not in Node.js, use the WebCrypto API instead.
+ // Note that the MD5 algorithm on WebCrypto is not available in Node.js.
+ // This is why we cannot just use WebCrypto in all environments.
+ const data = typeof string === 'string' ? textEncoder.encode(string) : string
+ const hash = await subtleCrypto.digest('MD5', data)
+ return Array.from(new Uint8Array(hash))
+ .map((b) => b.toString(16).padStart(2, '0'))
+ .join('')
+ }
+}
+
+// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html
+async function postgresMd5PasswordHash(user, password, salt) {
+ const inner = await md5(password + user)
+ const outer = await md5(Buffer.concat([Buffer.from(inner), salt]))
+ return 'md5' + outer
+}
+
+/**
+ * Create a SHA-256 digest of the given data
+ * @param {Buffer} data
+ */
+async function sha256(text) {
+ return await subtleCrypto.digest('SHA-256', text)
+}
+
+async function hashByName(hashName, text) {
+ return await subtleCrypto.digest(hashName, text)
+}
+
+/**
+ * Sign the message with the given key
+ * @param {ArrayBuffer} keyBuffer
+ * @param {string} msg
+ */
+async function hmacSha256(keyBuffer, msg) {
+ const key = await subtleCrypto.importKey('raw', keyBuffer, { name: 'HMAC', hash: 'SHA-256' }, false, ['sign'])
+ return await subtleCrypto.sign('HMAC', key, textEncoder.encode(msg))
+}
+
+/**
+ * Derive a key from the password and salt
+ * @param {string} password
+ * @param {Uint8Array} salt
+ * @param {number} iterations
+ */
+async function deriveKey(password, salt, iterations) {
+ const key = await subtleCrypto.importKey('raw', textEncoder.encode(password), 'PBKDF2', false, ['deriveBits'])
+ const params = { name: 'PBKDF2', hash: 'SHA-256', salt: salt, iterations: iterations }
+ return await subtleCrypto.deriveBits(params, key, 32 * 8, ['deriveBits'])
+}
diff --git a/api/node_modules/pg/lib/crypto/utils.js b/api/node_modules/pg/lib/crypto/utils.js
new file mode 100644
index 000000000..9644b150f
--- /dev/null
+++ b/api/node_modules/pg/lib/crypto/utils.js
@@ -0,0 +1,9 @@
+'use strict'
+
+const useLegacyCrypto = parseInt(process.versions && process.versions.node && process.versions.node.split('.')[0]) < 15
+if (useLegacyCrypto) {
+ // We are on an old version of Node.js that requires legacy crypto utilities.
+ module.exports = require('./utils-legacy')
+} else {
+ module.exports = require('./utils-webcrypto')
+}
diff --git a/api/node_modules/pg/lib/defaults.js b/api/node_modules/pg/lib/defaults.js
new file mode 100644
index 000000000..673696f79
--- /dev/null
+++ b/api/node_modules/pg/lib/defaults.js
@@ -0,0 +1,91 @@
+'use strict'
+
+let user
+try {
+ user = process.platform === 'win32' ? process.env.USERNAME : process.env.USER
+} catch {
+ // ignore, e.g., Deno without --allow-env
+}
+
+module.exports = {
+ // database host. defaults to localhost
+ host: 'localhost',
+
+ // database user's name
+ user,
+
+ // name of database to connect
+ database: undefined,
+
+ // database user's password
+ password: null,
+
+ // a Postgres connection string to be used instead of setting individual connection items
+ // NOTE: Setting this value will cause it to override any other value (such as database or user) defined
+ // in the defaults object.
+ connectionString: undefined,
+
+ // database port
+ port: 5432,
+
+ // number of rows to return at a time from a prepared statement's
+ // portal. 0 will return all rows at once
+ rows: 0,
+
+ // binary result mode
+ binary: false,
+
+ // Connection pool options - see https://github.com/brianc/node-pg-pool
+
+ // number of connections to use in connection pool
+ // 0 will disable connection pooling
+ max: 10,
+
+ // max milliseconds a client can go unused before it is removed
+ // from the pool and destroyed
+ idleTimeoutMillis: 30000,
+
+ client_encoding: '',
+
+ ssl: false,
+
+ application_name: undefined,
+
+ fallback_application_name: undefined,
+
+ options: undefined,
+
+ parseInputDatesAsUTC: false,
+
+ // max milliseconds any query using this connection will execute for before timing out in error.
+ // false=unlimited
+ statement_timeout: false,
+
+ // Abort any statement that waits longer than the specified duration in milliseconds while attempting to acquire a lock.
+ // false=unlimited
+ lock_timeout: false,
+
+ // Terminate any session with an open transaction that has been idle for longer than the specified duration in milliseconds
+ // false=unlimited
+ idle_in_transaction_session_timeout: false,
+
+ // max milliseconds to wait for query to complete (client side)
+ query_timeout: false,
+
+ connect_timeout: 0,
+
+ keepalives: 1,
+
+ keepalives_idle: 0,
+}
+
+const pgTypes = require('pg-types')
+// save default parsers
+const parseBigInteger = pgTypes.getTypeParser(20, 'text')
+const parseBigIntegerArray = pgTypes.getTypeParser(1016, 'text')
+
+// parse int8 so you can get your count values as actual numbers
+module.exports.__defineSetter__('parseInt8', function (val) {
+ pgTypes.setTypeParser(20, 'text', val ? pgTypes.getTypeParser(23, 'text') : parseBigInteger)
+ pgTypes.setTypeParser(1016, 'text', val ? pgTypes.getTypeParser(1007, 'text') : parseBigIntegerArray)
+})
diff --git a/api/node_modules/pg/lib/index.js b/api/node_modules/pg/lib/index.js
new file mode 100644
index 000000000..e8b746149
--- /dev/null
+++ b/api/node_modules/pg/lib/index.js
@@ -0,0 +1,73 @@
+'use strict'
+
+const Client = require('./client')
+const defaults = require('./defaults')
+const Connection = require('./connection')
+const Result = require('./result')
+const utils = require('./utils')
+const Pool = require('pg-pool')
+const TypeOverrides = require('./type-overrides')
+const { DatabaseError } = require('pg-protocol')
+const { escapeIdentifier, escapeLiteral } = require('./utils')
+
+const poolFactory = (Client) => {
+ return class BoundPool extends Pool {
+ constructor(options) {
+ super(options, Client)
+ }
+ }
+}
+
+const PG = function (clientConstructor) {
+ this.defaults = defaults
+ this.Client = clientConstructor
+ this.Query = this.Client.Query
+ this.Pool = poolFactory(this.Client)
+ this._pools = []
+ this.Connection = Connection
+ this.types = require('pg-types')
+ this.DatabaseError = DatabaseError
+ this.TypeOverrides = TypeOverrides
+ this.escapeIdentifier = escapeIdentifier
+ this.escapeLiteral = escapeLiteral
+ this.Result = Result
+ this.utils = utils
+}
+
+let clientConstructor = Client
+
+let forceNative = false
+try {
+ forceNative = !!process.env.NODE_PG_FORCE_NATIVE
+} catch {
+ // ignore, e.g., Deno without --allow-env
+}
+
+if (forceNative) {
+ clientConstructor = require('./native')
+}
+
+module.exports = new PG(clientConstructor)
+
+// lazy require native module...the native module may not have installed
+Object.defineProperty(module.exports, 'native', {
+ configurable: true,
+ enumerable: false,
+ get() {
+ let native = null
+ try {
+ native = new PG(require('./native'))
+ } catch (err) {
+ if (err.code !== 'MODULE_NOT_FOUND') {
+ throw err
+ }
+ }
+
+ // overwrite module.exports.native so that getter is never called again
+ Object.defineProperty(module.exports, 'native', {
+ value: native,
+ })
+
+ return native
+ },
+})
diff --git a/api/node_modules/pg/lib/native/client.js b/api/node_modules/pg/lib/native/client.js
new file mode 100644
index 000000000..d8bb4dce5
--- /dev/null
+++ b/api/node_modules/pg/lib/native/client.js
@@ -0,0 +1,323 @@
+const nodeUtils = require('util')
+// eslint-disable-next-line
+var Native
+// eslint-disable-next-line no-useless-catch
+try {
+ // Wrap this `require()` in a try-catch to avoid upstream bundlers from complaining that this might not be available since it is an optional import
+ Native = require('pg-native')
+} catch (e) {
+ throw e
+}
+const TypeOverrides = require('../type-overrides')
+const EventEmitter = require('events').EventEmitter
+const util = require('util')
+const ConnectionParameters = require('../connection-parameters')
+
+const NativeQuery = require('./query')
+
+const queryQueueLengthDeprecationNotice = nodeUtils.deprecate(
+ () => {},
+ 'Calling client.query() when the client is already executing a query is deprecated and will be removed in pg@9.0. Use async/await or an external async flow control mechanism instead.'
+)
+
+const Client = (module.exports = function (config) {
+ EventEmitter.call(this)
+ config = config || {}
+
+ this._Promise = config.Promise || global.Promise
+ this._types = new TypeOverrides(config.types)
+
+ this.native = new Native({
+ types: this._types,
+ })
+
+ this._queryQueue = []
+ this._ending = false
+ this._connecting = false
+ this._connected = false
+ this._queryable = true
+
+ // keep these on the object for legacy reasons
+ // for the time being. TODO: deprecate all this jazz
+ const cp = (this.connectionParameters = new ConnectionParameters(config))
+ if (config.nativeConnectionString) cp.nativeConnectionString = config.nativeConnectionString
+ this.user = cp.user
+
+ // "hiding" the password so it doesn't show up in stack traces
+ // or if the client is console.logged
+ Object.defineProperty(this, 'password', {
+ configurable: true,
+ enumerable: false,
+ writable: true,
+ value: cp.password,
+ })
+ this.database = cp.database
+ this.host = cp.host
+ this.port = cp.port
+
+ // a hash to hold named queries
+ this.namedQueries = {}
+})
+
+Client.Query = NativeQuery
+
+util.inherits(Client, EventEmitter)
+
+Client.prototype._errorAllQueries = function (err) {
+ const enqueueError = (query) => {
+ process.nextTick(() => {
+ query.native = this.native
+ query.handleError(err)
+ })
+ }
+
+ if (this._hasActiveQuery()) {
+ enqueueError(this._activeQuery)
+ this._activeQuery = null
+ }
+
+ this._queryQueue.forEach(enqueueError)
+ this._queryQueue.length = 0
+}
+
+// connect to the backend
+// pass an optional callback to be called once connected
+// or with an error if there was a connection error
+Client.prototype._connect = function (cb) {
+ const self = this
+
+ if (this._connecting) {
+ process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.')))
+ return
+ }
+
+ this._connecting = true
+
+ this.connectionParameters.getLibpqConnectionString(function (err, conString) {
+ if (self.connectionParameters.nativeConnectionString) conString = self.connectionParameters.nativeConnectionString
+ if (err) return cb(err)
+ self.native.connect(conString, function (err) {
+ if (err) {
+ self.native.end()
+ return cb(err)
+ }
+
+ // set internal states to connected
+ self._connected = true
+
+ // handle connection errors from the native layer
+ self.native.on('error', function (err) {
+ self._queryable = false
+ self._errorAllQueries(err)
+ self.emit('error', err)
+ })
+
+ self.native.on('notification', function (msg) {
+ self.emit('notification', {
+ channel: msg.relname,
+ payload: msg.extra,
+ })
+ })
+
+ // signal we are connected now
+ self.emit('connect')
+ self._pulseQueryQueue(true)
+
+ cb(null, this)
+ })
+ })
+}
+
+Client.prototype.connect = function (callback) {
+ if (callback) {
+ this._connect(callback)
+ return
+ }
+
+ return new this._Promise((resolve, reject) => {
+ this._connect((error) => {
+ if (error) {
+ reject(error)
+ } else {
+ resolve(this)
+ }
+ })
+ })
+}
+
+// send a query to the server
+// this method is highly overloaded to take
+// 1) string query, optional array of parameters, optional function callback
+// 2) object query with {
+// string query
+// optional array values,
+// optional function callback instead of as a separate parameter
+// optional string name to name & cache the query plan
+// optional string rowMode = 'array' for an array of results
+// }
+Client.prototype.query = function (config, values, callback) {
+ let query
+ let result
+ let readTimeout
+ let readTimeoutTimer
+ let queryCallback
+
+ if (config === null || config === undefined) {
+ throw new TypeError('Client was passed a null or undefined query')
+ } else if (typeof config.submit === 'function') {
+ readTimeout = config.query_timeout || this.connectionParameters.query_timeout
+ result = query = config
+ // accept query(new Query(...), (err, res) => { }) style
+ if (typeof values === 'function') {
+ config.callback = values
+ }
+ } else {
+ readTimeout = config.query_timeout || this.connectionParameters.query_timeout
+ query = new NativeQuery(config, values, callback)
+ if (!query.callback) {
+ let resolveOut, rejectOut
+ result = new this._Promise((resolve, reject) => {
+ resolveOut = resolve
+ rejectOut = reject
+ }).catch((err) => {
+ Error.captureStackTrace(err)
+ throw err
+ })
+ query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res))
+ }
+ }
+
+ if (readTimeout) {
+ queryCallback = query.callback || (() => {})
+
+ readTimeoutTimer = setTimeout(() => {
+ const error = new Error('Query read timeout')
+
+ process.nextTick(() => {
+ query.handleError(error, this.connection)
+ })
+
+ queryCallback(error)
+
+ // we already returned an error,
+ // just do nothing if query completes
+ query.callback = () => {}
+
+ // Remove from queue
+ const index = this._queryQueue.indexOf(query)
+ if (index > -1) {
+ this._queryQueue.splice(index, 1)
+ }
+
+ this._pulseQueryQueue()
+ }, readTimeout)
+
+ query.callback = (err, res) => {
+ clearTimeout(readTimeoutTimer)
+ queryCallback(err, res)
+ }
+ }
+
+ if (!this._queryable) {
+ query.native = this.native
+ process.nextTick(() => {
+ query.handleError(new Error('Client has encountered a connection error and is not queryable'))
+ })
+ return result
+ }
+
+ if (this._ending) {
+ query.native = this.native
+ process.nextTick(() => {
+ query.handleError(new Error('Client was closed and is not queryable'))
+ })
+ return result
+ }
+
+ if (this._queryQueue.length > 0) {
+ queryQueueLengthDeprecationNotice()
+ }
+
+ this._queryQueue.push(query)
+ this._pulseQueryQueue()
+ return result
+}
+
+// disconnect from the backend server
+Client.prototype.end = function (cb) {
+ const self = this
+
+ this._ending = true
+
+ if (!this._connected) {
+ this.once('connect', this.end.bind(this, cb))
+ }
+ let result
+ if (!cb) {
+ result = new this._Promise(function (resolve, reject) {
+ cb = (err) => (err ? reject(err) : resolve())
+ })
+ }
+
+ this.native.end(function () {
+ self._connected = false
+
+ self._errorAllQueries(new Error('Connection terminated'))
+
+ process.nextTick(() => {
+ self.emit('end')
+ if (cb) cb()
+ })
+ })
+ return result
+}
+
+Client.prototype._hasActiveQuery = function () {
+ return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end'
+}
+
+Client.prototype._pulseQueryQueue = function (initialConnection) {
+ if (!this._connected) {
+ return
+ }
+ if (this._hasActiveQuery()) {
+ return
+ }
+ const query = this._queryQueue.shift()
+ if (!query) {
+ if (!initialConnection) {
+ this.emit('drain')
+ }
+ return
+ }
+ this._activeQuery = query
+ query.submit(this)
+ const self = this
+ query.once('_done', function () {
+ self._pulseQueryQueue()
+ })
+}
+
+// attempt to cancel an in-progress query
+Client.prototype.cancel = function (query) {
+ if (this._activeQuery === query) {
+ this.native.cancel(function () {})
+ } else if (this._queryQueue.indexOf(query) !== -1) {
+ this._queryQueue.splice(this._queryQueue.indexOf(query), 1)
+ }
+}
+
+Client.prototype.ref = function () {}
+Client.prototype.unref = function () {}
+
+Client.prototype.setTypeParser = function (oid, format, parseFn) {
+ return this._types.setTypeParser(oid, format, parseFn)
+}
+
+Client.prototype.getTypeParser = function (oid, format) {
+ return this._types.getTypeParser(oid, format)
+}
+
+Client.prototype.isConnected = function () {
+ return this._connected
+}
diff --git a/api/node_modules/pg/lib/native/index.js b/api/node_modules/pg/lib/native/index.js
new file mode 100644
index 000000000..eead422a3
--- /dev/null
+++ b/api/node_modules/pg/lib/native/index.js
@@ -0,0 +1,2 @@
+'use strict'
+module.exports = require('./client')
diff --git a/api/node_modules/pg/lib/native/query.js b/api/node_modules/pg/lib/native/query.js
new file mode 100644
index 000000000..e02294f63
--- /dev/null
+++ b/api/node_modules/pg/lib/native/query.js
@@ -0,0 +1,165 @@
+'use strict'
+
+const EventEmitter = require('events').EventEmitter
+const util = require('util')
+const utils = require('../utils')
+
+const NativeQuery = (module.exports = function (config, values, callback) {
+ EventEmitter.call(this)
+ config = utils.normalizeQueryConfig(config, values, callback)
+ this.text = config.text
+ this.values = config.values
+ this.name = config.name
+ this.queryMode = config.queryMode
+ this.callback = config.callback
+ this.state = 'new'
+ this._arrayMode = config.rowMode === 'array'
+
+ // if the 'row' event is listened for
+ // then emit them as they come in
+ // without setting singleRowMode to true
+ // this has almost no meaning because libpq
+ // reads all rows into memory before returning any
+ this._emitRowEvents = false
+ this.on(
+ 'newListener',
+ function (event) {
+ if (event === 'row') this._emitRowEvents = true
+ }.bind(this)
+ )
+})
+
+util.inherits(NativeQuery, EventEmitter)
+
+const errorFieldMap = {
+ sqlState: 'code',
+ statementPosition: 'position',
+ messagePrimary: 'message',
+ context: 'where',
+ schemaName: 'schema',
+ tableName: 'table',
+ columnName: 'column',
+ dataTypeName: 'dataType',
+ constraintName: 'constraint',
+ sourceFile: 'file',
+ sourceLine: 'line',
+ sourceFunction: 'routine',
+}
+
+NativeQuery.prototype.handleError = function (err) {
+ // copy pq error fields into the error object
+ const fields = this.native.pq.resultErrorFields()
+ if (fields) {
+ for (const key in fields) {
+ const normalizedFieldName = errorFieldMap[key] || key
+ err[normalizedFieldName] = fields[key]
+ }
+ }
+ if (this.callback) {
+ this.callback(err)
+ } else {
+ this.emit('error', err)
+ }
+ this.state = 'error'
+}
+
+NativeQuery.prototype.then = function (onSuccess, onFailure) {
+ return this._getPromise().then(onSuccess, onFailure)
+}
+
+NativeQuery.prototype.catch = function (callback) {
+ return this._getPromise().catch(callback)
+}
+
+NativeQuery.prototype._getPromise = function () {
+ if (this._promise) return this._promise
+ this._promise = new Promise(
+ function (resolve, reject) {
+ this._once('end', resolve)
+ this._once('error', reject)
+ }.bind(this)
+ )
+ return this._promise
+}
+
+NativeQuery.prototype.submit = function (client) {
+ this.state = 'running'
+ const self = this
+ this.native = client.native
+ client.native.arrayMode = this._arrayMode
+
+ let after = function (err, rows, results) {
+ client.native.arrayMode = false
+ setImmediate(function () {
+ self.emit('_done')
+ })
+
+ // handle possible query error
+ if (err) {
+ return self.handleError(err)
+ }
+
+ // emit row events for each row in the result
+ if (self._emitRowEvents) {
+ if (results.length > 1) {
+ rows.forEach((rowOfRows, i) => {
+ rowOfRows.forEach((row) => {
+ self.emit('row', row, results[i])
+ })
+ })
+ } else {
+ rows.forEach(function (row) {
+ self.emit('row', row, results)
+ })
+ }
+ }
+
+ // handle successful result
+ self.state = 'end'
+ self.emit('end', results)
+ if (self.callback) {
+ self.callback(null, results)
+ }
+ }
+
+ if (process.domain) {
+ after = process.domain.bind(after)
+ }
+
+ // named query
+ if (this.name) {
+ if (this.name.length > 63) {
+ console.error('Warning! Postgres only supports 63 characters for query names.')
+ console.error('You supplied %s (%s)', this.name, this.name.length)
+ console.error('This can cause conflicts and silent errors executing queries')
+ }
+ const values = (this.values || []).map(utils.prepareValue)
+
+ // check if the client has already executed this named query
+ // if so...just execute it again - skip the planning phase
+ if (client.namedQueries[this.name]) {
+ if (this.text && client.namedQueries[this.name] !== this.text) {
+ const err = new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`)
+ return after(err)
+ }
+ return client.native.execute(this.name, values, after)
+ }
+ // plan the named query the first time, then execute it
+ return client.native.prepare(this.name, this.text, values.length, function (err) {
+ if (err) return after(err)
+ client.namedQueries[self.name] = self.text
+ return self.native.execute(self.name, values, after)
+ })
+ } else if (this.values) {
+ if (!Array.isArray(this.values)) {
+ const err = new Error('Query values must be an array')
+ return after(err)
+ }
+ const vals = this.values.map(utils.prepareValue)
+ client.native.query(this.text, vals, after)
+ } else if (this.queryMode === 'extended') {
+ client.native.query(this.text, [], after)
+ } else {
+ client.native.query(this.text, after)
+ }
+}
diff --git a/api/node_modules/pg/lib/query.js b/api/node_modules/pg/lib/query.js
new file mode 100644
index 000000000..64aab5ff2
--- /dev/null
+++ b/api/node_modules/pg/lib/query.js
@@ -0,0 +1,252 @@
+'use strict'
+
+const { EventEmitter } = require('events')
+
+const Result = require('./result')
+const utils = require('./utils')
+
+class Query extends EventEmitter {
+ constructor(config, values, callback) {
+ super()
+
+ config = utils.normalizeQueryConfig(config, values, callback)
+
+ this.text = config.text
+ this.values = config.values
+ this.rows = config.rows
+ this.types = config.types
+ this.name = config.name
+ this.queryMode = config.queryMode
+ this.binary = config.binary
+ // use unique portal name each time
+ this.portal = config.portal || ''
+ this.callback = config.callback
+ this._rowMode = config.rowMode
+ if (process.domain && config.callback) {
+ this.callback = process.domain.bind(config.callback)
+ }
+ this._result = new Result(this._rowMode, this.types)
+
+ // potential for multiple results
+ this._results = this._result
+ this._canceledDueToError = false
+ }
+
+ requiresPreparation() {
+ if (this.queryMode === 'extended') {
+ return true
+ }
+
+ // named queries must always be prepared
+ if (this.name) {
+ return true
+ }
+ // always prepare if there are max number of rows expected per
+ // portal execution
+ if (this.rows) {
+ return true
+ }
+ // don't prepare empty text queries
+ if (!this.text) {
+ return false
+ }
+ // prepare if there are values
+ if (!this.values) {
+ return false
+ }
+ return this.values.length > 0
+ }
+
+ _checkForMultirow() {
+ // if we already have a result with a command property
+ // then we've already executed one query in a multi-statement simple query
+ // turn our results into an array of results
+ if (this._result.command) {
+ if (!Array.isArray(this._results)) {
+ this._results = [this._result]
+ }
+ this._result = new Result(this._rowMode, this._result._types)
+ this._results.push(this._result)
+ }
+ }
+
+ // associates row metadata from the supplied
+ // message with this query object
+ // metadata used when parsing row results
+ handleRowDescription(msg) {
+ this._checkForMultirow()
+ this._result.addFields(msg.fields)
+ this._accumulateRows = this.callback || !this.listeners('row').length
+ }
+
+ handleDataRow(msg) {
+ let row
+
+ if (this._canceledDueToError) {
+ return
+ }
+
+ try {
+ row = this._result.parseRow(msg.fields)
+ } catch (err) {
+ this._canceledDueToError = err
+ return
+ }
+
+ this.emit('row', row, this._result)
+ if (this._accumulateRows) {
+ this._result.addRow(row)
+ }
+ }
+
+ handleCommandComplete(msg, connection) {
+ this._checkForMultirow()
+ this._result.addCommandComplete(msg)
+ // need to sync after each command complete of a prepared statement
+ // if we were using a row count which results in multiple calls to _getRows
+ if (this.rows) {
+ connection.sync()
+ }
+ }
+
+ // if a named prepared statement is created with empty query text
+ // the backend will send an emptyQuery message but *not* a command complete message
+ // since we pipeline sync immediately after execute we don't need to do anything here
+ // unless we have rows specified, in which case we did not pipeline the initial sync call
+ handleEmptyQuery(connection) {
+ if (this.rows) {
+ connection.sync()
+ }
+ }
+
+ handleError(err, connection) {
+ // need to sync after error during a prepared statement
+ if (this._canceledDueToError) {
+ err = this._canceledDueToError
+ this._canceledDueToError = false
+ }
+ // if callback supplied do not emit error event as uncaught error
+ // events will bubble up to node process
+ if (this.callback) {
+ return this.callback(err)
+ }
+ this.emit('error', err)
+ }
+
+ handleReadyForQuery(con) {
+ if (this._canceledDueToError) {
+ return this.handleError(this._canceledDueToError, con)
+ }
+ if (this.callback) {
+ try {
+ this.callback(null, this._results)
+ } catch (err) {
+ process.nextTick(() => {
+ throw err
+ })
+ }
+ }
+ this.emit('end', this._results)
+ }
+
+ submit(connection) {
+ if (typeof this.text !== 'string' && typeof this.name !== 'string') {
+ return new Error('A query must have either text or a name. Supplying neither is unsupported.')
+ }
+ const previous = connection.parsedStatements[this.name]
+ if (this.text && previous && this.text !== previous) {
+ return new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`)
+ }
+ if (this.values && !Array.isArray(this.values)) {
+ return new Error('Query values must be an array')
+ }
+ if (this.requiresPreparation()) {
+ // If we're using the extended query protocol we fire off several separate commands
+ // to the backend. On some versions of node & some operating system versions
+ // the network stack writes each message separately instead of buffering them together
+ // causing the client & network to send more slowly. Corking & uncorking the stream
+ // allows node to buffer up the messages internally before sending them all off at once.
+ // note: we're checking for existence of cork/uncork because some versions of streams
+ // might not have this (cloudflare?)
+ connection.stream.cork && connection.stream.cork()
+ try {
+ this.prepare(connection)
+ } finally {
+ // while unlikely for this.prepare to throw, if it does & we don't uncork this stream
+ // this client becomes unresponsive, so put in finally block "just in case"
+ connection.stream.uncork && connection.stream.uncork()
+ }
+ } else {
+ connection.query(this.text)
+ }
+ return null
+ }
+
+ hasBeenParsed(connection) {
+ return this.name && connection.parsedStatements[this.name]
+ }
+
+ handlePortalSuspended(connection) {
+ this._getRows(connection, this.rows)
+ }
+
+ _getRows(connection, rows) {
+ connection.execute({
+ portal: this.portal,
+ rows: rows,
+ })
+ // if we're not reading pages of rows send the sync command
+ // to indicate the pipeline is finished
+ if (!rows) {
+ connection.sync()
+ } else {
+ // otherwise flush the call out to read more rows
+ connection.flush()
+ }
+ }
+
+ // http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY
+ prepare(connection) {
+ // TODO refactor this poor encapsulation
+ if (!this.hasBeenParsed(connection)) {
+ connection.parse({
+ text: this.text,
+ name: this.name,
+ types: this.types,
+ })
+ }
+
+ // because we're mapping user supplied values to
+ // postgres wire protocol compatible values it could
+ // throw an exception, so try/catch this section
+ try {
+ connection.bind({
+ portal: this.portal,
+ statement: this.name,
+ values: this.values,
+ binary: this.binary,
+ valueMapper: utils.prepareValue,
+ })
+ } catch (err) {
+ this.handleError(err, connection)
+ return
+ }
+
+ connection.describe({
+ type: 'P',
+ name: this.portal || '',
+ })
+
+ this._getRows(connection, this.rows)
+ }
+
+ handleCopyInResponse(connection) {
+ connection.sendCopyFail('No source stream defined')
+ }
+
+ handleCopyData(msg, connection) {
+ // noop
+ }
+}
+
+module.exports = Query
diff --git a/api/node_modules/pg/lib/result.js b/api/node_modules/pg/lib/result.js
new file mode 100644
index 000000000..0ab7bb80c
--- /dev/null
+++ b/api/node_modules/pg/lib/result.js
@@ -0,0 +1,109 @@
+'use strict'
+
+const types = require('pg-types')
+
+const matchRegexp = /^([A-Za-z]+)(?: (\d+))?(?: (\d+))?/
+
+// result object returned from query
+// in the 'end' event and also
+// passed as second argument to provided callback
+class Result {
+ constructor(rowMode, types) {
+ this.command = null
+ this.rowCount = null
+ this.oid = null
+ this.rows = []
+ this.fields = []
+ this._parsers = undefined
+ this._types = types
+ this.RowCtor = null
+ this.rowAsArray = rowMode === 'array'
+ if (this.rowAsArray) {
+ this.parseRow = this._parseRowAsArray
+ }
+ this._prebuiltEmptyResultObject = null
+ }
+
+ // adds a command complete message
+ addCommandComplete(msg) {
+ let match
+ if (msg.text) {
+ // pure javascript
+ match = matchRegexp.exec(msg.text)
+ } else {
+ // native bindings
+ match = matchRegexp.exec(msg.command)
+ }
+ if (match) {
+ this.command = match[1]
+ if (match[3]) {
+ // COMMAND OID ROWS
+ this.oid = parseInt(match[2], 10)
+ this.rowCount = parseInt(match[3], 10)
+ } else if (match[2]) {
+ // COMMAND ROWS
+ this.rowCount = parseInt(match[2], 10)
+ }
+ }
+ }
+
+ _parseRowAsArray(rowData) {
+ const row = new Array(rowData.length)
+ for (let i = 0, len = rowData.length; i < len; i++) {
+ const rawValue = rowData[i]
+ if (rawValue !== null) {
+ row[i] = this._parsers[i](rawValue)
+ } else {
+ row[i] = null
+ }
+ }
+ return row
+ }
+
+ parseRow(rowData) {
+ const row = { ...this._prebuiltEmptyResultObject }
+ for (let i = 0, len = rowData.length; i < len; i++) {
+ const rawValue = rowData[i]
+ const field = this.fields[i].name
+ if (rawValue !== null) {
+ const v = this.fields[i].format === 'binary' ? Buffer.from(rawValue) : rawValue
+ row[field] = this._parsers[i](v)
+ } else {
+ row[field] = null
+ }
+ }
+ return row
+ }
+
+ addRow(row) {
+ this.rows.push(row)
+ }
+
+ addFields(fieldDescriptions) {
+ // clears field definitions
+ // multiple query statements in 1 action can result in multiple sets
+ // of rowDescriptions...eg: 'select NOW(); select 1::int;'
+ // you need to reset the fields
+ this.fields = fieldDescriptions
+ if (this.fields.length) {
+ this._parsers = new Array(fieldDescriptions.length)
+ }
+
+ const row = {}
+
+ for (let i = 0; i < fieldDescriptions.length; i++) {
+ const desc = fieldDescriptions[i]
+ row[desc.name] = null
+
+ if (this._types) {
+ this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text')
+ } else {
+ this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text')
+ }
+ }
+
+ this._prebuiltEmptyResultObject = { ...row }
+ }
+}
+
+module.exports = Result
diff --git a/api/node_modules/pg/lib/stream.js b/api/node_modules/pg/lib/stream.js
new file mode 100644
index 000000000..edc301833
--- /dev/null
+++ b/api/node_modules/pg/lib/stream.js
@@ -0,0 +1,83 @@
+const { getStream, getSecureStream } = getStreamFuncs()
+
+module.exports = {
+ /**
+ * Get a socket stream compatible with the current runtime environment.
+ * @returns {Duplex}
+ */
+ getStream,
+ /**
+ * Get a TLS secured socket, compatible with the current environment,
+ * using the socket and other settings given in `options`.
+ * @returns {Duplex}
+ */
+ getSecureStream,
+}
+
+/**
+ * The stream functions that work in Node.js
+ */
+function getNodejsStreamFuncs() {
+ function getStream(ssl) {
+ const net = require('net')
+ return new net.Socket()
+ }
+
+ function getSecureStream(options) {
+ const tls = require('tls')
+ return tls.connect(options)
+ }
+ return {
+ getStream,
+ getSecureStream,
+ }
+}
+
+/**
+ * The stream functions that work in Cloudflare Workers
+ */
+function getCloudflareStreamFuncs() {
+ function getStream(ssl) {
+ const { CloudflareSocket } = require('pg-cloudflare')
+ return new CloudflareSocket(ssl)
+ }
+
+ function getSecureStream(options) {
+ options.socket.startTls(options)
+ return options.socket
+ }
+ return {
+ getStream,
+ getSecureStream,
+ }
+}
+
+/**
+ * Are we running in a Cloudflare Worker?
+ *
+ * @returns true if the code is currently running inside a Cloudflare Worker.
+ */
+function isCloudflareRuntime() {
+ // Since 2022-03-21 the `global_navigator` compatibility flag is on for Cloudflare Workers
+ // which means that `navigator.userAgent` will be defined.
+ // eslint-disable-next-line no-undef
+ if (typeof navigator === 'object' && navigator !== null && typeof navigator.userAgent === 'string') {
+ // eslint-disable-next-line no-undef
+ return navigator.userAgent === 'Cloudflare-Workers'
+ }
+ // In case `navigator` or `navigator.userAgent` is not defined then try a more sneaky approach
+ if (typeof Response === 'function') {
+ const resp = new Response(null, { cf: { thing: true } })
+ if (typeof resp.cf === 'object' && resp.cf !== null && resp.cf.thing) {
+ return true
+ }
+ }
+ return false
+}
+
+function getStreamFuncs() {
+ if (isCloudflareRuntime()) {
+ return getCloudflareStreamFuncs()
+ }
+ return getNodejsStreamFuncs()
+}
diff --git a/api/node_modules/pg/lib/type-overrides.js b/api/node_modules/pg/lib/type-overrides.js
new file mode 100644
index 000000000..9d219e525
--- /dev/null
+++ b/api/node_modules/pg/lib/type-overrides.js
@@ -0,0 +1,35 @@
+'use strict'
+
+const types = require('pg-types')
+
+function TypeOverrides(userTypes) {
+ this._types = userTypes || types
+ this.text = {}
+ this.binary = {}
+}
+
+TypeOverrides.prototype.getOverrides = function (format) {
+ switch (format) {
+ case 'text':
+ return this.text
+ case 'binary':
+ return this.binary
+ default:
+ return {}
+ }
+}
+
+TypeOverrides.prototype.setTypeParser = function (oid, format, parseFn) {
+ if (typeof format === 'function') {
+ parseFn = format
+ format = 'text'
+ }
+ this.getOverrides(format)[oid] = parseFn
+}
+
+TypeOverrides.prototype.getTypeParser = function (oid, format) {
+ format = format || 'text'
+ return this.getOverrides(format)[oid] || this._types.getTypeParser(oid, format)
+}
+
+module.exports = TypeOverrides
diff --git a/api/node_modules/pg/lib/utils.js b/api/node_modules/pg/lib/utils.js
new file mode 100644
index 000000000..e23a55e9a
--- /dev/null
+++ b/api/node_modules/pg/lib/utils.js
@@ -0,0 +1,217 @@
+'use strict'
+
+const defaults = require('./defaults')
+
+const util = require('util')
+const { isDate } = util.types || util // Node 8 doesn't have `util.types`
+
+function escapeElement(elementRepresentation) {
+ const escaped = elementRepresentation.replace(/\\/g, '\\\\').replace(/"/g, '\\"')
+
+ return '"' + escaped + '"'
+}
+
+// convert a JS array to a postgres array literal
+// uses comma separator so won't work for types like box that use
+// a different array separator.
+function arrayString(val) {
+ let result = '{'
+ for (let i = 0; i < val.length; i++) {
+ if (i > 0) {
+ result = result + ','
+ }
+ if (val[i] === null || typeof val[i] === 'undefined') {
+ result = result + 'NULL'
+ } else if (Array.isArray(val[i])) {
+ result = result + arrayString(val[i])
+ } else if (ArrayBuffer.isView(val[i])) {
+ let item = val[i]
+ if (!(item instanceof Buffer)) {
+ const buf = Buffer.from(item.buffer, item.byteOffset, item.byteLength)
+ if (buf.length === item.byteLength) {
+ item = buf
+ } else {
+ item = buf.slice(item.byteOffset, item.byteOffset + item.byteLength)
+ }
+ }
+ result += '\\\\x' + item.toString('hex')
+ } else {
+ result += escapeElement(prepareValue(val[i]))
+ }
+ }
+ result = result + '}'
+ return result
+}
+
+// converts values from javascript types
+// to their 'raw' counterparts for use as a postgres parameter
+// note: you can override this function to provide your own conversion mechanism
+// for complex types, etc...
+const prepareValue = function (val, seen) {
+ // null and undefined are both null for postgres
+ if (val == null) {
+ return null
+ }
+ if (typeof val === 'object') {
+ if (val instanceof Buffer) {
+ return val
+ }
+ if (ArrayBuffer.isView(val)) {
+ const buf = Buffer.from(val.buffer, val.byteOffset, val.byteLength)
+ if (buf.length === val.byteLength) {
+ return buf
+ }
+ return buf.slice(val.byteOffset, val.byteOffset + val.byteLength) // Node.js v4 does not support those Buffer.from params
+ }
+ if (isDate(val)) {
+ if (defaults.parseInputDatesAsUTC) {
+ return dateToStringUTC(val)
+ } else {
+ return dateToString(val)
+ }
+ }
+ if (Array.isArray(val)) {
+ return arrayString(val)
+ }
+
+ return prepareObject(val, seen)
+ }
+ return val.toString()
+}
+
+function prepareObject(val, seen) {
+ if (val && typeof val.toPostgres === 'function') {
+ seen = seen || []
+ if (seen.indexOf(val) !== -1) {
+ throw new Error('circular reference detected while preparing "' + val + '" for query')
+ }
+ seen.push(val)
+
+ return prepareValue(val.toPostgres(prepareValue), seen)
+ }
+ return JSON.stringify(val)
+}
+
+function dateToString(date) {
+ let offset = -date.getTimezoneOffset()
+
+ let year = date.getFullYear()
+ const isBCYear = year < 1
+ if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation
+
+ let ret =
+ String(year).padStart(4, '0') +
+ '-' +
+ String(date.getMonth() + 1).padStart(2, '0') +
+ '-' +
+ String(date.getDate()).padStart(2, '0') +
+ 'T' +
+ String(date.getHours()).padStart(2, '0') +
+ ':' +
+ String(date.getMinutes()).padStart(2, '0') +
+ ':' +
+ String(date.getSeconds()).padStart(2, '0') +
+ '.' +
+ String(date.getMilliseconds()).padStart(3, '0')
+
+ if (offset < 0) {
+ ret += '-'
+ offset *= -1
+ } else {
+ ret += '+'
+ }
+
+ ret += String(Math.floor(offset / 60)).padStart(2, '0') + ':' + String(offset % 60).padStart(2, '0')
+ if (isBCYear) ret += ' BC'
+ return ret
+}
+
+function dateToStringUTC(date) {
+ let year = date.getUTCFullYear()
+ const isBCYear = year < 1
+ if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation
+
+ let ret =
+ String(year).padStart(4, '0') +
+ '-' +
+ String(date.getUTCMonth() + 1).padStart(2, '0') +
+ '-' +
+ String(date.getUTCDate()).padStart(2, '0') +
+ 'T' +
+ String(date.getUTCHours()).padStart(2, '0') +
+ ':' +
+ String(date.getUTCMinutes()).padStart(2, '0') +
+ ':' +
+ String(date.getUTCSeconds()).padStart(2, '0') +
+ '.' +
+ String(date.getUTCMilliseconds()).padStart(3, '0')
+
+ ret += '+00:00'
+ if (isBCYear) ret += ' BC'
+ return ret
+}
+
+function normalizeQueryConfig(config, values, callback) {
+ // can take in strings or config objects
+ config = typeof config === 'string' ? { text: config } : config
+ if (values) {
+ if (typeof values === 'function') {
+ config.callback = values
+ } else {
+ config.values = values
+ }
+ }
+ if (callback) {
+ config.callback = callback
+ }
+ return config
+}
+
+// Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c
+const escapeIdentifier = function (str) {
+ return '"' + str.replace(/"/g, '""') + '"'
+}
+
+const escapeLiteral = function (str) {
+ let hasBackslash = false
+ let escaped = "'"
+
+ if (str == null) {
+ return "''"
+ }
+
+ if (typeof str !== 'string') {
+ return "''"
+ }
+
+ for (let i = 0; i < str.length; i++) {
+ const c = str[i]
+ if (c === "'") {
+ escaped += c + c
+ } else if (c === '\\') {
+ escaped += c + c
+ hasBackslash = true
+ } else {
+ escaped += c
+ }
+ }
+
+ escaped += "'"
+
+ if (hasBackslash === true) {
+ escaped = ' E' + escaped
+ }
+
+ return escaped
+}
+
+module.exports = {
+ prepareValue: function prepareValueWrapper(value) {
+ // this ensures that extra arguments do not get passed into prepareValue
+ // by accident, eg: from calling values.map(utils.prepareValue)
+ return prepareValue(value)
+ },
+ normalizeQueryConfig,
+ escapeIdentifier,
+ escapeLiteral,
+}
diff --git a/api/node_modules/pg/package.json b/api/node_modules/pg/package.json
new file mode 100644
index 000000000..27f188507
--- /dev/null
+++ b/api/node_modules/pg/package.json
@@ -0,0 +1,76 @@
+{
+ "name": "pg",
+ "version": "8.20.0",
+ "description": "PostgreSQL client - pure javascript & libpq with the same API",
+ "keywords": [
+ "database",
+ "libpq",
+ "pg",
+ "postgre",
+ "postgres",
+ "postgresql",
+ "rdbms"
+ ],
+ "homepage": "https://github.com/brianc/node-postgres",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/brianc/node-postgres.git",
+ "directory": "packages/pg"
+ },
+ "author": "Brian Carlson ",
+ "main": "./lib",
+ "exports": {
+ ".": {
+ "import": "./esm/index.mjs",
+ "require": "./lib/index.js",
+ "default": "./lib/index.js"
+ },
+ "./package.json": {
+ "default": "./package.json"
+ },
+ "./lib/*": "./lib/*.js",
+ "./lib/*.js": "./lib/*.js"
+ },
+ "dependencies": {
+ "pg-connection-string": "^2.12.0",
+ "pg-pool": "^3.13.0",
+ "pg-protocol": "^1.13.0",
+ "pg-types": "2.2.0",
+ "pgpass": "1.0.5"
+ },
+ "devDependencies": {
+ "@cloudflare/vitest-pool-workers": "0.8.23",
+ "@cloudflare/workers-types": "^4.20230404.0",
+ "async": "2.6.4",
+ "bluebird": "3.7.2",
+ "co": "4.6.0",
+ "pg-copy-streams": "0.3.0",
+ "typescript": "^4.0.3",
+ "vitest": "~3.0.9",
+ "wrangler": "^3.x"
+ },
+ "optionalDependencies": {
+ "pg-cloudflare": "^1.3.0"
+ },
+ "peerDependencies": {
+ "pg-native": ">=3.0.1"
+ },
+ "peerDependenciesMeta": {
+ "pg-native": {
+ "optional": true
+ }
+ },
+ "scripts": {
+ "test": "make test-all"
+ },
+ "files": [
+ "lib",
+ "esm",
+ "SPONSORS.md"
+ ],
+ "license": "MIT",
+ "engines": {
+ "node": ">= 16.0.0"
+ },
+ "gitHead": "c9070cc8d526fca65780cedc25c1966b57cf7532"
+}
diff --git a/api/node_modules/pgpass/README.md b/api/node_modules/pgpass/README.md
new file mode 100644
index 000000000..bbc51939e
--- /dev/null
+++ b/api/node_modules/pgpass/README.md
@@ -0,0 +1,74 @@
+# pgpass
+
+[](https://github.com/hoegaarden/pgpass/actions?query=workflow%3ACI+branch%3Amaster)
+
+## Install
+
+```sh
+npm install pgpass
+```
+
+## Usage
+```js
+var pgPass = require('pgpass');
+
+var connInfo = {
+ 'host' : 'pgserver' ,
+ 'user' : 'the_user_name' ,
+};
+
+pgPass(connInfo, function(pass){
+ conn_info.password = pass;
+ // connect to postgresql server
+});
+```
+
+## Description
+
+This module tries to read the `~/.pgpass` file (or the equivalent for windows systems). If the environment variable `PGPASSFILE` is set, this file is used instead. If everything goes right, the password from said file is passed to the callback; if the password cannot be read `undefined` is passed to the callback.
+
+Cases where `undefined` is returned:
+
+- the environment variable `PGPASSWORD` is set
+- the file cannot be read (wrong permissions, no such file, ...)
+- for non windows systems: the file is write-/readable by the group or by other users
+- there is no matching line for the given connection info
+
+There should be no need to use this module directly; it is already included in `node-postgres`.
+
+## Configuration
+
+The module reads the environment variable `PGPASS_NO_DEESCAPE` to decide if the the read tokens from the password file should be de-escaped or not. Default is to do de-escaping. For further information on this see [this commit](https://github.com/postgres/postgres/commit/8d15e3ec4fcb735875a8a70a09ec0c62153c3329).
+
+
+## Tests
+
+There are tests in `./test/`; including linting and coverage testing. Running `npm test` runs:
+
+- `jshint`
+- `mocha` tests
+- `jscoverage` and `mocha -R html-cov`
+
+You can see the coverage report in `coverage.html`.
+
+
+## Development, Patches, Bugs, ...
+
+If you find Bugs or have improvements, please feel free to open a issue on GitHub. If you provide a pull request, I'm more than happy to merge them, just make sure to add tests for your changes.
+
+## Links
+
+- https://github.com/hoegaarden/node-pgpass
+- http://www.postgresql.org/docs/current/static/libpq-pgpass.html
+- https://wiki.postgresql.org/wiki/Pgpass
+- https://github.com/postgres/postgres/blob/master/src/interfaces/libpq/fe-connect.c
+
+## License
+
+Copyright (c) 2013-2016 Hannes Hörl
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/api/node_modules/pgpass/lib/helper.js b/api/node_modules/pgpass/lib/helper.js
new file mode 100644
index 000000000..f9884601e
--- /dev/null
+++ b/api/node_modules/pgpass/lib/helper.js
@@ -0,0 +1,233 @@
+'use strict';
+
+var path = require('path')
+ , Stream = require('stream').Stream
+ , split = require('split2')
+ , util = require('util')
+ , defaultPort = 5432
+ , isWin = (process.platform === 'win32')
+ , warnStream = process.stderr
+;
+
+
+var S_IRWXG = 56 // 00070(8)
+ , S_IRWXO = 7 // 00007(8)
+ , S_IFMT = 61440 // 00170000(8)
+ , S_IFREG = 32768 // 0100000(8)
+;
+function isRegFile(mode) {
+ return ((mode & S_IFMT) == S_IFREG);
+}
+
+var fieldNames = [ 'host', 'port', 'database', 'user', 'password' ];
+var nrOfFields = fieldNames.length;
+var passKey = fieldNames[ nrOfFields -1 ];
+
+
+function warn() {
+ var isWritable = (
+ warnStream instanceof Stream &&
+ true === warnStream.writable
+ );
+
+ if (isWritable) {
+ var args = Array.prototype.slice.call(arguments).concat("\n");
+ warnStream.write( util.format.apply(util, args) );
+ }
+}
+
+
+Object.defineProperty(module.exports, 'isWin', {
+ get : function() {
+ return isWin;
+ } ,
+ set : function(val) {
+ isWin = val;
+ }
+});
+
+
+module.exports.warnTo = function(stream) {
+ var old = warnStream;
+ warnStream = stream;
+ return old;
+};
+
+module.exports.getFileName = function(rawEnv){
+ var env = rawEnv || process.env;
+ var file = env.PGPASSFILE || (
+ isWin ?
+ path.join( env.APPDATA || './' , 'postgresql', 'pgpass.conf' ) :
+ path.join( env.HOME || './', '.pgpass' )
+ );
+ return file;
+};
+
+module.exports.usePgPass = function(stats, fname) {
+ if (Object.prototype.hasOwnProperty.call(process.env, 'PGPASSWORD')) {
+ return false;
+ }
+
+ if (isWin) {
+ return true;
+ }
+
+ fname = fname || '';
+
+ if (! isRegFile(stats.mode)) {
+ warn('WARNING: password file "%s" is not a plain file', fname);
+ return false;
+ }
+
+ if (stats.mode & (S_IRWXG | S_IRWXO)) {
+ /* If password file is insecure, alert the user and ignore it. */
+ warn('WARNING: password file "%s" has group or world access; permissions should be u=rw (0600) or less', fname);
+ return false;
+ }
+
+ return true;
+};
+
+
+var matcher = module.exports.match = function(connInfo, entry) {
+ return fieldNames.slice(0, -1).reduce(function(prev, field, idx){
+ if (idx == 1) {
+ // the port
+ if ( Number( connInfo[field] || defaultPort ) === Number( entry[field] ) ) {
+ return prev && true;
+ }
+ }
+ return prev && (
+ entry[field] === '*' ||
+ entry[field] === connInfo[field]
+ );
+ }, true);
+};
+
+
+module.exports.getPassword = function(connInfo, stream, cb) {
+ var pass;
+ var lineStream = stream.pipe(split());
+
+ function onLine(line) {
+ var entry = parseLine(line);
+ if (entry && isValidEntry(entry) && matcher(connInfo, entry)) {
+ pass = entry[passKey];
+ lineStream.end(); // -> calls onEnd(), but pass is set now
+ }
+ }
+
+ var onEnd = function() {
+ stream.destroy();
+ cb(pass);
+ };
+
+ var onErr = function(err) {
+ stream.destroy();
+ warn('WARNING: error on reading file: %s', err);
+ cb(undefined);
+ };
+
+ stream.on('error', onErr);
+ lineStream
+ .on('data', onLine)
+ .on('end', onEnd)
+ .on('error', onErr)
+ ;
+
+};
+
+
+var parseLine = module.exports.parseLine = function(line) {
+ if (line.length < 11 || line.match(/^\s+#/)) {
+ return null;
+ }
+
+ var curChar = '';
+ var prevChar = '';
+ var fieldIdx = 0;
+ var startIdx = 0;
+ var endIdx = 0;
+ var obj = {};
+ var isLastField = false;
+ var addToObj = function(idx, i0, i1) {
+ var field = line.substring(i0, i1);
+
+ if (! Object.hasOwnProperty.call(process.env, 'PGPASS_NO_DEESCAPE')) {
+ field = field.replace(/\\([:\\])/g, '$1');
+ }
+
+ obj[ fieldNames[idx] ] = field;
+ };
+
+ for (var i = 0 ; i < line.length-1 ; i += 1) {
+ curChar = line.charAt(i+1);
+ prevChar = line.charAt(i);
+
+ isLastField = (fieldIdx == nrOfFields-1);
+
+ if (isLastField) {
+ addToObj(fieldIdx, startIdx);
+ break;
+ }
+
+ if (i >= 0 && curChar == ':' && prevChar !== '\\') {
+ addToObj(fieldIdx, startIdx, i+1);
+
+ startIdx = i+2;
+ fieldIdx += 1;
+ }
+ }
+
+ obj = ( Object.keys(obj).length === nrOfFields ) ? obj : null;
+
+ return obj;
+};
+
+
+var isValidEntry = module.exports.isValidEntry = function(entry){
+ var rules = {
+ // host
+ 0 : function(x){
+ return x.length > 0;
+ } ,
+ // port
+ 1 : function(x){
+ if (x === '*') {
+ return true;
+ }
+ x = Number(x);
+ return (
+ isFinite(x) &&
+ x > 0 &&
+ x < 9007199254740992 &&
+ Math.floor(x) === x
+ );
+ } ,
+ // database
+ 2 : function(x){
+ return x.length > 0;
+ } ,
+ // username
+ 3 : function(x){
+ return x.length > 0;
+ } ,
+ // password
+ 4 : function(x){
+ return x.length > 0;
+ }
+ };
+
+ for (var idx = 0 ; idx < fieldNames.length ; idx += 1) {
+ var rule = rules[idx];
+ var value = entry[ fieldNames[idx] ] || '';
+
+ var res = rule(value);
+ if (!res) {
+ return false;
+ }
+ }
+
+ return true;
+};
+
diff --git a/api/node_modules/pgpass/lib/index.js b/api/node_modules/pgpass/lib/index.js
new file mode 100644
index 000000000..ecfcf308e
--- /dev/null
+++ b/api/node_modules/pgpass/lib/index.js
@@ -0,0 +1,23 @@
+'use strict';
+
+var path = require('path')
+ , fs = require('fs')
+ , helper = require('./helper.js')
+;
+
+
+module.exports = function(connInfo, cb) {
+ var file = helper.getFileName();
+
+ fs.stat(file, function(err, stat){
+ if (err || !helper.usePgPass(stat, file)) {
+ return cb(undefined);
+ }
+
+ var st = fs.createReadStream(file);
+
+ helper.getPassword(connInfo, st, cb);
+ });
+};
+
+module.exports.warnTo = helper.warnTo;
diff --git a/api/node_modules/pgpass/package.json b/api/node_modules/pgpass/package.json
new file mode 100644
index 000000000..22bfe84e1
--- /dev/null
+++ b/api/node_modules/pgpass/package.json
@@ -0,0 +1,41 @@
+{
+ "name": "pgpass",
+ "version": "1.0.5",
+ "description": "Module for reading .pgpass",
+ "main": "lib/index",
+ "scripts": {
+ "pretest": "chmod 600 ./test/_pgpass",
+ "_hint": "jshint --exclude node_modules --verbose lib test",
+ "_test": "mocha --recursive -R list",
+ "_covered_test": "nyc --reporter html --reporter text \"$npm_execpath\" run _test",
+ "test": "\"$npm_execpath\" run _hint && \"$npm_execpath\" run _covered_test"
+ },
+ "author": "Hannes Hörl ",
+ "license": "MIT",
+ "dependencies": {
+ "split2": "^4.1.0"
+ },
+ "devDependencies": {
+ "jshint": "^2.12.0",
+ "mocha": "^8.2.0",
+ "nyc": "^15.1.0",
+ "pg": "^8.4.1",
+ "pg-escape": "^0.2.0",
+ "pg-native": "3.0.0",
+ "resumer": "0.0.0",
+ "tmp": "^0.2.1",
+ "which": "^2.0.2"
+ },
+ "keywords": [
+ "postgres",
+ "pg",
+ "pgpass",
+ "password",
+ "postgresql"
+ ],
+ "bugs": "https://github.com/hoegaarden/pgpass/issues",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/hoegaarden/pgpass.git"
+ }
+}
diff --git a/api/node_modules/postgres-array/index.d.ts b/api/node_modules/postgres-array/index.d.ts
new file mode 100644
index 000000000..88665bd91
--- /dev/null
+++ b/api/node_modules/postgres-array/index.d.ts
@@ -0,0 +1,4 @@
+
+export function parse(source: string): string[];
+export function parse(source: string, transform: (value: string) => T): T[];
+
diff --git a/api/node_modules/postgres-array/index.js b/api/node_modules/postgres-array/index.js
new file mode 100644
index 000000000..18bfd1636
--- /dev/null
+++ b/api/node_modules/postgres-array/index.js
@@ -0,0 +1,97 @@
+'use strict'
+
+exports.parse = function (source, transform) {
+ return new ArrayParser(source, transform).parse()
+}
+
+class ArrayParser {
+ constructor (source, transform) {
+ this.source = source
+ this.transform = transform || identity
+ this.position = 0
+ this.entries = []
+ this.recorded = []
+ this.dimension = 0
+ }
+
+ isEof () {
+ return this.position >= this.source.length
+ }
+
+ nextCharacter () {
+ var character = this.source[this.position++]
+ if (character === '\\') {
+ return {
+ value: this.source[this.position++],
+ escaped: true
+ }
+ }
+ return {
+ value: character,
+ escaped: false
+ }
+ }
+
+ record (character) {
+ this.recorded.push(character)
+ }
+
+ newEntry (includeEmpty) {
+ var entry
+ if (this.recorded.length > 0 || includeEmpty) {
+ entry = this.recorded.join('')
+ if (entry === 'NULL' && !includeEmpty) {
+ entry = null
+ }
+ if (entry !== null) entry = this.transform(entry)
+ this.entries.push(entry)
+ this.recorded = []
+ }
+ }
+
+ consumeDimensions () {
+ if (this.source[0] === '[') {
+ while (!this.isEof()) {
+ var char = this.nextCharacter()
+ if (char.value === '=') break
+ }
+ }
+ }
+
+ parse (nested) {
+ var character, parser, quote
+ this.consumeDimensions()
+ while (!this.isEof()) {
+ character = this.nextCharacter()
+ if (character.value === '{' && !quote) {
+ this.dimension++
+ if (this.dimension > 1) {
+ parser = new ArrayParser(this.source.substr(this.position - 1), this.transform)
+ this.entries.push(parser.parse(true))
+ this.position += parser.position - 2
+ }
+ } else if (character.value === '}' && !quote) {
+ this.dimension--
+ if (!this.dimension) {
+ this.newEntry()
+ if (nested) return this.entries
+ }
+ } else if (character.value === '"' && !character.escaped) {
+ if (quote) this.newEntry(true)
+ quote = !quote
+ } else if (character.value === ',' && !quote) {
+ this.newEntry()
+ } else {
+ this.record(character.value)
+ }
+ }
+ if (this.dimension !== 0) {
+ throw new Error('array dimension not balanced')
+ }
+ return this.entries
+ }
+}
+
+function identity (value) {
+ return value
+}
diff --git a/api/node_modules/postgres-array/license b/api/node_modules/postgres-array/license
new file mode 100644
index 000000000..25c624701
--- /dev/null
+++ b/api/node_modules/postgres-array/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Ben Drucker (bendrucker.me)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/postgres-array/package.json b/api/node_modules/postgres-array/package.json
new file mode 100644
index 000000000..d6aa94e58
--- /dev/null
+++ b/api/node_modules/postgres-array/package.json
@@ -0,0 +1,35 @@
+{
+ "name": "postgres-array",
+ "main": "index.js",
+ "version": "2.0.0",
+ "description": "Parse postgres array columns",
+ "license": "MIT",
+ "repository": "bendrucker/postgres-array",
+ "author": {
+ "name": "Ben Drucker",
+ "email": "bvdrucker@gmail.com",
+ "url": "bendrucker.me"
+ },
+ "engines": {
+ "node": ">=4"
+ },
+ "scripts": {
+ "test": "standard && tape test.js"
+ },
+ "types": "index.d.ts",
+ "keywords": [
+ "postgres",
+ "array",
+ "parser"
+ ],
+ "dependencies": {},
+ "devDependencies": {
+ "standard": "^12.0.1",
+ "tape": "^4.0.0"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts",
+ "readme.md"
+ ]
+}
diff --git a/api/node_modules/postgres-array/readme.md b/api/node_modules/postgres-array/readme.md
new file mode 100644
index 000000000..b74b369df
--- /dev/null
+++ b/api/node_modules/postgres-array/readme.md
@@ -0,0 +1,43 @@
+# postgres-array [](https://travis-ci.org/bendrucker/postgres-array)
+
+> Parse postgres array columns
+
+
+## Install
+
+```
+$ npm install --save postgres-array
+```
+
+
+## Usage
+
+```js
+var postgresArray = require('postgres-array')
+
+postgresArray.parse('{1,2,3}', (value) => parseInt(value, 10))
+//=> [1, 2, 3]
+```
+
+## API
+
+#### `parse(input, [transform])` -> `array`
+
+##### input
+
+*Required*
+Type: `string`
+
+A Postgres array string.
+
+##### transform
+
+Type: `function`
+Default: `identity`
+
+A function that transforms non-null values inserted into the array.
+
+
+## License
+
+MIT © [Ben Drucker](http://bendrucker.me)
diff --git a/api/node_modules/postgres-bytea/index.js b/api/node_modules/postgres-bytea/index.js
new file mode 100644
index 000000000..b0dc4c78b
--- /dev/null
+++ b/api/node_modules/postgres-bytea/index.js
@@ -0,0 +1,33 @@
+'use strict'
+
+var bufferFrom = Buffer.from || Buffer
+
+module.exports = function parseBytea (input) {
+ if (/^\\x/.test(input)) {
+ // new 'hex' style response (pg >9.0)
+ return bufferFrom(input.substr(2), 'hex')
+ }
+ var output = ''
+ var i = 0
+ while (i < input.length) {
+ if (input[i] !== '\\') {
+ output += input[i]
+ ++i
+ } else {
+ if (/[0-7]{3}/.test(input.substr(i + 1, 3))) {
+ output += String.fromCharCode(parseInt(input.substr(i + 1, 3), 8))
+ i += 4
+ } else {
+ var backslashes = 1
+ while (i + backslashes < input.length && input[i + backslashes] === '\\') {
+ backslashes++
+ }
+ for (var k = 0; k < Math.floor(backslashes / 2); ++k) {
+ output += '\\'
+ }
+ i += Math.floor(backslashes / 2) * 2
+ }
+ }
+ }
+ return bufferFrom(output, 'binary')
+}
diff --git a/api/node_modules/postgres-bytea/license b/api/node_modules/postgres-bytea/license
new file mode 100644
index 000000000..25c624701
--- /dev/null
+++ b/api/node_modules/postgres-bytea/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Ben Drucker (bendrucker.me)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/postgres-bytea/package.json b/api/node_modules/postgres-bytea/package.json
new file mode 100644
index 000000000..344b9a01e
--- /dev/null
+++ b/api/node_modules/postgres-bytea/package.json
@@ -0,0 +1,34 @@
+{
+ "name": "postgres-bytea",
+ "main": "index.js",
+ "version": "1.0.1",
+ "description": "Postgres bytea parser",
+ "license": "MIT",
+ "repository": "bendrucker/postgres-bytea",
+ "author": {
+ "name": "Ben Drucker",
+ "email": "bvdrucker@gmail.com",
+ "url": "bendrucker.me"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "standard && tape test.js"
+ },
+ "keywords": [
+ "bytea",
+ "postgres",
+ "binary",
+ "parser"
+ ],
+ "dependencies": {},
+ "devDependencies": {
+ "tape": "^4.0.0",
+ "standard": "^4.0.0"
+ },
+ "files": [
+ "index.js",
+ "readme.md"
+ ]
+}
diff --git a/api/node_modules/postgres-bytea/readme.md b/api/node_modules/postgres-bytea/readme.md
new file mode 100644
index 000000000..4939c3be4
--- /dev/null
+++ b/api/node_modules/postgres-bytea/readme.md
@@ -0,0 +1,34 @@
+# postgres-bytea [](https://travis-ci.org/bendrucker/postgres-bytea)
+
+> Postgres bytea parser
+
+
+## Install
+
+```
+$ npm install --save postgres-bytea
+```
+
+
+## Usage
+
+```js
+var bytea = require('postgres-bytea');
+bytea('\\000\\100\\200')
+//=> buffer
+```
+
+## API
+
+#### `bytea(input)` -> `buffer`
+
+##### input
+
+*Required*
+Type: `string`
+
+A Postgres bytea binary string.
+
+## License
+
+MIT © [Ben Drucker](http://bendrucker.me)
diff --git a/api/node_modules/postgres-date/index.js b/api/node_modules/postgres-date/index.js
new file mode 100644
index 000000000..5dc73fbda
--- /dev/null
+++ b/api/node_modules/postgres-date/index.js
@@ -0,0 +1,116 @@
+'use strict'
+
+var DATE_TIME = /(\d{1,})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?.*?( BC)?$/
+var DATE = /^(\d{1,})-(\d{2})-(\d{2})( BC)?$/
+var TIME_ZONE = /([Z+-])(\d{2})?:?(\d{2})?:?(\d{2})?/
+var INFINITY = /^-?infinity$/
+
+module.exports = function parseDate (isoDate) {
+ if (INFINITY.test(isoDate)) {
+ // Capitalize to Infinity before passing to Number
+ return Number(isoDate.replace('i', 'I'))
+ }
+ var matches = DATE_TIME.exec(isoDate)
+
+ if (!matches) {
+ // Force YYYY-MM-DD dates to be parsed as local time
+ return getDate(isoDate) || null
+ }
+
+ var isBC = !!matches[8]
+ var year = parseInt(matches[1], 10)
+ if (isBC) {
+ year = bcYearToNegativeYear(year)
+ }
+
+ var month = parseInt(matches[2], 10) - 1
+ var day = matches[3]
+ var hour = parseInt(matches[4], 10)
+ var minute = parseInt(matches[5], 10)
+ var second = parseInt(matches[6], 10)
+
+ var ms = matches[7]
+ ms = ms ? 1000 * parseFloat(ms) : 0
+
+ var date
+ var offset = timeZoneOffset(isoDate)
+ if (offset != null) {
+ date = new Date(Date.UTC(year, month, day, hour, minute, second, ms))
+
+ // Account for years from 0 to 99 being interpreted as 1900-1999
+ // by Date.UTC / the multi-argument form of the Date constructor
+ if (is0To99(year)) {
+ date.setUTCFullYear(year)
+ }
+
+ if (offset !== 0) {
+ date.setTime(date.getTime() - offset)
+ }
+ } else {
+ date = new Date(year, month, day, hour, minute, second, ms)
+
+ if (is0To99(year)) {
+ date.setFullYear(year)
+ }
+ }
+
+ return date
+}
+
+function getDate (isoDate) {
+ var matches = DATE.exec(isoDate)
+ if (!matches) {
+ return
+ }
+
+ var year = parseInt(matches[1], 10)
+ var isBC = !!matches[4]
+ if (isBC) {
+ year = bcYearToNegativeYear(year)
+ }
+
+ var month = parseInt(matches[2], 10) - 1
+ var day = matches[3]
+ // YYYY-MM-DD will be parsed as local time
+ var date = new Date(year, month, day)
+
+ if (is0To99(year)) {
+ date.setFullYear(year)
+ }
+
+ return date
+}
+
+// match timezones:
+// Z (UTC)
+// -05
+// +06:30
+function timeZoneOffset (isoDate) {
+ if (isoDate.endsWith('+00')) {
+ return 0
+ }
+
+ var zone = TIME_ZONE.exec(isoDate.split(' ')[1])
+ if (!zone) return
+ var type = zone[1]
+
+ if (type === 'Z') {
+ return 0
+ }
+ var sign = type === '-' ? -1 : 1
+ var offset = parseInt(zone[2], 10) * 3600 +
+ parseInt(zone[3] || 0, 10) * 60 +
+ parseInt(zone[4] || 0, 10)
+
+ return offset * sign * 1000
+}
+
+function bcYearToNegativeYear (year) {
+ // Account for numerical difference between representations of BC years
+ // See: https://github.com/bendrucker/postgres-date/issues/5
+ return -(year - 1)
+}
+
+function is0To99 (num) {
+ return num >= 0 && num < 100
+}
diff --git a/api/node_modules/postgres-date/license b/api/node_modules/postgres-date/license
new file mode 100644
index 000000000..25c624701
--- /dev/null
+++ b/api/node_modules/postgres-date/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Ben Drucker (bendrucker.me)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/postgres-date/package.json b/api/node_modules/postgres-date/package.json
new file mode 100644
index 000000000..6fddec71c
--- /dev/null
+++ b/api/node_modules/postgres-date/package.json
@@ -0,0 +1,33 @@
+{
+ "name": "postgres-date",
+ "main": "index.js",
+ "version": "1.0.7",
+ "description": "Postgres date column parser",
+ "license": "MIT",
+ "repository": "bendrucker/postgres-date",
+ "author": {
+ "name": "Ben Drucker",
+ "email": "bvdrucker@gmail.com",
+ "url": "bendrucker.me"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "standard && tape test.js"
+ },
+ "keywords": [
+ "postgres",
+ "date",
+ "parser"
+ ],
+ "dependencies": {},
+ "devDependencies": {
+ "standard": "^14.0.0",
+ "tape": "^5.0.0"
+ },
+ "files": [
+ "index.js",
+ "readme.md"
+ ]
+}
diff --git a/api/node_modules/postgres-date/readme.md b/api/node_modules/postgres-date/readme.md
new file mode 100644
index 000000000..095431a0f
--- /dev/null
+++ b/api/node_modules/postgres-date/readme.md
@@ -0,0 +1,49 @@
+# postgres-date [](https://travis-ci.org/bendrucker/postgres-date) [](https://greenkeeper.io/)
+
+> Postgres date output parser
+
+This package parses [date/time outputs](https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME-OUTPUT) from Postgres into Javascript `Date` objects. Its goal is to match Postgres behavior and preserve data accuracy.
+
+If you find a case where a valid Postgres output results in incorrect parsing (including loss of precision), please [create a pull request](https://github.com/bendrucker/postgres-date/compare) and provide a failing test.
+
+**Supported Postgres Versions:** `>= 9.6`
+
+All prior versions of Postgres are likely compatible but not officially supported.
+
+## Install
+
+```
+$ npm install --save postgres-date
+```
+
+
+## Usage
+
+```js
+var parse = require('postgres-date')
+parse('2011-01-23 22:15:51Z')
+// => 2011-01-23T22:15:51.000Z
+```
+
+## API
+
+#### `parse(isoDate)` -> `date`
+
+##### isoDate
+
+*Required*
+Type: `string`
+
+A date string from Postgres.
+
+## Releases
+
+The following semantic versioning increments will be used for changes:
+
+* **Major**: Removal of support for Node.js versions or Postgres versions (not expected)
+* **Minor**: Unused, since Postgres returns dates in standard ISO 8601 format
+* **Patch**: Any fix for parsing behavior
+
+## License
+
+MIT © [Ben Drucker](http://bendrucker.me)
diff --git a/api/node_modules/postgres-interval/index.d.ts b/api/node_modules/postgres-interval/index.d.ts
new file mode 100644
index 000000000..f82b4c376
--- /dev/null
+++ b/api/node_modules/postgres-interval/index.d.ts
@@ -0,0 +1,20 @@
+declare namespace PostgresInterval {
+ export interface IPostgresInterval {
+ years?: number;
+ months?: number;
+ days?: number;
+ hours?: number;
+ minutes?: number;
+ seconds?: number;
+ milliseconds?: number;
+
+ toPostgres(): string;
+
+ toISO(): string;
+ toISOString(): string;
+ }
+}
+
+declare function PostgresInterval(raw: string): PostgresInterval.IPostgresInterval;
+
+export = PostgresInterval;
diff --git a/api/node_modules/postgres-interval/index.js b/api/node_modules/postgres-interval/index.js
new file mode 100644
index 000000000..8ecca800a
--- /dev/null
+++ b/api/node_modules/postgres-interval/index.js
@@ -0,0 +1,125 @@
+'use strict'
+
+var extend = require('xtend/mutable')
+
+module.exports = PostgresInterval
+
+function PostgresInterval (raw) {
+ if (!(this instanceof PostgresInterval)) {
+ return new PostgresInterval(raw)
+ }
+ extend(this, parse(raw))
+}
+var properties = ['seconds', 'minutes', 'hours', 'days', 'months', 'years']
+PostgresInterval.prototype.toPostgres = function () {
+ var filtered = properties.filter(this.hasOwnProperty, this)
+
+ // In addition to `properties`, we need to account for fractions of seconds.
+ if (this.milliseconds && filtered.indexOf('seconds') < 0) {
+ filtered.push('seconds')
+ }
+
+ if (filtered.length === 0) return '0'
+ return filtered
+ .map(function (property) {
+ var value = this[property] || 0
+
+ // Account for fractional part of seconds,
+ // remove trailing zeroes.
+ if (property === 'seconds' && this.milliseconds) {
+ value = (value + this.milliseconds / 1000).toFixed(6).replace(/\.?0+$/, '')
+ }
+
+ return value + ' ' + property
+ }, this)
+ .join(' ')
+}
+
+var propertiesISOEquivalent = {
+ years: 'Y',
+ months: 'M',
+ days: 'D',
+ hours: 'H',
+ minutes: 'M',
+ seconds: 'S'
+}
+var dateProperties = ['years', 'months', 'days']
+var timeProperties = ['hours', 'minutes', 'seconds']
+// according to ISO 8601
+PostgresInterval.prototype.toISOString = PostgresInterval.prototype.toISO = function () {
+ var datePart = dateProperties
+ .map(buildProperty, this)
+ .join('')
+
+ var timePart = timeProperties
+ .map(buildProperty, this)
+ .join('')
+
+ return 'P' + datePart + 'T' + timePart
+
+ function buildProperty (property) {
+ var value = this[property] || 0
+
+ // Account for fractional part of seconds,
+ // remove trailing zeroes.
+ if (property === 'seconds' && this.milliseconds) {
+ value = (value + this.milliseconds / 1000).toFixed(6).replace(/0+$/, '')
+ }
+
+ return value + propertiesISOEquivalent[property]
+ }
+}
+
+var NUMBER = '([+-]?\\d+)'
+var YEAR = NUMBER + '\\s+years?'
+var MONTH = NUMBER + '\\s+mons?'
+var DAY = NUMBER + '\\s+days?'
+var TIME = '([+-])?([\\d]*):(\\d\\d):(\\d\\d)\\.?(\\d{1,6})?'
+var INTERVAL = new RegExp([YEAR, MONTH, DAY, TIME].map(function (regexString) {
+ return '(' + regexString + ')?'
+})
+ .join('\\s*'))
+
+// Positions of values in regex match
+var positions = {
+ years: 2,
+ months: 4,
+ days: 6,
+ hours: 9,
+ minutes: 10,
+ seconds: 11,
+ milliseconds: 12
+}
+// We can use negative time
+var negatives = ['hours', 'minutes', 'seconds', 'milliseconds']
+
+function parseMilliseconds (fraction) {
+ // add omitted zeroes
+ var microseconds = fraction + '000000'.slice(fraction.length)
+ return parseInt(microseconds, 10) / 1000
+}
+
+function parse (interval) {
+ if (!interval) return {}
+ var matches = INTERVAL.exec(interval)
+ var isNegative = matches[8] === '-'
+ return Object.keys(positions)
+ .reduce(function (parsed, property) {
+ var position = positions[property]
+ var value = matches[position]
+ // no empty string
+ if (!value) return parsed
+ // milliseconds are actually microseconds (up to 6 digits)
+ // with omitted trailing zeroes.
+ value = property === 'milliseconds'
+ ? parseMilliseconds(value)
+ : parseInt(value, 10)
+ // no zeros
+ if (!value) return parsed
+ if (isNegative && ~negatives.indexOf(property)) {
+ value *= -1
+ }
+ parsed[property] = value
+ return parsed
+ }, {})
+}
diff --git a/api/node_modules/postgres-interval/license b/api/node_modules/postgres-interval/license
new file mode 100644
index 000000000..25c624701
--- /dev/null
+++ b/api/node_modules/postgres-interval/license
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) Ben Drucker (bendrucker.me)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/postgres-interval/package.json b/api/node_modules/postgres-interval/package.json
new file mode 100644
index 000000000..95520a0e6
--- /dev/null
+++ b/api/node_modules/postgres-interval/package.json
@@ -0,0 +1,36 @@
+{
+ "name": "postgres-interval",
+ "main": "index.js",
+ "version": "1.2.0",
+ "description": "Parse Postgres interval columns",
+ "license": "MIT",
+ "repository": "bendrucker/postgres-interval",
+ "author": {
+ "name": "Ben Drucker",
+ "email": "bvdrucker@gmail.com",
+ "url": "bendrucker.me"
+ },
+ "engines": {
+ "node": ">=0.10.0"
+ },
+ "scripts": {
+ "test": "standard && tape test.js"
+ },
+ "keywords": [
+ "postgres",
+ "interval",
+ "parser"
+ ],
+ "dependencies": {
+ "xtend": "^4.0.0"
+ },
+ "devDependencies": {
+ "tape": "^4.0.0",
+ "standard": "^12.0.1"
+ },
+ "files": [
+ "index.js",
+ "index.d.ts",
+ "readme.md"
+ ]
+}
diff --git a/api/node_modules/postgres-interval/readme.md b/api/node_modules/postgres-interval/readme.md
new file mode 100644
index 000000000..53cda4ade
--- /dev/null
+++ b/api/node_modules/postgres-interval/readme.md
@@ -0,0 +1,48 @@
+# postgres-interval [](https://travis-ci.org/bendrucker/postgres-interval) [](https://greenkeeper.io/)
+
+> Parse Postgres interval columns
+
+
+## Install
+
+```
+$ npm install --save postgres-interval
+```
+
+
+## Usage
+
+```js
+var parse = require('postgres-interval')
+var interval = parse('01:02:03')
+//=> {hours: 1, minutes: 2, seconds: 3}
+interval.toPostgres()
+// 3 seconds 2 minutes 1 hours
+interval.toISO()
+// P0Y0M0DT1H2M3S
+```
+
+## API
+
+#### `parse(pgInterval)` -> `interval`
+
+##### pgInterval
+
+*Required*
+Type: `string`
+
+A Postgres interval string.
+
+#### `interval.toPostgres()` -> `string`
+
+Returns an interval string. This allows the interval object to be passed into prepared statements.
+
+#### `interval.toISOString()` -> `string`
+
+Returns an [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) compliant string.
+
+Also available as `interval.toISO()` for backwards compatibility.
+
+## License
+
+MIT © [Ben Drucker](http://bendrucker.me)
diff --git a/api/node_modules/split2/LICENSE b/api/node_modules/split2/LICENSE
new file mode 100644
index 000000000..a91afe5b7
--- /dev/null
+++ b/api/node_modules/split2/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2014-2018, Matteo Collina
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/api/node_modules/split2/README.md b/api/node_modules/split2/README.md
new file mode 100644
index 000000000..36f03ab67
--- /dev/null
+++ b/api/node_modules/split2/README.md
@@ -0,0 +1,85 @@
+# Split2(matcher, mapper, options)
+
+
+
+Break up a stream and reassemble it so that each line is a chunk.
+`split2` is inspired by [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) module,
+and it is totally API compatible with it.
+However, it is based on Node.js core [`Transform`](https://nodejs.org/api/stream.html#stream_new_stream_transform_options).
+
+`matcher` may be a `String`, or a `RegExp`. Example, read every line in a file ...
+
+``` js
+ fs.createReadStream(file)
+ .pipe(split2())
+ .on('data', function (line) {
+ //each chunk now is a separate line!
+ })
+
+```
+
+`split` takes the same arguments as `string.split` except it defaults to '/\r?\n/', and the optional `limit` paremeter is ignored.
+[String#split](https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/String/split)
+
+`split` takes an optional options object on it's third argument, which
+is directly passed as a
+[Transform](https://nodejs.org/api/stream.html#stream_new_stream_transform_options)
+option.
+
+Additionally, the `.maxLength` and `.skipOverflow` options are implemented, which set limits on the internal
+buffer size and the stream's behavior when the limit is exceeded. There is no limit unless `maxLength` is set. When
+the internal buffer size exceeds `maxLength`, the stream emits an error by default. You may also set `skipOverflow` to
+true to suppress the error and instead skip past any lines that cause the internal buffer to exceed `maxLength`.
+
+Calling `.destroy` will make the stream emit `close`. Use this to perform cleanup logic
+
+``` js
+var splitFile = function(filename) {
+ var file = fs.createReadStream(filename)
+
+ return file
+ .pipe(split2())
+ .on('close', function() {
+ // destroy the file stream in case the split stream was destroyed
+ file.destroy()
+ })
+}
+
+var stream = splitFile('my-file.txt')
+
+stream.destroy() // will destroy the input file stream
+```
+
+# NDJ - Newline Delimited Json
+
+`split2` accepts a function which transforms each line.
+
+``` js
+fs.createReadStream(file)
+ .pipe(split2(JSON.parse))
+ .on('data', function (obj) {
+ //each chunk now is a js object
+ })
+ .on("error", function(error) {
+ //handling parsing errors
+ })
+```
+
+However, in [@dominictarr](https://github.com/dominictarr) [`split`](https://github.com/dominictarr/split) the mapper
+is wrapped in a try-catch, while here it is not: if your parsing logic can throw, wrap it yourself. Otherwise, you can also use the stream error handling when mapper function throw.
+
+# License
+
+Copyright (c) 2014-2021, Matteo Collina
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/api/node_modules/split2/bench.js b/api/node_modules/split2/bench.js
new file mode 100644
index 000000000..15ec5df99
--- /dev/null
+++ b/api/node_modules/split2/bench.js
@@ -0,0 +1,27 @@
+'use strict'
+
+const split = require('./')
+const bench = require('fastbench')
+const binarySplit = require('binary-split')
+const fs = require('fs')
+
+function benchSplit (cb) {
+ fs.createReadStream('package.json')
+ .pipe(split())
+ .on('end', cb)
+ .resume()
+}
+
+function benchBinarySplit (cb) {
+ fs.createReadStream('package.json')
+ .pipe(binarySplit())
+ .on('end', cb)
+ .resume()
+}
+
+const run = bench([
+ benchSplit,
+ benchBinarySplit
+], 10000)
+
+run(run)
diff --git a/api/node_modules/split2/index.js b/api/node_modules/split2/index.js
new file mode 100644
index 000000000..9b59f6cee
--- /dev/null
+++ b/api/node_modules/split2/index.js
@@ -0,0 +1,141 @@
+/*
+Copyright (c) 2014-2021, Matteo Collina
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+*/
+
+'use strict'
+
+const { Transform } = require('stream')
+const { StringDecoder } = require('string_decoder')
+const kLast = Symbol('last')
+const kDecoder = Symbol('decoder')
+
+function transform (chunk, enc, cb) {
+ let list
+ if (this.overflow) { // Line buffer is full. Skip to start of next line.
+ const buf = this[kDecoder].write(chunk)
+ list = buf.split(this.matcher)
+
+ if (list.length === 1) return cb() // Line ending not found. Discard entire chunk.
+
+ // Line ending found. Discard trailing fragment of previous line and reset overflow state.
+ list.shift()
+ this.overflow = false
+ } else {
+ this[kLast] += this[kDecoder].write(chunk)
+ list = this[kLast].split(this.matcher)
+ }
+
+ this[kLast] = list.pop()
+
+ for (let i = 0; i < list.length; i++) {
+ try {
+ push(this, this.mapper(list[i]))
+ } catch (error) {
+ return cb(error)
+ }
+ }
+
+ this.overflow = this[kLast].length > this.maxLength
+ if (this.overflow && !this.skipOverflow) {
+ cb(new Error('maximum buffer reached'))
+ return
+ }
+
+ cb()
+}
+
+function flush (cb) {
+ // forward any gibberish left in there
+ this[kLast] += this[kDecoder].end()
+
+ if (this[kLast]) {
+ try {
+ push(this, this.mapper(this[kLast]))
+ } catch (error) {
+ return cb(error)
+ }
+ }
+
+ cb()
+}
+
+function push (self, val) {
+ if (val !== undefined) {
+ self.push(val)
+ }
+}
+
+function noop (incoming) {
+ return incoming
+}
+
+function split (matcher, mapper, options) {
+ // Set defaults for any arguments not supplied.
+ matcher = matcher || /\r?\n/
+ mapper = mapper || noop
+ options = options || {}
+
+ // Test arguments explicitly.
+ switch (arguments.length) {
+ case 1:
+ // If mapper is only argument.
+ if (typeof matcher === 'function') {
+ mapper = matcher
+ matcher = /\r?\n/
+ // If options is only argument.
+ } else if (typeof matcher === 'object' && !(matcher instanceof RegExp) && !matcher[Symbol.split]) {
+ options = matcher
+ matcher = /\r?\n/
+ }
+ break
+
+ case 2:
+ // If mapper and options are arguments.
+ if (typeof matcher === 'function') {
+ options = mapper
+ mapper = matcher
+ matcher = /\r?\n/
+ // If matcher and options are arguments.
+ } else if (typeof mapper === 'object') {
+ options = mapper
+ mapper = noop
+ }
+ }
+
+ options = Object.assign({}, options)
+ options.autoDestroy = true
+ options.transform = transform
+ options.flush = flush
+ options.readableObjectMode = true
+
+ const stream = new Transform(options)
+
+ stream[kLast] = ''
+ stream[kDecoder] = new StringDecoder('utf8')
+ stream.matcher = matcher
+ stream.mapper = mapper
+ stream.maxLength = options.maxLength
+ stream.skipOverflow = options.skipOverflow || false
+ stream.overflow = false
+ stream._destroy = function (err, cb) {
+ // Weird Node v12 bug that we need to work around
+ this._writableState.errorEmitted = false
+ cb(err)
+ }
+
+ return stream
+}
+
+module.exports = split
diff --git a/api/node_modules/split2/package.json b/api/node_modules/split2/package.json
new file mode 100644
index 000000000..e04bcc81c
--- /dev/null
+++ b/api/node_modules/split2/package.json
@@ -0,0 +1,39 @@
+{
+ "name": "split2",
+ "version": "4.2.0",
+ "description": "split a Text Stream into a Line Stream, using Stream 3",
+ "main": "index.js",
+ "scripts": {
+ "lint": "standard --verbose",
+ "unit": "nyc --lines 100 --branches 100 --functions 100 --check-coverage --reporter=text tape test.js",
+ "coverage": "nyc --reporter=html --reporter=cobertura --reporter=text tape test/test.js",
+ "test:report": "npm run lint && npm run unit:report",
+ "test": "npm run lint && npm run unit",
+ "legacy": "tape test.js"
+ },
+ "pre-commit": [
+ "test"
+ ],
+ "website": "https://github.com/mcollina/split2",
+ "repository": {
+ "type": "git",
+ "url": "https://github.com/mcollina/split2.git"
+ },
+ "bugs": {
+ "url": "http://github.com/mcollina/split2/issues"
+ },
+ "engines": {
+ "node": ">= 10.x"
+ },
+ "author": "Matteo Collina ",
+ "license": "ISC",
+ "devDependencies": {
+ "binary-split": "^1.0.3",
+ "callback-stream": "^1.1.0",
+ "fastbench": "^1.0.0",
+ "nyc": "^15.0.1",
+ "pre-commit": "^1.1.2",
+ "standard": "^17.0.0",
+ "tape": "^5.0.0"
+ }
+}
diff --git a/api/node_modules/split2/test.js b/api/node_modules/split2/test.js
new file mode 100644
index 000000000..a7f98385d
--- /dev/null
+++ b/api/node_modules/split2/test.js
@@ -0,0 +1,409 @@
+'use strict'
+
+const test = require('tape')
+const split = require('./')
+const callback = require('callback-stream')
+const strcb = callback.bind(null, { decodeStrings: false })
+const objcb = callback.bind(null, { objectMode: true })
+
+test('split two lines on end', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end('hello\nworld')
+})
+
+test('split two lines on two writes', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.write('hello')
+ input.write('\nworld')
+ input.end()
+})
+
+test('split four lines on three writes', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world', 'bye', 'world'])
+ }))
+
+ input.write('hello\nwor')
+ input.write('ld\nbye\nwo')
+ input.write('rld')
+ input.end()
+})
+
+test('accumulate multiple writes', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['helloworld'])
+ }))
+
+ input.write('hello')
+ input.write('world')
+ input.end()
+})
+
+test('split using a custom string matcher', function (t) {
+ t.plan(2)
+
+ const input = split('~')
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end('hello~world')
+})
+
+test('split using a custom regexp matcher', function (t) {
+ t.plan(2)
+
+ const input = split(/~/)
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end('hello~world')
+})
+
+test('support an option argument', function (t) {
+ t.plan(2)
+
+ const input = split({ highWaterMark: 2 })
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end('hello\nworld')
+})
+
+test('support a mapper function', function (t) {
+ t.plan(2)
+
+ const a = { a: '42' }
+ const b = { b: '24' }
+
+ const input = split(JSON.parse)
+
+ input.pipe(objcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, [a, b])
+ }))
+
+ input.write(JSON.stringify(a))
+ input.write('\n')
+ input.end(JSON.stringify(b))
+})
+
+test('split lines windows-style', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end('hello\r\nworld')
+})
+
+test('splits a buffer', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end(Buffer.from('hello\nworld'))
+})
+
+test('do not end on undefined', function (t) {
+ t.plan(2)
+
+ const input = split(function (line) { })
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, [])
+ }))
+
+ input.end(Buffer.from('hello\nworld'))
+})
+
+test('has destroy method', function (t) {
+ t.plan(1)
+
+ const input = split(function (line) { })
+
+ input.on('close', function () {
+ t.ok(true, 'close emitted')
+ t.end()
+ })
+
+ input.destroy()
+})
+
+test('support custom matcher and mapper', function (t) {
+ t.plan(4)
+
+ const a = { a: '42' }
+ const b = { b: '24' }
+ const input = split('~', JSON.parse)
+
+ t.equal(input.matcher, '~')
+ t.equal(typeof input.mapper, 'function')
+
+ input.pipe(objcb(function (err, list) {
+ t.notOk(err, 'no errors')
+ t.deepEqual(list, [a, b])
+ }))
+
+ input.write(JSON.stringify(a))
+ input.write('~')
+ input.end(JSON.stringify(b))
+})
+
+test('support custom matcher and options', function (t) {
+ t.plan(6)
+
+ const input = split('~', { highWaterMark: 1024 })
+
+ t.equal(input.matcher, '~')
+ t.equal(typeof input.mapper, 'function')
+ t.equal(input._readableState.highWaterMark, 1024)
+ t.equal(input._writableState.highWaterMark, 1024)
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end('hello~world')
+})
+
+test('support mapper and options', function (t) {
+ t.plan(6)
+
+ const a = { a: '42' }
+ const b = { b: '24' }
+ const input = split(JSON.parse, { highWaterMark: 1024 })
+
+ t.ok(input.matcher instanceof RegExp, 'matcher is RegExp')
+ t.equal(typeof input.mapper, 'function')
+ t.equal(input._readableState.highWaterMark, 1024)
+ t.equal(input._writableState.highWaterMark, 1024)
+
+ input.pipe(objcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, [a, b])
+ }))
+
+ input.write(JSON.stringify(a))
+ input.write('\n')
+ input.end(JSON.stringify(b))
+})
+
+test('split utf8 chars', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['烫烫烫', '锟斤拷'])
+ }))
+
+ const buf = Buffer.from('烫烫烫\r\n锟斤拷', 'utf8')
+ for (let i = 0; i < buf.length; ++i) {
+ input.write(buf.slice(i, i + 1))
+ }
+ input.end()
+})
+
+test('split utf8 chars 2by2', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['烫烫烫', '烫烫烫'])
+ }))
+
+ const str = '烫烫烫\r\n烫烫烫'
+ const buf = Buffer.from(str, 'utf8')
+ for (let i = 0; i < buf.length; i += 2) {
+ input.write(buf.slice(i, i + 2))
+ }
+ input.end()
+})
+
+test('split lines when the \n comes at the end of a chunk', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.write('hello\n')
+ input.end('world')
+})
+
+test('truncated utf-8 char', function (t) {
+ t.plan(2)
+
+ const input = split()
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['烫' + Buffer.from('e7', 'hex').toString()])
+ }))
+
+ const str = '烫烫'
+ const buf = Buffer.from(str, 'utf8')
+
+ input.write(buf.slice(0, 3))
+ input.end(buf.slice(3, 4))
+})
+
+test('maximum buffer limit', function (t) {
+ t.plan(1)
+
+ const input = split({ maxLength: 2 })
+ input.on('error', function (err) {
+ t.ok(err)
+ })
+
+ input.resume()
+
+ input.write('hey')
+})
+
+test('readable highWaterMark', function (t) {
+ const input = split()
+ t.equal(input._readableState.highWaterMark, 16)
+ t.end()
+})
+
+test('maxLength < chunk size', function (t) {
+ t.plan(2)
+
+ const input = split({ maxLength: 2 })
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['a', 'b'])
+ }))
+
+ input.end('a\nb')
+})
+
+test('maximum buffer limit w/skip', function (t) {
+ t.plan(2)
+
+ const input = split({ maxLength: 2, skipOverflow: true })
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['a', 'b', 'c'])
+ }))
+
+ input.write('a\n123')
+ input.write('456')
+ input.write('789\nb\nc')
+ input.end()
+})
+
+test("don't modify the options object", function (t) {
+ t.plan(2)
+
+ const options = {}
+ const input = split(options)
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.same(options, {})
+ }))
+
+ input.end()
+})
+
+test('mapper throws flush', function (t) {
+ t.plan(1)
+ const error = new Error()
+ const input = split(function () {
+ throw error
+ })
+
+ input.on('error', (err, list) => {
+ t.same(err, error)
+ })
+ input.end('hello')
+})
+
+test('mapper throws on transform', function (t) {
+ t.plan(1)
+
+ const error = new Error()
+ const input = split(function (l) {
+ throw error
+ })
+
+ input.on('error', (err) => {
+ t.same(err, error)
+ })
+ input.write('a')
+ input.write('\n')
+ input.end('b')
+})
+
+test('supports Symbol.split', function (t) {
+ t.plan(2)
+
+ const input = split({
+ [Symbol.split] (str) {
+ return str.split('~')
+ }
+ })
+
+ input.pipe(strcb(function (err, list) {
+ t.error(err)
+ t.deepEqual(list, ['hello', 'world'])
+ }))
+
+ input.end('hello~world')
+})
diff --git a/api/node_modules/xtend/.jshintrc b/api/node_modules/xtend/.jshintrc
new file mode 100644
index 000000000..77887b5f0
--- /dev/null
+++ b/api/node_modules/xtend/.jshintrc
@@ -0,0 +1,30 @@
+{
+ "maxdepth": 4,
+ "maxstatements": 200,
+ "maxcomplexity": 12,
+ "maxlen": 80,
+ "maxparams": 5,
+
+ "curly": true,
+ "eqeqeq": true,
+ "immed": true,
+ "latedef": false,
+ "noarg": true,
+ "noempty": true,
+ "nonew": true,
+ "undef": true,
+ "unused": "vars",
+ "trailing": true,
+
+ "quotmark": true,
+ "expr": true,
+ "asi": true,
+
+ "browser": false,
+ "esnext": true,
+ "devel": false,
+ "node": false,
+ "nonstandard": false,
+
+ "predef": ["require", "module", "__dirname", "__filename"]
+}
diff --git a/api/node_modules/xtend/LICENSE b/api/node_modules/xtend/LICENSE
new file mode 100644
index 000000000..0099f4f6c
--- /dev/null
+++ b/api/node_modules/xtend/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+Copyright (c) 2012-2014 Raynos.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/api/node_modules/xtend/README.md b/api/node_modules/xtend/README.md
new file mode 100644
index 000000000..4a2703cff
--- /dev/null
+++ b/api/node_modules/xtend/README.md
@@ -0,0 +1,32 @@
+# xtend
+
+[![browser support][3]][4]
+
+[](http://github.com/badges/stability-badges)
+
+Extend like a boss
+
+xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence.
+
+## Examples
+
+```js
+var extend = require("xtend")
+
+// extend returns a new object. Does not mutate arguments
+var combination = extend({
+ a: "a",
+ b: "c"
+}, {
+ b: "b"
+})
+// { a: "a", b: "b" }
+```
+
+## Stability status: Locked
+
+## MIT Licensed
+
+
+ [3]: http://ci.testling.com/Raynos/xtend.png
+ [4]: http://ci.testling.com/Raynos/xtend
diff --git a/api/node_modules/xtend/immutable.js b/api/node_modules/xtend/immutable.js
new file mode 100644
index 000000000..94889c9de
--- /dev/null
+++ b/api/node_modules/xtend/immutable.js
@@ -0,0 +1,19 @@
+module.exports = extend
+
+var hasOwnProperty = Object.prototype.hasOwnProperty;
+
+function extend() {
+ var target = {}
+
+ for (var i = 0; i < arguments.length; i++) {
+ var source = arguments[i]
+
+ for (var key in source) {
+ if (hasOwnProperty.call(source, key)) {
+ target[key] = source[key]
+ }
+ }
+ }
+
+ return target
+}
diff --git a/api/node_modules/xtend/mutable.js b/api/node_modules/xtend/mutable.js
new file mode 100644
index 000000000..72debede6
--- /dev/null
+++ b/api/node_modules/xtend/mutable.js
@@ -0,0 +1,17 @@
+module.exports = extend
+
+var hasOwnProperty = Object.prototype.hasOwnProperty;
+
+function extend(target) {
+ for (var i = 1; i < arguments.length; i++) {
+ var source = arguments[i]
+
+ for (var key in source) {
+ if (hasOwnProperty.call(source, key)) {
+ target[key] = source[key]
+ }
+ }
+ }
+
+ return target
+}
diff --git a/api/node_modules/xtend/package.json b/api/node_modules/xtend/package.json
new file mode 100644
index 000000000..f7a39d10a
--- /dev/null
+++ b/api/node_modules/xtend/package.json
@@ -0,0 +1,55 @@
+{
+ "name": "xtend",
+ "version": "4.0.2",
+ "description": "extend like a boss",
+ "keywords": [
+ "extend",
+ "merge",
+ "options",
+ "opts",
+ "object",
+ "array"
+ ],
+ "author": "Raynos ",
+ "repository": "git://github.com/Raynos/xtend.git",
+ "main": "immutable",
+ "scripts": {
+ "test": "node test"
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "tape": "~1.1.0"
+ },
+ "homepage": "https://github.com/Raynos/xtend",
+ "contributors": [
+ {
+ "name": "Jake Verbaten"
+ },
+ {
+ "name": "Matt Esch"
+ }
+ ],
+ "bugs": {
+ "url": "https://github.com/Raynos/xtend/issues",
+ "email": "raynos2@gmail.com"
+ },
+ "license": "MIT",
+ "testling": {
+ "files": "test.js",
+ "browsers": [
+ "ie/7..latest",
+ "firefox/16..latest",
+ "firefox/nightly",
+ "chrome/22..latest",
+ "chrome/canary",
+ "opera/12..latest",
+ "opera/next",
+ "safari/5.1..latest",
+ "ipad/6.0..latest",
+ "iphone/6.0..latest"
+ ]
+ },
+ "engines": {
+ "node": ">=0.4"
+ }
+}
diff --git a/api/node_modules/xtend/test.js b/api/node_modules/xtend/test.js
new file mode 100644
index 000000000..b895b42b3
--- /dev/null
+++ b/api/node_modules/xtend/test.js
@@ -0,0 +1,103 @@
+var test = require("tape")
+var extend = require("./")
+var mutableExtend = require("./mutable")
+
+test("merge", function(assert) {
+ var a = { a: "foo" }
+ var b = { b: "bar" }
+
+ assert.deepEqual(extend(a, b), { a: "foo", b: "bar" })
+ assert.end()
+})
+
+test("replace", function(assert) {
+ var a = { a: "foo" }
+ var b = { a: "bar" }
+
+ assert.deepEqual(extend(a, b), { a: "bar" })
+ assert.end()
+})
+
+test("undefined", function(assert) {
+ var a = { a: undefined }
+ var b = { b: "foo" }
+
+ assert.deepEqual(extend(a, b), { a: undefined, b: "foo" })
+ assert.deepEqual(extend(b, a), { a: undefined, b: "foo" })
+ assert.end()
+})
+
+test("handle 0", function(assert) {
+ var a = { a: "default" }
+ var b = { a: 0 }
+
+ assert.deepEqual(extend(a, b), { a: 0 })
+ assert.deepEqual(extend(b, a), { a: "default" })
+ assert.end()
+})
+
+test("is immutable", function (assert) {
+ var record = {}
+
+ extend(record, { foo: "bar" })
+ assert.equal(record.foo, undefined)
+ assert.end()
+})
+
+test("null as argument", function (assert) {
+ var a = { foo: "bar" }
+ var b = null
+ var c = void 0
+
+ assert.deepEqual(extend(b, a, c), { foo: "bar" })
+ assert.end()
+})
+
+test("mutable", function (assert) {
+ var a = { foo: "bar" }
+
+ mutableExtend(a, { bar: "baz" })
+
+ assert.equal(a.bar, "baz")
+ assert.end()
+})
+
+test("null prototype", function(assert) {
+ var a = { a: "foo" }
+ var b = Object.create(null)
+ b.b = "bar";
+
+ assert.deepEqual(extend(a, b), { a: "foo", b: "bar" })
+ assert.end()
+})
+
+test("null prototype mutable", function (assert) {
+ var a = { foo: "bar" }
+ var b = Object.create(null)
+ b.bar = "baz";
+
+ mutableExtend(a, b)
+
+ assert.equal(a.bar, "baz")
+ assert.end()
+})
+
+test("prototype pollution", function (assert) {
+ var a = {}
+ var maliciousPayload = '{"__proto__":{"oops":"It works!"}}'
+
+ assert.strictEqual(a.oops, undefined)
+ extend({}, maliciousPayload)
+ assert.strictEqual(a.oops, undefined)
+ assert.end()
+})
+
+test("prototype pollution mutable", function (assert) {
+ var a = {}
+ var maliciousPayload = '{"__proto__":{"oops":"It works!"}}'
+
+ assert.strictEqual(a.oops, undefined)
+ mutableExtend({}, maliciousPayload)
+ assert.strictEqual(a.oops, undefined)
+ assert.end()
+})
diff --git a/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/01-login-page.png b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/01-login-page.png
new file mode 100644
index 000000000..a3f643c3c
Binary files /dev/null and b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/01-login-page.png differ
diff --git a/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/02-form-visible.png b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/02-form-visible.png
new file mode 100644
index 000000000..677150301
Binary files /dev/null and b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/02-form-visible.png differ
diff --git a/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/03-creds-filled.png b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/03-creds-filled.png
new file mode 100644
index 000000000..fda7bbcb8
Binary files /dev/null and b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/03-creds-filled.png differ
diff --git a/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/page@de50e8d8e7292e51ac275646b59e8fd4.webm b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/page@de50e8d8e7292e51ac275646b59e8fd4.webm
new file mode 100644
index 000000000..2b9350cd3
Binary files /dev/null and b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/page@de50e8d8e7292e51ac275646b59e8fd4.webm differ
diff --git a/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/results.json b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/results.json
new file mode 100644
index 000000000..3170d1a39
--- /dev/null
+++ b/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/results.json
@@ -0,0 +1,34 @@
+{
+ "ts": "2026-04-19T18-27-21-814Z",
+ "tests": [
+ {
+ "name": "load_login",
+ "pass": true,
+ "status": 200
+ },
+ {
+ "name": "manual_toggle_opens_form",
+ "pass": true
+ },
+ {
+ "name": "fill_credentials",
+ "pass": true
+ },
+ {
+ "name": "exception",
+ "pass": false,
+ "error": "response.text: Protocol error (Network.getResponseBody): No resource with given identifier found"
+ }
+ ],
+ "video": "/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/page@de50e8d8e7292e51ac275646b59e8fd4.webm",
+ "summary": {
+ "total": 4,
+ "pass": 3,
+ "fail": 1
+ },
+ "screenshots": [
+ "/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/01-login-page.png",
+ "/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/02-form-visible.png",
+ "/api/playwright-results/v41-login-wtp-2026-04-19T18-27-21-814Z/03-creds-filled.png"
+ ]
+}
\ No newline at end of file
diff --git a/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/01-login-page.png b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/01-login-page.png
new file mode 100644
index 000000000..a3f643c3c
Binary files /dev/null and b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/01-login-page.png differ
diff --git a/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/02-form-visible.png b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/02-form-visible.png
new file mode 100644
index 000000000..4edee4db7
Binary files /dev/null and b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/02-form-visible.png differ
diff --git a/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/03-creds-filled.png b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/03-creds-filled.png
new file mode 100644
index 000000000..f53b2b8a1
Binary files /dev/null and b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/03-creds-filled.png differ
diff --git a/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/04-after-submit.png b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/04-after-submit.png
new file mode 100644
index 000000000..4d466d013
Binary files /dev/null and b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/04-after-submit.png differ
diff --git a/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/05-wtp-loaded.png b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/05-wtp-loaded.png
new file mode 100644
index 000000000..30429a74b
Binary files /dev/null and b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/05-wtp-loaded.png differ
diff --git a/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/page@478c101dc40ab3e7e2f0572eeeafe0ff.webm b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/page@478c101dc40ab3e7e2f0572eeeafe0ff.webm
new file mode 100644
index 000000000..11603ed91
Binary files /dev/null and b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/page@478c101dc40ab3e7e2f0572eeeafe0ff.webm differ
diff --git a/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/results.json b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/results.json
new file mode 100644
index 000000000..fab71a415
--- /dev/null
+++ b/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/results.json
@@ -0,0 +1,59 @@
+{
+ "ts": "2026-04-19T18-27-49-044Z",
+ "tests": [
+ {
+ "name": "load_login",
+ "pass": true,
+ "status": 200
+ },
+ {
+ "name": "manual_toggle",
+ "pass": true
+ },
+ {
+ "name": "fill_credentials",
+ "pass": true
+ },
+ {
+ "name": "submit_redirect",
+ "pass": true,
+ "url": "https://weval-consulting.com/products/workspace.html"
+ },
+ {
+ "name": "session_cookie_set",
+ "pass": true,
+ "cookie": "PHPSESSID"
+ },
+ {
+ "name": "wtp_access",
+ "pass": true,
+ "status": 200,
+ "url": "https://weval-consulting.com/weval-technology-platform.html"
+ },
+ {
+ "name": "wtp_not_redirect_login",
+ "pass": true,
+ "title": "WEVAL Technology Platform — All-in-One ERP Portal",
+ "body_length": 129323,
+ "url": "https://weval-consulting.com/weval-technology-platform.html"
+ },
+ {
+ "name": "logout",
+ "pass": true,
+ "body": "{\"ok\":true}"
+ }
+ ],
+ "summary": {
+ "total": 8,
+ "pass": 8,
+ "fail": 0
+ },
+ "video": "https://weval-consulting.com/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/page@478c101dc40ab3e7e2f0572eeeafe0ff.webm",
+ "screenshots": [
+ "https://weval-consulting.com/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/01-login-page.png",
+ "https://weval-consulting.com/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/02-form-visible.png",
+ "https://weval-consulting.com/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/03-creds-filled.png",
+ "https://weval-consulting.com/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/04-after-submit.png",
+ "https://weval-consulting.com/api/playwright-results/v41b-login-wtp-2026-04-19T18-27-49-044Z/05-wtp-loaded.png"
+ ]
+}
\ No newline at end of file
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/01-tile-highlighted.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/01-tile-highlighted.png
new file mode 100644
index 000000000..693e0dbd5
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/01-tile-highlighted.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/03-erp-with-drill.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/03-erp-with-drill.png
new file mode 100644
index 000000000..b7e5ca7ad
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/03-erp-with-drill.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/04-erp-drill-open.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/04-erp-drill-open.png
new file mode 100644
index 000000000..7ca18129f
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/04-erp-drill-open.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/05-drillable-cards.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/05-drillable-cards.png
new file mode 100644
index 000000000..823b7ff9f
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/05-drillable-cards.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/06-drillable-expanded.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/06-drillable-expanded.png
new file mode 100644
index 000000000..44f1f93d8
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/06-drillable-expanded.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/07-wtp-auth.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/07-wtp-auth.png
new file mode 100644
index 000000000..70865c2a6
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/07-wtp-auth.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/08-sitemap.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/08-sitemap.png
new file mode 100644
index 000000000..b11847ab6
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/08-sitemap.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/09-wevia-master.png b/api/playwright-results/v81-video-2026-04-19T18-25-27/09-wevia-master.png
new file mode 100644
index 000000000..70865c2a6
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/09-wevia-master.png differ
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/results.json b/api/playwright-results/v81-video-2026-04-19T18-25-27/results.json
new file mode 100644
index 000000000..f2ab1e380
--- /dev/null
+++ b/api/playwright-results/v81-video-2026-04-19T18-25-27/results.json
@@ -0,0 +1,64 @@
+{
+ "ts": "2026-04-19T18:26:17.744Z",
+ "test": "v8.1 VIDEO COMPLETE drill-down + cross-pages + chat WEVIA",
+ "tests": [
+ {
+ "name": "training_drill_overlay_opens",
+ "pass": false,
+ "error": "no visible tile"
+ },
+ {
+ "name": "erp_drill_works",
+ "pass": true,
+ "wired": 20,
+ "clicked": true
+ },
+ {
+ "name": "drillable_native_works",
+ "pass": true,
+ "cards": 9
+ },
+ {
+ "name": "wtp_auth_guard",
+ "pass": true,
+ "title": "WEVAL — Login"
+ },
+ {
+ "name": "sitemap_260",
+ "pass": true,
+ "total": "261"
+ },
+ {
+ "name": "wevia_5_conversations",
+ "pass": true,
+ "conversations": [
+ {
+ "msg": "salut",
+ "intent": "wevia_greetings"
+ },
+ {
+ "msg": "andon",
+ "intent": "wevia_andon_status"
+ },
+ {
+ "msg": "dormants 220",
+ "intent": "wevia_dormants_220"
+ },
+ {
+ "msg": "v60 bridges",
+ "intent": "wevia_v60_bridges"
+ },
+ {
+ "msg": "drill down",
+ "intent": "wevia_ux_drill"
+ }
+ ]
+ }
+ ],
+ "total": 6,
+ "pass": 5,
+ "fail": 1,
+ "video": "v81-video-complete.webm",
+ "screenshots": 9,
+ "out": "/var/www/html/api/playwright-results/v81-video-2026-04-19T18-25-27"
+}
\ No newline at end of file
diff --git a/api/playwright-results/v81-video-2026-04-19T18-25-27/v81-video-complete.webm b/api/playwright-results/v81-video-2026-04-19T18-25-27/v81-video-complete.webm
new file mode 100644
index 000000000..cbfdc759a
Binary files /dev/null and b/api/playwright-results/v81-video-2026-04-19T18-25-27/v81-video-complete.webm differ
diff --git a/api/scan-erp-gaps-all.sh b/api/scan-erp-gaps-all.sh
new file mode 100755
index 000000000..dacd9c5e3
--- /dev/null
+++ b/api/scan-erp-gaps-all.sh
@@ -0,0 +1,81 @@
+#!/usr/bin/env bash
+# WEVAL — Master ERP Gap Scanner (D+C+B)
+# Usage: scan-erp-gaps-all.sh
+# Runs LLM gen (D) + RSS feeds (C) + Playwright G2 (B), then aggregates.
+set -u
+
+LOG_DIR=/var/log/weval
+mkdir -p "$LOG_DIR"
+TS=$(date +%Y%m%dT%H%M%S)
+
+echo "═══════════════════════════════════════════"
+echo "WEVAL ERP GAP SCANNER · $TS"
+echo "═══════════════════════════════════════════"
+
+# D — LLM sovereign (fastest, most reliable)
+echo ""
+echo "▶ OPTION D · LLM sovereign cascade"
+timeout 480 python3 /var/www/html/api/scan-erp-gaps-llm.py > "$LOG_DIR/scan-llm-$TS.log" 2>&1
+D_CODE=$?
+tail -3 "$LOG_DIR/scan-llm-$TS.log"
+echo " exit=$D_CODE log=$LOG_DIR/scan-llm-$TS.log"
+
+# C — RSS feeds
+echo ""
+echo "▶ OPTION C · RSS consulting feeds"
+timeout 180 python3 /var/www/html/api/scan-erp-gaps-rss.py > "$LOG_DIR/scan-rss-$TS.log" 2>&1
+C_CODE=$?
+tail -3 "$LOG_DIR/scan-rss-$TS.log"
+echo " exit=$C_CODE log=$LOG_DIR/scan-rss-$TS.log"
+
+# B — Playwright G2 scrape (slowest, skip if node missing)
+echo ""
+echo "▶ OPTION B · Playwright G2 public reviews"
+if command -v node >/dev/null 2>&1; then
+ # Ensure deps installed in /var/www/html/api (one-time)
+ if [ ! -d /var/www/html/api/node_modules/playwright ]; then
+ echo " [BOOTSTRAP] installing node deps..."
+ cd /var/www/html/api && npm install playwright pg --no-save 2>&1 | tail -3
+ fi
+ cd /var/www/html/api
+ timeout 600 node /var/www/html/api/scan-erp-gaps-playwright.js > "$LOG_DIR/scan-pw-$TS.log" 2>&1
+ B_CODE=$?
+ tail -3 "$LOG_DIR/scan-pw-$TS.log"
+ echo " exit=$B_CODE log=$LOG_DIR/scan-pw-$TS.log"
+else
+ echo " [SKIP] node not found"
+ B_CODE=-1
+fi
+
+# Summary
+echo ""
+echo "═══════════════════════════════════════════"
+echo "SUMMARY · $(date -Iseconds)"
+echo "═══════════════════════════════════════════"
+PGPASSWORD=admin123 psql -h 10.1.0.3 -U admin -d adx_system -c "
+SELECT
+ CASE
+ WHEN query LIKE 'llm_%' THEN 'LLM (D)'
+ WHEN query LIKE 'rss_%' THEN 'RSS (C)'
+ WHEN query LIKE 'playwright_%' THEN 'Playwright (B)'
+ ELSE 'Other (searxng)'
+ END AS source,
+ COUNT(DISTINCT erp_id) AS erps,
+ COUNT(*) AS gaps,
+ ROUND(AVG(confidence_score)::NUMERIC,3) AS avg_conf
+FROM erp_gap_scans
+GROUP BY source
+ORDER BY gaps DESC;"
+
+echo ""
+echo "TOP ERPS BY GAP COUNT:"
+PGPASSWORD=admin123 psql -h 10.1.0.3 -U admin -d adx_system -c "
+SELECT erp_id, COUNT(*) AS gaps, ROUND(AVG(confidence_score)::NUMERIC,3) AS avg_conf
+FROM erp_gap_scans
+GROUP BY erp_id
+ORDER BY gaps DESC
+LIMIT 15;"
+
+echo ""
+echo "═══ DONE ═══"
+exit 0
diff --git a/api/scan-erp-gaps-playwright.js b/api/scan-erp-gaps-playwright.js
new file mode 100644
index 000000000..9b32b3d4a
--- /dev/null
+++ b/api/scan-erp-gaps-playwright.js
@@ -0,0 +1,206 @@
+// WEVAL — ERP Gap Scanner via Playwright (Option B)
+// Scrape G2/TrustRadius/Capterra public review pages for the "Cons" / "Dislikes" sections
+// Usage: node scan-erp-gaps-playwright.js [erp_id|all]
+// Doctrine #5 ON CONFLICT DO NOTHING via unique constraint
+
+const { chromium } = require('playwright');
+const { Pool } = require('pg');
+
+const DB = {
+ host: '10.1.0.3',
+ port: 5432,
+ database: 'adx_system',
+ user: 'admin',
+ password: 'admin123',
+ connectionTimeoutMillis: 5000,
+};
+
+// Known public review URLs per ERP (G2 and TrustRadius only — publicly accessible)
+const ERP_URLS = {
+ sap_s4hana: [
+ 'https://www.g2.com/products/sap-s-4hana-cloud/reviews',
+ ],
+ sap_b1: [
+ 'https://www.g2.com/products/sap-business-one/reviews',
+ ],
+ oracle_netsuite: [
+ 'https://www.g2.com/products/oracle-netsuite/reviews',
+ ],
+ oracle_fusion: [
+ 'https://www.g2.com/products/oracle-fusion-cloud-erp/reviews',
+ ],
+ sage_x3: [
+ 'https://www.g2.com/products/sage-x3/reviews',
+ ],
+ sage_intacct: [
+ 'https://www.g2.com/products/sage-intacct/reviews',
+ ],
+ odoo: [
+ 'https://www.g2.com/products/odoo-erp/reviews',
+ ],
+ ms_d365_fo: [
+ 'https://www.g2.com/products/dynamics-365-finance/reviews',
+ ],
+ ms_d365_bc: [
+ 'https://www.g2.com/products/dynamics-365-business-central/reviews',
+ ],
+ workday: [
+ 'https://www.g2.com/products/workday-hcm/reviews',
+ ],
+ salesforce: [
+ 'https://www.g2.com/products/salesforce-sales-cloud/reviews',
+ ],
+ infor_m3: [
+ 'https://www.g2.com/products/infor-cloudsuite-industrial/reviews',
+ ],
+ ifs: [
+ 'https://www.g2.com/products/ifs-cloud/reviews',
+ ],
+ epicor: [
+ 'https://www.g2.com/products/epicor-kinetic/reviews',
+ ],
+ acumatica: [
+ 'https://www.g2.com/products/acumatica-erp/reviews',
+ ],
+ deltek: [
+ 'https://www.g2.com/products/deltek-costpoint/reviews',
+ ],
+ servicenow: [
+ 'https://www.g2.com/products/servicenow-itsm/reviews',
+ ],
+ veeva: [
+ 'https://www.g2.com/products/veeva-vault-clinical-suite/reviews',
+ ],
+};
+
+const ERP_NAMES = {
+ sap_s4hana: 'SAP S/4HANA',
+ sap_b1: 'SAP Business One',
+ oracle_netsuite: 'Oracle NetSuite',
+ oracle_fusion: 'Oracle Fusion Cloud',
+ sage_x3: 'Sage X3',
+ sage_intacct: 'Sage Intacct',
+ odoo: 'Odoo',
+ ms_d365_fo: 'Microsoft Dynamics 365 F&O',
+ ms_d365_bc: 'Microsoft Dynamics 365 Business Central',
+ workday: 'Workday',
+ salesforce: 'Salesforce',
+ infor_m3: 'Infor M3',
+ ifs: 'IFS Cloud',
+ epicor: 'Epicor Kinetic',
+ acumatica: 'Acumatica Cloud',
+ deltek: 'Deltek Costpoint',
+ servicenow: 'ServiceNow',
+ veeva: 'Veeva Vault',
+};
+
+async function scrapeG2(browser, erp_id, url) {
+ const context = await browser.newContext({
+ userAgent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
+ viewport: { width: 1280, height: 800 },
+ });
+ const page = await context.newPage();
+ const results = [];
+
+ try {
+ await page.goto(url, { waitUntil: 'domcontentloaded', timeout: 25000 });
+ await page.waitForTimeout(3000);
+
+ // Extract "Dislikes" / "Cons" sections from reviews
+ const extracted = await page.evaluate(() => {
+ const out = [];
+ // Strategy 1: look for text following "Dislikes" / "What do you dislike" / "Cons"
+ const textNodes = Array.from(document.querySelectorAll('p, div[class*="review"], li'));
+ for (const el of textNodes) {
+ const txt = (el.innerText || '').trim();
+ if (!txt || txt.length < 40 || txt.length > 1500) continue;
+ // Is this a "dislike" / "con" snippet ?
+ const lc = txt.toLowerCase();
+ if (
+ lc.match(/\b(dislike|drawback|limitation|painpoint|pain point|missing|lacks|difficult|slow|complicated|bug|issue|problem|frustrat|workaround|hard to)\b/i)
+ ) {
+ out.push({
+ snippet: txt.substring(0, 1000),
+ title: txt.substring(0, 100),
+ });
+ }
+ }
+ // Dedupe by first 60 chars
+ const seen = new Set();
+ return out.filter(x => {
+ const k = x.snippet.substring(0, 60);
+ if (seen.has(k)) return false;
+ seen.add(k);
+ return true;
+ }).slice(0, 15);
+ });
+
+ for (const r of extracted) {
+ results.push({
+ erp_id,
+ erp_name: ERP_NAMES[erp_id] || erp_id,
+ source_url: `${url}#pain-${Date.now()}-${results.length}`,
+ title: r.title,
+ snippet: r.snippet,
+ confidence: 0.65, // Playwright scraped from public G2 reviews — medium-high
+ keywords: ['g2_review', 'playwright_scrape'],
+ });
+ }
+ } catch (e) {
+ console.error(` [ERR] ${url}: ${e.message}`);
+ } finally {
+ await page.close();
+ await context.close();
+ }
+
+ return results;
+}
+
+async function storeResults(pool, results) {
+ let inserted = 0;
+ for (const r of results) {
+ try {
+ const res = await pool.query(
+ `INSERT INTO erp_gap_scans (erp_id, erp_name, query, source_url, title, snippet, confidence_score, keywords)
+ VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
+ ON CONFLICT DO NOTHING`,
+ [r.erp_id, r.erp_name, 'playwright_g2', r.source_url, r.title, r.snippet, r.confidence, r.keywords]
+ );
+ if (res.rowCount > 0) inserted++;
+ } catch (e) {
+ // ignore
+ }
+ }
+ return inserted;
+}
+
+(async () => {
+ const target = process.argv[2];
+ console.log(`═══ SCAN-ERP-GAPS-PLAYWRIGHT · ${new Date().toISOString()} ═══`);
+
+ const pool = new Pool(DB);
+ const browser = await chromium.launch({ headless: true, args: ['--no-sandbox'] });
+
+ let totalInserted = 0;
+ const erpIds = target && ERP_URLS[target] ? [target] : Object.keys(ERP_URLS);
+
+ for (const erp_id of erpIds) {
+ for (const url of ERP_URLS[erp_id]) {
+ console.log(`\n━━━ ${erp_id} · ${url}`);
+ const results = await scrapeG2(browser, erp_id, url);
+ console.log(` → ${results.length} snippets extracted`);
+ if (results.length > 0) {
+ const ins = await storeResults(pool, results);
+ console.log(` → ${ins} inserted`);
+ totalInserted += ins;
+ }
+ // Pace between scrapes
+ await new Promise(r => setTimeout(r, 2000));
+ }
+ }
+
+ await browser.close();
+ await pool.end();
+ console.log(`\n═══ DONE · inserted=${totalInserted} ═══`);
+ process.exit(0);
+})();
diff --git a/api/scan-erp-gaps-rss.py b/api/scan-erp-gaps-rss.py
new file mode 100755
index 000000000..182e8697d
--- /dev/null
+++ b/api/scan-erp-gaps-rss.py
@@ -0,0 +1,191 @@
+#!/usr/bin/env python3
+"""
+WEVAL — ERP Gap Scanner via RSS/ATOM feeds (Option C)
+Subscribe à flux consulting/tech/vendor et extrait mentions des 25 ERPs avec keywords pain.
+Stocke dans erp_gap_scans avec source_url = URL de l'article.
+
+Sources publiques (no auth required):
+- Reddit r/ERP (JSON API)
+- CIO.com ERP tag
+- TechRepublic ERP
+- SAP/Oracle/Microsoft blog RSS (release notes mentioning issues)
+- Gartner public blog
+- G2 recently added (limited)
+
+Doctrine #5 INSERT ON CONFLICT DO NOTHING.
+Doctrine #4 honnêteté — tag source clairement.
+"""
+import sys, json, time, re, urllib.request, urllib.error, html
+from datetime import datetime
+import psycopg2
+
+try:
+ import feedparser
+except ImportError:
+ print("ERR: pip install feedparser", file=sys.stderr)
+ sys.exit(1)
+
+DB_CONFIG = dict(host="10.1.0.3", port=5432, dbname="adx_system", user="admin", password="admin123", connect_timeout=5)
+
+# Publicly accessible RSS feeds (no auth)
+RSS_FEEDS = [
+ ("Reddit_r_ERP", "https://www.reddit.com/r/ERP/.rss"),
+ ("Reddit_r_sap", "https://www.reddit.com/r/SAP/.rss"),
+ ("Reddit_r_netsuite", "https://www.reddit.com/r/netsuite/.rss"),
+ ("Reddit_r_Dynamics365","https://www.reddit.com/r/Dynamics365/.rss"),
+ ("Reddit_r_salesforce","https://www.reddit.com/r/salesforce/.rss"),
+ ("Reddit_r_workday", "https://www.reddit.com/r/Workday/.rss"),
+ ("Reddit_r_Odoo", "https://www.reddit.com/r/Odoo/.rss"),
+ ("CIO_ERP", "https://www.cio.com/feed/"),
+ ("TechRepublic_Enterprise", "https://www.techrepublic.com/rssfeeds/topic/enterprise-software/"),
+ ("ComputerWeekly_ERP", "https://www.computerweekly.com/rss/IT-for-transport-and-travel-industry.xml"),
+ ("ITWorldCanada_ERP", "https://www.itworldcanada.com/feed"),
+ ("ERPToday", "https://erp.today/feed/"),
+ ("DiginomicaERP", "https://diginomica.com/topic/erp/rss.xml"),
+ ("CXToday", "https://www.cxtoday.com/crm/feed/"),
+]
+
+# ERP name -> id mapping (for extraction)
+ERP_MAP = {
+ "SAP S/4HANA": "sap_s4hana", "S/4HANA": "sap_s4hana", "S/4 HANA": "sap_s4hana",
+ "SAP Business One": "sap_b1", "SAP B1": "sap_b1", "Business One": "sap_b1",
+ "Oracle E-Business": "oracle_ebs", "Oracle EBS": "oracle_ebs", "E-Business Suite": "oracle_ebs",
+ "Oracle Fusion": "oracle_fusion", "Fusion Cloud": "oracle_fusion",
+ "NetSuite": "oracle_netsuite",
+ "Sage X3": "sage_x3",
+ "Sage 100": "sage_100",
+ "Sage Intacct": "sage_intacct", "Intacct": "sage_intacct",
+ "Odoo": "odoo",
+ "Dynamics 365 F&O": "ms_d365_fo", "D365 F&O": "ms_d365_fo", "Dynamics 365 Finance": "ms_d365_fo", "D365FO": "ms_d365_fo",
+ "Dynamics 365 Business Central": "ms_d365_bc", "D365 BC": "ms_d365_bc", "Business Central": "ms_d365_bc",
+ "Dynamics 365 Customer Engagement": "ms_d365_ce", "D365 CE": "ms_d365_ce", "Dynamics CRM": "ms_d365_ce",
+ "Workday": "workday",
+ "Salesforce": "salesforce",
+ "Infor M3": "infor_m3",
+ "Infor CloudSuite": "infor_cs", "Infor CS": "infor_cs",
+ "IFS Cloud": "ifs", "IFS Applications": "ifs",
+ "Epicor": "epicor", "Kinetic": "epicor",
+ "QAD": "qad",
+ "Acumatica": "acumatica",
+ "Priority": "priority",
+ "Deltek": "deltek", "Costpoint": "deltek",
+ "ServiceNow": "servicenow",
+ "Veeva": "veeva",
+ "Temenos": "temenos",
+}
+
+# Keywords (English + French) for pain-detection
+PAIN_KW = [
+ "pain", "limitation", "limits", "issue", "problem", "bug", "slow", "crash",
+ "complaint", "drawback", "weakness", "shortcoming", "bottleneck", "broken",
+ "frustrating", "workaround", "manual", "difficult", "lacks", "missing",
+ "challenge", "struggle", "outdated", "legacy", "expensive", "complex",
+ # FR
+ "lent", "manque", "limitation", "problème", "difficulté", "bogue", "bug",
+ "archaïque", "obsolète", "manuel", "complexe", "difficile", "frustrant",
+]
+
+def normalize_entry(entry):
+ title = html.unescape(entry.get("title", ""))
+ summary = html.unescape(entry.get("summary", entry.get("description", "")))
+ # Strip HTML tags
+ summary = re.sub(r"<[^>]+>", " ", summary)
+ summary = re.sub(r"\s+", " ", summary).strip()
+ link = entry.get("link", "")
+ return title, summary, link
+
+def detect_erps_mentioned(text):
+ """Return list of erp_id mentioned in text"""
+ text_lc = text.lower()
+ found = set()
+ for alias, erp_id in ERP_MAP.items():
+ if alias.lower() in text_lc:
+ found.add(erp_id)
+ return list(found)
+
+def score_pain(text):
+ """Return (score 0..1, matched keywords)"""
+ text_lc = text.lower()
+ matches = [kw for kw in PAIN_KW if kw in text_lc]
+ # Normalize: 5+ kw = 1.0
+ score = min(1.0, len(matches) / 5.0)
+ return round(score, 3), matches
+
+def main():
+ print(f"═══ SCAN-ERP-GAPS-RSS · {datetime.now().isoformat()} ═══")
+ conn = psycopg2.connect(**DB_CONFIG)
+ cur = conn.cursor()
+
+ total_feeds = 0
+ total_entries = 0
+ total_matches = 0
+ total_inserted = 0
+
+ for feed_name, feed_url in RSS_FEEDS:
+ print(f"\n━━━ {feed_name} ━━━")
+ try:
+ # feedparser doesn't always respect timeout; set socket default
+ import socket
+ socket.setdefaulttimeout(10)
+ feed = feedparser.parse(feed_url)
+ entries = feed.entries[:30] # top 30 latest
+ total_feeds += 1
+ total_entries += len(entries)
+ print(f" → {len(entries)} entries")
+ except Exception as e:
+ print(f" [ERR] {e}")
+ continue
+
+ feed_matches = 0
+ feed_inserted = 0
+
+ for entry in entries:
+ title, summary, link = normalize_entry(entry)
+ combined = f"{title} {summary}"
+ if not combined.strip() or not link:
+ continue
+
+ erps = detect_erps_mentioned(combined)
+ if not erps:
+ continue
+
+ score, kws = score_pain(combined)
+ if score < 0.1: # at least 1 pain keyword
+ continue
+
+ feed_matches += 1
+
+ # Insert one row per ERP mentioned
+ for erp_id in erps:
+ erp_name = [k for k, v in ERP_MAP.items() if v == erp_id and len(k) > 3]
+ erp_name = erp_name[0] if erp_name else erp_id
+ try:
+ cur.execute("""
+ INSERT INTO erp_gap_scans (erp_id, erp_name, query, source_url, title, snippet, confidence_score, keywords)
+ VALUES (%s, %s, %s, %s, %s, %s, %s, %s)
+ ON CONFLICT (erp_id, source_url) DO NOTHING
+ """, (
+ erp_id, erp_name,
+ f"rss_{feed_name}",
+ link[:500], title[:500], summary[:1500],
+ score,
+ kws + ["rss", feed_name]
+ ))
+ if cur.rowcount > 0:
+ feed_inserted += 1
+ total_inserted += 1
+ except Exception as e:
+ pass
+
+ total_matches += 1
+
+ conn.commit()
+ print(f" matches={feed_matches}, inserted={feed_inserted}")
+
+ cur.close()
+ conn.close()
+ print(f"\n═══ DONE · feeds={total_feeds} · entries={total_entries} · matches={total_matches} · inserted={total_inserted} ═══")
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/api/v41-playwright-login-wtp.js b/api/v41-playwright-login-wtp.js
new file mode 100755
index 000000000..75fef8c20
--- /dev/null
+++ b/api/v41-playwright-login-wtp.js
@@ -0,0 +1,99 @@
+const { chromium } = require('/var/www/html/api/node_modules/playwright');
+const fs = require('fs');
+
+(async () => {
+ const ts = new Date().toISOString().replace(/[:.]/g, '-');
+ const outDir = `/var/www/html/api/playwright-results/v41b-login-wtp-${ts}`;
+ fs.mkdirSync(outDir, { recursive: true });
+
+ const browser = await chromium.launch({ headless: true });
+ const context = await browser.newContext({
+ ignoreHTTPSErrors: true,
+ recordVideo: { dir: outDir, size: { width: 1280, height: 720 } }
+ });
+ const page = await context.newPage();
+
+ const results = { ts, tests: [] };
+ let screenshotIdx = 0;
+ const shot = async (name) => {
+ screenshotIdx++;
+ const p = `${outDir}/${String(screenshotIdx).padStart(2,'0')}-${name}.png`;
+ await page.screenshot({ path: p, fullPage: true });
+ return p;
+ };
+
+ try {
+ // 1. Load login
+ const r1 = await page.goto('https://weval-consulting.com/login.html', { waitUntil: 'domcontentloaded', timeout: 15000 });
+ await shot('login-page');
+ results.tests.push({ name: 'load_login', pass: r1.status() === 200, status: r1.status() });
+
+ // 2. Open manual form
+ await page.waitForTimeout(1500);
+ await page.click('text=Connexion manuelle');
+ await page.waitForTimeout(500);
+ const userVis = await page.isVisible('#user');
+ await shot('form-visible');
+ results.tests.push({ name: 'manual_toggle', pass: userVis });
+
+ // 3. Fill creds
+ await page.fill('#user', 'yacine');
+ await page.fill('#pass', 'YacineWeval2026');
+ await shot('creds-filled');
+ results.tests.push({ name: 'fill_credentials', pass: true });
+
+ // 4. Submit (simple click, wait for URL change)
+ await page.click('#btn');
+ await page.waitForTimeout(4000);
+ const urlAfter = page.url();
+ await shot('after-submit');
+ const loginSuccess = urlAfter.includes('workspace') || urlAfter.includes('technology-platform') || !urlAfter.includes('login.html');
+ results.tests.push({ name: 'submit_redirect', pass: loginSuccess, url: urlAfter });
+
+ // 5. Check session cookie
+ const cookies = await context.cookies();
+ const phpsess = cookies.find(c => c.name === 'PHPSESSID');
+ results.tests.push({ name: 'session_cookie_set', pass: !!phpsess, cookie: phpsess ? phpsess.name : null });
+
+ // 6. Access WTP
+ const r6 = await page.goto('https://weval-consulting.com/weval-technology-platform.html', { waitUntil: 'domcontentloaded', timeout: 15000 });
+ await page.waitForTimeout(3000);
+ await shot('wtp-loaded');
+ results.tests.push({ name: 'wtp_access', pass: r6.status() === 200, status: r6.status(), url: page.url() });
+
+ // 7. Check WTP content (not login redirect)
+ const title = await page.title();
+ const bodyText = await page.textContent('body');
+ const hasWTPContent = title.includes('WEVAL') || (bodyText && bodyText.length > 1000);
+ const isLoginPage = page.url().includes('login.html');
+ results.tests.push({ name: 'wtp_not_redirect_login', pass: !isLoginPage && hasWTPContent, title, body_length: bodyText?.length, url: page.url() });
+
+ // 8. Logout test
+ const r8 = await page.goto('https://weval-consulting.com/api/weval-auth-session.php?action=logout', { timeout: 10000 });
+ const logoutBody = await page.textContent('body');
+ results.tests.push({ name: 'logout', pass: r8.status() === 200 && logoutBody && logoutBody.includes('true'), body: logoutBody?.substring(0, 100) });
+
+ } catch (e) {
+ await shot('error').catch(()=>{});
+ results.tests.push({ name: 'exception', pass: false, error: e.message.substring(0, 300) });
+ }
+
+ results.summary = {
+ total: results.tests.length,
+ pass: results.tests.filter(t => t.pass).length,
+ fail: results.tests.filter(t => !t.pass).length
+ };
+
+ await page.close();
+ await context.close();
+ await browser.close();
+
+ const videos = fs.readdirSync(outDir).filter(f => f.endsWith('.webm'));
+ if (videos.length > 0) results.video = `https://weval-consulting.com/api/playwright-results/v41b-login-wtp-${ts}/${videos[0]}`;
+
+ const pngs = fs.readdirSync(outDir).filter(f => f.endsWith('.png'));
+ results.screenshots = pngs.map(s => `https://weval-consulting.com/api/playwright-results/v41b-login-wtp-${ts}/${s}`);
+
+ fs.writeFileSync(`${outDir}/results.json`, JSON.stringify(results, null, 2));
+ console.log(JSON.stringify(results, null, 2));
+})();
diff --git a/api/v81-video-latest.json b/api/v81-video-latest.json
new file mode 100644
index 000000000..f2ab1e380
--- /dev/null
+++ b/api/v81-video-latest.json
@@ -0,0 +1,64 @@
+{
+ "ts": "2026-04-19T18:26:17.744Z",
+ "test": "v8.1 VIDEO COMPLETE drill-down + cross-pages + chat WEVIA",
+ "tests": [
+ {
+ "name": "training_drill_overlay_opens",
+ "pass": false,
+ "error": "no visible tile"
+ },
+ {
+ "name": "erp_drill_works",
+ "pass": true,
+ "wired": 20,
+ "clicked": true
+ },
+ {
+ "name": "drillable_native_works",
+ "pass": true,
+ "cards": 9
+ },
+ {
+ "name": "wtp_auth_guard",
+ "pass": true,
+ "title": "WEVAL — Login"
+ },
+ {
+ "name": "sitemap_260",
+ "pass": true,
+ "total": "261"
+ },
+ {
+ "name": "wevia_5_conversations",
+ "pass": true,
+ "conversations": [
+ {
+ "msg": "salut",
+ "intent": "wevia_greetings"
+ },
+ {
+ "msg": "andon",
+ "intent": "wevia_andon_status"
+ },
+ {
+ "msg": "dormants 220",
+ "intent": "wevia_dormants_220"
+ },
+ {
+ "msg": "v60 bridges",
+ "intent": "wevia_v60_bridges"
+ },
+ {
+ "msg": "drill down",
+ "intent": "wevia_ux_drill"
+ }
+ ]
+ }
+ ],
+ "total": 6,
+ "pass": 5,
+ "fail": 1,
+ "video": "v81-video-complete.webm",
+ "screenshots": 9,
+ "out": "/var/www/html/api/playwright-results/v81-video-2026-04-19T18-25-27"
+}
\ No newline at end of file
diff --git a/api/v83-business-kpi-latest.json b/api/v83-business-kpi-latest.json
index e54487dcc..dd4a6b1f0 100644
--- a/api/v83-business-kpi-latest.json
+++ b/api/v83-business-kpi-latest.json
@@ -1,7 +1,7 @@
{
"ok": true,
"version": "V83-business-kpi",
- "ts": "2026-04-19T18:24:35+00:00",
+ "ts": "2026-04-19T18:27:57+00:00",
"summary": {
"total_categories": 7,
"total_kpis": 56,
diff --git a/api/wave-wiring-queue.json b/api/wave-wiring-queue.json
index f0924463f..4b4ea0eef 100644
--- a/api/wave-wiring-queue.json
+++ b/api/wave-wiring-queue.json
@@ -4628,5 +4628,41 @@
"status": "PENDING_APPROVAL",
"created_at": "2026-04-19T18:22:24+00:00",
"source": "opus4-autowire-early-v2"
+ },
+ "347": {
+ "name": "scan_erp_gaps_all",
+ "triggers": [
+ "scan erp gaps all",
+ "lance scan erp gaps",
+ "run all erp scanners"
+ ],
+ "cmd": "bash \/var\/www\/html\/api\/scan-erp-gaps-all.sh",
+ "status": "PENDING_APPROVAL",
+ "created_at": "2026-04-19T18:26:03+00:00",
+ "source": "opus4-autowire-early-v2"
+ },
+ "348": {
+ "name": "scan_erp_gaps_llm",
+ "triggers": [
+ "scan erp gaps llm",
+ "llm gen erp gaps",
+ "sovereign erp gaps"
+ ],
+ "cmd": "python3 \/var\/www\/html\/api\/scan-erp-gaps-llm.py",
+ "status": "PENDING_APPROVAL",
+ "created_at": "2026-04-19T18:26:03+00:00",
+ "source": "opus4-autowire-early-v2"
+ },
+ "349": {
+ "name": "show_erp_gaps_stats",
+ "triggers": [
+ "show erp gaps stats",
+ "erp gaps summary",
+ "gap scan summary"
+ ],
+ "cmd": "psql postgresql:\/\/admin:admin123@10.1.0.3:5432\/adx_system -c 'select erp_id,count(*) gaps,round(avg(confidence_score)::numeric,3) avg_conf,max(scanned_at) last from erp_gap_scans group by erp_id order by gaps desc;'",
+ "status": "PENDING_SECURITY_REVIEW",
+ "created_at": "2026-04-19T18:26:03+00:00",
+ "source": "opus4-autowire-early-v2"
}
}
\ No newline at end of file
diff --git a/api/wevia-apple-scan.php b/api/wevia-apple-scan.php
index 788553090..2dd65d463 100644
--- a/api/wevia-apple-scan.php
+++ b/api/wevia-apple-scan.php
@@ -49,22 +49,39 @@ function extract_oss($text) {
return $hits;
}
-// Call Qwen VL via wevia-vision-vl.php for image understanding
+// Call Gemini 2.5 Flash direct with inline_data - 100% free tier vision
function call_vision_vl($image_url, $prompt) {
- $ch = curl_init('http://127.0.0.1/api/wevia-vision-vl.php');
+ $key = '';
+ foreach (@file('/etc/weval/secrets.env') ?: [] as $l) {
+ $l = trim($l); if (!$l || $l[0]==='#') continue;
+ $parts = explode('=', $l, 2); if (count($parts)<2) continue;
+ if (trim($parts[0]) === 'GEMINI_KEY') { $key = trim($parts[1]); break; }
+ }
+ if (!$key) return '(no GEMINI_KEY)';
+ $local = preg_replace('#^https?://[^/]+#', '/var/www/html', $image_url);
+ if (!file_exists($local)) return '(file not found: '.$local.')';
+ $img_b64 = base64_encode(file_get_contents($local));
+ $mime = mime_content_type($local) ?: 'image/png';
+ $body = ['contents' => [['parts' => [
+ ['inline_data' => ['mime_type' => $mime, 'data' => $img_b64]],
+ ['text' => $prompt]
+ ]]]];
+ $ch = curl_init("https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:generateContent?key=$key");
curl_setopt_array($ch, [
CURLOPT_POST => true,
CURLOPT_RETURNTRANSFER => true,
CURLOPT_TIMEOUT => 45,
- CURLOPT_POSTFIELDS => http_build_query(['image_url'=>$image_url, 'prompt'=>$prompt]),
- CURLOPT_SSL_VERIFYPEER => false,
+ CURLOPT_CONNECTTIMEOUT => 5,
+ CURLOPT_HTTPHEADER => ['Content-Type: application/json'],
+ CURLOPT_POSTFIELDS => json_encode($body),
]);
$r = curl_exec($ch);
+ $code = curl_getinfo($ch, CURLINFO_HTTP_CODE);
curl_close($ch);
+ if ($code !== 200) return "(gemini http $code)";
$d = json_decode($r, true);
- return $d['response'] ?? '';
+ return $d['candidates'][0]['content']['parts'][0]['text'] ?? '(no text)';
}
-
if ($action === 'status') {
$scans = load_scans();
echo json_encode([
diff --git a/api/wired-pending/intent-opus4-playwright_login_wtp.php b/api/wired-pending/intent-opus4-playwright_login_wtp.php
new file mode 100644
index 000000000..dc217671d
--- /dev/null
+++ b/api/wired-pending/intent-opus4-playwright_login_wtp.php
@@ -0,0 +1,17 @@
+ "playwright_login_wtp",
+ "triggers" => array(
+ "playwright login wtp",
+ "playwright e2e login",
+ "test login wtp video",
+ "run v41 playwright",
+ "playwright wtp test",
+ "e2e login video",
+ ),
+ "cmd" => "sudo -u www-data HOME=/var/www PLAYWRIGHT_BROWSERS_PATH=/var/www/.cache/ms-playwright node /var/www/html/api/v41-playwright-login-wtp.js 2>&1 | tail -c 2000",
+ "status" => "EXECUTED",
+ "created_at" => "2026-04-19T18:28:00+00:00",
+ "source" => "opus-yacine-v41",
+ "description" => "Playwright E2E REAL login yacine+WTP access with video capture",
+);
diff --git a/api/wired-pending/intent-opus4-scan_erp_gaps_all.php b/api/wired-pending/intent-opus4-scan_erp_gaps_all.php
new file mode 100644
index 000000000..6b1e4f194
--- /dev/null
+++ b/api/wired-pending/intent-opus4-scan_erp_gaps_all.php
@@ -0,0 +1,14 @@
+ 'scan_erp_gaps_all',
+ 'triggers' =>
+ array (
+ 0 => 'scan erp gaps all',
+ 1 => 'lance scan erp gaps',
+ 2 => 'run all erp scanners',
+ ),
+ 'cmd' => 'bash /var/www/html/api/scan-erp-gaps-all.sh',
+ 'status' => 'PENDING_APPROVAL',
+ 'created_at' => '2026-04-19T18:26:03+00:00',
+ 'source' => 'opus4-autowire-early-v2',
+);
diff --git a/api/wired-pending/intent-opus4-scan_erp_gaps_llm.php b/api/wired-pending/intent-opus4-scan_erp_gaps_llm.php
new file mode 100644
index 000000000..cbea78997
--- /dev/null
+++ b/api/wired-pending/intent-opus4-scan_erp_gaps_llm.php
@@ -0,0 +1,14 @@
+ 'scan_erp_gaps_llm',
+ 'triggers' =>
+ array (
+ 0 => 'scan erp gaps llm',
+ 1 => 'llm gen erp gaps',
+ 2 => 'sovereign erp gaps',
+ ),
+ 'cmd' => 'python3 /var/www/html/api/scan-erp-gaps-llm.py',
+ 'status' => 'PENDING_APPROVAL',
+ 'created_at' => '2026-04-19T18:26:03+00:00',
+ 'source' => 'opus4-autowire-early-v2',
+);
diff --git a/api/wired-pending/intent-opus4-show_erp_gaps_stats.php b/api/wired-pending/intent-opus4-show_erp_gaps_stats.php
new file mode 100644
index 000000000..a7a709834
--- /dev/null
+++ b/api/wired-pending/intent-opus4-show_erp_gaps_stats.php
@@ -0,0 +1,14 @@
+ 'show_erp_gaps_stats',
+ 'triggers' =>
+ array (
+ 0 => 'show erp gaps stats',
+ 1 => 'erp gaps summary',
+ 2 => 'gap scan summary',
+ ),
+ 'cmd' => 'psql postgresql://admin:admin123@10.1.0.3:5432/adx_system -c \'select erp_id,count(*) gaps,round(avg(confidence_score)::numeric,3) avg_conf,max(scanned_at) last from erp_gap_scans group by erp_id order by gaps desc;\'',
+ 'status' => 'PENDING_SECURITY_REVIEW',
+ 'created_at' => '2026-04-19T18:26:03+00:00',
+ 'source' => 'opus4-autowire-early-v2',
+);
diff --git a/data/wevia-apple-scans.json b/data/wevia-apple-scans.json
index c680a2ce6..88db5a449 100644
--- a/data/wevia-apple-scans.json
+++ b/data/wevia-apple-scans.json
@@ -22,9 +22,113 @@
"docker_images": 0
},
"image_url": "https:\/\/weval-consulting.com\/data\/wevia-apple-uploads\/apple_69e51d66b62457.37078168.png"
+ },
+ {
+ "id": "apple_69e51e1459f6f1.35131794",
+ "filename": "test-erp-page.png",
+ "stored_as": "apple_69e51e1459f6f1.35131794.png",
+ "size_bytes": 307961,
+ "scan_ms": 15768,
+ "scanned_at": "2026-04-19T18:25:40+00:00",
+ "caption": "Retest Gemini 2.5 Flash",
+ "ocr_text": "@ WEVAL ERP Gap-Fill Agents.\n\nOffre service commerciale — Agents Al autonomes pour combler les gaps fonctionnels des ERPs\nSAP\/Oracle\/Sage, par département métier, adaptables à 7 verticaux industriels.\n\n| © Les ERPs SAP\/Oracle\/Sage ne couvrent pas 100% des besoins métier. Nos agents Al comblent les gaps là où ERPs échouent.\n\nRISQUES CATALOGUÉS ERP GAPS IDENTIFIÉS VERTICAUX COUVERTS AGENTS PACK TOTAL\n8 critical -14 high 7 départements Retail - Pharma - Banque « Industrie 23 clients identifies\n\nServices - Conseil - Energie\n\n| @ Matrice des risques 5x5 — Likelihood x impact, mitigés par agents WEVAL\n\nLEGENDE SEVERITE\n\nFaible (1-4) IMPACT 1 IMPACT 2 IMPACT 3\nible\n\n1 Modéré (5-9)\n\nI élevé (10-14) cf o\n\nI critique (15-25)\n\ni\n\nChaque cellule = nombre de risques. ü\n\nCliquez pour filtrer la liste ci-dessous.\n? o\na o\n7 o\n\nIMPACT 4\n\n{ wo - isers=25 { eas - ours { por - 1445-20\n\nTAM\n\nPELINE\n\n7.3 me\n\n23 clients x revenue avg\n\nIMPACT 5\n\nV67 Registry — 84 ERP agents \/ 96\ntotal\n\nSavings potentiel : 21.11 M€\/an\nPaperclip source-of-truth + API V67\n",
+ "vision_text": "Here is the extracted information from the image:\n\n1) **All text visible:**\n * Logout\n * ← WTP Portal\n * Print offre\n * WEVAL ERP Gap-Fill Agents.\n * Offre service commerciale — Agents AI autonomes pour combler les gaps fonctionnels des ERPs SAP\/Oracle\/Sage, par département métier, adaptables à 7 verticaux industriels.\n * 💡 Les ERPs SAP\/Oracle\/Sage ne couvrent pas 100% des besoins métier. Nos agents AI comblent les gaps là où ERPs échouent.\n * RISQUES CATALOGUÉS\n * 25\n * 8 critical • 14 high\n * ERP GAPS IDENTIFIÉS\n * 33\n * 7 départements\n * VERTICAUX COUVERTS\n * 7\n * Retail • Pharma • Banque • Industrie • Services • Conseil • Énergie\n * AGENTS PACK TOTAL\n * 149\n * 23 clients identifiés\n * TAM PIPELINE\n * 7.3 M€\n * 23 clients x revenue avg\n * Matrice des risques 5x5 — Likelihood x Impact, mitigés par agents WEVAL\n * LÉGENDE SÉVÉRITÉ\n * Faible (1-4)\n * Modéré (5-9)\n * Élevé (10-14)\n * Critique (15-25)\n * Chaque cellule = nombre de risques. Cliquez pour filtrer la liste ci-dessous.\n * IMPACT 1\n * IMPACT 2\n * IMPACT 3\n * IMPACT 4\n * IMPACT 5\n * L=5\n * L=4\n * L=3\n * L=2\n * L=1\n * . (dots in matrix cells)\n * 1\n * 2\n * 2\n * 7\n * 3\n * 3\n * 5\n * 2\n * R02 • L5xI5=25 (partially visible)\n * R13 • L5xI5=25 (partially visible)\n * R01 • L4xI5=20 (partially visible)\n * V67 Registry — 84 ERP agents \/ 96 total (from tooltip)\n * Savings potentiel : 21.11 M€\/an (from tooltip)\n * Paperclip source-of-truth • API V67 (from tooltip)\n\n2) **Names of software tools, frameworks, AI agents, open-source projects, SaaS platforms mentioned:**\n * WEVAL (Company\/Product name)\n * ERP (Enterprise Resource Planning - a category of software)\n * AI Agents \/ Agents AI (Type of autonomous software entities)\n * SAP (ERP vendor\/software)\n * Oracle (ERP vendor\/software)\n * Sage (ERP vendor\/software)\n * V67 Registry (Platform\/System name, from tooltip)\n * API V67 (Specific API name, from tooltip)\n\n3) **Architecture diagrams components:**\n * None explicitly visible as diagrams. \"AI Agents\" and \"API V67\" are mentioned textually as components but not visually represented in an architectural diagram.\n\n4) **GitHub URLs or repo names:**\n * None visible.\n\n5) **Docker images:**\n * None visible.\n\n6) **Programming languages visible:**\n * None visible.",
+ "oss_extracted": {
+ "github_urls": [],
+ "project_names": [
+ "docker"
+ ],
+ "stacks": [],
+ "docker_images": []
+ },
+ "counts": {
+ "github_urls": 0,
+ "project_names": 1,
+ "docker_images": 0
+ },
+ "image_url": "https:\/\/weval-consulting.com\/data\/wevia-apple-uploads\/apple_69e51e1459f6f1.35131794.png"
+ },
+ {
+ "id": "apple_69e51e3b8155b7.97045246",
+ "filename": "test-rich.png",
+ "stored_as": "apple_69e51e3b8155b7.97045246.png",
+ "size_bytes": 25649,
+ "scan_ms": 3732,
+ "scanned_at": "2026-04-19T18:26:07+00:00",
+ "caption": "OSS Discovery page - rich OSS mentions",
+ "ocr_text": "",
+ "vision_text": "Here's the extracted information from the image:\n\n1) **All text visible:**\n * WEVAL\n * Espace sécurisé\n * UTILISATEUR\n * MOT DE PASSE\n * Se connecter\n * WEVAL Consulting — Casablanca\n\n2) **Names of software tools, frameworks, AI agents, open-source projects, SaaS platforms mentioned:**\n * WEVAL (Appears to be a company or product name, potentially a SaaS platform or internal tool)\n * WEVAL Consulting (Company name)\n\n3) **Architecture diagrams components:**\n * None visible.\n\n4) **GitHub URLs or repo names:**\n * None visible.\n\n5) **Docker images:**\n * None visible.\n\n6) **Programming languages visible:**\n * None visible.",
+ "oss_extracted": {
+ "github_urls": [],
+ "project_names": [
+ "docker"
+ ],
+ "stacks": [],
+ "docker_images": []
+ },
+ "counts": {
+ "github_urls": 0,
+ "project_names": 1,
+ "docker_images": 0
+ },
+ "image_url": "https:\/\/weval-consulting.com\/data\/wevia-apple-uploads\/apple_69e51e3b8155b7.97045246.png"
+ },
+ {
+ "id": "apple_69e51e61b96415.08941175",
+ "filename": "yacine_img.png",
+ "stored_as": "apple_69e51e61b96415.08941175.png",
+ "size_bytes": 162664,
+ "scan_ms": 26241,
+ "scanned_at": "2026-04-19T18:27:08+00:00",
+ "caption": "QA Hub V72 screenshot Yacine",
+ "ocr_text": "@ QA Hub — Quality Assurance Central\n\nL99 : NonReg - Risk - Hallucination - Lean 60- TOC - Zero regression\n\nY 199 NONREG M DPMO SIGMA 3% INTENTS WIRED @ TOC BOTTLENECK\n\n153\/153 60 149 es\nGoldratt Theory of Constraints\n\n100% - 20260416 143247 target: 3.4 DPMO (60) WEVIA Master orchestrator\n\n# HALLU NOT EVAL @ ACTION PLAN # SERVICES UP\n\nTruthfulQA - HaluEval - FActScore RAGAS Dynamic items WEVIA managed Apache\/Sovereign\/Qdrant\/PG\/Redis\/Gitea\n\n© QUICK ACCESS\n\n© WEVIA Training © WEVIA Master M WTP Dashboard @ DG Center © Sales Hub V72 4 055 Discovery © Pitch Y 199 SON\n\n@ QA HUB V72 — UNIFIED QUALITY DASHBOARD\n\nAgrège V67 (dashboard), V70 (honest), V71 (risk+plan). Auto-refresh 20s. Zero fake + Zero regression : Lean 6 + TOC + NIST Al RMF + ISO 23894 + Anthropic RSP.\n\n@ RISK SCORE\n\n57.7%\n\nNIST Al RME \/ ISO 23894\n\n¥ refreshed 18:37:29\n",
+ "vision_text": "Here's the extracted information from the image:\n\n---\n\n**1) All text visible:**\n\n* **Browser\/Header:**\n * `weval-consulting.com\/qa-hub.html`\n * `Wevads`\n * `Calendly`\n * `Designs.ai - Creativ`\n * `claude-code`\n * `Create - AI Image G...`\n * `Entreprise d...`\n * `Vistex Contracts Ov...`\n * `googlecba1a80ba9...`\n * `SAP S\/4HANA for Ri...`\n * `What is SAP Vistex?...`\n * `125 %`\n * `Réinitialiser`\n * `Tous les favoris`\n * `Logout`\n * `refreshed 18:37:29`\n* **Main Dashboard - Header:**\n * `QA Hub — Quality Assurance Central`\n * `L99 • NonReg • Risk • Hallucination • Lean 6σ • TOC • Zero regression`\n* **Dashboard Cards:**\n * **L99 NONREG:**\n * `L99 NONREG`\n * `153\/153`\n * `100% • 20260416_143247`\n * **DPMO SIGMA:**\n * `DPMO SIGMA`\n * `6σ`\n * `target: 3.4 DPMO (6σ)`\n * **INTENTS WIRED:**\n * `INTENTS WIRED`\n * `149`\n * `WEVIA Master orchestrator`\n * **TOC BOTTLENECK:**\n * `TOC BOTTLENECK`\n * `Close \/ Contract`\n * `Goldratt Theory of Constraints`\n * **RISK SCORE:**\n * `RISK SCORE`\n * `57.7%`\n * `NIST AI RMF \/ ISO 23894`\n * **HALLU NOT EVAL:**\n * `HALLU NOT EVAL`\n * `7\/7`\n * `TruthfulQA • HaluEval • FactScore • RAGAS`\n * **ACTION PLAN:**\n * `ACTION PLAN`\n * `13`\n * `Dynamic items WEVIA managed`\n * **SERVICES UP:**\n * `SERVICES UP`\n * `100%`\n * `Apache\/Sovereign\/Qdrant\/PG\/Redis\/Gitea`\n* **Quick Access:**\n * `QUICK ACCESS`\n * `WEVIA Training`\n * `WEVIA Master`\n * `WTP Dashboard`\n * `DG Center`\n * `Sales Hub V72`\n * `OSS Discovery`\n * `Pitch`\n * `L99 JSON`\n* **QA Hub V72 Section:**\n * `QA HUB V72 — UNIFIED QUALITY DASHBOARD`\n * `Agrège V67 (dashboard), V70 (honest), V71 (risk+plan). Auto-refresh 20s. Zero fake • Zero regression • Lean 6σ + TOC + NIST AI RMF + ISO 23894 + Anthropic RSP.`\n* **Windows Taskbar (bottom):**\n * `Taper ici pour rechercher`\n * `Liens`\n * `23°C Ciel couvert`\n * `18:37`\n * `19\/04\/2026`\n\n---\n\n**2) Names of software tools, frameworks, AI agents, open-source projects, SaaS platforms mentioned:**\n\n* **SaaS Platforms\/Tools (from browser tabs):**\n * `Wevads`\n * `Calendly`\n * `Designs.ai`\n * `Vistex`\n * `SAP S\/4HANA`\n* **AI Agents\/Models\/Frameworks:**\n * `Claude` (from `claude-code` tab)\n * `TruthfulQA`\n * `HaluEval`\n * `FactScore`\n * `RAGAS`\n * `Anthropic RSP` (Refers to Anthropic's Responsible Scaling Policy, implying the use or consideration of Anthropic's AI models\/practices)\n* **Quality\/Management Frameworks\/Methodologies:**\n * `Lean 6σ` (Lean Six Sigma)\n * `TOC` (Theory of Constraints, mentioned with `G",
+ "oss_extracted": {
+ "github_urls": [],
+ "project_names": [
+ "qdrant",
+ "postgres",
+ "redis",
+ "docker",
+ "gitea"
+ ],
+ "stacks": [],
+ "docker_images": []
+ },
+ "counts": {
+ "github_urls": 0,
+ "project_names": 5,
+ "docker_images": 0
+ },
+ "image_url": "https:\/\/weval-consulting.com\/data\/wevia-apple-uploads\/apple_69e51e61b96415.08941175.png"
+ },
+ {
+ "id": "apple_69e51e952fdf65.84699201",
+ "filename": "yacine_img.png",
+ "stored_as": "apple_69e51e952fdf65.84699201.png",
+ "size_bytes": 247877,
+ "scan_ms": 21802,
+ "scanned_at": "2026-04-19T18:27:54+00:00",
+ "caption": "ERP Gap-Fill page screenshot Yacine",
+ "ocr_text": "Verticals couverts\n\n| \/ ERP Gap-Fill Catalog —7 départements x SAP\/Oracle\/Sage\n\n© Finance & Controlling\nSAP FI-GL SAP FI-AP SAP FI-AR SAP CO-PA\nOracle Financials Cloud Oracle Risk Mgmt Cloud\n\nSage X3 Finance\n\n@ Real-\n© Cash Flow Predictor AI\n\nime cash flow prediction 90 days\n\nEnergie\n\n% Al-powered invoice 3-way match anomaly\n\n© Invoice Anomaly Detector\n\n% Cross-ERP consolidation automatique\n\n multi-ERP Consolidator\n\nHolding Grands groupes\n\n#4. Close mensuel auto en 3 jours (vs 10-15)\n© Fast Close Agent\n\nSage 100 Compta\n\nClient satisfaction (pilot)\n\n@ Supply Chain & Procurement\n\nSAP MM SAP Ariba SAP IBP SAP APO Oracle SCM Cloud\n\nOracle Procurement Cloud Sage X3 Supply\n\n@, Stockout prediction ML (14j horizon)\n\n© Stockout Predictor agent\n\n#4 Nendor fraud detection (shell companies)\n© Vendor Fraud Agent\n\nBanque Pharma Public\n\n# Dynamic safety stock optimization\n© Safety Stock AI\n\n#4 Multi-echelon inventory balancing\n© Inventory Balancer\n\n@, Supplier risk scoring temps réel\n\n© Supplier Risk Scorer\n\ne\/5 \/ 4.5\/5\n\nAgents coverage\n\n& Manufacturing & Production\n\nSAP PP SAP PM SAP QM SAP MES Oracle Manufacturing Cloud\n\nOracle PLM Cloud Sage X3 Production\n\n%, OEE real-time per machine (5s granularité)\n© OEE Live Agent\n\nIndustrie Pharma\n\n# Predictive maintenance (sound\/vibration)\n© Predictive Maint AI\n\nIndustrie\n\n@ Bottleneck detection TOC dynamique\n© ToC Bottleneck Agent\n\nIndustrie Pharma\n\n7% SPC anomaly detection temps réel\n© SPC Real-time Agent\n\n@ Digital twin simulation line\n\n© Digital Twin Agent\n",
+ "vision_text": "Here is the extracted information from the image:\n\n### 1) All Text Visible:\n\n* **Browser\/Header:**\n * weval-consulting.com\/erp-gap-fill-offer.html\n * Wewads\n * Calendly\n * Designs.ai - Creativ...\n * claude-code\n * Create - AI Image G...\n * Entreprise de consei...\n * Vistex Contracts Ov...\n * googlecba1a80ba9...\n * SAP S\/4HANA for Ri...\n * What is SAP Vistex?...\n * 1909 - Chargebacks...\n * Queue | Vistex-Day...\n * Tous les favoris\n * Verticals couverts\n * 7#\n * Client satisfaction (pilot)\n * 0\/5 \/ 4.5\/5\n * Agents coverage\n * 25#\n * Logout\n* **Main Content (ERP Gap-Fill Catalog):**\n * ERP Gap-Fill Catalog — 7 départements x SAP\/Oracle\/Sage\n * **Finance & Controlling**\n * SAP FI-GL\n * SAP FI-AP\n * SAP FI-AR\n * SAP CO-PA\n * SAP CO-CCA\n * Oracle Financials Cloud\n * Oracle Risk Mgmt Cloud\n * Sage 100 Compta\n * Sage X3 Finance\n * Real-time cash flow prediction 90 days\n * Cash Flow Predictor AI\n * ERP limite : SAP FI only historical\n * Retail\n * Industrie\n * Énergie\n * AI-powered invoice 3-way match anomaly\n * Invoice Anomaly Detector\n * ERP limite : Sage match 2-way only\n * Tous\n * Cross-ERP consolidation automatique\n * Multi-ERP Consolidator\n * ERP limite : Oracle cloud slow cross-book\n * Holding\n * Grands groupes\n * Close mensuel auto en 3 jours (vs 10-15)\n * Fast Close Agent\n * ERP limite : SAP FI manual reconcil\n * Tous\n * Budget vs reality alerts temps réel\n * **Supply Chain & Procurement**\n * SAP MM\n * SAP Ariba\n * SAP IBP\n * SAP APO\n * Oracle SCM Cloud\n * Oracle Procurement Cloud\n * Sage X3 Supply\n * Stockout prediction ML (14j horizon)\n * Stockout Predictor Agent\n * ERP limite : SAP MM reactive MRP\n * Retail\n * Pharma\n * Industrie\n * Vendor fraud detection (shell companies)\n * Vendor Fraud Agent\n * ERP limite : Oracle Proc manual audit\n * Banque\n * Pharma\n * Public\n * Dynamic safety stock optimization\n * Safety Stock AI\n * ERP limite : Sage X3 fixed buffer\n * Retail\n * Industrie\n * Multi-echelon inventory balancing\n * Inventory Balancer\n * ERP limite : SAP APO complex setup\n * Retail\n * Pharma\n * Industrie\n * Supplier risk scoring temps réel\n * Supplier Risk Scorer\n * **Manufacturing & Production**\n * SAP PP\n * SAP PM\n * SAP QM\n * SAP MES\n * Oracle Manufacturing Cloud\n * Oracle PLM Cloud\n * Sage X3 Production\n ",
+ "oss_extracted": {
+ "github_urls": [],
+ "project_names": [
+ "docker"
+ ],
+ "stacks": [],
+ "docker_images": []
+ },
+ "counts": {
+ "github_urls": 0,
+ "project_names": 1,
+ "docker_images": 0
+ },
+ "image_url": "https:\/\/weval-consulting.com\/data\/wevia-apple-uploads\/apple_69e51e952fdf65.84699201.png"
}
],
- "total": 1,
- "oss_total": 0,
- "last_scan": "2026-04-19T18:22:32+00:00"
+ "total": 5,
+ "oss_total": 8,
+ "last_scan": "2026-04-19T18:27:54+00:00"
}
\ No newline at end of file
diff --git a/data/wevia-apple-uploads/apple_69e51e1459f6f1.35131794.png b/data/wevia-apple-uploads/apple_69e51e1459f6f1.35131794.png
new file mode 100644
index 000000000..3de815762
Binary files /dev/null and b/data/wevia-apple-uploads/apple_69e51e1459f6f1.35131794.png differ
diff --git a/data/wevia-apple-uploads/apple_69e51e3b8155b7.97045246.png b/data/wevia-apple-uploads/apple_69e51e3b8155b7.97045246.png
new file mode 100644
index 000000000..eb0e30af0
Binary files /dev/null and b/data/wevia-apple-uploads/apple_69e51e3b8155b7.97045246.png differ
diff --git a/data/wevia-apple-uploads/apple_69e51e61b96415.08941175.png b/data/wevia-apple-uploads/apple_69e51e61b96415.08941175.png
new file mode 100644
index 000000000..49a6b7610
Binary files /dev/null and b/data/wevia-apple-uploads/apple_69e51e61b96415.08941175.png differ
diff --git a/data/wevia-apple-uploads/apple_69e51e952fdf65.84699201.png b/data/wevia-apple-uploads/apple_69e51e952fdf65.84699201.png
new file mode 100644
index 000000000..145a3d00b
Binary files /dev/null and b/data/wevia-apple-uploads/apple_69e51e952fdf65.84699201.png differ