From ec3534b75096e690756208ce371c75cd474bae7b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Wed, 26 Aug 2020 18:24:59 +0800 Subject: [PATCH 1/9] initial commit: able to add into todo database --- index.js | 44 +- node_modules/.bin/semver | 1 + node_modules/buffer-writer/.travis.yml | 7 + node_modules/buffer-writer/LICENSE | 19 + node_modules/buffer-writer/README.md | 48 + node_modules/buffer-writer/index.js | 129 ++ node_modules/buffer-writer/package.json | 57 + node_modules/buffer-writer/test/mocha.opts | 1 + .../buffer-writer/test/writer-tests.js | 218 +++ node_modules/packet-reader/.travis.yml | 8 + node_modules/packet-reader/README.md | 87 ++ node_modules/packet-reader/index.js | 65 + node_modules/packet-reader/package.json | 52 + node_modules/packet-reader/test/index.js | 148 ++ node_modules/pg-connection-string/LICENSE | 21 + node_modules/pg-connection-string/README.md | 72 + node_modules/pg-connection-string/index.d.ts | 15 + node_modules/pg-connection-string/index.js | 89 ++ .../pg-connection-string/package.json | 67 + node_modules/pg-int8/LICENSE | 13 + node_modules/pg-int8/README.md | 16 + node_modules/pg-int8/index.js | 100 ++ node_modules/pg-int8/package.json | 52 + node_modules/pg-pool/LICENSE | 21 + node_modules/pg-pool/README.md | 376 ++++++ node_modules/pg-pool/index.js | 403 ++++++ node_modules/pg-pool/package.json | 67 + .../pg-pool/test/bring-your-own-promise.js | 42 + .../pg-pool/test/connection-strings.js | 29 + .../pg-pool/test/connection-timeout.js | 229 ++++ node_modules/pg-pool/test/ending.js | 40 + node_modules/pg-pool/test/error-handling.js | 260 ++++ node_modules/pg-pool/test/events.js | 86 ++ node_modules/pg-pool/test/idle-timeout.js | 87 ++ node_modules/pg-pool/test/index.js | 226 ++++ node_modules/pg-pool/test/logging.js | 20 + node_modules/pg-pool/test/max-uses.js | 98 ++ .../pg-pool/test/releasing-clients.js | 54 + node_modules/pg-pool/test/setup.js | 10 + node_modules/pg-pool/test/sizing.js | 58 + node_modules/pg-pool/test/submittable.js | 19 + node_modules/pg-pool/test/timeout.js | 0 node_modules/pg-pool/test/verify.js | 25 + node_modules/pg-protocol/LICENSE | 21 + .../pg-protocol/dist/BufferReader.d.ts | 14 + node_modules/pg-protocol/dist/BufferReader.js | 48 + .../pg-protocol/dist/BufferReader.js.map | 1 + .../pg-protocol/dist/BufferWriter.d.ts | 20 + node_modules/pg-protocol/dist/BufferWriter.js | 109 ++ .../pg-protocol/dist/BufferWriter.js.map | 1 + node_modules/pg-protocol/dist/b.d.ts | 1 + node_modules/pg-protocol/dist/b.js | 25 + node_modules/pg-protocol/dist/b.js.map | 1 + .../pg-protocol/dist/buffer-reader.d.ts | 14 + .../pg-protocol/dist/buffer-reader.js | 49 + .../pg-protocol/dist/buffer-reader.js.map | 1 + .../pg-protocol/dist/buffer-writer.d.ts | 16 + .../pg-protocol/dist/buffer-writer.js | 80 ++ .../pg-protocol/dist/buffer-writer.js.map | 1 + node_modules/pg-protocol/dist/connection.d.ts | 22 + node_modules/pg-protocol/dist/connection.js | 311 +++++ .../pg-protocol/dist/connection.js.map | 1 + .../pg-protocol/dist/inbound-parser.test.d.ts | 1 + .../pg-protocol/dist/inbound-parser.test.js | 483 +++++++ .../dist/inbound-parser.test.js.map | 1 + node_modules/pg-protocol/dist/index.d.ts | 5 + node_modules/pg-protocol/dist/index.js | 12 + node_modules/pg-protocol/dist/index.js.map | 1 + node_modules/pg-protocol/dist/messages.d.ts | 182 +++ node_modules/pg-protocol/dist/messages.js | 150 +++ node_modules/pg-protocol/dist/messages.js.map | 1 + .../dist/outbound-serializer.test.d.ts | 1 + .../dist/outbound-serializer.test.js | 220 +++ .../dist/outbound-serializer.test.js.map | 1 + node_modules/pg-protocol/dist/parser.d.ts | 37 + node_modules/pg-protocol/dist/parser.js | 299 +++++ node_modules/pg-protocol/dist/parser.js.map | 1 + node_modules/pg-protocol/dist/serializer.d.ts | 41 + node_modules/pg-protocol/dist/serializer.js | 193 +++ .../pg-protocol/dist/serializer.js.map | 1 + .../pg-protocol/dist/testing/buffer-list.d.ts | 15 + .../pg-protocol/dist/testing/buffer-list.js | 68 + .../dist/testing/buffer-list.js.map | 1 + .../dist/testing/test-buffers.d.ts | 30 + .../pg-protocol/dist/testing/test-buffers.js | 137 ++ .../dist/testing/test-buffers.js.map | 1 + node_modules/pg-protocol/package.json | 51 + node_modules/pg-protocol/src/b.ts | 28 + node_modules/pg-protocol/src/buffer-reader.ts | 53 + node_modules/pg-protocol/src/buffer-writer.ts | 85 ++ .../pg-protocol/src/inbound-parser.test.ts | 522 ++++++++ node_modules/pg-protocol/src/index.ts | 11 + node_modules/pg-protocol/src/messages.ts | 222 +++ .../src/outbound-serializer.test.ts | 243 ++++ node_modules/pg-protocol/src/parser.ts | 377 ++++++ node_modules/pg-protocol/src/serializer.ts | 264 ++++ .../pg-protocol/src/testing/buffer-list.ts | 75 ++ .../pg-protocol/src/testing/test-buffers.ts | 156 +++ .../pg-protocol/src/types/chunky.d.ts | 1 + node_modules/pg-protocol/tsconfig.json | 24 + node_modules/pg-types/.travis.yml | 7 + node_modules/pg-types/Makefile | 14 + node_modules/pg-types/README.md | 75 ++ node_modules/pg-types/index.d.ts | 137 ++ node_modules/pg-types/index.js | 47 + node_modules/pg-types/index.test-d.ts | 21 + node_modules/pg-types/lib/arrayParser.js | 11 + node_modules/pg-types/lib/binaryParsers.js | 257 ++++ node_modules/pg-types/lib/builtins.js | 73 + node_modules/pg-types/lib/textParsers.js | 215 +++ node_modules/pg-types/package.json | 69 + node_modules/pg-types/test/index.js | 24 + node_modules/pg-types/test/types.js | 597 +++++++++ node_modules/pg/LICENSE | 21 + node_modules/pg/README.md | 96 ++ node_modules/pg/lib/client.js | 604 +++++++++ node_modules/pg/lib/connection-parameters.js | 156 +++ node_modules/pg/lib/connection.js | 208 +++ node_modules/pg/lib/defaults.js | 80 ++ node_modules/pg/lib/index.js | 56 + node_modules/pg/lib/native/client.js | 299 +++++ node_modules/pg/lib/native/index.js | 2 + node_modules/pg/lib/native/query.js | 165 +++ node_modules/pg/lib/query.js | 228 ++++ node_modules/pg/lib/result.js | 100 ++ node_modules/pg/lib/sasl.js | 151 +++ node_modules/pg/lib/type-overrides.js | 35 + node_modules/pg/lib/utils.js | 186 +++ node_modules/pg/package.json | 82 ++ node_modules/pgpass/.npmignore | 10 + node_modules/pgpass/README.md | 74 + node_modules/pgpass/lib/helper.js | 233 ++++ node_modules/pgpass/lib/index.js | 23 + node_modules/pgpass/package.json | 71 + node_modules/postgres-array/index.d.ts | 4 + node_modules/postgres-array/index.js | 97 ++ node_modules/postgres-array/license | 21 + node_modules/postgres-array/package.json | 67 + node_modules/postgres-array/readme.md | 43 + node_modules/postgres-bytea/index.js | 31 + node_modules/postgres-bytea/license | 21 + node_modules/postgres-bytea/package.json | 66 + node_modules/postgres-bytea/readme.md | 34 + node_modules/postgres-date/index.js | 110 ++ node_modules/postgres-date/license | 21 + node_modules/postgres-date/package.json | 65 + node_modules/postgres-date/readme.md | 49 + node_modules/postgres-interval/index.d.ts | 20 + node_modules/postgres-interval/index.js | 125 ++ node_modules/postgres-interval/license | 21 + node_modules/postgres-interval/package.json | 68 + node_modules/postgres-interval/readme.md | 48 + node_modules/semver/.npmignore | 1 + node_modules/semver/LICENSE | 27 + node_modules/semver/Makefile | 24 + node_modules/semver/README.md | 303 +++++ node_modules/semver/bin/semver | 133 ++ node_modules/semver/foot.js.txt | 6 + node_modules/semver/head.js.txt | 2 + node_modules/semver/package.json | 53 + node_modules/semver/semver.browser.js | 1187 ++++++++++++++++ node_modules/semver/semver.browser.js.gz | Bin 0 -> 7938 bytes node_modules/semver/semver.js | 1191 +++++++++++++++++ node_modules/semver/semver.min.js | 1 + node_modules/semver/semver.min.js.gz | Bin 0 -> 3756 bytes node_modules/semver/test/amd.js | 15 + node_modules/semver/test/big-numbers.js | 24 + node_modules/semver/test/clean.js | 29 + node_modules/semver/test/gtr.js | 173 +++ node_modules/semver/test/index.js | 684 ++++++++++ node_modules/semver/test/ltr.js | 181 +++ node_modules/semver/test/major-minor-patch.js | 72 + node_modules/semver/test/no-module.js | 19 + node_modules/split/.npmignore | 3 + node_modules/split/.travis.yml | 3 + node_modules/split/LICENCE | 22 + node_modules/split/examples/pretty.js | 26 + node_modules/split/index.js | 63 + node_modules/split/package.json | 62 + node_modules/split/readme.markdown | 72 + node_modules/split/test/options.asynct.js | 46 + .../split/test/partitioned_unicode.js | 34 + node_modules/split/test/split.asynct.js | 137 ++ node_modules/split/test/try_catch.asynct.js | 51 + node_modules/through/.travis.yml | 5 + node_modules/through/LICENSE.APACHE2 | 15 + node_modules/through/LICENSE.MIT | 24 + node_modules/through/index.js | 108 ++ node_modules/through/package.json | 68 + node_modules/through/readme.markdown | 64 + node_modules/through/test/async.js | 28 + node_modules/through/test/auto-destroy.js | 30 + node_modules/through/test/buffering.js | 71 + node_modules/through/test/end.js | 45 + node_modules/through/test/index.js | 133 ++ node_modules/xtend/.jshintrc | 30 + node_modules/xtend/LICENSE | 20 + node_modules/xtend/README.md | 32 + node_modules/xtend/immutable.js | 19 + node_modules/xtend/mutable.js | 17 + node_modules/xtend/package.json | 86 ++ node_modules/xtend/test.js | 103 ++ package-lock.json | 119 ++ package.json | 22 + 204 files changed, 19207 insertions(+), 23 deletions(-) create mode 120000 node_modules/.bin/semver create mode 100644 node_modules/buffer-writer/.travis.yml create mode 100644 node_modules/buffer-writer/LICENSE create mode 100644 node_modules/buffer-writer/README.md create mode 100644 node_modules/buffer-writer/index.js create mode 100644 node_modules/buffer-writer/package.json create mode 100644 node_modules/buffer-writer/test/mocha.opts create mode 100644 node_modules/buffer-writer/test/writer-tests.js create mode 100644 node_modules/packet-reader/.travis.yml create mode 100644 node_modules/packet-reader/README.md create mode 100644 node_modules/packet-reader/index.js create mode 100644 node_modules/packet-reader/package.json create mode 100644 node_modules/packet-reader/test/index.js create mode 100644 node_modules/pg-connection-string/LICENSE create mode 100644 node_modules/pg-connection-string/README.md create mode 100644 node_modules/pg-connection-string/index.d.ts create mode 100644 node_modules/pg-connection-string/index.js create mode 100644 node_modules/pg-connection-string/package.json create mode 100644 node_modules/pg-int8/LICENSE create mode 100644 node_modules/pg-int8/README.md create mode 100644 node_modules/pg-int8/index.js create mode 100644 node_modules/pg-int8/package.json create mode 100644 node_modules/pg-pool/LICENSE create mode 100644 node_modules/pg-pool/README.md create mode 100644 node_modules/pg-pool/index.js create mode 100644 node_modules/pg-pool/package.json create mode 100644 node_modules/pg-pool/test/bring-your-own-promise.js create mode 100644 node_modules/pg-pool/test/connection-strings.js create mode 100644 node_modules/pg-pool/test/connection-timeout.js create mode 100644 node_modules/pg-pool/test/ending.js create mode 100644 node_modules/pg-pool/test/error-handling.js create mode 100644 node_modules/pg-pool/test/events.js create mode 100644 node_modules/pg-pool/test/idle-timeout.js create mode 100644 node_modules/pg-pool/test/index.js create mode 100644 node_modules/pg-pool/test/logging.js create mode 100644 node_modules/pg-pool/test/max-uses.js create mode 100644 node_modules/pg-pool/test/releasing-clients.js create mode 100644 node_modules/pg-pool/test/setup.js create mode 100644 node_modules/pg-pool/test/sizing.js create mode 100644 node_modules/pg-pool/test/submittable.js create mode 100644 node_modules/pg-pool/test/timeout.js create mode 100644 node_modules/pg-pool/test/verify.js create mode 100644 node_modules/pg-protocol/LICENSE create mode 100644 node_modules/pg-protocol/dist/BufferReader.d.ts create mode 100644 node_modules/pg-protocol/dist/BufferReader.js create mode 100644 node_modules/pg-protocol/dist/BufferReader.js.map create mode 100644 node_modules/pg-protocol/dist/BufferWriter.d.ts create mode 100644 node_modules/pg-protocol/dist/BufferWriter.js create mode 100644 node_modules/pg-protocol/dist/BufferWriter.js.map create mode 100644 node_modules/pg-protocol/dist/b.d.ts create mode 100644 node_modules/pg-protocol/dist/b.js create mode 100644 node_modules/pg-protocol/dist/b.js.map create mode 100644 node_modules/pg-protocol/dist/buffer-reader.d.ts create mode 100644 node_modules/pg-protocol/dist/buffer-reader.js create mode 100644 node_modules/pg-protocol/dist/buffer-reader.js.map create mode 100644 node_modules/pg-protocol/dist/buffer-writer.d.ts create mode 100644 node_modules/pg-protocol/dist/buffer-writer.js create mode 100644 node_modules/pg-protocol/dist/buffer-writer.js.map create mode 100644 node_modules/pg-protocol/dist/connection.d.ts create mode 100644 node_modules/pg-protocol/dist/connection.js create mode 100644 node_modules/pg-protocol/dist/connection.js.map create mode 100644 node_modules/pg-protocol/dist/inbound-parser.test.d.ts create mode 100644 node_modules/pg-protocol/dist/inbound-parser.test.js create mode 100644 node_modules/pg-protocol/dist/inbound-parser.test.js.map create mode 100644 node_modules/pg-protocol/dist/index.d.ts create mode 100644 node_modules/pg-protocol/dist/index.js create mode 100644 node_modules/pg-protocol/dist/index.js.map create mode 100644 node_modules/pg-protocol/dist/messages.d.ts create mode 100644 node_modules/pg-protocol/dist/messages.js create mode 100644 node_modules/pg-protocol/dist/messages.js.map create mode 100644 node_modules/pg-protocol/dist/outbound-serializer.test.d.ts create mode 100644 node_modules/pg-protocol/dist/outbound-serializer.test.js create mode 100644 node_modules/pg-protocol/dist/outbound-serializer.test.js.map create mode 100644 node_modules/pg-protocol/dist/parser.d.ts create mode 100644 node_modules/pg-protocol/dist/parser.js create mode 100644 node_modules/pg-protocol/dist/parser.js.map create mode 100644 node_modules/pg-protocol/dist/serializer.d.ts create mode 100644 node_modules/pg-protocol/dist/serializer.js create mode 100644 node_modules/pg-protocol/dist/serializer.js.map create mode 100644 node_modules/pg-protocol/dist/testing/buffer-list.d.ts create mode 100644 node_modules/pg-protocol/dist/testing/buffer-list.js create mode 100644 node_modules/pg-protocol/dist/testing/buffer-list.js.map create mode 100644 node_modules/pg-protocol/dist/testing/test-buffers.d.ts create mode 100644 node_modules/pg-protocol/dist/testing/test-buffers.js create mode 100644 node_modules/pg-protocol/dist/testing/test-buffers.js.map create mode 100644 node_modules/pg-protocol/package.json create mode 100644 node_modules/pg-protocol/src/b.ts create mode 100644 node_modules/pg-protocol/src/buffer-reader.ts create mode 100644 node_modules/pg-protocol/src/buffer-writer.ts create mode 100644 node_modules/pg-protocol/src/inbound-parser.test.ts create mode 100644 node_modules/pg-protocol/src/index.ts create mode 100644 node_modules/pg-protocol/src/messages.ts create mode 100644 node_modules/pg-protocol/src/outbound-serializer.test.ts create mode 100644 node_modules/pg-protocol/src/parser.ts create mode 100644 node_modules/pg-protocol/src/serializer.ts create mode 100644 node_modules/pg-protocol/src/testing/buffer-list.ts create mode 100644 node_modules/pg-protocol/src/testing/test-buffers.ts create mode 100644 node_modules/pg-protocol/src/types/chunky.d.ts create mode 100644 node_modules/pg-protocol/tsconfig.json create mode 100644 node_modules/pg-types/.travis.yml create mode 100644 node_modules/pg-types/Makefile create mode 100644 node_modules/pg-types/README.md create mode 100644 node_modules/pg-types/index.d.ts create mode 100644 node_modules/pg-types/index.js create mode 100644 node_modules/pg-types/index.test-d.ts create mode 100644 node_modules/pg-types/lib/arrayParser.js create mode 100644 node_modules/pg-types/lib/binaryParsers.js create mode 100644 node_modules/pg-types/lib/builtins.js create mode 100644 node_modules/pg-types/lib/textParsers.js create mode 100644 node_modules/pg-types/package.json create mode 100644 node_modules/pg-types/test/index.js create mode 100644 node_modules/pg-types/test/types.js create mode 100644 node_modules/pg/LICENSE create mode 100644 node_modules/pg/README.md create mode 100644 node_modules/pg/lib/client.js create mode 100644 node_modules/pg/lib/connection-parameters.js create mode 100644 node_modules/pg/lib/connection.js create mode 100644 node_modules/pg/lib/defaults.js create mode 100644 node_modules/pg/lib/index.js create mode 100644 node_modules/pg/lib/native/client.js create mode 100644 node_modules/pg/lib/native/index.js create mode 100644 node_modules/pg/lib/native/query.js create mode 100644 node_modules/pg/lib/query.js create mode 100644 node_modules/pg/lib/result.js create mode 100644 node_modules/pg/lib/sasl.js create mode 100644 node_modules/pg/lib/type-overrides.js create mode 100644 node_modules/pg/lib/utils.js create mode 100644 node_modules/pg/package.json create mode 100644 node_modules/pgpass/.npmignore create mode 100644 node_modules/pgpass/README.md create mode 100644 node_modules/pgpass/lib/helper.js create mode 100644 node_modules/pgpass/lib/index.js create mode 100644 node_modules/pgpass/package.json create mode 100644 node_modules/postgres-array/index.d.ts create mode 100644 node_modules/postgres-array/index.js create mode 100644 node_modules/postgres-array/license create mode 100644 node_modules/postgres-array/package.json create mode 100644 node_modules/postgres-array/readme.md create mode 100644 node_modules/postgres-bytea/index.js create mode 100644 node_modules/postgres-bytea/license create mode 100644 node_modules/postgres-bytea/package.json create mode 100644 node_modules/postgres-bytea/readme.md create mode 100644 node_modules/postgres-date/index.js create mode 100644 node_modules/postgres-date/license create mode 100644 node_modules/postgres-date/package.json create mode 100644 node_modules/postgres-date/readme.md create mode 100644 node_modules/postgres-interval/index.d.ts create mode 100644 node_modules/postgres-interval/index.js create mode 100644 node_modules/postgres-interval/license create mode 100644 node_modules/postgres-interval/package.json create mode 100644 node_modules/postgres-interval/readme.md create mode 100644 node_modules/semver/.npmignore create mode 100644 node_modules/semver/LICENSE create mode 100644 node_modules/semver/Makefile create mode 100644 node_modules/semver/README.md create mode 100755 node_modules/semver/bin/semver create mode 100644 node_modules/semver/foot.js.txt create mode 100644 node_modules/semver/head.js.txt create mode 100644 node_modules/semver/package.json create mode 100644 node_modules/semver/semver.browser.js create mode 100644 node_modules/semver/semver.browser.js.gz create mode 100644 node_modules/semver/semver.js create mode 100644 node_modules/semver/semver.min.js create mode 100644 node_modules/semver/semver.min.js.gz create mode 100644 node_modules/semver/test/amd.js create mode 100644 node_modules/semver/test/big-numbers.js create mode 100644 node_modules/semver/test/clean.js create mode 100644 node_modules/semver/test/gtr.js create mode 100644 node_modules/semver/test/index.js create mode 100644 node_modules/semver/test/ltr.js create mode 100644 node_modules/semver/test/major-minor-patch.js create mode 100644 node_modules/semver/test/no-module.js create mode 100644 node_modules/split/.npmignore create mode 100644 node_modules/split/.travis.yml create mode 100644 node_modules/split/LICENCE create mode 100644 node_modules/split/examples/pretty.js create mode 100644 node_modules/split/index.js create mode 100644 node_modules/split/package.json create mode 100644 node_modules/split/readme.markdown create mode 100644 node_modules/split/test/options.asynct.js create mode 100644 node_modules/split/test/partitioned_unicode.js create mode 100644 node_modules/split/test/split.asynct.js create mode 100644 node_modules/split/test/try_catch.asynct.js create mode 100644 node_modules/through/.travis.yml create mode 100644 node_modules/through/LICENSE.APACHE2 create mode 100644 node_modules/through/LICENSE.MIT create mode 100644 node_modules/through/index.js create mode 100644 node_modules/through/package.json create mode 100644 node_modules/through/readme.markdown create mode 100644 node_modules/through/test/async.js create mode 100644 node_modules/through/test/auto-destroy.js create mode 100644 node_modules/through/test/buffering.js create mode 100644 node_modules/through/test/end.js create mode 100644 node_modules/through/test/index.js create mode 100644 node_modules/xtend/.jshintrc create mode 100644 node_modules/xtend/LICENSE create mode 100644 node_modules/xtend/README.md create mode 100644 node_modules/xtend/immutable.js create mode 100644 node_modules/xtend/mutable.js create mode 100644 node_modules/xtend/package.json create mode 100644 node_modules/xtend/test.js create mode 100644 package-lock.json create mode 100644 package.json diff --git a/index.js b/index.js index 3907d3b5..a9a49aff 100644 --- a/index.js +++ b/index.js @@ -1,9 +1,11 @@ console.log("works!!", process.argv[2]); - +const operation = process.argv[2]; +const chore = process.argv[3]; +let choreStatus = false; const pg = require('pg'); const configs = { - user: 'akira', + user: 'aurelialim', host: '127.0.0.1', database: 'todo', port: 5432, @@ -11,26 +13,22 @@ const configs = { const client = new pg.Client(configs); -let queryDoneCallback = (err, result) => { - if (err) { - console.log("query error", err.message); - } else { - console.log("result", result.rows ); - } - client.end(); -}; - -let clientConnectionCallback = (err) => { - - if( err ){ - console.log( "error", err.message ); - } - let text = "INSERT INTO todo (name) VALUES ($1) RETURNING id"; - const values = ["hello"]; - - client.query(text, values, queryDoneCallback); -}; - -client.connect(clientConnectionCallback); +client.connect((error)=> { + if(error){ + console.log('ERROR AT CONNECT', error.message) + } else { + if(operation ==="add"){ + queryText = 'INSERT INTO items (name, done) VALUES ($1, $2) RETURNING id'; + values = [chore, choreStatus]; + client.query(queryText,values, (err, res) => { + if (err) { + console.log("query error", err.message); + } else { + console.log("result", res.rows); + } + }); + } + } +}) \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 120000 index 00000000..317eb293 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1 @@ +../semver/bin/semver \ No newline at end of file diff --git a/node_modules/buffer-writer/.travis.yml b/node_modules/buffer-writer/.travis.yml new file mode 100644 index 00000000..8e59bb39 --- /dev/null +++ b/node_modules/buffer-writer/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - 4 + - 6 + - 8 + - 10 + - 11 diff --git a/node_modules/buffer-writer/LICENSE b/node_modules/buffer-writer/LICENSE new file mode 100644 index 00000000..72dc60d8 --- /dev/null +++ b/node_modules/buffer-writer/LICENSE @@ -0,0 +1,19 @@ +The MIT License (MIT) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/buffer-writer/README.md b/node_modules/buffer-writer/README.md new file mode 100644 index 00000000..81eccc05 --- /dev/null +++ b/node_modules/buffer-writer/README.md @@ -0,0 +1,48 @@ +# buffer-writer + +[![Build Status](https://secure.travis-ci.org/brianc/node-buffer-writer.png?branch=master)](http://travis-ci.org/brianc/node-buffer-writer) + +Fast & efficient buffer writer used to keep memory usage low by internally recycling a single large buffer. + +Used as the binary protocol writer in [node-postgres](https://github.com/brianc/node-postgres) + +Since postgres requires big endian encoding, this only writes big endian numbers for now, but can & probably will easily be extended to write little endian as well. + +I'll admit this has a few postgres specific things I might need to take out in the future, such as `addHeader` + +## api + +`var writer = new (require('buffer-writer')());` + +### writer.addInt32(num) + +Writes a 4-byte big endian binary encoded number to the end of the buffer. + +### writer.addInt16(num) + +Writes a 2-byte big endian binary encoded number to the end of the buffer. + +### writer.addCString(string) + +Writes a string to the buffer `utf8` encoded and adds a null character (`\0`) at the end. + +### var buffer = writer.addHeader(char) + +Writes the 5 byte PostgreSQL required header to the beginning of the buffer. (1 byte for character, 1 BE Int32 for length of the buffer) + +### var buffer = writer.join() + +Collects all data in the writer and joins it into a single, new buffer. + +### var buffer = writer.flush(char) + +Writes the 5 byte postgres required message header, collects all data in the writer and joins it into a single, new buffer, and then resets the writer. + +## thoughts + +This is kind of node-postgres specific. If you're interested in using this for a more general purpose thing, lemme know. +I would love to work with you on getting this more reusable for your needs. + +## license + +MIT diff --git a/node_modules/buffer-writer/index.js b/node_modules/buffer-writer/index.js new file mode 100644 index 00000000..f3c119e0 --- /dev/null +++ b/node_modules/buffer-writer/index.js @@ -0,0 +1,129 @@ +//binary data writer tuned for creating +//postgres message packets as effeciently as possible by reusing the +//same buffer to avoid memcpy and limit memory allocations +var Writer = module.exports = function (size) { + this.size = size || 1024; + this.buffer = Buffer.alloc(this.size + 5); + this.offset = 5; + this.headerPosition = 0; +}; + +//resizes internal buffer if not enough size left +Writer.prototype._ensure = function (size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.alloc(newSize); + oldBuffer.copy(this.buffer); + } +}; + +Writer.prototype.addInt32 = function (num) { + this._ensure(4); + this.buffer[this.offset++] = (num >>> 24 & 0xFF); + this.buffer[this.offset++] = (num >>> 16 & 0xFF); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; +}; + +Writer.prototype.addInt16 = function (num) { + this._ensure(2); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; +}; + +//for versions of node requiring 'length' as 3rd argument to buffer.write +var writeString = function (buffer, string, offset, len) { + buffer.write(string, offset, len); +}; + +//overwrite function for older versions of node +if (Buffer.prototype.write.length === 3) { + writeString = function (buffer, string, offset, len) { + buffer.write(string, offset); + }; +} + +Writer.prototype.addCString = function (string) { + //just write a 0 for empty or null strings + if (!string) { + this._ensure(1); + } else { + var len = Buffer.byteLength(string); + this._ensure(len + 1); //+1 for null terminator + writeString(this.buffer, string, this.offset, len); + this.offset += len; + } + + this.buffer[this.offset++] = 0; // null terminator + return this; +}; + +Writer.prototype.addChar = function (c) { + this._ensure(1); + writeString(this.buffer, c, this.offset, 1); + this.offset++; + return this; +}; + +Writer.prototype.addString = function (string) { + string = string || ""; + var len = Buffer.byteLength(string); + this._ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; +}; + +Writer.prototype.getByteLength = function () { + return this.offset - 5; +}; + +Writer.prototype.add = function (otherBuffer) { + this._ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; +}; + +Writer.prototype.clear = function () { + this.offset = 5; + this.headerPosition = 0; + this.lastEnd = 0; +}; + +//appends a header block to all the written data since the last +//subsequent header or to the beginning if there is only one data block +Writer.prototype.addHeader = function (code, last) { + var origOffset = this.offset; + this.offset = this.headerPosition; + this.buffer[this.offset++] = code; + //length is everything in this packet minus the code + this.addInt32(origOffset - (this.headerPosition + 1)); + //set next header position + this.headerPosition = origOffset; + //make space for next header + this.offset = origOffset; + if (!last) { + this._ensure(5); + this.offset += 5; + } +}; + +Writer.prototype.join = function (code) { + if (code) { + this.addHeader(code, true); + } + return this.buffer.slice(code ? 0 : 5, this.offset); +}; + +Writer.prototype.flush = function (code) { + var result = this.join(code); + this.clear(); + return result; +}; diff --git a/node_modules/buffer-writer/package.json b/node_modules/buffer-writer/package.json new file mode 100644 index 00000000..e51a3482 --- /dev/null +++ b/node_modules/buffer-writer/package.json @@ -0,0 +1,57 @@ +{ + "_from": "buffer-writer@2.0.0", + "_id": "buffer-writer@2.0.0", + "_inBundle": false, + "_integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "_location": "/buffer-writer", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "buffer-writer@2.0.0", + "name": "buffer-writer", + "escapedName": "buffer-writer", + "rawSpec": "2.0.0", + "saveSpec": null, + "fetchSpec": "2.0.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "_shasum": "ce7eb81a38f7829db09c873f2fbb792c0c98ec04", + "_spec": "buffer-writer@2.0.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-buffer-writer/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "a fast, efficient buffer writer", + "devDependencies": { + "mocha": "5.2.0" + }, + "engines": { + "node": ">=4" + }, + "homepage": "https://github.com/brianc/node-buffer-writer#readme", + "keywords": [ + "buffer", + "writer", + "builder" + ], + "license": "MIT", + "main": "index.js", + "name": "buffer-writer", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-buffer-writer.git" + }, + "scripts": { + "test": "mocha --throw-deprecation" + }, + "version": "2.0.0" +} diff --git a/node_modules/buffer-writer/test/mocha.opts b/node_modules/buffer-writer/test/mocha.opts new file mode 100644 index 00000000..5efaf24d --- /dev/null +++ b/node_modules/buffer-writer/test/mocha.opts @@ -0,0 +1 @@ +--ui tdd diff --git a/node_modules/buffer-writer/test/writer-tests.js b/node_modules/buffer-writer/test/writer-tests.js new file mode 100644 index 00000000..ded91c86 --- /dev/null +++ b/node_modules/buffer-writer/test/writer-tests.js @@ -0,0 +1,218 @@ +var Writer = require(__dirname + "/../"); + +var assert = require('assert'); +var util = require('util'); + +assert.equalBuffers = function (actual, expected) { + var spit = function (actual, expected) { + console.log(""); + console.log("actual " + util.inspect(actual)); + console.log("expect " + util.inspect(expected)); + console.log(""); + }; + if (actual.length != expected.length) { + spit(actual, expected); + assert.strictEqual(actual.length, expected.length); + } + for (var i = 0; i < actual.length; i++) { + if (actual[i] != expected[i]) { + spit(actual, expected); + } + assert.strictEqual(actual[i], expected[i]); + } +}; + +suite('adding int32', function () { + var testAddingInt32 = function (int, expectedBuffer) { + test('writes ' + int, function () { + var subject = new Writer(); + var result = subject.addInt32(int).join(); + assert.equalBuffers(result, expectedBuffer); + }); + }; + + testAddingInt32(0, [0, 0, 0, 0]); + testAddingInt32(1, [0, 0, 0, 1]); + testAddingInt32(256, [0, 0, 1, 0]); + test('writes largest int32', function () { + //todo need to find largest int32 when I have internet access + return false; + }); + + test('writing multiple int32s', function () { + var subject = new Writer(); + var result = subject.addInt32(1).addInt32(10).addInt32(0).join(); + assert.equalBuffers(result, [0, 0, 0, 1, 0, 0, 0, 0x0a, 0, 0, 0, 0]); + }); + + suite('having to resize the buffer', function () { + test('after resize correct result returned', function () { + var subject = new Writer(10); + subject.addInt32(1).addInt32(1).addInt32(1); + assert.equalBuffers(subject.join(), [0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1]); + }); + }); +}); + +suite('int16', function () { + test('writes 0', function () { + var subject = new Writer(); + var result = subject.addInt16(0).join(); + assert.equalBuffers(result, [0, 0]); + }); + + test('writes 400', function () { + var subject = new Writer(); + var result = subject.addInt16(400).join(); + assert.equalBuffers(result, [1, 0x90]); + }); + + test('writes many', function () { + var subject = new Writer(); + var result = subject.addInt16(0).addInt16(1).addInt16(2).join(); + assert.equalBuffers(result, [0, 0, 0, 1, 0, 2]); + }); + + test('resizes if internal buffer fills up', function () { + var subject = new Writer(3); + var result = subject.addInt16(2).addInt16(3).join(); + assert.equalBuffers(result, [0, 2, 0, 3]); + }); + +}); + +suite('cString', function () { + test('writes empty cstring', function () { + var subject = new Writer(); + var result = subject.addCString().join(); + assert.equalBuffers(result, [0]); + }); + + test('writes two empty cstrings', function () { + var subject = new Writer(); + var result = subject.addCString("").addCString("").join(); + assert.equalBuffers(result, [0, 0]); + }); + + + test('writes non-empty cstring', function () { + var subject = new Writer(); + var result = subject.addCString("!!!").join(); + assert.equalBuffers(result, [33, 33, 33, 0]); + }); + + test('resizes if reached end', function () { + var subject = new Writer(3); + var result = subject.addCString("!!!").join(); + assert.equalBuffers(result, [33, 33, 33, 0]); + }); + + test('writes multiple cstrings', function () { + var subject = new Writer(); + var result = subject.addCString("!").addCString("!").join(); + assert.equalBuffers(result, [33, 0, 33, 0]); + }); + +}); + +test('writes char', function () { + var subject = new Writer(2); + var result = subject.addChar('a').addChar('b').addChar('c').join(); + assert.equalBuffers(result, [0x61, 0x62, 0x63]); +}); + +test('gets correct byte length', function () { + var subject = new Writer(5); + assert.strictEqual(subject.getByteLength(), 0); + subject.addInt32(0); + assert.strictEqual(subject.getByteLength(), 4); + subject.addCString("!"); + assert.strictEqual(subject.getByteLength(), 6); +}); + +test('can add arbitrary buffer to the end', function () { + var subject = new Writer(4); + subject.addCString("!!!") + var result = subject.add(Buffer.from("@@@")).join(); + assert.equalBuffers(result, [33, 33, 33, 0, 0x40, 0x40, 0x40]); +}); + +suite('can write normal string', function () { + var subject = new Writer(4); + var result = subject.addString("!").join(); + assert.equalBuffers(result, [33]); + test('can write cString too', function () { + var result = subject.addCString("!").join(); + assert.equalBuffers(result, [33, 33, 0]); + }); + test('can resize', function () { + var result = subject.addString("!!").join(); + assert.equalBuffers(result, [33, 33, 0, 33, 33]); + }); +}); + + +suite('clearing', function () { + var subject = new Writer(); + subject.addCString("@!!#!#"); + subject.addInt32(10401); + test('clears', function () { + subject.clear(); + assert.equalBuffers(subject.join(), []); + }); + test('writing more', function () { + var joinedResult = subject.addCString("!").addInt32(9).addInt16(2).join(); + assert.equalBuffers(joinedResult, [33, 0, 0, 0, 0, 9, 0, 2]); + }); + test('returns result', function () { + var flushedResult = subject.flush(); + assert.equalBuffers(flushedResult, [33, 0, 0, 0, 0, 9, 0, 2]) + }); + test('clears the writer', function () { + assert.equalBuffers(subject.join(), []) + assert.equalBuffers(subject.flush(), []) + }); +}); + +test("resizing to much larger", function () { + var subject = new Writer(2); + var string = "!!!!!!!!"; + var result = subject.addCString(string).flush(); + assert.equalBuffers(result, [33, 33, 33, 33, 33, 33, 33, 33, 0]); +}); + +suite("flush", function () { + test('added as a hex code to a full writer', function () { + var subject = new Writer(2); + var result = subject.addCString("!").flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); + }); + + test('added as a hex code to a non-full writer', function () { + var subject = new Writer(10).addCString("!"); + var joinedResult = subject.join(0x50); + var result = subject.flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0]); + }); + + test('added as a hex code to a buffer which requires resizing', function () { + var result = new Writer(2).addCString("!!!!!!!!").flush(0x50); + assert.equalBuffers(result, [0x50, 0, 0, 0, 0x0D, 33, 33, 33, 33, 33, 33, 33, 33, 0]); + }); +}); + +suite("header", function () { + test('adding two packets with headers', function () { + var subject = new Writer(10).addCString("!"); + subject.addHeader(0x50); + subject.addCString("!!"); + subject.addHeader(0x40); + subject.addCString("!"); + var result = subject.flush(0x10); + assert.equalBuffers(result, [0x50, 0, 0, 0, 6, 33, 0, 0x40, 0, 0, 0, 7, 33, 33, 0, 0x10, 0, 0, 0, 6, 33, 0]); + }); +}); + + + + diff --git a/node_modules/packet-reader/.travis.yml b/node_modules/packet-reader/.travis.yml new file mode 100644 index 00000000..ac2e9eeb --- /dev/null +++ b/node_modules/packet-reader/.travis.yml @@ -0,0 +1,8 @@ +language: node_js + +node_js: "10" +matrix: + include: + - node_js: "4" + - node_js: "6" + - node_js: "8" diff --git a/node_modules/packet-reader/README.md b/node_modules/packet-reader/README.md new file mode 100644 index 00000000..5ae3ef85 --- /dev/null +++ b/node_modules/packet-reader/README.md @@ -0,0 +1,87 @@ +node-packet-reader +================== + +Handy little well tested module for reading length-prefixed binary packets. + +Since buffers come off a socket in randomly sized chunks you can't expect them to cleanly +break on packet boundaries. This module allows you to push buffers in and read +full packets out the other side, so you can get to parsing right away and not have +to manage concatenating partial buffers and searching through them for packets. + +## install + +` $ npm install packet-reader ` + +## example + +```js +var Reader = require('packet-reader') + +var reader = new Reader() +//assuming you have a socket emitting `data` events +socket.on('data', function(buffer) { + reader.addChunk(buffer) + var packet = reader.read() + while(packet) { + //do something with fully parsed packet + } +}) +``` + + +here's a more full featured example: + +let's assume our "packet" for our protocol is 32-bit Big Endian length-prefixed strings +so a "hello world" packet would look something like [length, string] +`[0, 0, 0 0x0B, h, e, l, l, o, w, o, r, l, d]` + +```js +var Transform = require('stream').Transform +var Reader = require('packet-reader') +var reader = new Reader() +var parser = new Transform() +parser._transform = function(chunk, encoding, cb) { + reader.addChunk(chunk) + var packet = reader.read() + while(packet) { + this.push(packet.toString('utf8')) + packet = reader.read() + } + cb() +} + +var server = net.createServer(function(socket) { + socket.pipe(parser).pipe(stdout) +}) + +``` + +There are a few config options for setting optional pre-length padding byte. Read the tests for details. + +## License + +MIT + +Copyright 2015 Brian M. Carlson +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/packet-reader/index.js b/node_modules/packet-reader/index.js new file mode 100644 index 00000000..5e97e217 --- /dev/null +++ b/node_modules/packet-reader/index.js @@ -0,0 +1,65 @@ +var assert = require('assert') + +var Reader = module.exports = function(options) { + //TODO - remove for version 1.0 + if(typeof options == 'number') { + options = { headerSize: options } + } + options = options || {} + this.offset = 0 + this.lastChunk = false + this.chunk = null + this.chunkLength = 0 + this.headerSize = options.headerSize || 0 + this.lengthPadding = options.lengthPadding || 0 + this.header = null + assert(this.headerSize < 2, 'pre-length header of more than 1 byte length not currently supported') +} + +Reader.prototype.addChunk = function(chunk) { + if (!this.chunk || this.offset === this.chunkLength) { + this.chunk = chunk + this.chunkLength = chunk.length + this.offset = 0 + return + } + + var newChunkLength = chunk.length + var newLength = this.chunkLength + newChunkLength + + if (newLength > this.chunk.length) { + var newBufferLength = this.chunk.length * 2 + while (newLength >= newBufferLength) { + newBufferLength *= 2 + } + var newBuffer = Buffer.alloc(newBufferLength) + this.chunk.copy(newBuffer) + this.chunk = newBuffer + } + chunk.copy(this.chunk, this.chunkLength) + this.chunkLength = newLength +} + +Reader.prototype.read = function() { + if(this.chunkLength < (this.headerSize + 4 + this.offset)) { + return false + } + + if(this.headerSize) { + this.header = this.chunk[this.offset] + } + + //read length of next item + var length = this.chunk.readUInt32BE(this.offset + this.headerSize) + this.lengthPadding + + //next item spans more chunks than we have + var remaining = this.chunkLength - (this.offset + 4 + this.headerSize) + if(length > remaining) { + return false + } + + this.offset += (this.headerSize + 4) + var result = this.chunk.slice(this.offset, this.offset + length) + this.offset += length + return result +} diff --git a/node_modules/packet-reader/package.json b/node_modules/packet-reader/package.json new file mode 100644 index 00000000..1dbafa9b --- /dev/null +++ b/node_modules/packet-reader/package.json @@ -0,0 +1,52 @@ +{ + "_from": "packet-reader@1.0.0", + "_id": "packet-reader@1.0.0", + "_inBundle": false, + "_integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==", + "_location": "/packet-reader", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "packet-reader@1.0.0", + "name": "packet-reader", + "escapedName": "packet-reader", + "rawSpec": "1.0.0", + "saveSpec": null, + "fetchSpec": "1.0.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "_shasum": "9238e5480dedabacfe1fe3f2771063f164157d74", + "_spec": "packet-reader@1.0.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-packet-reader/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Read binary packets...", + "devDependencies": { + "mocha": "~1.21.5" + }, + "directories": { + "test": "test" + }, + "homepage": "https://github.com/brianc/node-packet-reader", + "license": "MIT", + "main": "index.js", + "name": "packet-reader", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-packet-reader.git" + }, + "scripts": { + "test": "mocha" + }, + "version": "1.0.0" +} diff --git a/node_modules/packet-reader/test/index.js b/node_modules/packet-reader/test/index.js new file mode 100644 index 00000000..0e2eedbf --- /dev/null +++ b/node_modules/packet-reader/test/index.js @@ -0,0 +1,148 @@ +var assert = require('assert') +var Reader = require('../') +describe('packet-reader', function() { + beforeEach(function() { + this.reader = new Reader(1) + }) + + it('reads perfect 1 length buffer', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1])) + var result = this.reader.read() + assert.equal(result.length, 1) + assert.equal(result[0], 1) + assert.strictEqual(false, this.reader.read()) + }) + + it('reads perfect longer buffer', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 4, 1, 2, 3, 4])) + var result = this.reader.read() + assert.equal(result.length, 4) + assert.strictEqual(false, this.reader.read()) + }) + + it('reads two parts', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1])) + var result = this.reader.read() + assert.strictEqual(false, result) + this.reader.addChunk(Buffer.from([2])) + var result = this.reader.read() + assert.equal(result.length, 1, 'should return 1 length buffer') + assert.equal(result[0], 2) + assert.strictEqual(this.reader.read(), false) + }) + + it('reads multi-part', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 16])) + assert.equal(false, this.reader.read()) + this.reader.addChunk(Buffer.from([1, 2, 3, 4, 5, 6, 7, 8])) + assert.equal(false, this.reader.read()) + this.reader.addChunk(Buffer.from([9, 10, 11, 12, 13, 14, 15, 16])) + var result = this.reader.read() + assert.equal(result.length, 16) + }) + + it('resets internal buffer at end of packet', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 16])) + this.reader.addChunk(Buffer.from([1, 2, 3, 4, 5, 6, 7, 8])) + this.reader.addChunk(Buffer.from([9, 10, 11, 12, 13, 14, 15, 16])) + var result = this.reader.read() + assert.equal(result.length, 16) + + var newChunk = Buffer.from([0, 0, 0, 0, 16]) + this.reader.addChunk(newChunk) + assert.equal(this.reader.offset, 0, 'should have been reset to 0.') + assert.strictEqual(this.reader.chunk, newChunk) + }) + + it('reads multiple messages from single chunk', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 2, 1, 2])) + var result = this.reader.read() + assert.equal(result.length, 1, 'should have 1 length buffer') + assert.equal(result[0], 1) + var result = this.reader.read() + assert.equal(result.length, 2, 'should have 2 length buffer but was ' + result.length) + assert.equal(result[0], 1) + assert.equal(result[1], 2) + assert.strictEqual(false, this.reader.read()) + }) + + it('reads 1 and a split', function() { + this.reader.addChunk(Buffer.from([0, 0, 0, 0, 1, 1, 0, 0]))//, 0, 0, 2, 1, 2])) + var result = this.reader.read() + assert.equal(result.length, 1, 'should have 1 length buffer') + assert.equal(result[0], 1) + var result = this.reader.read() + assert.strictEqual(result, false) + + this.reader.addChunk(Buffer.from([0, 0, 2, 1, 2])) + var result = this.reader.read() + assert.equal(result.length, 2, 'should have 2 length buffer but was ' + result.length) + assert.equal(result[0], 1) + assert.equal(result[1], 2) + assert.strictEqual(false, this.reader.read()) + }) +}) + +describe('variable length header', function() { + beforeEach(function() { + this.reader = new Reader() + }) + + it('reads double message buffers', function() { + this.reader.addChunk(Buffer.from([ + 0, 0, 0, 1, 1, + 0, 0, 0, 2, 1, 2])) + var result = this.reader.read() + assert(result) + assert.equal(result.length, 1) + assert.equal(result[0], 1) + result = this.reader.read() + assert(result) + assert.equal(result.length, 2) + assert.equal(result[0], 1) + assert.equal(result[1], 2) + assert.strictEqual(this.reader.read(), false) + }) +}) + +describe('1 length code', function() { + beforeEach(function() { + this.reader = new Reader(1) + }) + + it('reads code', function() { + this.reader.addChunk(Buffer.from([9, 0, 0, 0, 1, 1])) + var result = this.reader.read() + assert(result) + assert.equal(this.reader.header, 9) + assert.equal(result.length, 1) + assert.equal(result[0], 1) + }) + + it('is set on uncompleted read', function() { + assert.equal(this.reader.header, null) + this.reader.addChunk(Buffer.from([2, 0, 0, 0, 1])) + assert.strictEqual(this.reader.read(), false) + assert.equal(this.reader.header, 2) + }) +}) + +describe('postgres style packet', function() { + beforeEach(function() { + this.reader = new Reader({ + headerSize: 1, + lengthPadding: -4 + }) + }) + + it('reads with padded length', function() { + this.reader.addChunk(Buffer.from([1, 0, 0, 0, 8, 0, 0, 2, 0])) + var result = this.reader.read() + assert(result) + assert.equal(result.length, 4) + assert.equal(result[0], 0) + assert.equal(result[1], 0) + assert.equal(result[2], 2) + assert.equal(result[3], 0) + }) +}) diff --git a/node_modules/pg-connection-string/LICENSE b/node_modules/pg-connection-string/LICENSE new file mode 100644 index 00000000..b068a6cb --- /dev/null +++ b/node_modules/pg-connection-string/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Iced Development + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/node_modules/pg-connection-string/README.md b/node_modules/pg-connection-string/README.md new file mode 100644 index 00000000..d5b45ab9 --- /dev/null +++ b/node_modules/pg-connection-string/README.md @@ -0,0 +1,72 @@ +pg-connection-string +==================== + +[![NPM](https://nodei.co/npm/pg-connection-string.png?compact=true)](https://nodei.co/npm/pg-connection-string/) + +[![Build Status](https://travis-ci.org/iceddev/pg-connection-string.svg?branch=master)](https://travis-ci.org/iceddev/pg-connection-string) +[![Coverage Status](https://coveralls.io/repos/github/iceddev/pg-connection-string/badge.svg?branch=master)](https://coveralls.io/github/iceddev/pg-connection-string?branch=master) + +Functions for dealing with a PostgresSQL connection string + +`parse` method taken from [node-postgres](https://github.com/brianc/node-postgres.git) +Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) +MIT License + +## Usage + +```js +var parse = require('pg-connection-string').parse; + +var config = parse('postgres://someuser:somepassword@somehost:381/somedatabase') +``` + +The resulting config contains a subset of the following properties: + +* `host` - Postgres server hostname or, for UNIX doamain sockets, the socket filename +* `port` - port on which to connect +* `user` - User with which to authenticate to the server +* `password` - Corresponding password +* `database` - Database name within the server +* `client_encoding` - string encoding the client will use +* `ssl`, either a boolean or an object with properties + * `cert` + * `key` + * `ca` +* any other query parameters (for example, `application_name`) are preserved intact. + +## Connection Strings + +The short summary of acceptable URLs is: + + * `socket:?` - UNIX domain socket + * `postgres://:@:/?` - TCP connection + +But see below for more details. + +### UNIX Domain Sockets + +When user and password are not given, the socket path follows `socket:`, as in `socket:/var/run/pgsql`. +This form can be shortened to just a path: `/var/run/pgsql`. + +When user and password are given, they are included in the typical URL positions, with an empty `host`, as in `socket://user:pass@/var/run/pgsql`. + +Query parameters follow a `?` character, including the following special query parameters: + + * `db=` - sets the database name (urlencoded) + * `encoding=` - sets the `client_encoding` property + +### TCP Connections + +TCP connections to the Postgres server are indicated with `pg:` or `postgres:` schemes (in fact, any scheme but `socket:` is accepted). +If username and password are included, they should be urlencoded. +The database name, however, should *not* be urlencoded. + +Query parameters follow a `?` character, including the following special query parameters: + * `host=` - sets `host` property, overriding the URL's host + * `encoding=` - sets the `client_encoding` property + * `ssl=1`, `ssl=true`, `ssl=0`, `ssl=false` - sets `ssl` to true or false, accordingly + * `sslcert=` - reads data from the given file and includes the result as `ssl.cert` + * `sslkey=` - reads data from the given file and includes the result as `ssl.key` + * `sslrootcert=` - reads data from the given file and includes the result as `ssl.ca` + +A bare relative URL, such as `salesdata`, will indicate a database name while leaving other properties empty. diff --git a/node_modules/pg-connection-string/index.d.ts b/node_modules/pg-connection-string/index.d.ts new file mode 100644 index 00000000..3081270e --- /dev/null +++ b/node_modules/pg-connection-string/index.d.ts @@ -0,0 +1,15 @@ +export function parse(connectionString: string): ConnectionOptions + +export interface ConnectionOptions { + host: string | null + password?: string + user?: string + port?: string | null + database: string | null | undefined + client_encoding?: string + ssl?: boolean | string + + application_name?: string + fallback_application_name?: string + options?: string +} diff --git a/node_modules/pg-connection-string/index.js b/node_modules/pg-connection-string/index.js new file mode 100644 index 00000000..65951c37 --- /dev/null +++ b/node_modules/pg-connection-string/index.js @@ -0,0 +1,89 @@ +'use strict' + +var url = require('url') +var fs = require('fs') + +//Parse method copied from https://github.com/brianc/node-postgres +//Copyright (c) 2010-2014 Brian Carlson (brian.m.carlson@gmail.com) +//MIT License + +//parses a connection string +function parse(str) { + //unix socket + if (str.charAt(0) === '/') { + var config = str.split(' ') + return { host: config[0], database: config[1] } + } + + // url parse expects spaces encoded as %20 + var result = url.parse( + / |%[^a-f0-9]|%[a-f0-9][^a-f0-9]/i.test(str) ? encodeURI(str).replace(/\%25(\d\d)/g, '%$1') : str, + true + ) + var config = result.query + for (var k in config) { + if (Array.isArray(config[k])) { + config[k] = config[k][config[k].length - 1] + } + } + + var auth = (result.auth || ':').split(':') + config.user = auth[0] + config.password = auth.splice(1).join(':') + + config.port = result.port + if (result.protocol == 'socket:') { + config.host = decodeURI(result.pathname) + config.database = result.query.db + config.client_encoding = result.query.encoding + return config + } + if (!config.host) { + // Only set the host if there is no equivalent query param. + config.host = result.hostname + } + + // If the host is missing it might be a URL-encoded path to a socket. + var pathname = result.pathname + if (!config.host && pathname && /^%2f/i.test(pathname)) { + var pathnameSplit = pathname.split('/') + config.host = decodeURIComponent(pathnameSplit[0]) + pathname = pathnameSplit.splice(1).join('/') + } + // result.pathname is not always guaranteed to have a '/' prefix (e.g. relative urls) + // only strip the slash if it is present. + if (pathname && pathname.charAt(0) === '/') { + pathname = pathname.slice(1) || null + } + config.database = pathname && decodeURI(pathname) + + if (config.ssl === 'true' || config.ssl === '1') { + config.ssl = true + } + + if (config.ssl === '0') { + config.ssl = false + } + + if (config.sslcert || config.sslkey || config.sslrootcert) { + config.ssl = {} + } + + if (config.sslcert) { + config.ssl.cert = fs.readFileSync(config.sslcert).toString() + } + + if (config.sslkey) { + config.ssl.key = fs.readFileSync(config.sslkey).toString() + } + + if (config.sslrootcert) { + config.ssl.ca = fs.readFileSync(config.sslrootcert).toString() + } + + return config +} + +module.exports = parse + +parse.parse = parse diff --git a/node_modules/pg-connection-string/package.json b/node_modules/pg-connection-string/package.json new file mode 100644 index 00000000..0b81eed5 --- /dev/null +++ b/node_modules/pg-connection-string/package.json @@ -0,0 +1,67 @@ +{ + "_from": "pg-connection-string@^2.3.0", + "_id": "pg-connection-string@2.3.0", + "_inBundle": false, + "_integrity": "sha512-ukMTJXLI7/hZIwTW7hGMZJ0Lj0S2XQBCJ4Shv4y1zgQ/vqVea+FLhzywvPj0ujSuofu+yA4MYHGZPTsgjBgJ+w==", + "_location": "/pg-connection-string", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-connection-string@^2.3.0", + "name": "pg-connection-string", + "escapedName": "pg-connection-string", + "rawSpec": "^2.3.0", + "saveSpec": null, + "fetchSpec": "^2.3.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.3.0.tgz", + "_shasum": "c13fcb84c298d0bfa9ba12b40dd6c23d946f55d6", + "_spec": "pg-connection-string@^2.3.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "author": { + "name": "Blaine Bublitz", + "email": "blaine@iceddev.com", + "url": "http://iceddev.com/" + }, + "bugs": { + "url": "https://github.com/iceddev/pg-connection-string/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Functions for dealing with a PostgresSQL connection string", + "devDependencies": { + "chai": "^4.1.1", + "coveralls": "^3.0.4", + "istanbul": "^0.4.5", + "mocha": "^7.1.2" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "homepage": "https://github.com/iceddev/pg-connection-string", + "keywords": [ + "pg", + "connection", + "string", + "parse" + ], + "license": "MIT", + "main": "./index.js", + "name": "pg-connection-string", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git" + }, + "scripts": { + "check-coverage": "istanbul check-coverage --statements 100 --branches 100 --lines 100 --functions 100", + "coveralls": "cat ./coverage/lcov.info | ./node_modules/.bin/coveralls", + "test": "istanbul cover _mocha && npm run check-coverage" + }, + "types": "./index.d.ts", + "version": "2.3.0" +} diff --git a/node_modules/pg-int8/LICENSE b/node_modules/pg-int8/LICENSE new file mode 100644 index 00000000..c56c9731 --- /dev/null +++ b/node_modules/pg-int8/LICENSE @@ -0,0 +1,13 @@ +Copyright © 2017, Charmander <~@charmander.me> + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/pg-int8/README.md b/node_modules/pg-int8/README.md new file mode 100644 index 00000000..ef2e6084 --- /dev/null +++ b/node_modules/pg-int8/README.md @@ -0,0 +1,16 @@ +[![Build status][ci image]][ci] + +64-bit big-endian signed integer-to-string conversion designed for [pg][]. + +```js +const readInt8 = require('pg-int8'); + +readInt8(Buffer.from([0, 1, 2, 3, 4, 5, 6, 7])) +// '283686952306183' +``` + + + [pg]: https://github.com/brianc/node-postgres + + [ci]: https://travis-ci.org/charmander/pg-int8 + [ci image]: https://api.travis-ci.org/charmander/pg-int8.svg diff --git a/node_modules/pg-int8/index.js b/node_modules/pg-int8/index.js new file mode 100644 index 00000000..db779750 --- /dev/null +++ b/node_modules/pg-int8/index.js @@ -0,0 +1,100 @@ +'use strict'; + +// selected so (BASE - 1) * 0x100000000 + 0xffffffff is a safe integer +var BASE = 1000000; + +function readInt8(buffer) { + var high = buffer.readInt32BE(0); + var low = buffer.readUInt32BE(4); + var sign = ''; + + if (high < 0) { + high = ~high + (low === 0); + low = (~low + 1) >>> 0; + sign = '-'; + } + + var result = ''; + var carry; + var t; + var digits; + var pad; + var l; + var i; + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + high = high / BASE >>> 0; + + t = 0x100000000 * carry + low; + low = t / BASE >>> 0; + digits = '' + (t - BASE * low); + + if (low === 0 && high === 0) { + return sign + digits + result; + } + + pad = ''; + l = 6 - digits.length; + + for (i = 0; i < l; i++) { + pad += '0'; + } + + result = pad + digits + result; + } + + { + carry = high % BASE; + t = 0x100000000 * carry + low; + digits = '' + t % BASE; + + return sign + digits + result; + } +} + +module.exports = readInt8; diff --git a/node_modules/pg-int8/package.json b/node_modules/pg-int8/package.json new file mode 100644 index 00000000..1ab2504a --- /dev/null +++ b/node_modules/pg-int8/package.json @@ -0,0 +1,52 @@ +{ + "_from": "pg-int8@1.0.1", + "_id": "pg-int8@1.0.1", + "_inBundle": false, + "_integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "_location": "/pg-int8", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "pg-int8@1.0.1", + "name": "pg-int8", + "escapedName": "pg-int8", + "rawSpec": "1.0.1", + "saveSpec": null, + "fetchSpec": "1.0.1" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "_shasum": "943bd463bf5b71b4170115f80f8efc9a0c0eb78c", + "_spec": "pg-int8@1.0.1", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg-types", + "bugs": { + "url": "https://github.com/charmander/pg-int8/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "64-bit big-endian signed integer-to-string conversion", + "devDependencies": { + "@charmander/eslint-config-base": "1.0.2", + "tap": "10.7.3" + }, + "engines": { + "node": ">=4.0.0" + }, + "files": [ + "index.js" + ], + "homepage": "https://github.com/charmander/pg-int8#readme", + "license": "ISC", + "name": "pg-int8", + "repository": { + "type": "git", + "url": "git+https://github.com/charmander/pg-int8.git" + }, + "scripts": { + "test": "tap test" + }, + "version": "1.0.1" +} diff --git a/node_modules/pg-pool/LICENSE b/node_modules/pg-pool/LICENSE new file mode 100644 index 00000000..4e905814 --- /dev/null +++ b/node_modules/pg-pool/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pg-pool/README.md b/node_modules/pg-pool/README.md new file mode 100644 index 00000000..f1c81ae5 --- /dev/null +++ b/node_modules/pg-pool/README.md @@ -0,0 +1,376 @@ +# pg-pool +[![Build Status](https://travis-ci.org/brianc/node-pg-pool.svg?branch=master)](https://travis-ci.org/brianc/node-pg-pool) + +A connection pool for node-postgres + +## install +```sh +npm i pg-pool pg +``` + +## use + +### create + +to use pg-pool you must first create an instance of a pool + +```js +var Pool = require('pg-pool') + +// by default the pool uses the same +// configuration as whatever `pg` version you have installed +var pool = new Pool() + +// you can pass properties to the pool +// these properties are passed unchanged to both the node-postgres Client constructor +// and the node-pool (https://github.com/coopernurse/node-pool) constructor +// allowing you to fully configure the behavior of both +var pool2 = new Pool({ + database: 'postgres', + user: 'brianc', + password: 'secret!', + port: 5432, + ssl: true, + max: 20, // set pool max size to 20 + idleTimeoutMillis: 1000, // close idle clients after 1 second + connectionTimeoutMillis: 1000, // return an error after 1 second if connection could not be established + maxUses: 7500, // close (and replace) a connection after it has been used 7500 times (see below for discussion) +}) + +//you can supply a custom client constructor +//if you want to use the native postgres client +var NativeClient = require('pg').native.Client +var nativePool = new Pool({ Client: NativeClient }) + +//you can even pool pg-native clients directly +var PgNativeClient = require('pg-native') +var pgNativePool = new Pool({ Client: PgNativeClient }) +``` + +##### Note: +The Pool constructor does not support passing a Database URL as the parameter. To use pg-pool on heroku, for example, you need to parse the URL into a config object. Here is an example of how to parse a Database URL. + +```js +const Pool = require('pg-pool'); +const url = require('url') + +const params = url.parse(process.env.DATABASE_URL); +const auth = params.auth.split(':'); + +const config = { + user: auth[0], + password: auth[1], + host: params.hostname, + port: params.port, + database: params.pathname.split('/')[1], + ssl: true +}; + +const pool = new Pool(config); + +/* + Transforms, 'progres://DBuser:secret@DBHost:#####/myDB', into + config = { + user: 'DBuser', + password: 'secret', + host: 'DBHost', + port: '#####', + database: 'myDB', + ssl: true + } +*/ +``` + +### acquire clients with a promise + +pg-pool supports a fully promise-based api for acquiring clients + +```js +var pool = new Pool() +pool.connect().then(client => { + client.query('select $1::text as name', ['pg-pool']).then(res => { + client.release() + console.log('hello from', res.rows[0].name) + }) + .catch(e => { + client.release() + console.error('query error', e.message, e.stack) + }) +}) +``` + +### plays nice with async/await + +this ends up looking much nicer if you're using [co](https://github.com/tj/co) or async/await: + +```js +// with async/await +(async () => { + var pool = new Pool() + var client = await pool.connect() + try { + var result = await client.query('select $1::text as name', ['brianc']) + console.log('hello from', result.rows[0]) + } finally { + client.release() + } +})().catch(e => console.error(e.message, e.stack)) + +// with co +co(function * () { + var client = yield pool.connect() + try { + var result = yield client.query('select $1::text as name', ['brianc']) + console.log('hello from', result.rows[0]) + } finally { + client.release() + } +}).catch(e => console.error(e.message, e.stack)) +``` + +### your new favorite helper method + +because its so common to just run a query and return the client to the pool afterward pg-pool has this built-in: + +```js +var pool = new Pool() +var time = await pool.query('SELECT NOW()') +var name = await pool.query('select $1::text as name', ['brianc']) +console.log(name.rows[0].name, 'says hello at', time.rows[0].name) +``` + +you can also use a callback here if you'd like: + +```js +var pool = new Pool() +pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + console.log(res.rows[0].name) // brianc +}) +``` + +__pro tip:__ unless you need to run a transaction (which requires a single client for multiple queries) or you +have some other edge case like [streaming rows](https://github.com/brianc/node-pg-query-stream) or using a [cursor](https://github.com/brianc/node-pg-cursor) +you should almost always just use `pool.query`. Its easy, it does the right thing :tm:, and wont ever forget to return +clients back to the pool after the query is done. + +### drop-in backwards compatible + +pg-pool still and will always support the traditional callback api for acquiring a client. This is the exact API node-postgres has shipped with for years: + +```js +var pool = new Pool() +pool.connect((err, client, done) => { + if (err) return done(err) + + client.query('SELECT $1::text as name', ['pg-pool'], (err, res) => { + done() + if (err) { + return console.error('query error', e.message, e.stack) + } + console.log('hello from', res.rows[0].name) + }) +}) +``` + +### shut it down + +When you are finished with the pool if all the clients are idle the pool will close them after `config.idleTimeoutMillis` and your app +will shutdown gracefully. If you don't want to wait for the timeout you can end the pool as follows: + +```js +var pool = new Pool() +var client = await pool.connect() +console.log(await client.query('select now()')) +client.release() +await pool.end() +``` + +### a note on instances + +The pool should be a __long-lived object__ in your application. Generally you'll want to instantiate one pool when your app starts up and use the same instance of the pool throughout the lifetime of your application. If you are frequently creating a new pool within your code you likely don't have your pool initialization code in the correct place. Example: + +```js +// assume this is a file in your program at ./your-app/lib/db.js + +// correct usage: create the pool and let it live +// 'globally' here, controlling access to it through exported methods +var pool = new pg.Pool() + +// this is the right way to export the query method +module.exports.query = (text, values) => { + console.log('query:', text, values) + return pool.query(text, values) +} + +// this would be the WRONG way to export the connect method +module.exports.connect = () => { + // notice how we would be creating a pool instance here + // every time we called 'connect' to get a new client? + // that's a bad thing & results in creating an unbounded + // number of pools & therefore connections + var aPool = new pg.Pool() + return aPool.connect() +} +``` + +### events + +Every instance of a `Pool` is an event emitter. These instances emit the following events: + +#### error + +Emitted whenever an idle client in the pool encounters an error. This is common when your PostgreSQL server shuts down, reboots, or a network partition otherwise causes it to become unavailable while your pool has connected clients. + +Example: + +```js +const Pool = require('pg-pool') +const pool = new Pool() + +// attach an error handler to the pool for when a connected, idle client +// receives an error by being disconnected, etc +pool.on('error', function(error, client) { + // handle this in the same way you would treat process.on('uncaughtException') + // it is supplied the error as well as the idle client which received the error +}) +``` + +#### connect + +Fired whenever the pool creates a __new__ `pg.Client` instance and successfully connects it to the backend. + +Example: + +```js +const Pool = require('pg-pool') +const pool = new Pool() + +var count = 0 + +pool.on('connect', client => { + client.count = count++ +}) + +pool + .connect() + .then(client => { + return client + .query('SELECT $1::int AS "clientCount"', [client.count]) + .then(res => console.log(res.rows[0].clientCount)) // outputs 0 + .then(() => client) + }) + .then(client => client.release()) + +``` + +#### acquire + +Fired whenever the a client is acquired from the pool + +Example: + +This allows you to count the number of clients which have ever been acquired from the pool. + +```js +var Pool = require('pg-pool') +var pool = new Pool() + +var acquireCount = 0 +pool.on('acquire', function (client) { + acquireCount++ +}) + +var connectCount = 0 +pool.on('connect', function () { + connectCount++ +}) + +for (var i = 0; i < 200; i++) { + pool.query('SELECT NOW()') +} + +setTimeout(function () { + console.log('connect count:', connectCount) // output: connect count: 10 + console.log('acquire count:', acquireCount) // output: acquire count: 200 +}, 100) + +``` + +### environment variables + +pg-pool & node-postgres support some of the same environment variables as `psql` supports. The most common are: + +``` +PGDATABASE=my_db +PGUSER=username +PGPASSWORD="my awesome password" +PGPORT=5432 +PGSSLMODE=require +``` + +Usually I will export these into my local environment via a `.env` file with environment settings or export them in `~/.bash_profile` or something similar. This way I get configurability which works with both the postgres suite of tools (`psql`, `pg_dump`, `pg_restore`) and node, I can vary the environment variables locally and in production, and it supports the concept of a [12-factor app](http://12factor.net/) out of the box. + +## bring your own promise + +In versions of node `<=0.12.x` there is no native promise implementation available globally. You can polyfill the promise globally like this: + +```js +// first run `npm install promise-polyfill --save +if (typeof Promise == 'undefined') { + global.Promise = require('promise-polyfill') +} +``` + +You can use any other promise implementation you'd like. The pool also allows you to configure the promise implementation on a per-pool level: + +```js +var bluebirdPool = new Pool({ + Promise: require('bluebird') +}) +``` + +__please note:__ in node `<=0.12.x` the pool will throw if you do not provide a promise constructor in one of the two ways mentioned above. In node `>=4.0.0` the pool will use the native promise implementation by default; however, the two methods above still allow you to "bring your own." + +## maxUses and read-replica autoscaling (e.g. AWS Aurora) + +The maxUses config option can help an application instance rebalance load against a replica set that has been auto-scaled after the connection pool is already full of healthy connections. + +The mechanism here is that a connection is considered "expended" after it has been acquired and released `maxUses` number of times. Depending on the load on your system, this means there will be an approximate time in which any given connection will live, thus creating a window for rebalancing. + +Imagine a scenario where you have 10 app instances providing an API running against a replica cluster of 3 that are accessed via a round-robin DNS entry. Each instance runs a connection pool size of 20. With an ambient load of 50 requests per second, the connection pool will likely fill up in a few minutes with healthy connections. + +If you have weekly bursts of traffic which peak at 1,000 requests per second, you might want to grow your replicas to 10 during this period. Without setting `maxUses`, the new replicas will not be adopted by the app servers without an intervention -- namely, restarting each in turn in order to build up new connection pools that are balanced against all the replicas. Adding additional app server instances will help to some extent because they will adopt all the replicas in an even way, but the initial app servers will continue to focus additional load on the original replicas. + +This is where the `maxUses` configuration option comes into play. Setting `maxUses` to 7500 will ensure that over a period of 30 minutes or so the new replicas will be adopted as the pre-existing connections are closed and replaced with new ones, thus creating a window for eventual balance. + +You'll want to test based on your own scenarios, but one way to make a first guess at `maxUses` is to identify an acceptable window for rebalancing and then solve for the value: + +``` +maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize +``` + +In the example above, assuming we acquire and release 1 connection per request and we are aiming for a 30 minute rebalancing window: + +``` +maxUses = rebalanceWindowSeconds * totalRequestsPerSecond / numAppInstances / poolSize + 7200 = 1800 * 1000 / 10 / 25 +``` + +## tests + +To run tests clone the repo, `npm i` in the working dir, and then run `npm test` + +## contributions + +I love contributions. Please make sure they have tests, and submit a PR. If you're not sure if the issue is worth it or will be accepted it never hurts to open an issue to begin the conversation. If you're interested in keeping up with node-postgres releated stuff, you can follow me on twitter at [@briancarlson](https://twitter.com/briancarlson) - I generally announce any noteworthy updates there. + +## license + +The MIT License (MIT) +Copyright (c) 2016 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/pg-pool/index.js b/node_modules/pg-pool/index.js new file mode 100644 index 00000000..eef490f9 --- /dev/null +++ b/node_modules/pg-pool/index.js @@ -0,0 +1,403 @@ +'use strict' +const EventEmitter = require('events').EventEmitter + +const NOOP = function () {} + +const removeWhere = (list, predicate) => { + const i = list.findIndex(predicate) + + return i === -1 ? undefined : list.splice(i, 1)[0] +} + +class IdleItem { + constructor(client, idleListener, timeoutId) { + this.client = client + this.idleListener = idleListener + this.timeoutId = timeoutId + } +} + +class PendingItem { + constructor(callback) { + this.callback = callback + } +} + +function throwOnDoubleRelease() { + throw new Error('Release called on client which has already been released to the pool.') +} + +function promisify(Promise, callback) { + if (callback) { + return { callback: callback, result: undefined } + } + let rej + let res + const cb = function (err, client) { + err ? rej(err) : res(client) + } + const result = new Promise(function (resolve, reject) { + res = resolve + rej = reject + }) + return { callback: cb, result: result } +} + +function makeIdleListener(pool, client) { + return function idleListener(err) { + err.client = client + + client.removeListener('error', idleListener) + client.on('error', () => { + pool.log('additional client error after disconnection due to error', err) + }) + pool._remove(client) + // TODO - document that once the pool emits an error + // the client has already been closed & purged and is unusable + pool.emit('error', err, client) + } +} + +class Pool extends EventEmitter { + constructor(options, Client) { + super() + this.options = Object.assign({}, options) + + if (options != null && 'password' in options) { + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this.options, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: options.password, + }) + } + + this.options.max = this.options.max || this.options.poolSize || 10 + this.options.maxUses = this.options.maxUses || Infinity + this.log = this.options.log || function () {} + this.Client = this.options.Client || Client || require('pg').Client + this.Promise = this.options.Promise || global.Promise + + if (typeof this.options.idleTimeoutMillis === 'undefined') { + this.options.idleTimeoutMillis = 10000 + } + + this._clients = [] + this._idle = [] + this._pendingQueue = [] + this._endCallback = undefined + this.ending = false + this.ended = false + } + + _isFull() { + return this._clients.length >= this.options.max + } + + _pulseQueue() { + this.log('pulse queue') + if (this.ended) { + this.log('pulse queue ended') + return + } + if (this.ending) { + this.log('pulse queue on ending') + if (this._idle.length) { + this._idle.slice().map((item) => { + this._remove(item.client) + }) + } + if (!this._clients.length) { + this.ended = true + this._endCallback() + } + return + } + // if we don't have any waiting, do nothing + if (!this._pendingQueue.length) { + this.log('no queued requests') + return + } + // if we don't have any idle clients and we have no more room do nothing + if (!this._idle.length && this._isFull()) { + return + } + const pendingItem = this._pendingQueue.shift() + if (this._idle.length) { + const idleItem = this._idle.pop() + clearTimeout(idleItem.timeoutId) + const client = idleItem.client + const idleListener = idleItem.idleListener + + return this._acquireClient(client, pendingItem, idleListener, false) + } + if (!this._isFull()) { + return this.newClient(pendingItem) + } + throw new Error('unexpected condition') + } + + _remove(client) { + const removed = removeWhere(this._idle, (item) => item.client === client) + + if (removed !== undefined) { + clearTimeout(removed.timeoutId) + } + + this._clients = this._clients.filter((c) => c !== client) + client.end() + this.emit('remove', client) + } + + connect(cb) { + if (this.ending) { + const err = new Error('Cannot use a pool after calling end on the pool') + return cb ? cb(err) : this.Promise.reject(err) + } + + const response = promisify(this.Promise, cb) + const result = response.result + + // if we don't have to connect a new client, don't do so + if (this._clients.length >= this.options.max || this._idle.length) { + // if we have idle clients schedule a pulse immediately + if (this._idle.length) { + process.nextTick(() => this._pulseQueue()) + } + + if (!this.options.connectionTimeoutMillis) { + this._pendingQueue.push(new PendingItem(response.callback)) + return result + } + + const queueCallback = (err, res, done) => { + clearTimeout(tid) + response.callback(err, res, done) + } + + const pendingItem = new PendingItem(queueCallback) + + // set connection timeout on checking out an existing client + const tid = setTimeout(() => { + // remove the callback from pending waiters because + // we're going to call it with a timeout error + removeWhere(this._pendingQueue, (i) => i.callback === queueCallback) + pendingItem.timedOut = true + response.callback(new Error('timeout exceeded when trying to connect')) + }, this.options.connectionTimeoutMillis) + + this._pendingQueue.push(pendingItem) + return result + } + + this.newClient(new PendingItem(response.callback)) + + return result + } + + newClient(pendingItem) { + const client = new this.Client(this.options) + this._clients.push(client) + const idleListener = makeIdleListener(this, client) + + this.log('checking client timeout') + + // connection timeout logic + let tid + let timeoutHit = false + if (this.options.connectionTimeoutMillis) { + tid = setTimeout(() => { + this.log('ending client due to timeout') + timeoutHit = true + // force kill the node driver, and let libpq do its teardown + client.connection ? client.connection.stream.destroy() : client.end() + }, this.options.connectionTimeoutMillis) + } + + this.log('connecting new client') + client.connect((err) => { + if (tid) { + clearTimeout(tid) + } + client.on('error', idleListener) + if (err) { + this.log('client failed to connect', err) + // remove the dead client from our list of clients + this._clients = this._clients.filter((c) => c !== client) + if (timeoutHit) { + err.message = 'Connection terminated due to connection timeout' + } + + // this client won’t be released, so move on immediately + this._pulseQueue() + + if (!pendingItem.timedOut) { + pendingItem.callback(err, undefined, NOOP) + } + } else { + this.log('new client connected') + + return this._acquireClient(client, pendingItem, idleListener, true) + } + }) + } + + // acquire a client for a pending work item + _acquireClient(client, pendingItem, idleListener, isNew) { + if (isNew) { + this.emit('connect', client) + } + + this.emit('acquire', client) + + client.release = this._releaseOnce(client, idleListener) + + client.removeListener('error', idleListener) + + if (!pendingItem.timedOut) { + if (isNew && this.options.verify) { + this.options.verify(client, (err) => { + if (err) { + client.release(err) + return pendingItem.callback(err, undefined, NOOP) + } + + pendingItem.callback(undefined, client, client.release) + }) + } else { + pendingItem.callback(undefined, client, client.release) + } + } else { + if (isNew && this.options.verify) { + this.options.verify(client, client.release) + } else { + client.release() + } + } + } + + // returns a function that wraps _release and throws if called more than once + _releaseOnce(client, idleListener) { + let released = false + + return (err) => { + if (released) { + throwOnDoubleRelease() + } + + released = true + this._release(client, idleListener, err) + } + } + + // release a client back to the poll, include an error + // to remove it from the pool + _release(client, idleListener, err) { + client.on('error', idleListener) + + client._poolUseCount = (client._poolUseCount || 0) + 1 + + // TODO(bmc): expose a proper, public interface _queryable and _ending + if (err || this.ending || !client._queryable || client._ending || client._poolUseCount >= this.options.maxUses) { + if (client._poolUseCount >= this.options.maxUses) { + this.log('remove expended client') + } + this._remove(client) + this._pulseQueue() + return + } + + // idle timeout + let tid + if (this.options.idleTimeoutMillis) { + tid = setTimeout(() => { + this.log('remove idle client') + this._remove(client) + }, this.options.idleTimeoutMillis) + } + + this._idle.push(new IdleItem(client, idleListener, tid)) + this._pulseQueue() + } + + query(text, values, cb) { + // guard clause against passing a function as the first parameter + if (typeof text === 'function') { + const response = promisify(this.Promise, text) + setImmediate(function () { + return response.callback(new Error('Passing a function as the first parameter to pool.query is not supported')) + }) + return response.result + } + + // allow plain text query without values + if (typeof values === 'function') { + cb = values + values = undefined + } + const response = promisify(this.Promise, cb) + cb = response.callback + + this.connect((err, client) => { + if (err) { + return cb(err) + } + + let clientReleased = false + const onError = (err) => { + if (clientReleased) { + return + } + clientReleased = true + client.release(err) + cb(err) + } + + client.once('error', onError) + this.log('dispatching query') + client.query(text, values, (err, res) => { + this.log('query dispatched') + client.removeListener('error', onError) + if (clientReleased) { + return + } + clientReleased = true + client.release(err) + if (err) { + return cb(err) + } else { + return cb(undefined, res) + } + }) + }) + return response.result + } + + end(cb) { + this.log('ending') + if (this.ending) { + const err = new Error('Called end on pool more than once') + return cb ? cb(err) : this.Promise.reject(err) + } + this.ending = true + const promised = promisify(this.Promise, cb) + this._endCallback = promised.callback + this._pulseQueue() + return promised.result + } + + get waitingCount() { + return this._pendingQueue.length + } + + get idleCount() { + return this._idle.length + } + + get totalCount() { + return this._clients.length + } +} +module.exports = Pool diff --git a/node_modules/pg-pool/package.json b/node_modules/pg-pool/package.json new file mode 100644 index 00000000..7a813fcf --- /dev/null +++ b/node_modules/pg-pool/package.json @@ -0,0 +1,67 @@ +{ + "_from": "pg-pool@^3.2.1", + "_id": "pg-pool@3.2.1", + "_inBundle": false, + "_integrity": "sha512-BQDPWUeKenVrMMDN9opfns/kZo4lxmSWhIqo+cSAF7+lfi9ZclQbr9vfnlNaPr8wYF3UYjm5X0yPAhbcgqNOdA==", + "_location": "/pg-pool", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-pool@^3.2.1", + "name": "pg-pool", + "escapedName": "pg-pool", + "rawSpec": "^3.2.1", + "saveSpec": null, + "fetchSpec": "^3.2.1" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.1.tgz", + "_shasum": "5f4afc0f58063659aeefa952d36af49fa28b30e0", + "_spec": "pg-pool@^3.2.1", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-pg-pool/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "Connection pool for node-postgres", + "devDependencies": { + "bluebird": "3.4.1", + "co": "4.6.0", + "expect.js": "0.3.1", + "lodash": "^4.17.11", + "mocha": "^7.1.2", + "pg-cursor": "^1.3.0" + }, + "directories": { + "test": "test" + }, + "gitHead": "f3136a7d5d5498280924b3e06f47f8ce80dbe4e6", + "homepage": "https://github.com/brianc/node-pg-pool#readme", + "keywords": [ + "pg", + "postgres", + "pool", + "database" + ], + "license": "MIT", + "main": "index.js", + "name": "pg-pool", + "peerDependencies": { + "pg": ">=8.0" + }, + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git" + }, + "scripts": { + "test": " node_modules/.bin/mocha" + }, + "version": "3.2.1" +} diff --git a/node_modules/pg-pool/test/bring-your-own-promise.js b/node_modules/pg-pool/test/bring-your-own-promise.js new file mode 100644 index 00000000..e905ccc0 --- /dev/null +++ b/node_modules/pg-pool/test/bring-your-own-promise.js @@ -0,0 +1,42 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const BluebirdPromise = require('bluebird') + +const Pool = require('../') + +const checkType = (promise) => { + expect(promise).to.be.a(BluebirdPromise) + return promise.catch((e) => undefined) +} + +describe('Bring your own promise', function () { + it( + 'uses supplied promise for operations', + co.wrap(function* () { + const pool = new Pool({ Promise: BluebirdPromise }) + const client1 = yield checkType(pool.connect()) + client1.release() + yield checkType(pool.query('SELECT NOW()')) + const client2 = yield checkType(pool.connect()) + // TODO - make sure pg supports BYOP as well + client2.release() + yield checkType(pool.end()) + }) + ) + + it( + 'uses promises in errors', + co.wrap(function* () { + const pool = new Pool({ Promise: BluebirdPromise, port: 48484 }) + yield checkType(pool.connect()) + yield checkType(pool.end()) + yield checkType(pool.connect()) + yield checkType(pool.query()) + yield checkType(pool.end()) + }) + ) +}) diff --git a/node_modules/pg-pool/test/connection-strings.js b/node_modules/pg-pool/test/connection-strings.js new file mode 100644 index 00000000..de45830d --- /dev/null +++ b/node_modules/pg-pool/test/connection-strings.js @@ -0,0 +1,29 @@ +const expect = require('expect.js') +const describe = require('mocha').describe +const it = require('mocha').it +const Pool = require('../') + +describe('Connection strings', function () { + it('pool delegates connectionString property to client', function (done) { + const connectionString = 'postgres://foo:bar@baz:1234/xur' + + const pool = new Pool({ + // use a fake client so we can check we're passed the connectionString + Client: function (args) { + expect(args.connectionString).to.equal(connectionString) + return { + connect: function (cb) { + cb(new Error('testing')) + }, + on: function () {}, + } + }, + connectionString: connectionString, + }) + + pool.connect(function (err, client) { + expect(err).to.not.be(undefined) + done() + }) + }) +}) diff --git a/node_modules/pg-pool/test/connection-timeout.js b/node_modules/pg-pool/test/connection-timeout.js new file mode 100644 index 00000000..05e8931d --- /dev/null +++ b/node_modules/pg-pool/test/connection-timeout.js @@ -0,0 +1,229 @@ +'use strict' +const net = require('net') +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it +const before = require('mocha').before +const after = require('mocha').after + +const Pool = require('../') + +describe('connection timeout', () => { + const connectionFailure = new Error('Temporary connection failure') + + before((done) => { + this.server = net.createServer((socket) => { + socket.on('data', () => { + // discard any buffered data or the server wont terminate + }) + }) + + this.server.listen(() => { + this.port = this.server.address().port + done() + }) + }) + + after((done) => { + this.server.close(done) + }) + + it('should callback with an error if timeout is passed', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' }) + pool.connect((err, client, release) => { + expect(err).to.be.an(Error) + expect(err.message).to.contain('timeout') + expect(client).to.equal(undefined) + expect(pool.idleCount).to.equal(0) + done() + }) + }) + + it('should reject promise with an error if timeout is passed', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 10, port: this.port, host: 'localhost' }) + pool.connect().catch((err) => { + expect(err).to.be.an(Error) + expect(err.message).to.contain('timeout') + expect(pool.idleCount).to.equal(0) + done() + }) + }) + + it( + 'should handle multiple timeouts', + co.wrap( + function* () { + const errors = [] + const pool = new Pool({ connectionTimeoutMillis: 1, port: this.port, host: 'localhost' }) + for (var i = 0; i < 15; i++) { + try { + yield pool.connect() + } catch (e) { + errors.push(e) + } + } + expect(errors).to.have.length(15) + }.bind(this) + ) + ) + + it('should timeout on checkout of used connection', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + release() + pool.end(done) + }) + }) + }) + + it('should not break further pending checkouts on a timeout', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 200, max: 1 }) + pool.connect((err, client, releaseOuter) => { + expect(err).to.be(undefined) + + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + releaseOuter() + }) + + setTimeout(() => { + pool.connect((err, client, releaseInner) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + releaseInner() + pool.end(done) + }) + }, 100) + }) + }) + + it('should timeout on query if all clients are busy', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.query('select now()', (err, result) => { + expect(err).to.be.an(Error) + expect(result).to.be(undefined) + release() + pool.end(done) + }) + }) + }) + + it('should recover from timeout errors', (done) => { + const pool = new Pool({ connectionTimeoutMillis: 100, max: 1 }) + pool.connect((err, client, release) => { + expect(err).to.be(undefined) + expect(client).to.not.be(undefined) + pool.query('select now()', (err, result) => { + expect(err).to.be.an(Error) + expect(result).to.be(undefined) + release() + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + }) + + it('continues processing after a connection failure', (done) => { + const Client = require('pg').Client + const orgConnect = Client.prototype.connect + let called = false + + Client.prototype.connect = function (cb) { + // Simulate a failure on first call + if (!called) { + called = true + + return setTimeout(() => { + cb(connectionFailure) + }, 100) + } + // And pass-through the second call + orgConnect.call(this, cb) + } + + const pool = new Pool({ + Client: Client, + connectionTimeoutMillis: 1000, + max: 1, + }) + + pool.connect((err, client, release) => { + expect(err).to.be(connectionFailure) + + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + + it('releases newly connected clients if the queued already timed out', (done) => { + const Client = require('pg').Client + + const orgConnect = Client.prototype.connect + + let connection = 0 + + Client.prototype.connect = function (cb) { + // Simulate a failure on first call + if (connection === 0) { + connection++ + + return setTimeout(() => { + cb(connectionFailure) + }, 300) + } + + // And second connect taking > connection timeout + if (connection === 1) { + connection++ + + return setTimeout(() => { + orgConnect.call(this, cb) + }, 1000) + } + + orgConnect.call(this, cb) + } + + const pool = new Pool({ + Client: Client, + connectionTimeoutMillis: 1000, + max: 1, + }) + + // Direct connect + pool.connect((err, client, release) => { + expect(err).to.be(connectionFailure) + }) + + // Queued + let called = 0 + pool.connect((err, client, release) => { + // Verify the callback is only called once + expect(called++).to.be(0) + expect(err).to.be.an(Error) + + pool.query('select $1::text as name', ['brianc'], (err, res) => { + expect(err).to.be(undefined) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) +}) diff --git a/node_modules/pg-pool/test/ending.js b/node_modules/pg-pool/test/ending.js new file mode 100644 index 00000000..e1839b46 --- /dev/null +++ b/node_modules/pg-pool/test/ending.js @@ -0,0 +1,40 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool ending', () => { + it('ends without being used', (done) => { + const pool = new Pool() + pool.end(done) + }) + + it('ends with a promise', () => { + return new Pool().end() + }) + + it( + 'ends with clients', + co.wrap(function* () { + const pool = new Pool() + const res = yield pool.query('SELECT $1::text as name', ['brianc']) + expect(res.rows[0].name).to.equal('brianc') + return pool.end() + }) + ) + + it( + 'allows client to finish', + co.wrap(function* () { + const pool = new Pool() + const query = pool.query('SELECT $1::text as name', ['brianc']) + yield pool.end() + const res = yield query + expect(res.rows[0].name).to.equal('brianc') + }) + ) +}) diff --git a/node_modules/pg-pool/test/error-handling.js b/node_modules/pg-pool/test/error-handling.js new file mode 100644 index 00000000..fea1d114 --- /dev/null +++ b/node_modules/pg-pool/test/error-handling.js @@ -0,0 +1,260 @@ +'use strict' +const net = require('net') +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool error handling', function () { + it('Should complete these queries without dying', function (done) { + const pool = new Pool() + let errors = 0 + let shouldGet = 0 + function runErrorQuery() { + shouldGet++ + return new Promise(function (resolve, reject) { + pool + .query("SELECT 'asd'+1 ") + .then(function (res) { + reject(res) // this should always error + }) + .catch(function (err) { + errors++ + resolve(err) + }) + }) + } + const ps = [] + for (let i = 0; i < 5; i++) { + ps.push(runErrorQuery()) + } + Promise.all(ps).then(function () { + expect(shouldGet).to.eql(errors) + pool.end(done) + }) + }) + + describe('calling release more than once', () => { + it( + 'should throw each time', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + client.release() + expect(() => client.release()).to.throwError() + expect(() => client.release()).to.throwError() + return yield pool.end() + }) + ) + + it('should throw each time with callbacks', function (done) { + const pool = new Pool() + + pool.connect(function (err, client, clientDone) { + expect(err).not.to.be.an(Error) + clientDone() + + expect(() => clientDone()).to.throwError() + expect(() => clientDone()).to.throwError() + + pool.end(done) + }) + }) + }) + + describe('calling connect after end', () => { + it('should return an error', function* () { + const pool = new Pool() + const res = yield pool.query('SELECT $1::text as name', ['hi']) + expect(res.rows[0].name).to.equal('hi') + const wait = pool.end() + pool.query('select now()') + yield wait + expect(() => pool.query('select now()')).to.reject() + }) + }) + + describe('using an ended pool', () => { + it('rejects all additional promises', (done) => { + const pool = new Pool() + const promises = [] + pool.end().then(() => { + const squash = (promise) => promise.catch((e) => 'okay!') + promises.push(squash(pool.connect())) + promises.push(squash(pool.query('SELECT NOW()'))) + promises.push(squash(pool.end())) + Promise.all(promises).then((res) => { + expect(res).to.eql(['okay!', 'okay!', 'okay!']) + done() + }) + }) + }) + + it('returns an error on all additional callbacks', (done) => { + const pool = new Pool() + pool.end(() => { + pool.query('SELECT *', (err) => { + expect(err).to.be.an(Error) + pool.connect((err) => { + expect(err).to.be.an(Error) + pool.end((err) => { + expect(err).to.be.an(Error) + done() + }) + }) + }) + }) + }) + }) + + describe('error from idle client', () => { + it( + 'removes client from pool', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.waitingCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + client.release() + yield new Promise((resolve, reject) => { + process.nextTick(() => { + let poolError + pool.once('error', (err) => { + poolError = err + }) + + let clientError + client.once('error', (err) => { + clientError = err + }) + + client.emit('error', new Error('expected')) + + expect(clientError.message).to.equal('expected') + expect(poolError.message).to.equal('expected') + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + pool.end().then(resolve, reject) + }) + }) + }) + ) + }) + + describe('error from in-use client', () => { + it( + 'keeps the client in the pool', + co.wrap(function* () { + const pool = new Pool() + const client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.waitingCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + + yield new Promise((resolve, reject) => { + process.nextTick(() => { + let poolError + pool.once('error', (err) => { + poolError = err + }) + + let clientError + client.once('error', (err) => { + clientError = err + }) + + client.emit('error', new Error('expected')) + + expect(clientError.message).to.equal('expected') + expect(poolError).not.to.be.ok() + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + client.release() + pool.end().then(resolve, reject) + }) + }) + }) + ) + }) + + describe('passing a function to pool.query', () => { + it('calls back with error', (done) => { + const pool = new Pool() + console.log('passing fn to query') + pool.query((err) => { + expect(err).to.be.an(Error) + pool.end(done) + }) + }) + }) + + describe('pool with lots of errors', () => { + it( + 'continues to work and provide new clients', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + const errors = [] + for (var i = 0; i < 20; i++) { + try { + yield pool.query('invalid sql') + } catch (err) { + errors.push(err) + } + } + expect(errors).to.have.length(20) + expect(pool.idleCount).to.equal(0) + expect(pool.query).to.be.a(Function) + const res = yield pool.query('SELECT $1::text as name', ['brianc']) + expect(res.rows).to.have.length(1) + expect(res.rows[0].name).to.equal('brianc') + return pool.end() + }) + ) + }) + + it('should continue with queued items after a connection failure', (done) => { + const closeServer = net + .createServer((socket) => { + socket.destroy() + }) + .unref() + + closeServer.listen(() => { + const pool = new Pool({ max: 1, port: closeServer.address().port, host: 'localhost' }) + pool.connect((err) => { + expect(err).to.be.an(Error) + if (err.code) { + expect(err.code).to.be('ECONNRESET') + } + }) + pool.connect((err) => { + expect(err).to.be.an(Error) + if (err.code) { + expect(err.code).to.be('ECONNRESET') + } + closeServer.close(() => { + pool.end(done) + }) + }) + }) + }) + + it('handles post-checkout client failures in pool.query', (done) => { + const pool = new Pool({ max: 1 }) + pool.on('error', () => { + // We double close the connection in this test, prevent exception caused by that + }) + pool.query('SELECT pg_sleep(5)', [], (err) => { + expect(err).to.be.an(Error) + done() + }) + + setTimeout(() => { + pool._clients[0].end() + }, 1000) + }) +}) diff --git a/node_modules/pg-pool/test/events.js b/node_modules/pg-pool/test/events.js new file mode 100644 index 00000000..61979247 --- /dev/null +++ b/node_modules/pg-pool/test/events.js @@ -0,0 +1,86 @@ +'use strict' + +const expect = require('expect.js') +const EventEmitter = require('events').EventEmitter +const describe = require('mocha').describe +const it = require('mocha').it +const Pool = require('../') + +describe('events', function () { + it('emits connect before callback', function (done) { + const pool = new Pool() + let emittedClient = false + pool.on('connect', function (client) { + emittedClient = client + }) + + pool.connect(function (err, client, release) { + if (err) return done(err) + release() + pool.end() + expect(client).to.be(emittedClient) + done() + }) + }) + + it('emits "connect" only with a successful connection', function () { + const pool = new Pool({ + // This client will always fail to connect + Client: mockClient({ + connect: function (cb) { + process.nextTick(() => { + cb(new Error('bad news')) + }) + }, + }), + }) + pool.on('connect', function () { + throw new Error('should never get here') + }) + return pool.connect().catch((e) => expect(e.message).to.equal('bad news')) + }) + + it('emits acquire every time a client is acquired', function (done) { + const pool = new Pool() + let acquireCount = 0 + pool.on('acquire', function (client) { + expect(client).to.be.ok() + acquireCount++ + }) + for (let i = 0; i < 10; i++) { + pool.connect(function (err, client, release) { + if (err) return done(err) + release() + }) + pool.query('SELECT now()') + } + setTimeout(function () { + expect(acquireCount).to.be(20) + pool.end(done) + }, 100) + }) + + it('emits error and client if an idle client in the pool hits an error', function (done) { + const pool = new Pool() + pool.connect(function (err, client) { + expect(err).to.equal(undefined) + client.release() + setImmediate(function () { + client.emit('error', new Error('problem')) + }) + pool.once('error', function (err, errClient) { + expect(err.message).to.equal('problem') + expect(errClient).to.equal(client) + done() + }) + }) + }) +}) + +function mockClient(methods) { + return function () { + const client = new EventEmitter() + Object.assign(client, methods) + return client + } +} diff --git a/node_modules/pg-pool/test/idle-timeout.js b/node_modules/pg-pool/test/idle-timeout.js new file mode 100644 index 00000000..fd9fba4a --- /dev/null +++ b/node_modules/pg-pool/test/idle-timeout.js @@ -0,0 +1,87 @@ +'use strict' +const co = require('co') +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +const wait = (time) => new Promise((resolve) => setTimeout(resolve, time)) + +describe('idle timeout', () => { + it('should timeout and remove the client', (done) => { + const pool = new Pool({ idleTimeoutMillis: 10 }) + pool.query('SELECT NOW()') + pool.on('remove', () => { + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + done() + }) + }) + + it( + 'times out and removes clients when others are also removed', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 10 }) + const clientA = yield pool.connect() + const clientB = yield pool.connect() + clientA.release() + clientB.release(new Error()) + + const removal = new Promise((resolve) => { + pool.on('remove', () => { + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + resolve() + }) + }) + + const timeout = wait(100).then(() => Promise.reject(new Error('Idle timeout failed to occur'))) + + try { + yield Promise.race([removal, timeout]) + } finally { + pool.end() + } + }) + ) + + it( + 'can remove idle clients and recreate them', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 1 }) + const results = [] + for (var i = 0; i < 20; i++) { + let query = pool.query('SELECT NOW()') + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(1) + results.push(yield query) + yield wait(2) + expect(pool.idleCount).to.equal(0) + expect(pool.totalCount).to.equal(0) + } + expect(results).to.have.length(20) + }) + ) + + it( + 'does not time out clients which are used', + co.wrap(function* () { + const pool = new Pool({ idleTimeoutMillis: 1 }) + const results = [] + for (var i = 0; i < 20; i++) { + let client = yield pool.connect() + expect(pool.totalCount).to.equal(1) + expect(pool.idleCount).to.equal(0) + yield wait(10) + results.push(yield client.query('SELECT NOW()')) + client.release() + expect(pool.idleCount).to.equal(1) + expect(pool.totalCount).to.equal(1) + } + expect(results).to.have.length(20) + return pool.end() + }) + ) +}) diff --git a/node_modules/pg-pool/test/index.js b/node_modules/pg-pool/test/index.js new file mode 100644 index 00000000..57a68e01 --- /dev/null +++ b/node_modules/pg-pool/test/index.js @@ -0,0 +1,226 @@ +'use strict' +const expect = require('expect.js') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool', function () { + describe('with callbacks', function () { + it('works totally unconfigured', function (done) { + const pool = new Pool() + pool.connect(function (err, client, release) { + if (err) return done(err) + client.query('SELECT NOW()', function (err, res) { + release() + if (err) return done(err) + expect(res.rows).to.have.length(1) + pool.end(done) + }) + }) + }) + + it('passes props to clients', function (done) { + const pool = new Pool({ binary: true }) + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + expect(client.binary).to.eql(true) + pool.end(done) + }) + }) + + it('can run a query with a callback without parameters', function (done) { + const pool = new Pool() + pool.query('SELECT 1 as num', function (err, res) { + expect(res.rows[0]).to.eql({ num: 1 }) + pool.end(function () { + done(err) + }) + }) + }) + + it('can run a query with a callback', function (done) { + const pool = new Pool() + pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + expect(res.rows[0]).to.eql({ name: 'brianc' }) + pool.end(function () { + done(err) + }) + }) + }) + + it('passes connection errors to callback', function (done) { + const pool = new Pool({ port: 53922 }) + pool.query('SELECT $1::text as name', ['brianc'], function (err, res) { + expect(res).to.be(undefined) + expect(err).to.be.an(Error) + // a connection error should not polute the pool with a dead client + expect(pool.totalCount).to.equal(0) + pool.end(function (err) { + done(err) + }) + }) + }) + + it('does not pass client to error callback', function (done) { + const pool = new Pool({ port: 58242 }) + pool.connect(function (err, client, release) { + expect(err).to.be.an(Error) + expect(client).to.be(undefined) + expect(release).to.be.a(Function) + pool.end(done) + }) + }) + + it('removes client if it errors in background', function (done) { + const pool = new Pool() + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + client.testString = 'foo' + setTimeout(function () { + client.emit('error', new Error('on purpose')) + }, 10) + }) + pool.on('error', function (err) { + expect(err.message).to.be('on purpose') + expect(err.client).to.not.be(undefined) + expect(err.client.testString).to.be('foo') + err.client.connection.stream.on('end', function () { + pool.end(done) + }) + }) + }) + + it('should not change given options', function (done) { + const options = { max: 10 } + const pool = new Pool(options) + pool.connect(function (err, client, release) { + release() + if (err) return done(err) + expect(options).to.eql({ max: 10 }) + pool.end(done) + }) + }) + + it('does not create promises when connecting', function (done) { + const pool = new Pool() + const returnValue = pool.connect(function (err, client, release) { + release() + if (err) return done(err) + pool.end(done) + }) + expect(returnValue).to.be(undefined) + }) + + it('does not create promises when querying', function (done) { + const pool = new Pool() + const returnValue = pool.query('SELECT 1 as num', function (err) { + pool.end(function () { + done(err) + }) + }) + expect(returnValue).to.be(undefined) + }) + + it('does not create promises when ending', function (done) { + const pool = new Pool() + const returnValue = pool.end(done) + expect(returnValue).to.be(undefined) + }) + + it('never calls callback syncronously', function (done) { + const pool = new Pool() + pool.connect((err, client) => { + if (err) throw err + client.release() + setImmediate(() => { + let called = false + pool.connect((err, client) => { + if (err) throw err + called = true + client.release() + setImmediate(() => { + pool.end(done) + }) + }) + expect(called).to.equal(false) + }) + }) + }) + }) + + describe('with promises', function () { + it('connects, queries, and disconnects', function () { + const pool = new Pool() + return pool.connect().then(function (client) { + return client.query('select $1::text as name', ['hi']).then(function (res) { + expect(res.rows).to.eql([{ name: 'hi' }]) + client.release() + return pool.end() + }) + }) + }) + + it('executes a query directly', () => { + const pool = new Pool() + return pool.query('SELECT $1::text as name', ['hi']).then((res) => { + expect(res.rows).to.have.length(1) + expect(res.rows[0].name).to.equal('hi') + return pool.end() + }) + }) + + it('properly pools clients', function () { + const pool = new Pool({ poolSize: 9 }) + const promises = _.times(30, function () { + return pool.connect().then(function (client) { + return client.query('select $1::text as name', ['hi']).then(function (res) { + client.release() + return res + }) + }) + }) + return Promise.all(promises).then(function (res) { + expect(res).to.have.length(30) + expect(pool.totalCount).to.be(9) + return pool.end() + }) + }) + + it('supports just running queries', function () { + const pool = new Pool({ poolSize: 9 }) + const text = 'select $1::text as name' + const values = ['hi'] + const query = { text: text, values: values } + const promises = _.times(30, () => pool.query(query)) + return Promise.all(promises).then(function (queries) { + expect(queries).to.have.length(30) + return pool.end() + }) + }) + + it('recovers from query errors', function () { + const pool = new Pool() + + const errors = [] + const promises = _.times(30, () => { + return pool.query('SELECT asldkfjasldkf').catch(function (e) { + errors.push(e) + }) + }) + return Promise.all(promises).then(() => { + expect(errors).to.have.length(30) + expect(pool.totalCount).to.equal(0) + expect(pool.idleCount).to.equal(0) + return pool.query('SELECT $1::text as name', ['hi']).then(function (res) { + expect(res.rows).to.eql([{ name: 'hi' }]) + return pool.end() + }) + }) + }) + }) +}) diff --git a/node_modules/pg-pool/test/logging.js b/node_modules/pg-pool/test/logging.js new file mode 100644 index 00000000..839603b7 --- /dev/null +++ b/node_modules/pg-pool/test/logging.js @@ -0,0 +1,20 @@ +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('logging', function () { + it('logs to supplied log function if given', function () { + const messages = [] + const log = function (msg) { + messages.push(msg) + } + const pool = new Pool({ log: log }) + return pool.query('SELECT NOW()').then(function () { + expect(messages.length).to.be.greaterThan(0) + return pool.end() + }) + }) +}) diff --git a/node_modules/pg-pool/test/max-uses.js b/node_modules/pg-pool/test/max-uses.js new file mode 100644 index 00000000..c94ddec6 --- /dev/null +++ b/node_modules/pg-pool/test/max-uses.js @@ -0,0 +1,98 @@ +const expect = require('expect.js') +const co = require('co') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('maxUses', () => { + it( + 'can create a single client and use it once', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const res = yield client.query('SELECT $1::text as name', ['hi']) + expect(res.rows[0].name).to.equal('hi') + client.release() + pool.end() + }) + ) + + it( + 'getting a connection a second time returns the same connection and releasing it also closes it', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + client.release() + const client2 = yield pool.connect() + expect(client).to.equal(client2) + expect(client2._ending).to.equal(false) + client2.release() + expect(client2._ending).to.equal(true) + return yield pool.end() + }) + ) + + it( + 'getting a connection a third time returns a new connection', + co.wrap(function* () { + const pool = new Pool({ maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + client.release() + const client2 = yield pool.connect() + expect(client).to.equal(client2) + client2.release() + const client3 = yield pool.connect() + expect(client3).not.to.equal(client2) + client3.release() + return yield pool.end() + }) + ) + + it( + 'getting a connection from a pending request gets a fresh client when the released candidate is expended', + co.wrap(function* () { + const pool = new Pool({ max: 1, maxUses: 2 }) + expect(pool.waitingCount).to.equal(0) + const client1 = yield pool.connect() + pool.connect().then((client2) => { + expect(client2).to.equal(client1) + expect(pool.waitingCount).to.equal(1) + // Releasing the client this time should also expend it since maxUses is 2, causing client3 to be a fresh client + client2.release() + }) + const client3Promise = pool.connect().then((client3) => { + // client3 should be a fresh client since client2's release caused the first client to be expended + expect(pool.waitingCount).to.equal(0) + expect(client3).not.to.equal(client1) + return client3.release() + }) + // There should be two pending requests since we have 3 connect requests but a max size of 1 + expect(pool.waitingCount).to.equal(2) + // Releasing the client should not yet expend it since maxUses is 2 + client1.release() + yield client3Promise + return yield pool.end() + }) + ) + + it( + 'logs when removing an expended client', + co.wrap(function* () { + const messages = [] + const log = function (msg) { + messages.push(msg) + } + const pool = new Pool({ maxUses: 1, log }) + const client = yield pool.connect() + client.release() + expect(messages).to.contain('remove expended client') + return yield pool.end() + }) + ) +}) diff --git a/node_modules/pg-pool/test/releasing-clients.js b/node_modules/pg-pool/test/releasing-clients.js new file mode 100644 index 00000000..da8e09c1 --- /dev/null +++ b/node_modules/pg-pool/test/releasing-clients.js @@ -0,0 +1,54 @@ +const Pool = require('../') + +const expect = require('expect.js') +const net = require('net') + +describe('releasing clients', () => { + it('removes a client which cannot be queried', async () => { + // make a pool w/ only 1 client + const pool = new Pool({ max: 1 }) + expect(pool.totalCount).to.eql(0) + const client = await pool.connect() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + // reach into the client and sever its connection + client.connection.end() + + // wait for the client to error out + const err = await new Promise((resolve) => client.once('error', resolve)) + expect(err).to.be.ok() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + + // try to return it to the pool - this removes it because its broken + client.release() + expect(pool.totalCount).to.eql(0) + expect(pool.idleCount).to.eql(0) + + // make sure pool still works + const { rows } = await pool.query('SELECT NOW()') + expect(rows).to.have.length(1) + await pool.end() + }) + + it('removes a client which is ending', async () => { + // make a pool w/ only 1 client + const pool = new Pool({ max: 1 }) + expect(pool.totalCount).to.eql(0) + const client = await pool.connect() + expect(pool.totalCount).to.eql(1) + expect(pool.idleCount).to.eql(0) + // end the client gracefully (but you shouldn't do this with pooled clients) + client.end() + + // try to return it to the pool + client.release() + expect(pool.totalCount).to.eql(0) + expect(pool.idleCount).to.eql(0) + + // make sure pool still works + const { rows } = await pool.query('SELECT NOW()') + expect(rows).to.have.length(1) + await pool.end() + }) +}) diff --git a/node_modules/pg-pool/test/setup.js b/node_modules/pg-pool/test/setup.js new file mode 100644 index 00000000..811e956d --- /dev/null +++ b/node_modules/pg-pool/test/setup.js @@ -0,0 +1,10 @@ +const crash = (reason) => { + process.on(reason, (err) => { + console.error(reason, err.stack) + process.exit(-1) + }) +} + +crash('unhandledRejection') +crash('uncaughtError') +crash('warning') diff --git a/node_modules/pg-pool/test/sizing.js b/node_modules/pg-pool/test/sizing.js new file mode 100644 index 00000000..e7863ba0 --- /dev/null +++ b/node_modules/pg-pool/test/sizing.js @@ -0,0 +1,58 @@ +const expect = require('expect.js') +const co = require('co') +const _ = require('lodash') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('pool size of 1', () => { + it( + 'can create a single client and use it once', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const res = yield client.query('SELECT $1::text as name', ['hi']) + expect(res.rows[0].name).to.equal('hi') + client.release() + pool.end() + }) + ) + + it( + 'can create a single client and use it multiple times', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + expect(pool.waitingCount).to.equal(0) + const client = yield pool.connect() + const wait = pool.connect() + expect(pool.waitingCount).to.equal(1) + client.release() + const client2 = yield wait + expect(client).to.equal(client2) + client2.release() + return yield pool.end() + }) + ) + + it( + 'can only send 1 query at a time', + co.wrap(function* () { + const pool = new Pool({ max: 1 }) + + // the query text column name changed in PostgreSQL 9.2 + const versionResult = yield pool.query('SHOW server_version_num') + const version = parseInt(versionResult.rows[0].server_version_num, 10) + const queryColumn = version < 90200 ? 'current_query' : 'query' + + const queryText = 'SELECT COUNT(*) as counts FROM pg_stat_activity WHERE ' + queryColumn + ' = $1' + const queries = _.times(20, () => pool.query(queryText, [queryText])) + const results = yield Promise.all(queries) + const counts = results.map((res) => parseInt(res.rows[0].counts, 10)) + expect(counts).to.eql(_.times(20, (i) => 1)) + return yield pool.end() + }) + ) +}) diff --git a/node_modules/pg-pool/test/submittable.js b/node_modules/pg-pool/test/submittable.js new file mode 100644 index 00000000..7a1574d4 --- /dev/null +++ b/node_modules/pg-pool/test/submittable.js @@ -0,0 +1,19 @@ +'use strict' +const Cursor = require('pg-cursor') +const expect = require('expect.js') +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('submittle', () => { + it('is returned from the query method', false, (done) => { + const pool = new Pool() + const cursor = pool.query(new Cursor('SELECT * from generate_series(0, 1000)')) + cursor.read((err, rows) => { + expect(err).to.be(undefined) + expect(!!rows).to.be.ok() + cursor.close(done) + }) + }) +}) diff --git a/node_modules/pg-pool/test/timeout.js b/node_modules/pg-pool/test/timeout.js new file mode 100644 index 00000000..e69de29b diff --git a/node_modules/pg-pool/test/verify.js b/node_modules/pg-pool/test/verify.js new file mode 100644 index 00000000..e7ae1dd8 --- /dev/null +++ b/node_modules/pg-pool/test/verify.js @@ -0,0 +1,25 @@ +'use strict' +const expect = require('expect.js') + +const describe = require('mocha').describe +const it = require('mocha').it + +const Pool = require('../') + +describe('verify', () => { + it('verifies a client with a callback', false, (done) => { + const pool = new Pool({ + verify: (client, cb) => { + client.release() + cb(new Error('nope')) + }, + }) + + pool.connect((err, client) => { + expect(err).to.be.an(Error) + expect(err.message).to.be('nope') + pool.end() + done() + }) + }) +}) diff --git a/node_modules/pg-protocol/LICENSE b/node_modules/pg-protocol/LICENSE new file mode 100644 index 00000000..aa66489d --- /dev/null +++ b/node_modules/pg-protocol/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - 2020 Brian Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pg-protocol/dist/BufferReader.d.ts b/node_modules/pg-protocol/dist/BufferReader.d.ts new file mode 100644 index 00000000..8970d77f --- /dev/null +++ b/node_modules/pg-protocol/dist/BufferReader.d.ts @@ -0,0 +1,14 @@ +/// +export declare class BufferReader { + private offset; + private buffer; + private encoding; + constructor(offset?: number); + setBuffer(offset: number, buffer: Buffer): void; + int16(): number; + byte(): number; + int32(): number; + string(length: number): string; + cstring(): string; + bytes(length: number): Buffer; +} diff --git a/node_modules/pg-protocol/dist/BufferReader.js b/node_modules/pg-protocol/dist/BufferReader.js new file mode 100644 index 00000000..60186a51 --- /dev/null +++ b/node_modules/pg-protocol/dist/BufferReader.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const emptyBuffer = Buffer.allocUnsafe(0); +class BufferReader { + constructor(offset = 0) { + this.offset = offset; + this.buffer = emptyBuffer; + // TODO(bmc): support non-utf8 encoding + this.encoding = 'utf-8'; + } + setBuffer(offset, buffer) { + this.offset = offset; + this.buffer = buffer; + } + int16() { + const result = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return result; + } + byte() { + const result = this.buffer[this.offset]; + this.offset++; + return result; + } + int32() { + const result = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return result; + } + string(length) { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length); + this.offset += length; + return result; + } + cstring() { + var start = this.offset; + var end = this.buffer.indexOf(0, start); + this.offset = end + 1; + return this.buffer.toString(this.encoding, start, end); + } + bytes(length) { + const result = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return result; + } +} +exports.BufferReader = BufferReader; +//# sourceMappingURL=BufferReader.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/BufferReader.js.map b/node_modules/pg-protocol/dist/BufferReader.js.map new file mode 100644 index 00000000..a4c367c7 --- /dev/null +++ b/node_modules/pg-protocol/dist/BufferReader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferReader.js","sourceRoot":"","sources":["../src/BufferReader.ts"],"names":[],"mappings":";;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAC;AAE1C,MAAa,YAAY;IAIvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAH9B,WAAM,GAAW,WAAW,CAAC;QACrC,uCAAuC;QAC/B,aAAQ,GAAW,OAAO,CAAC;IAEnC,CAAC;IACM,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;IACvB,CAAC;IACM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QACjB,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,EAAE,CAAC;QACd,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QACpD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;QACjB,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC;QACtF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC;QACtB,OAAO,MAAM,CAAC;IAChB,CAAC;IACM,OAAO;QACZ,IAAI,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC;QACxB,IAAI,GAAG,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,EAAE,KAAK,CAAC,CAAC;QACxC,IAAI,CAAC,MAAM,GAAG,GAAG,GAAG,CAAC,CAAC;QACtB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,CAAC,CAAC;IACzD,CAAC;IACM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC;QACpE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAC;QACtB,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAzCD,oCAyCC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/BufferWriter.d.ts b/node_modules/pg-protocol/dist/BufferWriter.d.ts new file mode 100644 index 00000000..da94e8f0 --- /dev/null +++ b/node_modules/pg-protocol/dist/BufferWriter.d.ts @@ -0,0 +1,20 @@ +/// +export declare class Writer { + private buffer; + private offset; + private headerPosition; + private readonly encoding; + constructor(size?: number); + private _ensure; + addInt32(num: number): Writer; + addInt16(num: number): Writer; + addCString(string: string): Writer; + addChar(c: string): Writer; + addString(string?: string): Writer; + getByteLength(): number; + add(otherBuffer: Buffer): Writer; + clear(): void; + addHeader(code: number, last?: boolean): void; + join(code?: number): Buffer; + flush(code?: number): Buffer; +} diff --git a/node_modules/pg-protocol/dist/BufferWriter.js b/node_modules/pg-protocol/dist/BufferWriter.js new file mode 100644 index 00000000..1370527f --- /dev/null +++ b/node_modules/pg-protocol/dist/BufferWriter.js @@ -0,0 +1,109 @@ +"use strict"; +//binary data writer tuned for creating +//postgres message packets as effeciently as possible by reusing the +//same buffer to avoid memcpy and limit memory allocations +Object.defineProperty(exports, "__esModule", { value: true }); +class Writer { + constructor(size = 1024) { + this.offset = 5; + this.headerPosition = 0; + this.encoding = 'utf-8'; + this.buffer = Buffer.alloc(size + 5); + } + _ensure(size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.alloc(newSize); + oldBuffer.copy(this.buffer); + } + } + addInt32(num) { + this._ensure(4); + this.buffer[this.offset++] = (num >>> 24 & 0xFF); + this.buffer[this.offset++] = (num >>> 16 & 0xFF); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; + } + addInt16(num) { + this._ensure(2); + this.buffer[this.offset++] = (num >>> 8 & 0xFF); + this.buffer[this.offset++] = (num >>> 0 & 0xFF); + return this; + } + addCString(string) { + //just write a 0 for empty or null strings + if (!string) { + this._ensure(1); + } + else { + var len = Buffer.byteLength(string); + this._ensure(len + 1); //+1 for null terminator + this.buffer.write(string, this.offset, this.encoding); + this.offset += len; + } + this.buffer[this.offset++] = 0; // null terminator + return this; + } + // note: this assumes character is 1 byte - used for writing protocol charcodes + addChar(c) { + this._ensure(1); + this.buffer.write(c, this.offset); + this.offset++; + return this; + } + addString(string = "") { + var len = Buffer.byteLength(string); + this._ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; + } + getByteLength() { + return this.offset - 5; + } + add(otherBuffer) { + this._ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; + } + clear() { + this.offset = 5; + this.headerPosition = 0; + } + //appends a header block to all the written data since the last + //subsequent header or to the beginning if there is only one data block + addHeader(code, last = false) { + var origOffset = this.offset; + this.offset = this.headerPosition; + this.buffer[this.offset++] = code; + //length is everything in this packet minus the code + this.addInt32(origOffset - (this.headerPosition + 1)); + //set next header position + this.headerPosition = origOffset; + //make space for next header + this.offset = origOffset; + if (!last) { + this._ensure(5); + this.offset += 5; + } + } + join(code) { + if (code) { + this.addHeader(code, true); + } + return this.buffer.slice(code ? 0 : 5, this.offset); + } + flush(code) { + var result = this.join(code); + this.clear(); + return result; + } +} +exports.Writer = Writer; +//# sourceMappingURL=BufferWriter.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/BufferWriter.js.map b/node_modules/pg-protocol/dist/BufferWriter.js.map new file mode 100644 index 00000000..6ff956ea --- /dev/null +++ b/node_modules/pg-protocol/dist/BufferWriter.js.map @@ -0,0 +1 @@ +{"version":3,"file":"BufferWriter.js","sourceRoot":"","sources":["../src/BufferWriter.ts"],"names":[],"mappings":";AAAA,uCAAuC;AACvC,oEAAoE;AACpE,0DAA0D;;AAE1D,MAAa,MAAM;IAKjB,YAAY,OAAe,IAAI;QAHvB,WAAM,GAAW,CAAC,CAAC;QACnB,mBAAc,GAAW,CAAC,CAAC;QAClB,aAAQ,GAAG,OAAO,CAAC;QAElC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,CAAC,CAAA;IACtC,CAAC;IAEO,OAAO,CAAC,IAAY;QAC1B,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC;QACjD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC;YAC5B,4CAA4C;YAC5C,qEAAqE;YACrE,IAAI,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAC;YAChE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YACpC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;SAC7B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAChB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,GAAG,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,GAAG,IAAI,CAAC,CAAC;QACjD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAChB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC;QAChD,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,0CAA0C;QAC1C,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;SACjB;aAAM;YACL,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;YACpC,IAAI,CAAC,OAAO,CAAC,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC,wBAAwB;YAC/C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;YACrD,IAAI,CAAC,MAAM,IAAI,GAAG,CAAC;SACpB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,kBAAkB;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,+EAA+E;IACxE,OAAO,CAAC,CAAS;QACtB,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QAClC,IAAI,CAAC,MAAM,EAAE,CAAC;QACd,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC;QACpC,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;QAClB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QACvC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAC;QACnB,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,aAAa;QAClB,OAAO,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;IACzB,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC,MAAM,CAAC,CAAC;QACjC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QAC3C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAC;QAClC,OAAO,IAAI,CAAC;IACd,CAAC;IAEM,KAAK;QACV,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC;QAChB,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;IAC1B,CAAC;IAED,+DAA+D;IAC/D,uEAAuE;IAChE,SAAS,CAAC,IAAY,EAAE,OAAgB,KAAK;QAClD,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC;QAC7B,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,cAAc,CAAC;QAClC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,IAAI,CAAC;QAClC,oDAAoD;QACpD,IAAI,CAAC,QAAQ,CAAC,UAAU,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAC,CAAC;QACtD,0BAA0B;QAC1B,IAAI,CAAC,cAAc,GAAG,UAAU,CAAC;QACjC,4BAA4B;QAC5B,IAAI,CAAC,MAAM,GAAG,UAAU,CAAC;QACzB,IAAI,CAAC,IAAI,EAAE;YACT,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;YAChB,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC;SAClB;IACH,CAAC;IAEM,IAAI,CAAC,IAAa;QACvB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;SAC5B;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;IACtD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;QAC7B,IAAI,CAAC,KAAK,EAAE,CAAC;QACb,OAAO,MAAM,CAAC;IAChB,CAAC;CACF;AAlHD,wBAkHC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/b.d.ts b/node_modules/pg-protocol/dist/b.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/pg-protocol/dist/b.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/pg-protocol/dist/b.js b/node_modules/pg-protocol/dist/b.js new file mode 100644 index 00000000..5f5efb80 --- /dev/null +++ b/node_modules/pg-protocol/dist/b.js @@ -0,0 +1,25 @@ +"use strict"; +// file for microbenchmarking +Object.defineProperty(exports, "__esModule", { value: true }); +const buffer_writer_1 = require("./buffer-writer"); +const buffer_reader_1 = require("./buffer-reader"); +const LOOPS = 1000; +let count = 0; +let start = Date.now(); +const writer = new buffer_writer_1.Writer(); +const reader = new buffer_reader_1.BufferReader(); +const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]); +const run = () => { + if (count > LOOPS) { + console.log(Date.now() - start); + return; + } + count++; + for (let i = 0; i < LOOPS; i++) { + reader.setBuffer(0, buffer); + reader.cstring(); + } + setImmediate(run); +}; +run(); +//# sourceMappingURL=b.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/b.js.map b/node_modules/pg-protocol/dist/b.js.map new file mode 100644 index 00000000..cddd15e9 --- /dev/null +++ b/node_modules/pg-protocol/dist/b.js.map @@ -0,0 +1 @@ +{"version":3,"file":"b.js","sourceRoot":"","sources":["../src/b.ts"],"names":[],"mappings":";AAAA,6BAA6B;;AAE7B,mDAAwC;AAExC,mDAA8C;AAE9C,MAAM,KAAK,GAAG,IAAI,CAAA;AAClB,IAAI,KAAK,GAAG,CAAC,CAAA;AACb,IAAI,KAAK,GAAG,IAAI,CAAC,GAAG,EAAE,CAAA;AACtB,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,MAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;AACjC,MAAM,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;AAE3D,MAAM,GAAG,GAAG,GAAG,EAAE;IACf,IAAI,KAAK,GAAG,KAAK,EAAE;QACjB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,KAAK,CAAC,CAAA;QAC/B,OAAM;KACP;IACD,KAAK,EAAE,CAAA;IACP,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE;QAC9B,MAAM,CAAC,SAAS,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,EAAE,CAAA;KACjB;IACD,YAAY,CAAC,GAAG,CAAC,CAAA;AACnB,CAAC,CAAA;AAED,GAAG,EAAE,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-reader.d.ts b/node_modules/pg-protocol/dist/buffer-reader.d.ts new file mode 100644 index 00000000..8970d77f --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-reader.d.ts @@ -0,0 +1,14 @@ +/// +export declare class BufferReader { + private offset; + private buffer; + private encoding; + constructor(offset?: number); + setBuffer(offset: number, buffer: Buffer): void; + int16(): number; + byte(): number; + int32(): number; + string(length: number): string; + cstring(): string; + bytes(length: number): Buffer; +} diff --git a/node_modules/pg-protocol/dist/buffer-reader.js b/node_modules/pg-protocol/dist/buffer-reader.js new file mode 100644 index 00000000..ba6d37a1 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-reader.js @@ -0,0 +1,49 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const emptyBuffer = Buffer.allocUnsafe(0); +class BufferReader { + constructor(offset = 0) { + this.offset = offset; + this.buffer = emptyBuffer; + // TODO(bmc): support non-utf8 encoding? + this.encoding = 'utf-8'; + } + setBuffer(offset, buffer) { + this.offset = offset; + this.buffer = buffer; + } + int16() { + const result = this.buffer.readInt16BE(this.offset); + this.offset += 2; + return result; + } + byte() { + const result = this.buffer[this.offset]; + this.offset++; + return result; + } + int32() { + const result = this.buffer.readInt32BE(this.offset); + this.offset += 4; + return result; + } + string(length) { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length); + this.offset += length; + return result; + } + cstring() { + const start = this.offset; + let end = start; + while (this.buffer[end++] !== 0) { } + this.offset = end; + return this.buffer.toString(this.encoding, start, end - 1); + } + bytes(length) { + const result = this.buffer.slice(this.offset, this.offset + length); + this.offset += length; + return result; + } +} +exports.BufferReader = BufferReader; +//# sourceMappingURL=buffer-reader.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-reader.js.map b/node_modules/pg-protocol/dist/buffer-reader.js.map new file mode 100644 index 00000000..c7a18aad --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-reader.js.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-reader.js","sourceRoot":"","sources":["../src/buffer-reader.ts"],"names":[],"mappings":";;AAAA,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAEzC,MAAa,YAAY;IAMvB,YAAoB,SAAiB,CAAC;QAAlB,WAAM,GAAN,MAAM,CAAY;QAL9B,WAAM,GAAW,WAAW,CAAA;QAEpC,wCAAwC;QAChC,aAAQ,GAAW,OAAO,CAAA;IAEO,CAAC;IAEnC,SAAS,CAAC,MAAc,EAAE,MAAc;QAC7C,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;QACpB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;IACtB,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,IAAI;QACT,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACvC,IAAI,CAAC,MAAM,EAAE,CAAA;QACb,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,KAAK;QACV,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACnD,IAAI,CAAC,MAAM,IAAI,CAAC,CAAA;QAChB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAc;QAC1B,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACrF,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,OAAO;QACZ,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAA;QACzB,IAAI,GAAG,GAAG,KAAK,CAAA;QACf,OAAO,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,KAAK,CAAC,EAAE,GAAE;QACnC,IAAI,CAAC,MAAM,GAAG,GAAG,CAAA;QACjB,OAAO,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,KAAK,EAAE,GAAG,GAAG,CAAC,CAAC,CAAA;IAC5D,CAAC;IAEM,KAAK,CAAC,MAAc;QACzB,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,IAAI,MAAM,CAAA;QACrB,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlDD,oCAkDC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-writer.d.ts b/node_modules/pg-protocol/dist/buffer-writer.d.ts new file mode 100644 index 00000000..4ac41e69 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-writer.d.ts @@ -0,0 +1,16 @@ +/// +export declare class Writer { + private size; + private buffer; + private offset; + private headerPosition; + constructor(size?: number); + private ensure; + addInt32(num: number): Writer; + addInt16(num: number): Writer; + addCString(string: string): Writer; + addString(string?: string): Writer; + add(otherBuffer: Buffer): Writer; + private join; + flush(code?: number): Buffer; +} diff --git a/node_modules/pg-protocol/dist/buffer-writer.js b/node_modules/pg-protocol/dist/buffer-writer.js new file mode 100644 index 00000000..520617e5 --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-writer.js @@ -0,0 +1,80 @@ +"use strict"; +//binary data writer tuned for encoding binary specific to the postgres binary protocol +Object.defineProperty(exports, "__esModule", { value: true }); +class Writer { + constructor(size = 256) { + this.size = size; + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.alloc(size); + } + ensure(size) { + var remaining = this.buffer.length - this.offset; + if (remaining < size) { + var oldBuffer = this.buffer; + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size; + this.buffer = Buffer.alloc(newSize); + oldBuffer.copy(this.buffer); + } + } + addInt32(num) { + this.ensure(4); + this.buffer[this.offset++] = (num >>> 24) & 0xff; + this.buffer[this.offset++] = (num >>> 16) & 0xff; + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addInt16(num) { + this.ensure(2); + this.buffer[this.offset++] = (num >>> 8) & 0xff; + this.buffer[this.offset++] = (num >>> 0) & 0xff; + return this; + } + addCString(string) { + if (!string) { + this.ensure(1); + } + else { + var len = Buffer.byteLength(string); + this.ensure(len + 1); // +1 for null terminator + this.buffer.write(string, this.offset, 'utf-8'); + this.offset += len; + } + this.buffer[this.offset++] = 0; // null terminator + return this; + } + addString(string = '') { + var len = Buffer.byteLength(string); + this.ensure(len); + this.buffer.write(string, this.offset); + this.offset += len; + return this; + } + add(otherBuffer) { + this.ensure(otherBuffer.length); + otherBuffer.copy(this.buffer, this.offset); + this.offset += otherBuffer.length; + return this; + } + join(code) { + if (code) { + this.buffer[this.headerPosition] = code; + //length is everything in this packet minus the code + const length = this.offset - (this.headerPosition + 1); + this.buffer.writeInt32BE(length, this.headerPosition + 1); + } + return this.buffer.slice(code ? 0 : 5, this.offset); + } + flush(code) { + var result = this.join(code); + this.offset = 5; + this.headerPosition = 0; + this.buffer = Buffer.allocUnsafe(this.size); + return result; + } +} +exports.Writer = Writer; +//# sourceMappingURL=buffer-writer.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/buffer-writer.js.map b/node_modules/pg-protocol/dist/buffer-writer.js.map new file mode 100644 index 00000000..5de0d95c --- /dev/null +++ b/node_modules/pg-protocol/dist/buffer-writer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-writer.js","sourceRoot":"","sources":["../src/buffer-writer.ts"],"names":[],"mappings":";AAAA,uFAAuF;;AAEvF,MAAa,MAAM;IAIjB,YAAoB,OAAO,GAAG;QAAV,SAAI,GAAJ,IAAI,CAAM;QAFtB,WAAM,GAAW,CAAC,CAAA;QAClB,mBAAc,GAAW,CAAC,CAAA;QAEhC,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;IAClC,CAAC;IAEO,MAAM,CAAC,IAAY;QACzB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM,CAAA;QAChD,IAAI,SAAS,GAAG,IAAI,EAAE;YACpB,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;YAC3B,4CAA4C;YAC5C,qEAAqE;YACrE,IAAI,OAAO,GAAG,SAAS,CAAC,MAAM,GAAG,CAAC,SAAS,CAAC,MAAM,IAAI,CAAC,CAAC,GAAG,IAAI,CAAA;YAC/D,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,OAAO,CAAC,CAAA;YACnC,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;SAC5B;IACH,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,CAAA;QAChD,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW;QACzB,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACd,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAA;QAC/C,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,UAAU,CAAC,MAAc;QAC9B,IAAI,CAAC,MAAM,EAAE;YACX,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SACf;aAAM;YACL,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;YACnC,IAAI,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA,CAAC,yBAAyB;YAC9C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;YAC/C,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;SACnB;QAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAA,CAAC,kBAAkB;QACjD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,SAAS,CAAC,SAAiB,EAAE;QAClC,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;QACnC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;QAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACtC,IAAI,CAAC,MAAM,IAAI,GAAG,CAAA;QAClB,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,GAAG,CAAC,WAAmB;QAC5B,IAAI,CAAC,MAAM,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QAC/B,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QAC1C,IAAI,CAAC,MAAM,IAAI,WAAW,CAAC,MAAM,CAAA;QACjC,OAAO,IAAI,CAAA;IACb,CAAC;IAEO,IAAI,CAAC,IAAa;QACxB,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,cAAc,CAAC,GAAG,IAAI,CAAA;YACvC,oDAAoD;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;YACtD,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC,CAAA;SAC1D;QACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;IACrD,CAAC;IAEM,KAAK,CAAC,IAAa;QACxB,IAAI,MAAM,GAAG,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACf,IAAI,CAAC,cAAc,GAAG,CAAC,CAAA;QACvB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC3C,OAAO,MAAM,CAAA;IACf,CAAC;CACF;AAlFD,wBAkFC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/connection.d.ts b/node_modules/pg-protocol/dist/connection.d.ts new file mode 100644 index 00000000..ab5919a0 --- /dev/null +++ b/node_modules/pg-protocol/dist/connection.d.ts @@ -0,0 +1,22 @@ +/** + * Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com) + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * README.md file in the root directory of this source tree. + */ +/// +declare var net: any; +declare var EventEmitter: any; +declare var util: any; +declare var Writer: any; +declare const parse: any; +declare var warnDeprecation: any; +declare var TEXT_MODE: number; +declare class Connection extends EventEmitter { + constructor(config: any); +} +declare var emptyBuffer: Buffer; +declare const flushBuffer: Buffer; +declare const syncBuffer: Buffer; +declare const END_BUFFER: Buffer; diff --git a/node_modules/pg-protocol/dist/connection.js b/node_modules/pg-protocol/dist/connection.js new file mode 100644 index 00000000..811375da --- /dev/null +++ b/node_modules/pg-protocol/dist/connection.js @@ -0,0 +1,311 @@ +"use strict"; +/** + * Copyright (c) 2010-2017 Brian Carlson (brian.m.carlson@gmail.com) + * All rights reserved. + * + * This source code is licensed under the MIT license found in the + * README.md file in the root directory of this source tree. + */ +var net = require('net'); +var EventEmitter = require('events').EventEmitter; +var util = require('util'); +var Writer = require('buffer-writer'); +// eslint-disable-next-line +const { parse } = require('pg-packet-stream'); +var warnDeprecation = require('./compat/warn-deprecation'); +var TEXT_MODE = 0; +class Connection extends EventEmitter { + constructor(config) { + super(); + config = config || {}; + this.stream = config.stream || new net.Socket(); + this.stream.setNoDelay(true); + this._keepAlive = config.keepAlive; + this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis; + this.lastBuffer = false; + this.lastOffset = 0; + this.buffer = null; + this.offset = null; + this.encoding = config.encoding || 'utf8'; + this.parsedStatements = {}; + this.writer = new Writer(); + this.ssl = config.ssl || false; + this._ending = false; + this._mode = TEXT_MODE; + this._emitMessage = false; + var self = this; + this.on('newListener', function (eventName) { + if (eventName === 'message') { + self._emitMessage = true; + } + }); + } +} +Connection.prototype.connect = function (port, host) { + var self = this; + if (this.stream.readyState === 'closed') { + this.stream.connect(port, host); + } + else if (this.stream.readyState === 'open') { + this.emit('connect'); + } + this.stream.on('connect', function () { + if (self._keepAlive) { + self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis); + } + self.emit('connect'); + }); + const reportStreamError = function (error) { + // errors about disconnections should be ignored during disconnect + if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) { + return; + } + self.emit('error', error); + }; + this.stream.on('error', reportStreamError); + this.stream.on('close', function () { + self.emit('end'); + }); + if (!this.ssl) { + return this.attachListeners(this.stream); + } + this.stream.once('data', function (buffer) { + var responseCode = buffer.toString('utf8'); + switch (responseCode) { + case 'N': // Server does not support SSL connections + return self.emit('error', new Error('The server does not support SSL connections')); + case 'S': // Server supports SSL connections, continue with a secure connection + break; + default: + // Any other response byte, including 'E' (ErrorResponse) indicating a server error + return self.emit('error', new Error('There was an error establishing an SSL connection')); + } + var tls = require('tls'); + const options = Object.apply({ + socket: self.stream, + }, self.ssl); + if (net.isIP(host) === 0) { + options.servername = host; + } + self.stream = tls.connect(options); + self.attachListeners(self.stream); + self.stream.on('error', reportStreamError); + self.emit('sslconnect'); + }); +}; +Connection.prototype.attachListeners = function (stream) { + // TODO(bmc): support binary + const mode = this._mode === TEXT_MODE ? 'text' : 'binary'; + parse(this.stream, (msg) => { + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name; + this.emit(eventName, msg); + }); + this.stream.on('end', () => this.emit('end')); +}; +Connection.prototype.requestSsl = function () { + var bodyBuffer = this.writer + .addInt16(0x04d2) + .addInt16(0x162f) + .flush(); + var length = bodyBuffer.length + 4; + var buffer = new Writer() + .addInt32(length) + .add(bodyBuffer) + .join(); + this.stream.write(buffer); +}; +Connection.prototype.startup = function (config) { + var writer = this.writer.addInt16(3).addInt16(0); + Object.keys(config).forEach(function (key) { + var val = config[key]; + writer.addCString(key).addCString(val); + }); + writer.addCString('client_encoding').addCString("'utf-8'"); + var bodyBuffer = writer.addCString('').flush(); + // this message is sent without a code + var length = bodyBuffer.length + 4; + var buffer = new Writer() + .addInt32(length) + .add(bodyBuffer) + .join(); + this.stream.write(buffer); +}; +Connection.prototype.cancel = function (processID, secretKey) { + var bodyBuffer = this.writer + .addInt16(1234) + .addInt16(5678) + .addInt32(processID) + .addInt32(secretKey) + .flush(); + var length = bodyBuffer.length + 4; + var buffer = new Writer() + .addInt32(length) + .add(bodyBuffer) + .join(); + this.stream.write(buffer); +}; +Connection.prototype.password = function (password) { + // 0x70 = 'p' + this._send(0x70, this.writer.addCString(password)); +}; +Connection.prototype.sendSASLInitialResponseMessage = function (mechanism, initialResponse) { + // 0x70 = 'p' + this.writer + .addCString(mechanism) + .addInt32(Buffer.byteLength(initialResponse)) + .addString(initialResponse); + this._send(0x70); +}; +Connection.prototype.sendSCRAMClientFinalMessage = function (additionalData) { + // 0x70 = 'p' + this.writer.addString(additionalData); + this._send(0x70); +}; +Connection.prototype._send = function (code, more) { + if (!this.stream.writable) { + return false; + } + return this.stream.write(this.writer.flush(code)); +}; +Connection.prototype.query = function (text) { + // 0x51 = Q + this.stream.write(this.writer.addCString(text).flush(0x51)); +}; +// send parse message +Connection.prototype.parse = function (query) { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + // normalize missing query names to allow for null + query.name = query.name || ''; + if (query.name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.'); + console.error('You supplied %s (%s)', query.name, query.name.length); + console.error('This can cause conflicts and silent errors executing queries'); + /* eslint-enable no-console */ + } + // normalize null type array + query.types = query.types || []; + var len = query.types.length; + var buffer = this.writer + .addCString(query.name) // name of query + .addCString(query.text) // actual query text + .addInt16(len); + for (var i = 0; i < len; i++) { + buffer.addInt32(query.types[i]); + } + var code = 0x50; + this._send(code); + this.flush(); +}; +// send bind message +// "more" === true to buffer the message until flush() is called +Connection.prototype.bind = function (config) { + // normalize config + config = config || {}; + config.portal = config.portal || ''; + config.statement = config.statement || ''; + config.binary = config.binary || false; + var values = config.values || []; + var len = values.length; + var useBinary = false; + for (var j = 0; j < len; j++) { + useBinary = useBinary || values[j] instanceof Buffer; + } + var buffer = this.writer.addCString(config.portal).addCString(config.statement); + if (!useBinary) { + buffer.addInt16(0); + } + else { + buffer.addInt16(len); + for (j = 0; j < len; j++) { + buffer.addInt16(values[j] instanceof Buffer); + } + } + buffer.addInt16(len); + for (var i = 0; i < len; i++) { + var val = values[i]; + if (val === null || typeof val === 'undefined') { + buffer.addInt32(-1); + } + else if (val instanceof Buffer) { + buffer.addInt32(val.length); + buffer.add(val); + } + else { + buffer.addInt32(Buffer.byteLength(val)); + buffer.addString(val); + } + } + if (config.binary) { + buffer.addInt16(1); // format codes to use binary + buffer.addInt16(1); + } + else { + buffer.addInt16(0); // format codes to use text + } + // 0x42 = 'B' + this._send(0x42); + this.flush(); +}; +// send execute message +// "more" === true to buffer the message until flush() is called +Connection.prototype.execute = function (config) { + config = config || {}; + config.portal = config.portal || ''; + config.rows = config.rows || ''; + this.writer.addCString(config.portal).addInt32(config.rows); + // 0x45 = 'E' + this._send(0x45); + this.flush(); +}; +var emptyBuffer = Buffer.alloc(0); +const flushBuffer = Buffer.from([0x48, 0x00, 0x00, 0x00, 0x04]); +Connection.prototype.flush = function () { + if (this.stream.writable) { + this.stream.write(flushBuffer); + } +}; +const syncBuffer = Buffer.from([0x53, 0x00, 0x00, 0x00, 0x04]); +Connection.prototype.sync = function () { + this._ending = true; + // clear out any pending data in the writer + this.writer.clear(); + if (this.stream.writable) { + this.stream.write(syncBuffer); + this.stream.write(flushBuffer); + } +}; +const END_BUFFER = Buffer.from([0x58, 0x00, 0x00, 0x00, 0x04]); +Connection.prototype.end = function () { + // 0x58 = 'X' + this.writer.clear(); + this._ending = true; + return this.stream.write(END_BUFFER, () => { + this.stream.end(); + }); +}; +Connection.prototype.close = function (msg) { + this.writer.addCString(msg.type + (msg.name || '')); + this._send(0x43); +}; +Connection.prototype.describe = function (msg) { + this.writer.addCString(msg.type + (msg.name || '')); + this._send(0x44); + this.flush(); +}; +Connection.prototype.sendCopyFromChunk = function (chunk) { + this.stream.write(this.writer.add(chunk).flush(0x64)); +}; +Connection.prototype.endCopyFrom = function () { + this.stream.write(this.writer.add(emptyBuffer).flush(0x63)); +}; +Connection.prototype.sendCopyFail = function (msg) { + // this.stream.write(this.writer.add(emptyBuffer).flush(0x66)); + this.writer.addCString(msg); + this._send(0x66); +}; +module.exports = Connection; +//# sourceMappingURL=connection.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/connection.js.map b/node_modules/pg-protocol/dist/connection.js.map new file mode 100644 index 00000000..d4bdbbf3 --- /dev/null +++ b/node_modules/pg-protocol/dist/connection.js.map @@ -0,0 +1 @@ +{"version":3,"file":"connection.js","sourceRoot":"","sources":["../src/connection.ts"],"names":[],"mappings":";AAAA;;;;;;GAMG;AAEH,IAAI,GAAG,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;AACxB,IAAI,YAAY,GAAG,OAAO,CAAC,QAAQ,CAAC,CAAC,YAAY,CAAA;AACjD,IAAI,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;AAE1B,IAAI,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC,CAAA;AACrC,2BAA2B;AAC3B,MAAM,EAAE,KAAK,EAAE,GAAG,OAAO,CAAC,kBAAkB,CAAC,CAAA;AAE7C,IAAI,eAAe,GAAG,OAAO,CAAC,2BAA2B,CAAC,CAAA;AAE1D,IAAI,SAAS,GAAG,CAAC,CAAA;AAEjB,MAAM,UAAW,SAAQ,YAAY;IACnC,YAAY,MAAW;QACrB,KAAK,EAAE,CAAA;QACP,MAAM,GAAG,MAAM,IAAI,EAAE,CAAA;QACrB,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,IAAI,GAAG,CAAC,MAAM,EAAE,CAAA;QAC/C,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,SAAS,CAAA;QAClC,IAAI,CAAC,4BAA4B,GAAG,MAAM,CAAC,2BAA2B,CAAA;QACtE,IAAI,CAAC,UAAU,GAAG,KAAK,CAAA;QACvB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAA;QACnB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAA;QAClB,IAAI,CAAC,MAAM,GAAG,IAAI,CAAA;QAClB,IAAI,CAAC,QAAQ,GAAG,MAAM,CAAC,QAAQ,IAAI,MAAM,CAAA;QACzC,IAAI,CAAC,gBAAgB,GAAG,EAAE,CAAA;QAC1B,IAAI,CAAC,MAAM,GAAG,IAAI,MAAM,EAAE,CAAA;QAC1B,IAAI,CAAC,GAAG,GAAG,MAAM,CAAC,GAAG,IAAI,KAAK,CAAA;QAC9B,IAAI,CAAC,OAAO,GAAG,KAAK,CAAA;QACpB,IAAI,CAAC,KAAK,GAAG,SAAS,CAAA;QACtB,IAAI,CAAC,YAAY,GAAG,KAAK,CAAA;QACzB,IAAI,IAAI,GAAG,IAAI,CAAA;QACf,IAAI,CAAC,EAAE,CAAC,aAAa,EAAE,UAAU,SAAS;YACxC,IAAI,SAAS,KAAK,SAAS,EAAE;gBAC3B,IAAI,CAAC,YAAY,GAAG,IAAI,CAAA;aACzB;QACH,CAAC,CAAC,CAAA;IACJ,CAAC;CACF;AAED,UAAU,CAAC,SAAS,CAAC,OAAO,GAAG,UAAU,IAAI,EAAE,IAAI;IACjD,IAAI,IAAI,GAAG,IAAI,CAAA;IAEf,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,KAAK,QAAQ,EAAE;QACvC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KAChC;SAAM,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,KAAK,MAAM,EAAE;QAC5C,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;KACrB;IAED,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,SAAS,EAAE;QACxB,IAAI,IAAI,CAAC,UAAU,EAAE;YACnB,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,4BAA4B,CAAC,CAAA;SAClE;QACD,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;IACtB,CAAC,CAAC,CAAA;IAEF,MAAM,iBAAiB,GAAG,UAAU,KAAK;QACvC,kEAAkE;QAClE,IAAI,IAAI,CAAC,OAAO,IAAI,CAAC,KAAK,CAAC,IAAI,KAAK,YAAY,IAAI,KAAK,CAAC,IAAI,KAAK,OAAO,CAAC,EAAE;YAC3E,OAAM;SACP;QACD,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,KAAK,CAAC,CAAA;IAC3B,CAAC,CAAA;IACD,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,iBAAiB,CAAC,CAAA;IAE1C,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE;QACtB,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAA;IAClB,CAAC,CAAC,CAAA;IAEF,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;QACb,OAAO,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;KACzC;IAED,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,UAAU,MAAM;QACvC,IAAI,YAAY,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;QAC1C,QAAQ,YAAY,EAAE;YACpB,KAAK,GAAG,EAAE,0CAA0C;gBAClD,OAAO,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,6CAA6C,CAAC,CAAC,CAAA;YACrF,KAAK,GAAG,EAAE,qEAAqE;gBAC7E,MAAK;YACP;gBACE,mFAAmF;gBACnF,OAAO,IAAI,CAAC,IAAI,CAAC,OAAO,EAAE,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAC,CAAA;SAC5F;QACD,IAAI,GAAG,GAAG,OAAO,CAAC,KAAK,CAAC,CAAA;QACxB,MAAM,OAAO,GAAG,MAAM,CAAC,KAAK,CAAC;YAC3B,MAAM,EAAE,IAAI,CAAC,MAAM;SACpB,EAAE,IAAI,CAAC,GAAG,CAAC,CAAA;QACZ,IAAI,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,EAAE;YACxB,OAAO,CAAC,UAAU,GAAG,IAAI,CAAA;SAC1B;QACD,IAAI,CAAC,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,CAAA;QAClC,IAAI,CAAC,eAAe,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACjC,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,iBAAiB,CAAC,CAAA;QAE1C,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,CAAA;IACzB,CAAC,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,eAAe,GAAG,UAAU,MAAM;IACrD,4BAA4B;IAC5B,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,KAAK,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAA;IACzD,KAAK,CAAC,IAAI,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE;QACzB,IAAI,SAAS,GAAG,GAAG,CAAC,IAAI,KAAK,OAAO,CAAC,CAAC,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAA;QAChE,IAAI,CAAC,IAAI,CAAC,SAAS,EAAE,GAAG,CAAC,CAAA;IAC3B,CAAC,CAAC,CAAA;IACF,IAAI,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAA;AAC/C,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,UAAU,GAAG;IAChC,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM;SACzB,QAAQ,CAAC,MAAM,CAAC;SAChB,QAAQ,CAAC,MAAM,CAAC;SAChB,KAAK,EAAE,CAAA;IAEV,IAAI,MAAM,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAA;IAElC,IAAI,MAAM,GAAG,IAAI,MAAM,EAAE;SACtB,QAAQ,CAAC,MAAM,CAAC;SAChB,GAAG,CAAC,UAAU,CAAC;SACf,IAAI,EAAE,CAAA;IACT,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;AAC3B,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,OAAO,GAAG,UAAU,MAAM;IAC7C,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;IAEhD,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,UAAU,GAAG;QACvC,IAAI,GAAG,GAAG,MAAM,CAAC,GAAG,CAAC,CAAA;QACrB,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IACxC,CAAC,CAAC,CAAA;IAEF,MAAM,CAAC,UAAU,CAAC,iBAAiB,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;IAE1D,IAAI,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAC9C,sCAAsC;IAEtC,IAAI,MAAM,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAA;IAElC,IAAI,MAAM,GAAG,IAAI,MAAM,EAAE;SACtB,QAAQ,CAAC,MAAM,CAAC;SAChB,GAAG,CAAC,UAAU,CAAC;SACf,IAAI,EAAE,CAAA;IACT,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;AAC3B,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,MAAM,GAAG,UAAU,SAAS,EAAE,SAAS;IAC1D,IAAI,UAAU,GAAG,IAAI,CAAC,MAAM;SACzB,QAAQ,CAAC,IAAI,CAAC;SACd,QAAQ,CAAC,IAAI,CAAC;SACd,QAAQ,CAAC,SAAS,CAAC;SACnB,QAAQ,CAAC,SAAS,CAAC;SACnB,KAAK,EAAE,CAAA;IAEV,IAAI,MAAM,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAA;IAElC,IAAI,MAAM,GAAG,IAAI,MAAM,EAAE;SACtB,QAAQ,CAAC,MAAM,CAAC;SAChB,GAAG,CAAC,UAAU,CAAC;SACf,IAAI,EAAE,CAAA;IACT,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;AAC3B,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,QAAQ,GAAG,UAAU,QAAQ;IAChD,aAAa;IACb,IAAI,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,CAAA;AACpD,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,8BAA8B,GAAG,UAAU,SAAS,EAAE,eAAe;IACxF,aAAa;IACb,IAAI,CAAC,MAAM;SACR,UAAU,CAAC,SAAS,CAAC;SACrB,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC;SAC5C,SAAS,CAAC,eAAe,CAAC,CAAA;IAE7B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,2BAA2B,GAAG,UAAU,cAAsB;IACjF,aAAa;IACb,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,CAAA;IAErC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,KAAK,GAAG,UAAU,IAAI,EAAE,IAAI;IAC/C,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACzB,OAAO,KAAK,CAAA;KACb;IACD,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;AACnD,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,KAAK,GAAG,UAAU,IAAI;IACzC,WAAW;IACX,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;AAC7D,CAAC,CAAA;AAED,qBAAqB;AACrB,UAAU,CAAC,SAAS,CAAC,KAAK,GAAG,UAAU,KAAK;IAC1C,8BAA8B;IAC9B,uBAAuB;IACvB,gCAAgC;IAChC,8BAA8B;IAE9B,kDAAkD;IAClD,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,IAAI,IAAI,EAAE,CAAA;IAC7B,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;QAC1B,+BAA+B;QAC/B,OAAO,CAAC,KAAK,CAAC,gEAAgE,CAAC,CAAA;QAC/E,OAAO,CAAC,KAAK,CAAC,sBAAsB,EAAE,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QACpE,OAAO,CAAC,KAAK,CAAC,8DAA8D,CAAC,CAAA;QAC7E,8BAA8B;KAC/B;IACD,4BAA4B;IAC5B,KAAK,CAAC,KAAK,GAAG,KAAK,CAAC,KAAK,IAAI,EAAE,CAAA;IAC/B,IAAI,GAAG,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,CAAA;IAC5B,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM;SACrB,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,gBAAgB;SACvC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,oBAAoB;SAC3C,QAAQ,CAAC,GAAG,CAAC,CAAA;IAChB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;KAChC;IAED,IAAI,IAAI,GAAG,IAAI,CAAA;IACf,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;IAChB,IAAI,CAAC,KAAK,EAAE,CAAA;AACd,CAAC,CAAA;AAED,oBAAoB;AACpB,gEAAgE;AAChE,UAAU,CAAC,SAAS,CAAC,IAAI,GAAG,UAAU,MAAM;IAC1C,mBAAmB;IACnB,MAAM,GAAG,MAAM,IAAI,EAAE,CAAA;IACrB,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IACnC,MAAM,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,EAAE,CAAA;IACzC,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,KAAK,CAAA;IACtC,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAChC,IAAI,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;IACvB,IAAI,SAAS,GAAG,KAAK,CAAA;IACrB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,SAAS,GAAG,SAAS,IAAI,MAAM,CAAC,CAAC,CAAC,YAAY,MAAM,CAAA;KACrD;IACD,IAAI,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,SAAS,CAAC,CAAA;IAC/E,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;KACnB;SAAM;QACL,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;QACpB,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;YACxB,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,MAAM,CAAC,CAAA;SAC7C;KACF;IACD,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,IAAI,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAA;QACnB,IAAI,GAAG,KAAK,IAAI,IAAI,OAAO,GAAG,KAAK,WAAW,EAAE;YAC9C,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;SACpB;aAAM,IAAI,GAAG,YAAY,MAAM,EAAE;YAChC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;YAC3B,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;SAChB;aAAM;YACL,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAA;YACvC,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAA;SACtB;KACF;IAED,IAAI,MAAM,CAAC,MAAM,EAAE;QACjB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA,CAAC,6BAA6B;QAChD,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;KACnB;SAAM;QACL,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA,CAAC,2BAA2B;KAC/C;IACD,aAAa;IACb,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;IAChB,IAAI,CAAC,KAAK,EAAE,CAAA;AACd,CAAC,CAAA;AAED,uBAAuB;AACvB,gEAAgE;AAChE,UAAU,CAAC,SAAS,CAAC,OAAO,GAAG,UAAU,MAAM;IAC7C,MAAM,GAAG,MAAM,IAAI,EAAE,CAAA;IACrB,MAAM,CAAC,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IACnC,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,EAAE,CAAA;IAC/B,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAA;IAE3D,aAAa;IACb,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;IAChB,IAAI,CAAC,KAAK,EAAE,CAAA;AACd,CAAC,CAAA;AAED,IAAI,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;AAEjC,MAAM,WAAW,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAC/D,UAAU,CAAC,SAAS,CAAC,KAAK,GAAG;IAC3B,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACxB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,CAAA;KAC/B;AACH,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAC9D,UAAU,CAAC,SAAS,CAAC,IAAI,GAAG;IAC1B,IAAI,CAAC,OAAO,GAAG,IAAI,CAAA;IACnB,2CAA2C;IAC3C,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;IACnB,IAAI,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;QACxB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,CAAA;QAC7B,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,WAAW,CAAC,CAAA;KAC/B;AACH,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAE9D,UAAU,CAAC,SAAS,CAAC,GAAG,GAAG;IACzB,aAAa;IACb,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;IACnB,IAAI,CAAC,OAAO,GAAG,IAAI,CAAA;IACnB,OAAO,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,UAAU,EAAE,GAAG,EAAE;QACxC,IAAI,CAAC,MAAM,CAAC,GAAG,EAAE,CAAA;IACnB,CAAC,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,KAAK,GAAG,UAAU,GAAG;IACxC,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,CAAA;IACnD,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,QAAQ,GAAG,UAAU,GAAG;IAC3C,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,GAAG,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC,CAAA;IACnD,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;IAChB,IAAI,CAAC,KAAK,EAAE,CAAA;AACd,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,iBAAiB,GAAG,UAAU,KAAK;IACtD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;AACvD,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,WAAW,GAAG;IACjC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;AAC7D,CAAC,CAAA;AAED,UAAU,CAAC,SAAS,CAAC,YAAY,GAAG,UAAU,GAAG;IAC/C,+DAA+D;IAC/D,IAAI,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;IAC3B,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;AAClB,CAAC,CAAA;AAED,MAAM,CAAC,OAAO,GAAG,UAAU,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/inbound-parser.test.d.ts b/node_modules/pg-protocol/dist/inbound-parser.test.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/pg-protocol/dist/inbound-parser.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/pg-protocol/dist/inbound-parser.test.js b/node_modules/pg-protocol/dist/inbound-parser.test.js new file mode 100644 index 00000000..6950daac --- /dev/null +++ b/node_modules/pg-protocol/dist/inbound-parser.test.js @@ -0,0 +1,483 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const test_buffers_1 = __importDefault(require("./testing/test-buffers")); +const buffer_list_1 = __importDefault(require("./testing/buffer-list")); +const _1 = require("."); +const assert_1 = __importDefault(require("assert")); +const stream_1 = require("stream"); +var authOkBuffer = test_buffers_1.default.authenticationOk(); +var paramStatusBuffer = test_buffers_1.default.parameterStatus('client_encoding', 'UTF8'); +var readyForQueryBuffer = test_buffers_1.default.readyForQuery(); +var backendKeyDataBuffer = test_buffers_1.default.backendKeyData(1, 2); +var commandCompleteBuffer = test_buffers_1.default.commandComplete('SELECT 3'); +var parseCompleteBuffer = test_buffers_1.default.parseComplete(); +var bindCompleteBuffer = test_buffers_1.default.bindComplete(); +var portalSuspendedBuffer = test_buffers_1.default.portalSuspended(); +var addRow = function (bufferList, name, offset) { + return bufferList + .addCString(name) // field name + .addInt32(offset++) // table id + .addInt16(offset++) // attribute of column number + .addInt32(offset++) // objectId of field's data type + .addInt16(offset++) // datatype size + .addInt32(offset++) // type modifier + .addInt16(0); // format code, 0 => text +}; +var row1 = { + name: 'id', + tableID: 1, + attributeNumber: 2, + dataTypeID: 3, + dataTypeSize: 4, + typeModifier: 5, + formatCode: 0, +}; +var oneRowDescBuff = test_buffers_1.default.rowDescription([row1]); +row1.name = 'bang'; +var twoRowBuf = test_buffers_1.default.rowDescription([ + row1, + { + name: 'whoah', + tableID: 10, + attributeNumber: 11, + dataTypeID: 12, + dataTypeSize: 13, + typeModifier: 14, + formatCode: 0, + }, +]); +var emptyRowFieldBuf = new buffer_list_1.default().addInt16(0).join(true, 'D'); +var emptyRowFieldBuf = test_buffers_1.default.dataRow([]); +var oneFieldBuf = new buffer_list_1.default() + .addInt16(1) // number of fields + .addInt32(5) // length of bytes of fields + .addCString('test') + .join(true, 'D'); +var oneFieldBuf = test_buffers_1.default.dataRow(['test']); +var expectedAuthenticationOkayMessage = { + name: 'authenticationOk', + length: 8, +}; +var expectedParameterStatusMessage = { + name: 'parameterStatus', + parameterName: 'client_encoding', + parameterValue: 'UTF8', + length: 25, +}; +var expectedBackendKeyDataMessage = { + name: 'backendKeyData', + processID: 1, + secretKey: 2, +}; +var expectedReadyForQueryMessage = { + name: 'readyForQuery', + length: 5, + status: 'I', +}; +var expectedCommandCompleteMessage = { + name: 'commandComplete', + length: 13, + text: 'SELECT 3', +}; +var emptyRowDescriptionBuffer = new buffer_list_1.default() + .addInt16(0) // number of fields + .join(true, 'T'); +var expectedEmptyRowDescriptionMessage = { + name: 'rowDescription', + length: 6, + fieldCount: 0, + fields: [], +}; +var expectedOneRowMessage = { + name: 'rowDescription', + length: 27, + fieldCount: 1, + fields: [ + { + name: 'id', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + ], +}; +var expectedTwoRowMessage = { + name: 'rowDescription', + length: 53, + fieldCount: 2, + fields: [ + { + name: 'bang', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + { + name: 'whoah', + tableID: 10, + columnID: 11, + dataTypeID: 12, + dataTypeSize: 13, + dataTypeModifier: 14, + format: 'text', + }, + ], +}; +var testForMessage = function (buffer, expectedMessage) { + it('recieves and parses ' + expectedMessage.name, () => __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([buffer]); + const [lastMessage] = messages; + for (const key in expectedMessage) { + assert_1.default.deepEqual(lastMessage[key], expectedMessage[key]); + } + })); +}; +var plainPasswordBuffer = test_buffers_1.default.authenticationCleartextPassword(); +var md5PasswordBuffer = test_buffers_1.default.authenticationMD5Password(); +var SASLBuffer = test_buffers_1.default.authenticationSASL(); +var SASLContinueBuffer = test_buffers_1.default.authenticationSASLContinue(); +var SASLFinalBuffer = test_buffers_1.default.authenticationSASLFinal(); +var expectedPlainPasswordMessage = { + name: 'authenticationCleartextPassword', +}; +var expectedMD5PasswordMessage = { + name: 'authenticationMD5Password', + salt: Buffer.from([1, 2, 3, 4]), +}; +var expectedSASLMessage = { + name: 'authenticationSASL', + mechanisms: ['SCRAM-SHA-256'], +}; +var expectedSASLContinueMessage = { + name: 'authenticationSASLContinue', + data: 'data', +}; +var expectedSASLFinalMessage = { + name: 'authenticationSASLFinal', + data: 'data', +}; +var notificationResponseBuffer = test_buffers_1.default.notification(4, 'hi', 'boom'); +var expectedNotificationResponseMessage = { + name: 'notification', + processId: 4, + channel: 'hi', + payload: 'boom', +}; +const parseBuffers = (buffers) => __awaiter(void 0, void 0, void 0, function* () { + const stream = new stream_1.PassThrough(); + for (const buffer of buffers) { + stream.write(buffer); + } + stream.end(); + const msgs = []; + yield _1.parse(stream, (msg) => msgs.push(msg)); + return msgs; +}); +describe('PgPacketStream', function () { + testForMessage(authOkBuffer, expectedAuthenticationOkayMessage); + testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage); + testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage); + testForMessage(SASLBuffer, expectedSASLMessage); + testForMessage(SASLContinueBuffer, expectedSASLContinueMessage); + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]); + testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage); + testForMessage(SASLFinalBuffer, expectedSASLFinalMessage); + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]); + testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage); + testForMessage(paramStatusBuffer, expectedParameterStatusMessage); + testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage); + testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage); + testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage); + testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage); + testForMessage(test_buffers_1.default.emptyQuery(), { + name: 'emptyQuery', + length: 4, + }); + testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), { + name: 'noData', + }); + describe('rowDescription messages', function () { + testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage); + testForMessage(oneRowDescBuff, expectedOneRowMessage); + testForMessage(twoRowBuf, expectedTwoRowMessage); + }); + describe('parsing rows', function () { + describe('parsing empty row', function () { + testForMessage(emptyRowFieldBuf, { + name: 'dataRow', + fieldCount: 0, + }); + }); + describe('parsing data row with fields', function () { + testForMessage(oneFieldBuf, { + name: 'dataRow', + fieldCount: 1, + fields: ['test'], + }); + }); + }); + describe('notice message', function () { + // this uses the same logic as error message + var buff = test_buffers_1.default.notice([{ type: 'C', value: 'code' }]); + testForMessage(buff, { + name: 'notice', + code: 'code', + }); + }); + testForMessage(test_buffers_1.default.error([]), { + name: 'error', + }); + describe('with all the fields', function () { + var buffer = test_buffers_1.default.error([ + { + type: 'S', + value: 'ERROR', + }, + { + type: 'C', + value: 'code', + }, + { + type: 'M', + value: 'message', + }, + { + type: 'D', + value: 'details', + }, + { + type: 'H', + value: 'hint', + }, + { + type: 'P', + value: '100', + }, + { + type: 'p', + value: '101', + }, + { + type: 'q', + value: 'query', + }, + { + type: 'W', + value: 'where', + }, + { + type: 'F', + value: 'file', + }, + { + type: 'L', + value: 'line', + }, + { + type: 'R', + value: 'routine', + }, + { + type: 'Z', + value: 'alsdkf', + }, + ]); + testForMessage(buffer, { + name: 'error', + severity: 'ERROR', + code: 'code', + message: 'message', + detail: 'details', + hint: 'hint', + position: '100', + internalPosition: '101', + internalQuery: 'query', + where: 'where', + file: 'file', + line: 'line', + routine: 'routine', + }); + }); + testForMessage(parseCompleteBuffer, { + name: 'parseComplete', + }); + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }); + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }); + testForMessage(test_buffers_1.default.closeComplete(), { + name: 'closeComplete', + }); + describe('parses portal suspended message', function () { + testForMessage(portalSuspendedBuffer, { + name: 'portalSuspended', + }); + }); + describe('parses replication start message', function () { + testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), { + name: 'replicationStart', + length: 4, + }); + }); + describe('copy', () => { + testForMessage(test_buffers_1.default.copyIn(0), { + name: 'copyInResponse', + length: 7, + binary: false, + columnTypes: [], + }); + testForMessage(test_buffers_1.default.copyIn(2), { + name: 'copyInResponse', + length: 11, + binary: false, + columnTypes: [0, 1], + }); + testForMessage(test_buffers_1.default.copyOut(0), { + name: 'copyOutResponse', + length: 7, + binary: false, + columnTypes: [], + }); + testForMessage(test_buffers_1.default.copyOut(3), { + name: 'copyOutResponse', + length: 13, + binary: false, + columnTypes: [0, 1, 2], + }); + testForMessage(test_buffers_1.default.copyDone(), { + name: 'copyDone', + length: 4, + }); + testForMessage(test_buffers_1.default.copyData(Buffer.from([5, 6, 7])), { + name: 'copyData', + length: 7, + chunk: Buffer.from([5, 6, 7]), + }); + }); + // since the data message on a stream can randomly divide the incomming + // tcp packets anywhere, we need to make sure we can parse every single + // split on a tcp message + describe('split buffer, single message parsing', function () { + var fullBuffer = test_buffers_1.default.dataRow([null, 'bang', 'zug zug', null, '!']); + it('parses when full buffer comes in', function () { + return __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([fullBuffer]); + const message = messages[0]; + assert_1.default.equal(message.fields.length, 5); + assert_1.default.equal(message.fields[0], null); + assert_1.default.equal(message.fields[1], 'bang'); + assert_1.default.equal(message.fields[2], 'zug zug'); + assert_1.default.equal(message.fields[3], null); + assert_1.default.equal(message.fields[4], '!'); + }); + }); + var testMessageRecievedAfterSpiltAt = function (split) { + return __awaiter(this, void 0, void 0, function* () { + var firstBuffer = Buffer.alloc(fullBuffer.length - split); + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length); + fullBuffer.copy(firstBuffer, 0, 0); + fullBuffer.copy(secondBuffer, 0, firstBuffer.length); + const messages = yield parseBuffers([fullBuffer]); + const message = messages[0]; + assert_1.default.equal(message.fields.length, 5); + assert_1.default.equal(message.fields[0], null); + assert_1.default.equal(message.fields[1], 'bang'); + assert_1.default.equal(message.fields[2], 'zug zug'); + assert_1.default.equal(message.fields[3], null); + assert_1.default.equal(message.fields[4], '!'); + }); + }; + it('parses when split in the middle', function () { + testMessageRecievedAfterSpiltAt(6); + }); + it('parses when split at end', function () { + testMessageRecievedAfterSpiltAt(2); + }); + it('parses when split at beginning', function () { + testMessageRecievedAfterSpiltAt(fullBuffer.length - 2); + testMessageRecievedAfterSpiltAt(fullBuffer.length - 1); + testMessageRecievedAfterSpiltAt(fullBuffer.length - 5); + }); + }); + describe('split buffer, multiple message parsing', function () { + var dataRowBuffer = test_buffers_1.default.dataRow(['!']); + var readyForQueryBuffer = test_buffers_1.default.readyForQuery(); + var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length); + dataRowBuffer.copy(fullBuffer, 0, 0); + readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0); + var verifyMessages = function (messages) { + assert_1.default.strictEqual(messages.length, 2); + assert_1.default.deepEqual(messages[0], { + name: 'dataRow', + fieldCount: 1, + length: 11, + fields: ['!'], + }); + assert_1.default.equal(messages[0].fields[0], '!'); + assert_1.default.deepEqual(messages[1], { + name: 'readyForQuery', + length: 5, + status: 'I', + }); + }; + // sanity check + it('recieves both messages when packet is not split', function () { + return __awaiter(this, void 0, void 0, function* () { + const messages = yield parseBuffers([fullBuffer]); + verifyMessages(messages); + }); + }); + var splitAndVerifyTwoMessages = function (split) { + return __awaiter(this, void 0, void 0, function* () { + var firstBuffer = Buffer.alloc(fullBuffer.length - split); + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length); + fullBuffer.copy(firstBuffer, 0, 0); + fullBuffer.copy(secondBuffer, 0, firstBuffer.length); + const messages = yield parseBuffers([firstBuffer, secondBuffer]); + verifyMessages(messages); + }); + }; + describe('recieves both messages when packet is split', function () { + it('in the middle', function () { + return splitAndVerifyTwoMessages(11); + }); + it('at the front', function () { + return Promise.all([ + splitAndVerifyTwoMessages(fullBuffer.length - 1), + splitAndVerifyTwoMessages(fullBuffer.length - 4), + splitAndVerifyTwoMessages(fullBuffer.length - 6), + ]); + }); + it('at the end', function () { + return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]); + }); + }); + }); +}); +//# sourceMappingURL=inbound-parser.test.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/inbound-parser.test.js.map b/node_modules/pg-protocol/dist/inbound-parser.test.js.map new file mode 100644 index 00000000..06d6e5d9 --- /dev/null +++ b/node_modules/pg-protocol/dist/inbound-parser.test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"inbound-parser.test.js","sourceRoot":"","sources":["../src/inbound-parser.test.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,0EAA4C;AAC5C,wEAA8C;AAC9C,wBAAyB;AACzB,oDAA2B;AAC3B,mCAAoC;AAGpC,IAAI,YAAY,GAAG,sBAAO,CAAC,gBAAgB,EAAE,CAAA;AAC7C,IAAI,iBAAiB,GAAG,sBAAO,CAAC,eAAe,CAAC,iBAAiB,EAAE,MAAM,CAAC,CAAA;AAC1E,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACjD,IAAI,oBAAoB,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;AACvD,IAAI,qBAAqB,GAAG,sBAAO,CAAC,eAAe,CAAC,UAAU,CAAC,CAAA;AAC/D,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;AACjD,IAAI,kBAAkB,GAAG,sBAAO,CAAC,YAAY,EAAE,CAAA;AAC/C,IAAI,qBAAqB,GAAG,sBAAO,CAAC,eAAe,EAAE,CAAA;AAErD,IAAI,MAAM,GAAG,UAAU,UAAsB,EAAE,IAAY,EAAE,MAAc;IACzE,OAAO,UAAU;SACd,UAAU,CAAC,IAAI,CAAC,CAAC,aAAa;SAC9B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,WAAW;SAC9B,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,6BAA6B;SAChD,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gCAAgC;SACnD,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gBAAgB;SACnC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,gBAAgB;SACnC,QAAQ,CAAC,CAAC,CAAC,CAAA,CAAC,yBAAyB;AAC1C,CAAC,CAAA;AAED,IAAI,IAAI,GAAG;IACT,IAAI,EAAE,IAAI;IACV,OAAO,EAAE,CAAC;IACV,eAAe,EAAE,CAAC;IAClB,UAAU,EAAE,CAAC;IACb,YAAY,EAAE,CAAC;IACf,YAAY,EAAE,CAAC;IACf,UAAU,EAAE,CAAC;CACd,CAAA;AACD,IAAI,cAAc,GAAG,sBAAO,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC,CAAC,CAAA;AACnD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAA;AAElB,IAAI,SAAS,GAAG,sBAAO,CAAC,cAAc,CAAC;IACrC,IAAI;IACJ;QACE,IAAI,EAAE,OAAO;QACb,OAAO,EAAE,EAAE;QACX,eAAe,EAAE,EAAE;QACnB,UAAU,EAAE,EAAE;QACd,YAAY,EAAE,EAAE;QAChB,YAAY,EAAE,EAAE;QAChB,UAAU,EAAE,CAAC;KACd;CACF,CAAC,CAAA;AAEF,IAAI,gBAAgB,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAEnE,IAAI,gBAAgB,GAAG,sBAAO,CAAC,OAAO,CAAC,EAAE,CAAC,CAAA;AAE1C,IAAI,WAAW,GAAG,IAAI,qBAAU,EAAE;KAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,mBAAmB;KAC/B,QAAQ,CAAC,CAAC,CAAC,CAAC,4BAA4B;KACxC,UAAU,CAAC,MAAM,CAAC;KAClB,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,WAAW,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;AAE3C,IAAI,iCAAiC,GAAG;IACtC,IAAI,EAAE,kBAAkB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAED,IAAI,8BAA8B,GAAG;IACnC,IAAI,EAAE,iBAAiB;IACvB,aAAa,EAAE,iBAAiB;IAChC,cAAc,EAAE,MAAM;IACtB,MAAM,EAAE,EAAE;CACX,CAAA;AAED,IAAI,6BAA6B,GAAG;IAClC,IAAI,EAAE,gBAAgB;IACtB,SAAS,EAAE,CAAC;IACZ,SAAS,EAAE,CAAC;CACb,CAAA;AAED,IAAI,4BAA4B,GAAG;IACjC,IAAI,EAAE,eAAe;IACrB,MAAM,EAAE,CAAC;IACT,MAAM,EAAE,GAAG;CACZ,CAAA;AAED,IAAI,8BAA8B,GAAG;IACnC,IAAI,EAAE,iBAAiB;IACvB,MAAM,EAAE,EAAE;IACV,IAAI,EAAE,UAAU;CACjB,CAAA;AACD,IAAI,yBAAyB,GAAG,IAAI,qBAAU,EAAE;KAC7C,QAAQ,CAAC,CAAC,CAAC,CAAC,mBAAmB;KAC/B,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;AAElB,IAAI,kCAAkC,GAAG;IACvC,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,CAAC;IACT,UAAU,EAAE,CAAC;IACb,MAAM,EAAE,EAAE;CACX,CAAA;AACD,IAAI,qBAAqB,GAAG;IAC1B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,IAAI;YACV,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,IAAI,qBAAqB,GAAG;IAC1B,IAAI,EAAE,gBAAgB;IACtB,MAAM,EAAE,EAAE;IACV,UAAU,EAAE,CAAC;IACb,MAAM,EAAE;QACN;YACE,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,CAAC;YACV,QAAQ,EAAE,CAAC;YACX,UAAU,EAAE,CAAC;YACb,YAAY,EAAE,CAAC;YACf,gBAAgB,EAAE,CAAC;YACnB,MAAM,EAAE,MAAM;SACf;QACD;YACE,IAAI,EAAE,OAAO;YACb,OAAO,EAAE,EAAE;YACX,QAAQ,EAAE,EAAE;YACZ,UAAU,EAAE,EAAE;YACd,YAAY,EAAE,EAAE;YAChB,gBAAgB,EAAE,EAAE;YACpB,MAAM,EAAE,MAAM;SACf;KACF;CACF,CAAA;AAED,IAAI,cAAc,GAAG,UAAU,MAAc,EAAE,eAAoB;IACjE,EAAE,CAAC,sBAAsB,GAAG,eAAe,CAAC,IAAI,EAAE,GAAS,EAAE;QAC3D,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,MAAM,CAAC,CAAC,CAAA;QAC7C,MAAM,CAAC,WAAW,CAAC,GAAG,QAAQ,CAAA;QAE9B,KAAK,MAAM,GAAG,IAAI,eAAe,EAAE;YACjC,gBAAM,CAAC,SAAS,CAAE,WAAmB,CAAC,GAAG,CAAC,EAAE,eAAe,CAAC,GAAG,CAAC,CAAC,CAAA;SAClE;IACH,CAAC,CAAA,CAAC,CAAA;AACJ,CAAC,CAAA;AAED,IAAI,mBAAmB,GAAG,sBAAO,CAAC,+BAA+B,EAAE,CAAA;AACnE,IAAI,iBAAiB,GAAG,sBAAO,CAAC,yBAAyB,EAAE,CAAA;AAC3D,IAAI,UAAU,GAAG,sBAAO,CAAC,kBAAkB,EAAE,CAAA;AAC7C,IAAI,kBAAkB,GAAG,sBAAO,CAAC,0BAA0B,EAAE,CAAA;AAC7D,IAAI,eAAe,GAAG,sBAAO,CAAC,uBAAuB,EAAE,CAAA;AAEvD,IAAI,4BAA4B,GAAG;IACjC,IAAI,EAAE,iCAAiC;CACxC,CAAA;AAED,IAAI,0BAA0B,GAAG;IAC/B,IAAI,EAAE,2BAA2B;IACjC,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;CAChC,CAAA;AAED,IAAI,mBAAmB,GAAG;IACxB,IAAI,EAAE,oBAAoB;IAC1B,UAAU,EAAE,CAAC,eAAe,CAAC;CAC9B,CAAA;AAED,IAAI,2BAA2B,GAAG;IAChC,IAAI,EAAE,4BAA4B;IAClC,IAAI,EAAE,MAAM;CACb,CAAA;AAED,IAAI,wBAAwB,GAAG;IAC7B,IAAI,EAAE,yBAAyB;IAC/B,IAAI,EAAE,MAAM;CACb,CAAA;AAED,IAAI,0BAA0B,GAAG,sBAAO,CAAC,YAAY,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,CAAC,CAAA;AACtE,IAAI,mCAAmC,GAAG;IACxC,IAAI,EAAE,cAAc;IACpB,SAAS,EAAE,CAAC;IACZ,OAAO,EAAE,IAAI;IACb,OAAO,EAAE,MAAM;CAChB,CAAA;AAED,MAAM,YAAY,GAAG,CAAO,OAAiB,EAA6B,EAAE;IAC1E,MAAM,MAAM,GAAG,IAAI,oBAAW,EAAE,CAAA;IAChC,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE;QAC5B,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;KACrB;IACD,MAAM,CAAC,GAAG,EAAE,CAAA;IACZ,MAAM,IAAI,GAAqB,EAAE,CAAA;IACjC,MAAM,QAAK,CAAC,MAAM,EAAE,CAAC,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;IAC5C,OAAO,IAAI,CAAA;AACb,CAAC,CAAA,CAAA;AAED,QAAQ,CAAC,gBAAgB,EAAE;IACzB,cAAc,CAAC,YAAY,EAAE,iCAAiC,CAAC,CAAA;IAC/D,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,iBAAiB,EAAE,0BAA0B,CAAC,CAAA;IAC7D,cAAc,CAAC,UAAU,EAAE,mBAAmB,CAAC,CAAA;IAC/C,cAAc,CAAC,kBAAkB,EAAE,2BAA2B,CAAC,CAAA;IAE/D,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,0BAA0B,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,kBAAkB,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IACjG,cAAc,CAAC,0BAA0B,EAAE,2BAA2B,CAAC,CAAA;IAEvE,cAAc,CAAC,eAAe,EAAE,wBAAwB,CAAC,CAAA;IAEzD,4CAA4C;IAC5C,2EAA2E;IAC3E,yFAAyF;IACzF,MAAM,uBAAuB,GAAG,MAAM,CAAC,MAAM,CAAC,CAAC,eAAe,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;IAC3F,cAAc,CAAC,uBAAuB,EAAE,wBAAwB,CAAC,CAAA;IAEjE,cAAc,CAAC,iBAAiB,EAAE,8BAA8B,CAAC,CAAA;IACjE,cAAc,CAAC,oBAAoB,EAAE,6BAA6B,CAAC,CAAA;IACnE,cAAc,CAAC,mBAAmB,EAAE,4BAA4B,CAAC,CAAA;IACjE,cAAc,CAAC,qBAAqB,EAAE,8BAA8B,CAAC,CAAA;IACrE,cAAc,CAAC,0BAA0B,EAAE,mCAAmC,CAAC,CAAA;IAC/E,cAAc,CAAC,sBAAO,CAAC,UAAU,EAAE,EAAE;QACnC,IAAI,EAAE,YAAY;QAClB,MAAM,EAAE,CAAC;KACV,CAAC,CAAA;IAEF,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE;QAC9C,IAAI,EAAE,QAAQ;KACf,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,cAAc,CAAC,yBAAyB,EAAE,kCAAkC,CAAC,CAAA;QAC7E,cAAc,CAAC,cAAc,EAAE,qBAAqB,CAAC,CAAA;QACrD,cAAc,CAAC,SAAS,EAAE,qBAAqB,CAAC,CAAA;IAClD,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,cAAc,EAAE;QACvB,QAAQ,CAAC,mBAAmB,EAAE;YAC5B,cAAc,CAAC,gBAAgB,EAAE;gBAC/B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;aACd,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QAEF,QAAQ,CAAC,8BAA8B,EAAE;YACvC,cAAc,CAAC,WAAW,EAAE;gBAC1B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,CAAC,MAAM,CAAC;aACjB,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,gBAAgB,EAAE;QACzB,4CAA4C;QAC5C,IAAI,IAAI,GAAG,sBAAO,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,CAAC,CAAA;QACzD,cAAc,CAAC,IAAI,EAAE;YACnB,IAAI,EAAE,QAAQ;YACd,IAAI,EAAE,MAAM;SACb,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;QAChC,IAAI,EAAE,OAAO;KACd,CAAC,CAAA;IAEF,QAAQ,CAAC,qBAAqB,EAAE;QAC9B,IAAI,MAAM,GAAG,sBAAO,CAAC,KAAK,CAAC;YACzB;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,KAAK;aACb;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,OAAO;aACf;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,MAAM;aACd;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,SAAS;aACjB;YACD;gBACE,IAAI,EAAE,GAAG;gBACT,KAAK,EAAE,QAAQ;aAChB;SACF,CAAC,CAAA;QAEF,cAAc,CAAC,MAAM,EAAE;YACrB,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,OAAO;YACjB,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;YAClB,MAAM,EAAE,SAAS;YACjB,IAAI,EAAE,MAAM;YACZ,QAAQ,EAAE,KAAK;YACf,gBAAgB,EAAE,KAAK;YACvB,aAAa,EAAE,OAAO;YACtB,KAAK,EAAE,OAAO;YACd,IAAI,EAAE,MAAM;YACZ,IAAI,EAAE,MAAM;YACZ,OAAO,EAAE,SAAS;SACnB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,cAAc,CAAC,mBAAmB,EAAE;QAClC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,kBAAkB,EAAE;QACjC,IAAI,EAAE,cAAc;KACrB,CAAC,CAAA;IAEF,cAAc,CAAC,sBAAO,CAAC,aAAa,EAAE,EAAE;QACtC,IAAI,EAAE,eAAe;KACtB,CAAC,CAAA;IAEF,QAAQ,CAAC,iCAAiC,EAAE;QAC1C,cAAc,CAAC,qBAAqB,EAAE;YACpC,IAAI,EAAE,iBAAiB;SACxB,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,kCAAkC,EAAE;QAC3C,cAAc,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,EAAE;YAC1D,IAAI,EAAE,kBAAkB;YACxB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,MAAM,EAAE,GAAG,EAAE;QACpB,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE;YAChC,IAAI,EAAE,gBAAgB;YACtB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC;SACpB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,CAAC;YACT,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,EAAE;SAChB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE;YACjC,IAAI,EAAE,iBAAiB;YACvB,MAAM,EAAE,EAAE;YACV,MAAM,EAAE,KAAK;YACb,WAAW,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;SACvB,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,EAAE,EAAE;YACjC,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;SACV,CAAC,CAAA;QAEF,cAAc,CAAC,sBAAO,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE;YACvD,IAAI,EAAE,UAAU;YAChB,MAAM,EAAE,CAAC;YACT,KAAK,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;SAC9B,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,uEAAuE;IACvE,uEAAuE;IACvE,yBAAyB;IACzB,QAAQ,CAAC,sCAAsC,EAAE;QAC/C,IAAI,UAAU,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,SAAS,EAAE,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAEtE,EAAE,CAAC,kCAAkC,EAAE;;gBACrC,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAC,CAAA;QAEF,IAAI,+BAA+B,GAAG,UAAgB,KAAa;;gBACjE,IAAI,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBACzD,IAAI,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACvE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,MAAM,OAAO,GAAG,QAAQ,CAAC,CAAC,CAAQ,CAAA;gBAClC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;gBACtC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,CAAC,CAAA;gBACvC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,SAAS,CAAC,CAAA;gBAC1C,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,CAAA;gBACrC,gBAAM,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACtC,CAAC;SAAA,CAAA;QAED,EAAE,CAAC,iCAAiC,EAAE;YACpC,+BAA+B,CAAC,CAAC,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,+BAA+B,CAAC,CAAC,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,gCAAgC,EAAE;YACnC,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;YACtD,+BAA+B,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;QACxD,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wCAAwC,EAAE;QACjD,IAAI,aAAa,GAAG,sBAAO,CAAC,OAAO,CAAC,CAAC,GAAG,CAAC,CAAC,CAAA;QAC1C,IAAI,mBAAmB,GAAG,sBAAO,CAAC,aAAa,EAAE,CAAA;QACjD,IAAI,UAAU,GAAG,MAAM,CAAC,KAAK,CAAC,aAAa,CAAC,MAAM,GAAG,mBAAmB,CAAC,MAAM,CAAC,CAAA;QAChF,aAAa,CAAC,IAAI,CAAC,UAAU,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QACpC,mBAAmB,CAAC,IAAI,CAAC,UAAU,EAAE,aAAa,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;QAE7D,IAAI,cAAc,GAAG,UAAU,QAAe;YAC5C,gBAAM,CAAC,WAAW,CAAC,QAAQ,CAAC,MAAM,EAAE,CAAC,CAAC,CAAA;YACtC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,SAAS;gBACf,UAAU,EAAE,CAAC;gBACb,MAAM,EAAE,EAAE;gBACV,MAAM,EAAE,CAAC,GAAG,CAAC;aACd,CAAC,CAAA;YACF,gBAAM,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA;YACxC,gBAAM,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,EAAE;gBAC5B,IAAI,EAAE,eAAe;gBACrB,MAAM,EAAE,CAAC;gBACT,MAAM,EAAE,GAAG;aACZ,CAAC,CAAA;QACJ,CAAC,CAAA;QACD,eAAe;QACf,EAAE,CAAC,iDAAiD,EAAE;;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,UAAU,CAAC,CAAC,CAAA;gBACjD,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAC,CAAA;QAEF,IAAI,yBAAyB,GAAG,UAAgB,KAAa;;gBAC3D,IAAI,WAAW,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,KAAK,CAAC,CAAA;gBACzD,IAAI,YAAY,GAAG,MAAM,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,GAAG,WAAW,CAAC,MAAM,CAAC,CAAA;gBACvE,UAAU,CAAC,IAAI,CAAC,WAAW,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;gBAClC,UAAU,CAAC,IAAI,CAAC,YAAY,EAAE,CAAC,EAAE,WAAW,CAAC,MAAM,CAAC,CAAA;gBACpD,MAAM,QAAQ,GAAG,MAAM,YAAY,CAAC,CAAC,WAAW,EAAE,YAAY,CAAC,CAAC,CAAA;gBAChE,cAAc,CAAC,QAAQ,CAAC,CAAA;YAC1B,CAAC;SAAA,CAAA;QAED,QAAQ,CAAC,6CAA6C,EAAE;YACtD,EAAE,CAAC,eAAe,EAAE;gBAClB,OAAO,yBAAyB,CAAC,EAAE,CAAC,CAAA;YACtC,CAAC,CAAC,CAAA;YACF,EAAE,CAAC,cAAc,EAAE;gBACjB,OAAO,OAAO,CAAC,GAAG,CAAC;oBACjB,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;oBAChD,yBAAyB,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,CAAC;iBACjD,CAAC,CAAA;YACJ,CAAC,CAAC,CAAA;YAEF,EAAE,CAAC,YAAY,EAAE;gBACf,OAAO,OAAO,CAAC,GAAG,CAAC,CAAC,yBAAyB,CAAC,CAAC,CAAC,EAAE,yBAAyB,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;YAClF,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/index.d.ts b/node_modules/pg-protocol/dist/index.d.ts new file mode 100644 index 00000000..ba49890d --- /dev/null +++ b/node_modules/pg-protocol/dist/index.d.ts @@ -0,0 +1,5 @@ +/// +import { serialize } from './serializer'; +import { MessageCallback } from './parser'; +export declare function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise; +export { serialize }; diff --git a/node_modules/pg-protocol/dist/index.js b/node_modules/pg-protocol/dist/index.js new file mode 100644 index 00000000..eceb852e --- /dev/null +++ b/node_modules/pg-protocol/dist/index.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const serializer_1 = require("./serializer"); +exports.serialize = serializer_1.serialize; +const parser_1 = require("./parser"); +function parse(stream, callback) { + const parser = new parser_1.Parser(); + stream.on('data', (buffer) => parser.parse(buffer, callback)); + return new Promise((resolve) => stream.on('end', () => resolve())); +} +exports.parse = parse; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/index.js.map b/node_modules/pg-protocol/dist/index.js.map new file mode 100644 index 00000000..4c5aef3e --- /dev/null +++ b/node_modules/pg-protocol/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AACA,6CAAwC;AAS/B,oBATA,sBAAS,CASA;AARlB,qCAAkD;AAElD,SAAgB,KAAK,CAAC,MAA6B,EAAE,QAAyB;IAC5E,MAAM,MAAM,GAAG,IAAI,eAAM,EAAE,CAAA;IAC3B,MAAM,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,MAAc,EAAE,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC,CAAA;IACrE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,EAAE,CAAC,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,EAAE,CAAC,OAAO,EAAE,CAAC,CAAC,CAAA;AACpE,CAAC;AAJD,sBAIC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/messages.d.ts b/node_modules/pg-protocol/dist/messages.d.ts new file mode 100644 index 00000000..c5261b50 --- /dev/null +++ b/node_modules/pg-protocol/dist/messages.d.ts @@ -0,0 +1,182 @@ +/// +export declare type Mode = 'text' | 'binary'; +export declare const enum MessageName { + parseComplete = "parseComplete", + bindComplete = "bindComplete", + closeComplete = "closeComplete", + noData = "noData", + portalSuspended = "portalSuspended", + replicationStart = "replicationStart", + emptyQuery = "emptyQuery", + copyDone = "copyDone", + copyData = "copyData", + rowDescription = "rowDescription", + parameterStatus = "parameterStatus", + backendKeyData = "backendKeyData", + notification = "notification", + readyForQuery = "readyForQuery", + commandComplete = "commandComplete", + dataRow = "dataRow", + copyInResponse = "copyInResponse", + copyOutResponse = "copyOutResponse", + authenticationOk = "authenticationOk", + authenticationMD5Password = "authenticationMD5Password", + authenticationCleartextPassword = "authenticationCleartextPassword", + authenticationSASL = "authenticationSASL", + authenticationSASLContinue = "authenticationSASLContinue", + authenticationSASLFinal = "authenticationSASLFinal", + error = "error", + notice = "notice" +} +export interface BackendMessage { + name: MessageName; + length: number; +} +export declare const parseComplete: BackendMessage; +export declare const bindComplete: BackendMessage; +export declare const closeComplete: BackendMessage; +export declare const noData: BackendMessage; +export declare const portalSuspended: BackendMessage; +export declare const replicationStart: BackendMessage; +export declare const emptyQuery: BackendMessage; +export declare const copyDone: BackendMessage; +interface NoticeOrError { + message: string | undefined; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; +} +export declare class DatabaseError extends Error implements NoticeOrError { + readonly length: number; + readonly name: MessageName; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; + constructor(message: string, length: number, name: MessageName); +} +export declare class CopyDataMessage { + readonly length: number; + readonly chunk: Buffer; + readonly name = MessageName.copyData; + constructor(length: number, chunk: Buffer); +} +export declare class CopyResponse { + readonly length: number; + readonly name: MessageName; + readonly binary: boolean; + readonly columnTypes: number[]; + constructor(length: number, name: MessageName, binary: boolean, columnCount: number); +} +export declare class Field { + readonly name: string; + readonly tableID: number; + readonly columnID: number; + readonly dataTypeID: number; + readonly dataTypeSize: number; + readonly dataTypeModifier: number; + readonly format: Mode; + constructor(name: string, tableID: number, columnID: number, dataTypeID: number, dataTypeSize: number, dataTypeModifier: number, format: Mode); +} +export declare class RowDescriptionMessage { + readonly length: number; + readonly fieldCount: number; + readonly name: MessageName; + readonly fields: Field[]; + constructor(length: number, fieldCount: number); +} +export declare class ParameterStatusMessage { + readonly length: number; + readonly parameterName: string; + readonly parameterValue: string; + readonly name: MessageName; + constructor(length: number, parameterName: string, parameterValue: string); +} +export declare class AuthenticationMD5Password implements BackendMessage { + readonly length: number; + readonly salt: Buffer; + readonly name: MessageName; + constructor(length: number, salt: Buffer); +} +export declare class BackendKeyDataMessage { + readonly length: number; + readonly processID: number; + readonly secretKey: number; + readonly name: MessageName; + constructor(length: number, processID: number, secretKey: number); +} +export declare class NotificationResponseMessage { + readonly length: number; + readonly processId: number; + readonly channel: string; + readonly payload: string; + readonly name: MessageName; + constructor(length: number, processId: number, channel: string, payload: string); +} +export declare class ReadyForQueryMessage { + readonly length: number; + readonly status: string; + readonly name: MessageName; + constructor(length: number, status: string); +} +export declare class CommandCompleteMessage { + readonly length: number; + readonly text: string; + readonly name: MessageName; + constructor(length: number, text: string); +} +export declare class DataRowMessage { + length: number; + fields: any[]; + readonly fieldCount: number; + readonly name: MessageName; + constructor(length: number, fields: any[]); +} +export declare class NoticeMessage implements BackendMessage, NoticeOrError { + readonly length: number; + readonly message: string | undefined; + constructor(length: number, message: string | undefined); + readonly name = MessageName.notice; + severity: string | undefined; + code: string | undefined; + detail: string | undefined; + hint: string | undefined; + position: string | undefined; + internalPosition: string | undefined; + internalQuery: string | undefined; + where: string | undefined; + schema: string | undefined; + table: string | undefined; + column: string | undefined; + dataType: string | undefined; + constraint: string | undefined; + file: string | undefined; + line: string | undefined; + routine: string | undefined; +} +export {}; diff --git a/node_modules/pg-protocol/dist/messages.js b/node_modules/pg-protocol/dist/messages.js new file mode 100644 index 00000000..bf01d694 --- /dev/null +++ b/node_modules/pg-protocol/dist/messages.js @@ -0,0 +1,150 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseComplete = { + name: "parseComplete" /* parseComplete */, + length: 5, +}; +exports.bindComplete = { + name: "bindComplete" /* bindComplete */, + length: 5, +}; +exports.closeComplete = { + name: "closeComplete" /* closeComplete */, + length: 5, +}; +exports.noData = { + name: "noData" /* noData */, + length: 5, +}; +exports.portalSuspended = { + name: "portalSuspended" /* portalSuspended */, + length: 5, +}; +exports.replicationStart = { + name: "replicationStart" /* replicationStart */, + length: 4, +}; +exports.emptyQuery = { + name: "emptyQuery" /* emptyQuery */, + length: 4, +}; +exports.copyDone = { + name: "copyDone" /* copyDone */, + length: 4, +}; +class DatabaseError extends Error { + constructor(message, length, name) { + super(message); + this.length = length; + this.name = name; + } +} +exports.DatabaseError = DatabaseError; +class CopyDataMessage { + constructor(length, chunk) { + this.length = length; + this.chunk = chunk; + this.name = "copyData" /* copyData */; + } +} +exports.CopyDataMessage = CopyDataMessage; +class CopyResponse { + constructor(length, name, binary, columnCount) { + this.length = length; + this.name = name; + this.binary = binary; + this.columnTypes = new Array(columnCount); + } +} +exports.CopyResponse = CopyResponse; +class Field { + constructor(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, format) { + this.name = name; + this.tableID = tableID; + this.columnID = columnID; + this.dataTypeID = dataTypeID; + this.dataTypeSize = dataTypeSize; + this.dataTypeModifier = dataTypeModifier; + this.format = format; + } +} +exports.Field = Field; +class RowDescriptionMessage { + constructor(length, fieldCount) { + this.length = length; + this.fieldCount = fieldCount; + this.name = "rowDescription" /* rowDescription */; + this.fields = new Array(this.fieldCount); + } +} +exports.RowDescriptionMessage = RowDescriptionMessage; +class ParameterStatusMessage { + constructor(length, parameterName, parameterValue) { + this.length = length; + this.parameterName = parameterName; + this.parameterValue = parameterValue; + this.name = "parameterStatus" /* parameterStatus */; + } +} +exports.ParameterStatusMessage = ParameterStatusMessage; +class AuthenticationMD5Password { + constructor(length, salt) { + this.length = length; + this.salt = salt; + this.name = "authenticationMD5Password" /* authenticationMD5Password */; + } +} +exports.AuthenticationMD5Password = AuthenticationMD5Password; +class BackendKeyDataMessage { + constructor(length, processID, secretKey) { + this.length = length; + this.processID = processID; + this.secretKey = secretKey; + this.name = "backendKeyData" /* backendKeyData */; + } +} +exports.BackendKeyDataMessage = BackendKeyDataMessage; +class NotificationResponseMessage { + constructor(length, processId, channel, payload) { + this.length = length; + this.processId = processId; + this.channel = channel; + this.payload = payload; + this.name = "notification" /* notification */; + } +} +exports.NotificationResponseMessage = NotificationResponseMessage; +class ReadyForQueryMessage { + constructor(length, status) { + this.length = length; + this.status = status; + this.name = "readyForQuery" /* readyForQuery */; + } +} +exports.ReadyForQueryMessage = ReadyForQueryMessage; +class CommandCompleteMessage { + constructor(length, text) { + this.length = length; + this.text = text; + this.name = "commandComplete" /* commandComplete */; + } +} +exports.CommandCompleteMessage = CommandCompleteMessage; +class DataRowMessage { + constructor(length, fields) { + this.length = length; + this.fields = fields; + this.name = "dataRow" /* dataRow */; + this.fieldCount = fields.length; + } +} +exports.DataRowMessage = DataRowMessage; +class NoticeMessage { + constructor(length, message) { + this.length = length; + this.message = message; + this.name = "notice" /* notice */; + } +} +exports.NoticeMessage = NoticeMessage; +//# sourceMappingURL=messages.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/messages.js.map b/node_modules/pg-protocol/dist/messages.js.map new file mode 100644 index 00000000..7c536fdc --- /dev/null +++ b/node_modules/pg-protocol/dist/messages.js.map @@ -0,0 +1 @@ +{"version":3,"file":"messages.js","sourceRoot":"","sources":["../src/messages.ts"],"names":[],"mappings":";;AAoCa,QAAA,aAAa,GAAmB;IAC3C,IAAI,qCAA2B;IAC/B,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,YAAY,GAAmB;IAC1C,IAAI,mCAA0B;IAC9B,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,aAAa,GAAmB;IAC3C,IAAI,qCAA2B;IAC/B,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,MAAM,GAAmB;IACpC,IAAI,uBAAoB;IACxB,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,eAAe,GAAmB;IAC7C,IAAI,yCAA6B;IACjC,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,gBAAgB,GAAmB;IAC9C,IAAI,2CAA8B;IAClC,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,UAAU,GAAmB;IACxC,IAAI,+BAAwB;IAC5B,MAAM,EAAE,CAAC;CACV,CAAA;AAEY,QAAA,QAAQ,GAAmB;IACtC,IAAI,2BAAsB;IAC1B,MAAM,EAAE,CAAC;CACV,CAAA;AAsBD,MAAa,aAAc,SAAQ,KAAK;IAiBtC,YAAY,OAAe,EAAkB,MAAc,EAAkB,IAAiB;QAC5F,KAAK,CAAC,OAAO,CAAC,CAAA;QAD6B,WAAM,GAAN,MAAM,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAa;IAE9F,CAAC;CACF;AApBD,sCAoBC;AAED,MAAa,eAAe;IAE1B,YAA4B,MAAc,EAAkB,KAAa;QAA7C,WAAM,GAAN,MAAM,CAAQ;QAAkB,UAAK,GAAL,KAAK,CAAQ;QADzD,SAAI,6BAAuB;IACiC,CAAC;CAC9E;AAHD,0CAGC;AAED,MAAa,YAAY;IAEvB,YACkB,MAAc,EACd,IAAiB,EACjB,MAAe,EAC/B,WAAmB;QAHH,WAAM,GAAN,MAAM,CAAQ;QACd,SAAI,GAAJ,IAAI,CAAa;QACjB,WAAM,GAAN,MAAM,CAAS;QAG/B,IAAI,CAAC,WAAW,GAAG,IAAI,KAAK,CAAC,WAAW,CAAC,CAAA;IAC3C,CAAC;CACF;AAVD,oCAUC;AAED,MAAa,KAAK;IAChB,YACkB,IAAY,EACZ,OAAe,EACf,QAAgB,EAChB,UAAkB,EAClB,YAAoB,EACpB,gBAAwB,EACxB,MAAY;QANZ,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAQ;QACf,aAAQ,GAAR,QAAQ,CAAQ;QAChB,eAAU,GAAV,UAAU,CAAQ;QAClB,iBAAY,GAAZ,YAAY,CAAQ;QACpB,qBAAgB,GAAhB,gBAAgB,CAAQ;QACxB,WAAM,GAAN,MAAM,CAAM;IAC3B,CAAC;CACL;AAVD,sBAUC;AAED,MAAa,qBAAqB;IAGhC,YAA4B,MAAc,EAAkB,UAAkB;QAAlD,WAAM,GAAN,MAAM,CAAQ;QAAkB,eAAU,GAAV,UAAU,CAAQ;QAF9D,SAAI,yCAA0C;QAG5D,IAAI,CAAC,MAAM,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IAC1C,CAAC;CACF;AAND,sDAMC;AAED,MAAa,sBAAsB;IAEjC,YACkB,MAAc,EACd,aAAqB,EACrB,cAAsB;QAFtB,WAAM,GAAN,MAAM,CAAQ;QACd,kBAAa,GAAb,aAAa,CAAQ;QACrB,mBAAc,GAAd,cAAc,CAAQ;QAJxB,SAAI,2CAA2C;IAK5D,CAAC;CACL;AAPD,wDAOC;AAED,MAAa,yBAAyB;IAEpC,YAA4B,MAAc,EAAkB,IAAY;QAA5C,WAAM,GAAN,MAAM,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAQ;QADxD,SAAI,+DAAqD;IACE,CAAC;CAC7E;AAHD,8DAGC;AAED,MAAa,qBAAqB;IAEhC,YAA4B,MAAc,EAAkB,SAAiB,EAAkB,SAAiB;QAApF,WAAM,GAAN,MAAM,CAAQ;QAAkB,cAAS,GAAT,SAAS,CAAQ;QAAkB,cAAS,GAAT,SAAS,CAAQ;QADhG,SAAI,yCAA0C;IACqD,CAAC;CACrH;AAHD,sDAGC;AAED,MAAa,2BAA2B;IAEtC,YACkB,MAAc,EACd,SAAiB,EACjB,OAAe,EACf,OAAe;QAHf,WAAM,GAAN,MAAM,CAAQ;QACd,cAAS,GAAT,SAAS,CAAQ;QACjB,YAAO,GAAP,OAAO,CAAQ;QACf,YAAO,GAAP,OAAO,CAAQ;QALjB,SAAI,qCAAwC;IAMzD,CAAC;CACL;AARD,kEAQC;AAED,MAAa,oBAAoB;IAE/B,YAA4B,MAAc,EAAkB,MAAc;QAA9C,WAAM,GAAN,MAAM,CAAQ;QAAkB,WAAM,GAAN,MAAM,CAAQ;QAD1D,SAAI,uCAAyC;IACgB,CAAC;CAC/E;AAHD,oDAGC;AAED,MAAa,sBAAsB;IAEjC,YAA4B,MAAc,EAAkB,IAAY;QAA5C,WAAM,GAAN,MAAM,CAAQ;QAAkB,SAAI,GAAJ,IAAI,CAAQ;QADxD,SAAI,2CAA2C;IACY,CAAC;CAC7E;AAHD,wDAGC;AAED,MAAa,cAAc;IAGzB,YAAmB,MAAc,EAAS,MAAa;QAApC,WAAM,GAAN,MAAM,CAAQ;QAAS,WAAM,GAAN,MAAM,CAAO;QADvC,SAAI,2BAAmC;QAErD,IAAI,CAAC,UAAU,GAAG,MAAM,CAAC,MAAM,CAAA;IACjC,CAAC;CACF;AAND,wCAMC;AAED,MAAa,aAAa;IACxB,YAA4B,MAAc,EAAkB,OAA2B;QAA3D,WAAM,GAAN,MAAM,CAAQ;QAAkB,YAAO,GAAP,OAAO,CAAoB;QACvE,SAAI,yBAAqB;IADiD,CAAC;CAkB5F;AAnBD,sCAmBC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts b/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts new file mode 100644 index 00000000..cb0ff5c3 --- /dev/null +++ b/node_modules/pg-protocol/dist/outbound-serializer.test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/pg-protocol/dist/outbound-serializer.test.js b/node_modules/pg-protocol/dist/outbound-serializer.test.js new file mode 100644 index 00000000..14e5c66c --- /dev/null +++ b/node_modules/pg-protocol/dist/outbound-serializer.test.js @@ -0,0 +1,220 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = __importDefault(require("assert")); +const serializer_1 = require("./serializer"); +const buffer_list_1 = __importDefault(require("./testing/buffer-list")); +describe('serializer', () => { + it('builds startup message', function () { + const actual = serializer_1.serialize.startup({ + user: 'brian', + database: 'bang', + }); + assert_1.default.deepEqual(actual, new buffer_list_1.default() + .addInt16(3) + .addInt16(0) + .addCString('user') + .addCString('brian') + .addCString('database') + .addCString('bang') + .addCString('client_encoding') + .addCString('UTF8') + .addCString('') + .join(true)); + }); + it('builds password message', function () { + const actual = serializer_1.serialize.password('!'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('!').join(true, 'p')); + }); + it('builds request ssl message', function () { + const actual = serializer_1.serialize.requestSsl(); + const expected = new buffer_list_1.default().addInt32(80877103).join(true); + assert_1.default.deepEqual(actual, expected); + }); + it('builds SASLInitialResponseMessage message', function () { + const actual = serializer_1.serialize.sendSASLInitialResponseMessage('mech', 'data'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString('mech').addInt32(4).addString('data').join(true, 'p')); + }); + it('builds SCRAMClientFinalMessage message', function () { + const actual = serializer_1.serialize.sendSCRAMClientFinalMessage('data'); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addString('data').join(true, 'p')); + }); + it('builds query message', function () { + var txt = 'select * from boom'; + const actual = serializer_1.serialize.query(txt); + assert_1.default.deepEqual(actual, new buffer_list_1.default().addCString(txt).join(true, 'Q')); + }); + describe('parse message', () => { + it('builds parse message', function () { + const actual = serializer_1.serialize.parse({ text: '!' }); + var expected = new buffer_list_1.default().addCString('').addCString('!').addInt16(0).join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds parse message with named query', function () { + const actual = serializer_1.serialize.parse({ + name: 'boom', + text: 'select * from boom', + types: [], + }); + var expected = new buffer_list_1.default().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + it('with multiple parameters', function () { + const actual = serializer_1.serialize.parse({ + name: 'force', + text: 'select * from bang where name = $1', + types: [1, 2, 3, 4], + }); + var expected = new buffer_list_1.default() + .addCString('force') + .addCString('select * from bang where name = $1') + .addInt16(4) + .addInt32(1) + .addInt32(2) + .addInt32(3) + .addInt32(4) + .join(true, 'P'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('bind messages', function () { + it('with no values', function () { + const actual = serializer_1.serialize.bind(); + var expectedBuffer = new buffer_list_1.default() + .addCString('') + .addCString('') + .addInt16(0) + .addInt16(0) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('with named statement, portal, and values', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(0) + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing')) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + }); + it('with named statement, portal, and buffer value', function () { + const actual = serializer_1.serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, Buffer.from('zing', 'utf8')], + }); + var expectedBuffer = new buffer_list_1.default() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) // value count + .addInt16(0) // string + .addInt16(0) // string + .addInt16(0) // string + .addInt16(1) // binary + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing', 'utf-8')) + .addInt16(0) + .join(true, 'B'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + describe('builds execute message', function () { + it('for unamed portal with no row limit', function () { + const actual = serializer_1.serialize.execute(); + var expectedBuffer = new buffer_list_1.default().addCString('').addInt32(0).join(true, 'E'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + it('for named portal with row limit', function () { + const actual = serializer_1.serialize.execute({ + portal: 'my favorite portal', + rows: 100, + }); + var expectedBuffer = new buffer_list_1.default().addCString('my favorite portal').addInt32(100).join(true, 'E'); + assert_1.default.deepEqual(actual, expectedBuffer); + }); + }); + it('builds flush command', function () { + const actual = serializer_1.serialize.flush(); + var expected = new buffer_list_1.default().join(true, 'H'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds sync command', function () { + const actual = serializer_1.serialize.sync(); + var expected = new buffer_list_1.default().join(true, 'S'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds end command', function () { + const actual = serializer_1.serialize.end(); + var expected = Buffer.from([0x58, 0, 0, 0, 4]); + assert_1.default.deepEqual(actual, expected); + }); + describe('builds describe command', function () { + it('describe statement', function () { + const actual = serializer_1.serialize.describe({ type: 'S', name: 'bang' }); + var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'D'); + assert_1.default.deepEqual(actual, expected); + }); + it('describe unnamed portal', function () { + const actual = serializer_1.serialize.describe({ type: 'P' }); + var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'D'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('builds close command', function () { + it('describe statement', function () { + const actual = serializer_1.serialize.close({ type: 'S', name: 'bang' }); + var expected = new buffer_list_1.default().addChar('S').addCString('bang').join(true, 'C'); + assert_1.default.deepEqual(actual, expected); + }); + it('describe unnamed portal', function () { + const actual = serializer_1.serialize.close({ type: 'P' }); + var expected = new buffer_list_1.default().addChar('P').addCString('').join(true, 'C'); + assert_1.default.deepEqual(actual, expected); + }); + }); + describe('copy messages', function () { + it('builds copyFromChunk', () => { + const actual = serializer_1.serialize.copyData(Buffer.from([1, 2, 3])); + const expected = new buffer_list_1.default().add(Buffer.from([1, 2, 3])).join(true, 'd'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds copy fail', () => { + const actual = serializer_1.serialize.copyFail('err!'); + const expected = new buffer_list_1.default().addCString('err!').join(true, 'f'); + assert_1.default.deepEqual(actual, expected); + }); + it('builds copy done', () => { + const actual = serializer_1.serialize.copyDone(); + const expected = new buffer_list_1.default().join(true, 'c'); + assert_1.default.deepEqual(actual, expected); + }); + }); + it('builds cancel message', () => { + const actual = serializer_1.serialize.cancel(3, 4); + const expected = new buffer_list_1.default().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true); + assert_1.default.deepEqual(actual, expected); + }); +}); +//# sourceMappingURL=outbound-serializer.test.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/outbound-serializer.test.js.map b/node_modules/pg-protocol/dist/outbound-serializer.test.js.map new file mode 100644 index 00000000..c8285a59 --- /dev/null +++ b/node_modules/pg-protocol/dist/outbound-serializer.test.js.map @@ -0,0 +1 @@ +{"version":3,"file":"outbound-serializer.test.js","sourceRoot":"","sources":["../src/outbound-serializer.test.ts"],"names":[],"mappings":";;;;;AAAA,oDAA2B;AAC3B,6CAAwC;AACxC,wEAA8C;AAE9C,QAAQ,CAAC,YAAY,EAAE,GAAG,EAAE;IAC1B,EAAE,CAAC,wBAAwB,EAAE;QAC3B,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;YAC/B,IAAI,EAAE,OAAO;YACb,QAAQ,EAAE,MAAM;SACjB,CAAC,CAAA;QACF,gBAAM,CAAC,SAAS,CACd,MAAM,EACN,IAAI,qBAAU,EAAE;aACb,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,OAAO,CAAC;aACnB,UAAU,CAAC,UAAU,CAAC;aACtB,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,iBAAiB,CAAC;aAC7B,UAAU,CAAC,MAAM,CAAC;aAClB,UAAU,CAAC,EAAE,CAAC;aACd,IAAI,CAAC,IAAI,CAAC,CACd,CAAA;IACH,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,yBAAyB,EAAE;QAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;QACtC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,4BAA4B,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,UAAU,EAAE,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAC/D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,2CAA2C,EAAE;QAC9C,MAAM,MAAM,GAAG,sBAAS,CAAC,8BAA8B,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QACvE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC7G,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,wCAAwC,EAAE;QAC3C,MAAM,MAAM,GAAG,sBAAS,CAAC,2BAA2B,CAAC,MAAM,CAAC,CAAA;QAC5D,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC9E,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,IAAI,GAAG,GAAG,oBAAoB,CAAA;QAC9B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACnC,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IAC5E,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE,GAAG,EAAE;QAC7B,EAAE,CAAC,sBAAsB,EAAE;YACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC1F,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,uCAAuC,EAAE;YAC1C,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,MAAM;gBACZ,IAAI,EAAE,oBAAoB;gBAC1B,KAAK,EAAE,EAAE;aACV,CAAC,CAAA;YACF,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/G,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0BAA0B,EAAE;YAC7B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC;gBAC7B,IAAI,EAAE,OAAO;gBACb,IAAI,EAAE,oCAAoC;gBAC1C,KAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC;aACpB,CAAC,CAAA;YACF,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE;iBAC5B,UAAU,CAAC,OAAO,CAAC;iBACnB,UAAU,CAAC,oCAAoC,CAAC;iBAChD,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,gBAAgB,EAAE;YACnB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;YAE/B,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;iBAClC,UAAU,CAAC,EAAE,CAAC;iBACd,UAAU,CAAC,EAAE,CAAC;iBACd,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,0CAA0C,EAAE;YAC7C,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;gBAC5B,MAAM,EAAE,MAAM;gBACd,SAAS,EAAE,KAAK;gBAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC;aAClC,CAAC,CAAA;YACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;iBAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;iBACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;iBACnC,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;iBACrB,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;iBACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;iBACZ,QAAQ,CAAC,CAAC,CAAC;iBACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;iBACxB,QAAQ,CAAC,CAAC,CAAC;iBACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,gDAAgD,EAAE;QACnD,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,CAAC;YAC5B,MAAM,EAAE,MAAM;YACd,SAAS,EAAE,KAAK;YAChB,MAAM,EAAE,CAAC,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;SACvD,CAAC,CAAA;QACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE;aAClC,UAAU,CAAC,MAAM,CAAC,CAAC,cAAc;aACjC,UAAU,CAAC,KAAK,CAAC,CAAC,iBAAiB;aACnC,QAAQ,CAAC,CAAC,CAAC,CAAC,cAAc;aAC1B,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC,CAAC,SAAS;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aACrB,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aACtB,QAAQ,CAAC,CAAC,CAAC,CAAC;aACZ,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;aACjC,QAAQ,CAAC,CAAC,CAAC;aACX,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAClB,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;IAC1C,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,wBAAwB,EAAE;QACjC,EAAE,CAAC,qCAAqC,EAAE;YACxC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,EAAE,CAAA;YAClC,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAChF,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,iCAAiC,EAAE;YACpC,MAAM,MAAM,GAAG,sBAAS,CAAC,OAAO,CAAC;gBAC/B,MAAM,EAAE,oBAAoB;gBAC5B,IAAI,EAAE,GAAG;aACV,CAAC,CAAA;YACF,IAAI,cAAc,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,oBAAoB,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACpG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,cAAc,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,sBAAsB,EAAE;QACzB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,EAAE,CAAA;QAChC,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAC/C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,qBAAqB,EAAE;QACxB,MAAM,MAAM,GAAG,sBAAS,CAAC,IAAI,EAAE,CAAA;QAC/B,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QAC/C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,oBAAoB,EAAE;QACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,GAAG,EAAE,CAAA;QAC9B,IAAI,QAAQ,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;QAC9C,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,yBAAyB,EAAE;QAClC,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC9D,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAChD,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC3E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,sBAAsB,EAAE;QAC/B,EAAE,CAAC,oBAAoB,EAAE;YACvB,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,IAAI,EAAE,MAAM,EAAE,CAAC,CAAA;YAC3D,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC/E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,yBAAyB,EAAE;YAC5B,MAAM,MAAM,GAAG,sBAAS,CAAC,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,CAAC,CAAA;YAC7C,IAAI,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC3E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,QAAQ,CAAC,eAAe,EAAE;QACxB,EAAE,CAAC,sBAAsB,EAAE,GAAG,EAAE;YAC9B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YACzD,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YAC7E,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;YACzC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACpE,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QAEF,EAAE,CAAC,kBAAkB,EAAE,GAAG,EAAE;YAC1B,MAAM,MAAM,GAAG,sBAAS,CAAC,QAAQ,EAAE,CAAA;YACnC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;YACjD,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;IAEF,EAAE,CAAC,uBAAuB,EAAE,GAAG,EAAE;QAC/B,MAAM,MAAM,GAAG,sBAAS,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;QACrC,MAAM,QAAQ,GAAG,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA;QAClG,gBAAM,CAAC,SAAS,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAA;IACpC,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/parser.d.ts b/node_modules/pg-protocol/dist/parser.d.ts new file mode 100644 index 00000000..b8e1572a --- /dev/null +++ b/node_modules/pg-protocol/dist/parser.d.ts @@ -0,0 +1,37 @@ +/// +import { TransformOptions } from 'stream'; +import { Mode, BackendMessage } from './messages'; +export declare type Packet = { + code: number; + packet: Buffer; +}; +declare type StreamOptions = TransformOptions & { + mode: Mode; +}; +export declare type MessageCallback = (msg: BackendMessage) => void; +export declare class Parser { + private buffer; + private bufferLength; + private bufferOffset; + private reader; + private mode; + constructor(opts?: StreamOptions); + parse(buffer: Buffer, callback: MessageCallback): void; + private mergeBuffer; + private handlePacket; + private parseReadyForQueryMessage; + private parseCommandCompleteMessage; + private parseCopyData; + private parseCopyInMessage; + private parseCopyOutMessage; + private parseCopyMessage; + private parseNotificationMessage; + private parseRowDescriptionMessage; + private parseField; + private parseDataRowMessage; + private parseParameterStatusMessage; + private parseBackendKeyData; + parseAuthenticationResponse(offset: number, length: number, bytes: Buffer): any; + private parseErrorMessage; +} +export {}; diff --git a/node_modules/pg-protocol/dist/parser.js b/node_modules/pg-protocol/dist/parser.js new file mode 100644 index 00000000..68d91ea9 --- /dev/null +++ b/node_modules/pg-protocol/dist/parser.js @@ -0,0 +1,299 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const messages_1 = require("./messages"); +const buffer_reader_1 = require("./buffer-reader"); +const assert_1 = __importDefault(require("assert")); +// every message is prefixed with a single bye +const CODE_LENGTH = 1; +// every message has an int32 length which includes itself but does +// NOT include the code in the length +const LEN_LENGTH = 4; +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH; +const emptyBuffer = Buffer.allocUnsafe(0); +class Parser { + constructor(opts) { + var _a, _b; + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + this.reader = new buffer_reader_1.BufferReader(); + if (((_a = opts) === null || _a === void 0 ? void 0 : _a.mode) === 'binary') { + throw new Error('Binary mode not supported yet'); + } + this.mode = ((_b = opts) === null || _b === void 0 ? void 0 : _b.mode) || 'text'; + } + parse(buffer, callback) { + this.mergeBuffer(buffer); + const bufferFullLength = this.bufferOffset + this.bufferLength; + let offset = this.bufferOffset; + while (offset + HEADER_LENGTH <= bufferFullLength) { + // code is 1 byte long - it identifies the message type + const code = this.buffer[offset]; + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = this.buffer.readUInt32BE(offset + CODE_LENGTH); + const fullMessageLength = CODE_LENGTH + length; + if (fullMessageLength + offset <= bufferFullLength) { + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer); + callback(message); + offset += fullMessageLength; + } + else { + break; + } + } + if (offset === bufferFullLength) { + // No more use for the buffer + this.buffer = emptyBuffer; + this.bufferLength = 0; + this.bufferOffset = 0; + } + else { + // Adjust the cursors of remainingBuffer + this.bufferLength = bufferFullLength - offset; + this.bufferOffset = offset; + } + } + mergeBuffer(buffer) { + if (this.bufferLength > 0) { + const newLength = this.bufferLength + buffer.byteLength; + const newFullLength = newLength + this.bufferOffset; + if (newFullLength > this.buffer.byteLength) { + // We can't concat the new buffer with the remaining one + let newBuffer; + if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { + // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer + newBuffer = this.buffer; + } + else { + // Allocate a new larger buffer + let newBufferLength = this.buffer.byteLength * 2; + while (newLength >= newBufferLength) { + newBufferLength *= 2; + } + newBuffer = Buffer.allocUnsafe(newBufferLength); + } + // Move the remaining buffer to the new one + this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength); + this.buffer = newBuffer; + this.bufferOffset = 0; + } + // Concat the new buffer with the remaining one + buffer.copy(this.buffer, this.bufferOffset + this.bufferLength); + this.bufferLength = newLength; + } + else { + this.buffer = buffer; + this.bufferOffset = 0; + this.bufferLength = buffer.byteLength; + } + } + handlePacket(offset, code, length, bytes) { + switch (code) { + case 50 /* BindComplete */: + return messages_1.bindComplete; + case 49 /* ParseComplete */: + return messages_1.parseComplete; + case 51 /* CloseComplete */: + return messages_1.closeComplete; + case 110 /* NoData */: + return messages_1.noData; + case 115 /* PortalSuspended */: + return messages_1.portalSuspended; + case 99 /* CopyDone */: + return messages_1.copyDone; + case 87 /* ReplicationStart */: + return messages_1.replicationStart; + case 73 /* EmptyQuery */: + return messages_1.emptyQuery; + case 68 /* DataRow */: + return this.parseDataRowMessage(offset, length, bytes); + case 67 /* CommandComplete */: + return this.parseCommandCompleteMessage(offset, length, bytes); + case 90 /* ReadyForQuery */: + return this.parseReadyForQueryMessage(offset, length, bytes); + case 65 /* NotificationResponse */: + return this.parseNotificationMessage(offset, length, bytes); + case 82 /* AuthenticationResponse */: + return this.parseAuthenticationResponse(offset, length, bytes); + case 83 /* ParameterStatus */: + return this.parseParameterStatusMessage(offset, length, bytes); + case 75 /* BackendKeyData */: + return this.parseBackendKeyData(offset, length, bytes); + case 69 /* ErrorMessage */: + return this.parseErrorMessage(offset, length, bytes, "error" /* error */); + case 78 /* NoticeMessage */: + return this.parseErrorMessage(offset, length, bytes, "notice" /* notice */); + case 84 /* RowDescriptionMessage */: + return this.parseRowDescriptionMessage(offset, length, bytes); + case 71 /* CopyIn */: + return this.parseCopyInMessage(offset, length, bytes); + case 72 /* CopyOut */: + return this.parseCopyOutMessage(offset, length, bytes); + case 100 /* CopyData */: + return this.parseCopyData(offset, length, bytes); + default: + assert_1.default.fail(`unknown message code: ${code.toString(16)}`); + } + } + parseReadyForQueryMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const status = this.reader.string(1); + return new messages_1.ReadyForQueryMessage(length, status); + } + parseCommandCompleteMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const text = this.reader.cstring(); + return new messages_1.CommandCompleteMessage(length, text); + } + parseCopyData(offset, length, bytes) { + const chunk = bytes.slice(offset, offset + (length - 4)); + return new messages_1.CopyDataMessage(length, chunk); + } + parseCopyInMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, "copyInResponse" /* copyInResponse */); + } + parseCopyOutMessage(offset, length, bytes) { + return this.parseCopyMessage(offset, length, bytes, "copyOutResponse" /* copyOutResponse */); + } + parseCopyMessage(offset, length, bytes, messageName) { + this.reader.setBuffer(offset, bytes); + const isBinary = this.reader.byte() !== 0; + const columnCount = this.reader.int16(); + const message = new messages_1.CopyResponse(length, messageName, isBinary, columnCount); + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16(); + } + return message; + } + parseNotificationMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processId = this.reader.int32(); + const channel = this.reader.cstring(); + const payload = this.reader.cstring(); + return new messages_1.NotificationResponseMessage(length, processId, channel, payload); + } + parseRowDescriptionMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const message = new messages_1.RowDescriptionMessage(length, fieldCount); + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField(); + } + return message; + } + parseField() { + const name = this.reader.cstring(); + const tableID = this.reader.int32(); + const columnID = this.reader.int16(); + const dataTypeID = this.reader.int32(); + const dataTypeSize = this.reader.int16(); + const dataTypeModifier = this.reader.int32(); + const mode = this.reader.int16() === 0 ? 'text' : 'binary'; + return new messages_1.Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode); + } + parseDataRowMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const fieldCount = this.reader.int16(); + const fields = new Array(fieldCount); + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32(); + // a -1 for length means the value of the field is null + fields[i] = len === -1 ? null : this.reader.string(len); + } + return new messages_1.DataRowMessage(length, fields); + } + parseParameterStatusMessage(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const name = this.reader.cstring(); + const value = this.reader.cstring(); + return new messages_1.ParameterStatusMessage(length, name, value); + } + parseBackendKeyData(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const processID = this.reader.int32(); + const secretKey = this.reader.int32(); + return new messages_1.BackendKeyDataMessage(length, processID, secretKey); + } + parseAuthenticationResponse(offset, length, bytes) { + this.reader.setBuffer(offset, bytes); + const code = this.reader.int32(); + // TODO(bmc): maybe better types here + const message = { + name: "authenticationOk" /* authenticationOk */, + length, + }; + switch (code) { + case 0: // AuthenticationOk + break; + case 3: // AuthenticationCleartextPassword + if (message.length === 8) { + message.name = "authenticationCleartextPassword" /* authenticationCleartextPassword */; + } + break; + case 5: // AuthenticationMD5Password + if (message.length === 12) { + message.name = "authenticationMD5Password" /* authenticationMD5Password */; + const salt = this.reader.bytes(4); + return new messages_1.AuthenticationMD5Password(length, salt); + } + break; + case 10: // AuthenticationSASL + message.name = "authenticationSASL" /* authenticationSASL */; + message.mechanisms = []; + let mechanism; + do { + mechanism = this.reader.cstring(); + if (mechanism) { + message.mechanisms.push(mechanism); + } + } while (mechanism); + break; + case 11: // AuthenticationSASLContinue + message.name = "authenticationSASLContinue" /* authenticationSASLContinue */; + message.data = this.reader.string(length - 8); + break; + case 12: // AuthenticationSASLFinal + message.name = "authenticationSASLFinal" /* authenticationSASLFinal */; + message.data = this.reader.string(length - 8); + break; + default: + throw new Error('Unknown authenticationOk message type ' + code); + } + return message; + } + parseErrorMessage(offset, length, bytes, name) { + this.reader.setBuffer(offset, bytes); + const fields = {}; + let fieldType = this.reader.string(1); + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring(); + fieldType = this.reader.string(1); + } + const messageValue = fields.M; + const message = name === "notice" /* notice */ + ? new messages_1.NoticeMessage(length, messageValue) + : new messages_1.DatabaseError(messageValue, length, name); + message.severity = fields.S; + message.code = fields.C; + message.detail = fields.D; + message.hint = fields.H; + message.position = fields.P; + message.internalPosition = fields.p; + message.internalQuery = fields.q; + message.where = fields.W; + message.schema = fields.s; + message.table = fields.t; + message.column = fields.c; + message.dataType = fields.d; + message.constraint = fields.n; + message.file = fields.F; + message.line = fields.L; + message.routine = fields.R; + return message; + } +} +exports.Parser = Parser; +//# sourceMappingURL=parser.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/parser.js.map b/node_modules/pg-protocol/dist/parser.js.map new file mode 100644 index 00000000..7e667108 --- /dev/null +++ b/node_modules/pg-protocol/dist/parser.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parser.js","sourceRoot":"","sources":["../src/parser.ts"],"names":[],"mappings":";;;;;AACA,yCAyBmB;AACnB,mDAA8C;AAC9C,oDAA2B;AAE3B,8CAA8C;AAC9C,MAAM,WAAW,GAAG,CAAC,CAAA;AACrB,mEAAmE;AACnE,qCAAqC;AACrC,MAAM,UAAU,GAAG,CAAC,CAAA;AAEpB,MAAM,aAAa,GAAG,WAAW,GAAG,UAAU,CAAA;AAO9C,MAAM,WAAW,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;AAgCzC,MAAa,MAAM;IAOjB,YAAY,IAAoB;;QANxB,WAAM,GAAW,WAAW,CAAA;QAC5B,iBAAY,GAAW,CAAC,CAAA;QACxB,iBAAY,GAAW,CAAC,CAAA;QACxB,WAAM,GAAG,IAAI,4BAAY,EAAE,CAAA;QAIjC,IAAI,OAAA,IAAI,0CAAE,IAAI,MAAK,QAAQ,EAAE;YAC3B,MAAM,IAAI,KAAK,CAAC,+BAA+B,CAAC,CAAA;SACjD;QACD,IAAI,CAAC,IAAI,GAAG,OAAA,IAAI,0CAAE,IAAI,KAAI,MAAM,CAAA;IAClC,CAAC;IAEM,KAAK,CAAC,MAAc,EAAE,QAAyB;QACpD,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,CAAA;QACxB,MAAM,gBAAgB,GAAG,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9D,IAAI,MAAM,GAAG,IAAI,CAAC,YAAY,CAAA;QAC9B,OAAO,MAAM,GAAG,aAAa,IAAI,gBAAgB,EAAE;YACjD,uDAAuD;YACvD,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;YAChC,4EAA4E;YAC5E,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,MAAM,GAAG,WAAW,CAAC,CAAA;YAC7D,MAAM,iBAAiB,GAAG,WAAW,GAAG,MAAM,CAAA;YAC9C,IAAI,iBAAiB,GAAG,MAAM,IAAI,gBAAgB,EAAE;gBAClD,MAAM,OAAO,GAAG,IAAI,CAAC,YAAY,CAAC,MAAM,GAAG,aAAa,EAAE,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;gBACpF,QAAQ,CAAC,OAAO,CAAC,CAAA;gBACjB,MAAM,IAAI,iBAAiB,CAAA;aAC5B;iBAAM;gBACL,MAAK;aACN;SACF;QACD,IAAI,MAAM,KAAK,gBAAgB,EAAE;YAC/B,6BAA6B;YAC7B,IAAI,CAAC,MAAM,GAAG,WAAW,CAAA;YACzB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;SACtB;aAAM;YACL,wCAAwC;YACxC,IAAI,CAAC,YAAY,GAAG,gBAAgB,GAAG,MAAM,CAAA;YAC7C,IAAI,CAAC,YAAY,GAAG,MAAM,CAAA;SAC3B;IACH,CAAC;IAEO,WAAW,CAAC,MAAc;QAChC,IAAI,IAAI,CAAC,YAAY,GAAG,CAAC,EAAE;YACzB,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;YACvD,MAAM,aAAa,GAAG,SAAS,GAAG,IAAI,CAAC,YAAY,CAAA;YACnD,IAAI,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,EAAE;gBAC1C,wDAAwD;gBACxD,IAAI,SAAiB,CAAA;gBACrB,IAAI,SAAS,IAAI,IAAI,CAAC,MAAM,CAAC,UAAU,IAAI,IAAI,CAAC,YAAY,IAAI,IAAI,CAAC,YAAY,EAAE;oBACjF,kGAAkG;oBAClG,SAAS,GAAG,IAAI,CAAC,MAAM,CAAA;iBACxB;qBAAM;oBACL,+BAA+B;oBAC/B,IAAI,eAAe,GAAG,IAAI,CAAC,MAAM,CAAC,UAAU,GAAG,CAAC,CAAA;oBAChD,OAAO,SAAS,IAAI,eAAe,EAAE;wBACnC,eAAe,IAAI,CAAC,CAAA;qBACrB;oBACD,SAAS,GAAG,MAAM,CAAC,WAAW,CAAC,eAAe,CAAC,CAAA;iBAChD;gBACD,2CAA2C;gBAC3C,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,EAAE,IAAI,CAAC,YAAY,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;gBACxF,IAAI,CAAC,MAAM,GAAG,SAAS,CAAA;gBACvB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;aACtB;YACD,+CAA+C;YAC/C,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,GAAG,IAAI,CAAC,YAAY,CAAC,CAAA;YAC/D,IAAI,CAAC,YAAY,GAAG,SAAS,CAAA;SAC9B;aAAM;YACL,IAAI,CAAC,MAAM,GAAG,MAAM,CAAA;YACpB,IAAI,CAAC,YAAY,GAAG,CAAC,CAAA;YACrB,IAAI,CAAC,YAAY,GAAG,MAAM,CAAC,UAAU,CAAA;SACtC;IACH,CAAC;IAEO,YAAY,CAAC,MAAc,EAAE,IAAY,EAAE,MAAc,EAAE,KAAa;QAC9E,QAAQ,IAAI,EAAE;YACZ;gBACE,OAAO,uBAAY,CAAA;YACrB;gBACE,OAAO,wBAAa,CAAA;YACtB;gBACE,OAAO,wBAAa,CAAA;YACtB;gBACE,OAAO,iBAAM,CAAA;YACf;gBACE,OAAO,0BAAe,CAAA;YACxB;gBACE,OAAO,mBAAQ,CAAA;YACjB;gBACE,OAAO,2BAAgB,CAAA;YACzB;gBACE,OAAO,qBAAU,CAAA;YACnB;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,yBAAyB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC9D;gBACE,OAAO,IAAI,CAAC,wBAAwB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC7D;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,2BAA2B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAChE;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,sBAAoB,CAAA;YACzE;gBACE,OAAO,IAAI,CAAC,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,wBAAqB,CAAA;YAC1E;gBACE,OAAO,IAAI,CAAC,0BAA0B,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAC/D;gBACE,OAAO,IAAI,CAAC,kBAAkB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACvD;gBACE,OAAO,IAAI,CAAC,mBAAmB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YACxD;gBACE,OAAO,IAAI,CAAC,aAAa,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,CAAC,CAAA;YAClD;gBACE,gBAAM,CAAC,IAAI,CAAC,yBAAyB,IAAI,CAAC,QAAQ,CAAC,EAAE,CAAC,EAAE,CAAC,CAAA;SAC5D;IACH,CAAC;IAEO,yBAAyB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC7E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,MAAM,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACpC,OAAO,IAAI,+BAAoB,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;IAEO,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC/E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,OAAO,IAAI,iCAAsB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;IACjD,CAAC;IAEO,aAAa,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACjE,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,CAAC,MAAM,EAAE,MAAM,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,0BAAe,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAC3C,CAAC;IAEO,kBAAkB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACtE,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,wCAA6B,CAAA;IACjF,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,OAAO,IAAI,CAAC,gBAAgB,CAAC,MAAM,EAAE,MAAM,EAAE,KAAK,0CAA8B,CAAA;IAClF,CAAC;IAEO,gBAAgB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa,EAAE,WAAwB;QAC9F,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;QACzC,MAAM,WAAW,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACvC,MAAM,OAAO,GAAG,IAAI,uBAAY,CAAC,MAAM,EAAE,WAAW,EAAE,QAAQ,EAAE,WAAW,CAAC,CAAA;QAC5E,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,WAAW,EAAE,CAAC,EAAE,EAAE;YACpC,OAAO,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;SAC7C;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,wBAAwB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC5E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACrC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACrC,OAAO,IAAI,sCAA2B,CAAC,MAAM,EAAE,SAAS,EAAE,OAAO,EAAE,OAAO,CAAC,CAAA;IAC7E,CAAC;IAEO,0BAA0B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC9E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,OAAO,GAAG,IAAI,gCAAqB,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;QAC7D,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,UAAU,EAAE,CAAA;SACtC;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,UAAU;QAChB,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,MAAM,OAAO,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACnC,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,YAAY,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACxC,MAAM,gBAAgB,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAC5C,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAA;QAC1D,OAAO,IAAI,gBAAK,CAAC,IAAI,EAAE,OAAO,EAAE,QAAQ,EAAE,UAAU,EAAE,YAAY,EAAE,gBAAgB,EAAE,IAAI,CAAC,CAAA;IAC7F,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,UAAU,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACtC,MAAM,MAAM,GAAU,IAAI,KAAK,CAAC,UAAU,CAAC,CAAA;QAC3C,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,UAAU,EAAE,CAAC,EAAE,EAAE;YACnC,MAAM,GAAG,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;YAC/B,uDAAuD;YACvD,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAA;SACxD;QACD,OAAO,IAAI,yBAAc,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;IAC3C,CAAC;IAEO,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC/E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAClC,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACnC,OAAO,IAAI,iCAAsB,CAAC,MAAM,EAAE,IAAI,EAAE,KAAK,CAAC,CAAA;IACxD,CAAC;IAEO,mBAAmB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QACvE,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,MAAM,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QACrC,OAAO,IAAI,gCAAqB,CAAC,MAAM,EAAE,SAAS,EAAE,SAAS,CAAC,CAAA;IAChE,CAAC;IAEM,2BAA2B,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa;QAC9E,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,EAAE,CAAA;QAChC,qCAAqC;QACrC,MAAM,OAAO,GAAyB;YACpC,IAAI,2CAA8B;YAClC,MAAM;SACP,CAAA;QAED,QAAQ,IAAI,EAAE;YACZ,KAAK,CAAC,EAAE,mBAAmB;gBACzB,MAAK;YACP,KAAK,CAAC,EAAE,kCAAkC;gBACxC,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;oBACxB,OAAO,CAAC,IAAI,0EAA8C,CAAA;iBAC3D;gBACD,MAAK;YACP,KAAK,CAAC,EAAE,4BAA4B;gBAClC,IAAI,OAAO,CAAC,MAAM,KAAK,EAAE,EAAE;oBACzB,OAAO,CAAC,IAAI,8DAAwC,CAAA;oBACpD,MAAM,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;oBACjC,OAAO,IAAI,oCAAyB,CAAC,MAAM,EAAE,IAAI,CAAC,CAAA;iBACnD;gBACD,MAAK;YACP,KAAK,EAAE,EAAE,qBAAqB;gBAC5B,OAAO,CAAC,IAAI,gDAAiC,CAAA;gBAC7C,OAAO,CAAC,UAAU,GAAG,EAAE,CAAA;gBACvB,IAAI,SAAiB,CAAA;gBACrB,GAAG;oBACD,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;oBAEjC,IAAI,SAAS,EAAE;wBACb,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;qBACnC;iBACF,QAAQ,SAAS,EAAC;gBACnB,MAAK;YACP,KAAK,EAAE,EAAE,6BAA6B;gBACpC,OAAO,CAAC,IAAI,gEAAyC,CAAA;gBACrD,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;gBAC7C,MAAK;YACP,KAAK,EAAE,EAAE,0BAA0B;gBACjC,OAAO,CAAC,IAAI,0DAAsC,CAAA;gBAClD,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;gBAC7C,MAAK;YACP;gBACE,MAAM,IAAI,KAAK,CAAC,wCAAwC,GAAG,IAAI,CAAC,CAAA;SACnE;QACD,OAAO,OAAO,CAAA;IAChB,CAAC;IAEO,iBAAiB,CAAC,MAAc,EAAE,MAAc,EAAE,KAAa,EAAE,IAAiB;QACxF,IAAI,CAAC,MAAM,CAAC,SAAS,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;QACpC,MAAM,MAAM,GAA2B,EAAE,CAAA;QACzC,IAAI,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;QACrC,OAAO,SAAS,KAAK,IAAI,EAAE;YACzB,MAAM,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;YACzC,SAAS,GAAG,IAAI,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA;SAClC;QAED,MAAM,YAAY,GAAG,MAAM,CAAC,CAAC,CAAA;QAE7B,MAAM,OAAO,GACX,IAAI,0BAAuB;YACzB,CAAC,CAAC,IAAI,wBAAa,CAAC,MAAM,EAAE,YAAY,CAAC;YACzC,CAAC,CAAC,IAAI,wBAAa,CAAC,YAAY,EAAE,MAAM,EAAE,IAAI,CAAC,CAAA;QAEnD,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,gBAAgB,GAAG,MAAM,CAAC,CAAC,CAAA;QACnC,OAAO,CAAC,aAAa,GAAG,MAAM,CAAC,CAAC,CAAA;QAChC,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;QACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,KAAK,GAAG,MAAM,CAAC,CAAC,CAAA;QACxB,OAAO,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC,CAAA;QACzB,OAAO,CAAC,QAAQ,GAAG,MAAM,CAAC,CAAC,CAAA;QAC3B,OAAO,CAAC,UAAU,GAAG,MAAM,CAAC,CAAC,CAAA;QAC7B,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,IAAI,GAAG,MAAM,CAAC,CAAC,CAAA;QACvB,OAAO,CAAC,OAAO,GAAG,MAAM,CAAC,CAAC,CAAA;QAC1B,OAAO,OAAO,CAAA;IAChB,CAAC;CACF;AA7SD,wBA6SC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/serializer.d.ts b/node_modules/pg-protocol/dist/serializer.d.ts new file mode 100644 index 00000000..fa64144b --- /dev/null +++ b/node_modules/pg-protocol/dist/serializer.d.ts @@ -0,0 +1,41 @@ +/// +declare type ParseOpts = { + name?: string; + types?: number[]; + text: string; +}; +declare type BindOpts = { + portal?: string; + binary?: boolean; + statement?: string; + values?: any[]; +}; +declare type ExecOpts = { + portal?: string; + rows?: number; +}; +declare type PortalOpts = { + type: 'S' | 'P'; + name?: string; +}; +declare const serialize: { + startup: (opts: Record) => Buffer; + password: (password: string) => Buffer; + requestSsl: () => Buffer; + sendSASLInitialResponseMessage: (mechanism: string, initialResponse: string) => Buffer; + sendSCRAMClientFinalMessage: (additionalData: string) => Buffer; + query: (text: string) => Buffer; + parse: (query: ParseOpts) => Buffer; + bind: (config?: BindOpts) => Buffer; + execute: (config?: ExecOpts | undefined) => Buffer; + describe: (msg: PortalOpts) => Buffer; + close: (msg: PortalOpts) => Buffer; + flush: () => Buffer; + sync: () => Buffer; + end: () => Buffer; + copyData: (chunk: Buffer) => Buffer; + copyDone: () => Buffer; + copyFail: (message: string) => Buffer; + cancel: (processID: number, secretKey: number) => Buffer; +}; +export { serialize }; diff --git a/node_modules/pg-protocol/dist/serializer.js b/node_modules/pg-protocol/dist/serializer.js new file mode 100644 index 00000000..7b8b2727 --- /dev/null +++ b/node_modules/pg-protocol/dist/serializer.js @@ -0,0 +1,193 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const buffer_writer_1 = require("./buffer-writer"); +const writer = new buffer_writer_1.Writer(); +const startup = (opts) => { + // protocol version + writer.addInt16(3).addInt16(0); + for (const key of Object.keys(opts)) { + writer.addCString(key).addCString(opts[key]); + } + writer.addCString('client_encoding').addCString('UTF8'); + var bodyBuffer = writer.addCString('').flush(); + // this message is sent without a code + var length = bodyBuffer.length + 4; + return new buffer_writer_1.Writer().addInt32(length).add(bodyBuffer).flush(); +}; +const requestSsl = () => { + const response = Buffer.allocUnsafe(8); + response.writeInt32BE(8, 0); + response.writeInt32BE(80877103, 4); + return response; +}; +const password = (password) => { + return writer.addCString(password).flush(112 /* startup */); +}; +const sendSASLInitialResponseMessage = function (mechanism, initialResponse) { + // 0x70 = 'p' + writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse); + return writer.flush(112 /* startup */); +}; +const sendSCRAMClientFinalMessage = function (additionalData) { + return writer.addString(additionalData).flush(112 /* startup */); +}; +const query = (text) => { + return writer.addCString(text).flush(81 /* query */); +}; +const emptyArray = []; +const parse = (query) => { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + // normalize missing query names to allow for null + const name = query.name || ''; + if (name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.'); + console.error('You supplied %s (%s)', name, name.length); + console.error('This can cause conflicts and silent errors executing queries'); + /* eslint-enable no-console */ + } + const types = query.types || emptyArray; + var len = types.length; + var buffer = writer + .addCString(name) // name of query + .addCString(query.text) // actual query text + .addInt16(len); + for (var i = 0; i < len; i++) { + buffer.addInt32(types[i]); + } + return writer.flush(80 /* parse */); +}; +const bind = (config = {}) => { + // normalize config + const portal = config.portal || ''; + const statement = config.statement || ''; + const binary = config.binary || false; + var values = config.values || emptyArray; + var len = values.length; + var useBinary = false; + // TODO(bmc): all the loops in here aren't nice, we can do better + for (var j = 0; j < len; j++) { + useBinary = useBinary || values[j] instanceof Buffer; + } + var buffer = writer.addCString(portal).addCString(statement); + if (!useBinary) { + buffer.addInt16(0); + } + else { + buffer.addInt16(len); + for (j = 0; j < len; j++) { + buffer.addInt16(values[j] instanceof Buffer ? 1 : 0); + } + } + buffer.addInt16(len); + for (var i = 0; i < len; i++) { + var val = values[i]; + if (val === null || typeof val === 'undefined') { + buffer.addInt32(-1); + } + else if (val instanceof Buffer) { + buffer.addInt32(val.length); + buffer.add(val); + } + else { + buffer.addInt32(Buffer.byteLength(val)); + buffer.addString(val); + } + } + if (binary) { + buffer.addInt16(1); // format codes to use binary + buffer.addInt16(1); + } + else { + buffer.addInt16(0); // format codes to use text + } + return writer.flush(66 /* bind */); +}; +const emptyExecute = Buffer.from([69 /* execute */, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]); +const execute = (config) => { + // this is the happy path for most queries + if (!config || (!config.portal && !config.rows)) { + return emptyExecute; + } + const portal = config.portal || ''; + const rows = config.rows || 0; + const portalLength = Buffer.byteLength(portal); + const len = 4 + portalLength + 1 + 4; + // one extra bit for code + const buff = Buffer.allocUnsafe(1 + len); + buff[0] = 69 /* execute */; + buff.writeInt32BE(len, 1); + buff.write(portal, 5, 'utf-8'); + buff[portalLength + 5] = 0; // null terminate portal cString + buff.writeUInt32BE(rows, buff.length - 4); + return buff; +}; +const cancel = (processID, secretKey) => { + const buffer = Buffer.allocUnsafe(16); + buffer.writeInt32BE(16, 0); + buffer.writeInt16BE(1234, 4); + buffer.writeInt16BE(5678, 6); + buffer.writeInt32BE(processID, 8); + buffer.writeInt32BE(secretKey, 12); + return buffer; +}; +const cstringMessage = (code, string) => { + const stringLen = Buffer.byteLength(string); + const len = 4 + stringLen + 1; + // one extra bit for code + const buffer = Buffer.allocUnsafe(1 + len); + buffer[0] = code; + buffer.writeInt32BE(len, 1); + buffer.write(string, 5, 'utf-8'); + buffer[len] = 0; // null terminate cString + return buffer; +}; +const emptyDescribePortal = writer.addCString('P').flush(68 /* describe */); +const emptyDescribeStatement = writer.addCString('S').flush(68 /* describe */); +const describe = (msg) => { + return msg.name + ? cstringMessage(68 /* describe */, `${msg.type}${msg.name || ''}`) + : msg.type === 'P' + ? emptyDescribePortal + : emptyDescribeStatement; +}; +const close = (msg) => { + const text = `${msg.type}${msg.name || ''}`; + return cstringMessage(67 /* close */, text); +}; +const copyData = (chunk) => { + return writer.add(chunk).flush(100 /* copyFromChunk */); +}; +const copyFail = (message) => { + return cstringMessage(102 /* copyFail */, message); +}; +const codeOnlyBuffer = (code) => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]); +const flushBuffer = codeOnlyBuffer(72 /* flush */); +const syncBuffer = codeOnlyBuffer(83 /* sync */); +const endBuffer = codeOnlyBuffer(88 /* end */); +const copyDoneBuffer = codeOnlyBuffer(99 /* copyDone */); +const serialize = { + startup, + password, + requestSsl, + sendSASLInitialResponseMessage, + sendSCRAMClientFinalMessage, + query, + parse, + bind, + execute, + describe, + close, + flush: () => flushBuffer, + sync: () => syncBuffer, + end: () => endBuffer, + copyData, + copyDone: () => copyDoneBuffer, + copyFail, + cancel, +}; +exports.serialize = serialize; +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/serializer.js.map b/node_modules/pg-protocol/dist/serializer.js.map new file mode 100644 index 00000000..28305c5e --- /dev/null +++ b/node_modules/pg-protocol/dist/serializer.js.map @@ -0,0 +1 @@ +{"version":3,"file":"serializer.js","sourceRoot":"","sources":["../src/serializer.ts"],"names":[],"mappings":";;AAAA,mDAAwC;AAkBxC,MAAM,MAAM,GAAG,IAAI,sBAAM,EAAE,CAAA;AAE3B,MAAM,OAAO,GAAG,CAAC,IAA4B,EAAU,EAAE;IACvD,mBAAmB;IACnB,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;KAC7C;IAED,MAAM,CAAC,UAAU,CAAC,iBAAiB,CAAC,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAEvD,IAAI,UAAU,GAAG,MAAM,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,KAAK,EAAE,CAAA;IAC9C,sCAAsC;IAEtC,IAAI,MAAM,GAAG,UAAU,CAAC,MAAM,GAAG,CAAC,CAAA;IAElC,OAAO,IAAI,sBAAM,EAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,UAAU,CAAC,CAAC,KAAK,EAAE,CAAA;AAC9D,CAAC,CAAA;AAED,MAAM,UAAU,GAAG,GAAW,EAAE;IAC9B,MAAM,QAAQ,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,CAAC,CAAA;IACtC,QAAQ,CAAC,YAAY,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;IAC3B,QAAQ,CAAC,YAAY,CAAC,QAAQ,EAAE,CAAC,CAAC,CAAA;IAClC,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,QAAgB,EAAU,EAAE;IAC5C,OAAO,MAAM,CAAC,UAAU,CAAC,QAAQ,CAAC,CAAC,KAAK,mBAAc,CAAA;AACxD,CAAC,CAAA;AAED,MAAM,8BAA8B,GAAG,UAAU,SAAiB,EAAE,eAAuB;IACzF,aAAa;IACb,MAAM,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC,CAAC,SAAS,CAAC,eAAe,CAAC,CAAA;IAEpG,OAAO,MAAM,CAAC,KAAK,mBAAc,CAAA;AACnC,CAAC,CAAA;AAED,MAAM,2BAA2B,GAAG,UAAU,cAAsB;IAClE,OAAO,MAAM,CAAC,SAAS,CAAC,cAAc,CAAC,CAAC,KAAK,mBAAc,CAAA;AAC7D,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAU,EAAE;IACrC,OAAO,MAAM,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,KAAK,gBAAY,CAAA;AAClD,CAAC,CAAA;AAQD,MAAM,UAAU,GAAU,EAAE,CAAA;AAE5B,MAAM,KAAK,GAAG,CAAC,KAAgB,EAAU,EAAE;IACzC,8BAA8B;IAC9B,uBAAuB;IACvB,gCAAgC;IAChC,8BAA8B;IAE9B,kDAAkD;IAClD,MAAM,IAAI,GAAG,KAAK,CAAC,IAAI,IAAI,EAAE,CAAA;IAC7B,IAAI,IAAI,CAAC,MAAM,GAAG,EAAE,EAAE;QACpB,+BAA+B;QAC/B,OAAO,CAAC,KAAK,CAAC,gEAAgE,CAAC,CAAA;QAC/E,OAAO,CAAC,KAAK,CAAC,sBAAsB,EAAE,IAAI,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACxD,OAAO,CAAC,KAAK,CAAC,8DAA8D,CAAC,CAAA;QAC7E,8BAA8B;KAC/B;IAED,MAAM,KAAK,GAAG,KAAK,CAAC,KAAK,IAAI,UAAU,CAAA;IAEvC,IAAI,GAAG,GAAG,KAAK,CAAC,MAAM,CAAA;IAEtB,IAAI,MAAM,GAAG,MAAM;SAChB,UAAU,CAAC,IAAI,CAAC,CAAC,gBAAgB;SACjC,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,oBAAoB;SAC3C,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEhB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,MAAM,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAA;KAC1B;IAED,OAAO,MAAM,CAAC,KAAK,gBAAY,CAAA;AACjC,CAAC,CAAA;AASD,MAAM,IAAI,GAAG,CAAC,SAAmB,EAAE,EAAU,EAAE;IAC7C,mBAAmB;IACnB,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,EAAE,CAAA;IACxC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,KAAK,CAAA;IACrC,IAAI,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,UAAU,CAAA;IACxC,IAAI,GAAG,GAAG,MAAM,CAAC,MAAM,CAAA;IAEvB,IAAI,SAAS,GAAG,KAAK,CAAA;IACrB,iEAAiE;IACjE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,SAAS,GAAG,SAAS,IAAI,MAAM,CAAC,CAAC,CAAC,YAAY,MAAM,CAAA;KACrD;IAED,IAAI,MAAM,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,UAAU,CAAC,SAAS,CAAC,CAAA;IAC5D,IAAI,CAAC,SAAS,EAAE;QACd,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;KACnB;SAAM;QACL,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;QACpB,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;YACxB,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,YAAY,MAAM,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA;SACrD;KACF;IACD,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAA;IACpB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,EAAE,CAAC,EAAE,EAAE;QAC5B,IAAI,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAA;QACnB,IAAI,GAAG,KAAK,IAAI,IAAI,OAAO,GAAG,KAAK,WAAW,EAAE;YAC9C,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;SACpB;aAAM,IAAI,GAAG,YAAY,MAAM,EAAE;YAChC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;YAC3B,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;SAChB;aAAM;YACL,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,CAAA;YACvC,MAAM,CAAC,SAAS,CAAC,GAAG,CAAC,CAAA;SACtB;KACF;IAED,IAAI,MAAM,EAAE;QACV,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA,CAAC,6BAA6B;QAChD,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;KACnB;SAAM;QACL,MAAM,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA,CAAC,2BAA2B;KAC/C;IACD,OAAO,MAAM,CAAC,KAAK,eAAW,CAAA;AAChC,CAAC,CAAA;AAOD,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,mBAAe,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAEtG,MAAM,OAAO,GAAG,CAAC,MAAiB,EAAU,EAAE;IAC5C,0CAA0C;IAC1C,IAAI,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,CAAC,MAAM,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,EAAE;QAC/C,OAAO,YAAY,CAAA;KACpB;IAED,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,IAAI,EAAE,CAAA;IAClC,MAAM,IAAI,GAAG,MAAM,CAAC,IAAI,IAAI,CAAC,CAAA;IAE7B,MAAM,YAAY,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC9C,MAAM,GAAG,GAAG,CAAC,GAAG,YAAY,GAAG,CAAC,GAAG,CAAC,CAAA;IACpC,yBAAyB;IACzB,MAAM,IAAI,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IACxC,IAAI,CAAC,CAAC,CAAC,mBAAe,CAAA;IACtB,IAAI,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IACzB,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9B,IAAI,CAAC,YAAY,GAAG,CAAC,CAAC,GAAG,CAAC,CAAA,CAAC,gCAAgC;IAC3D,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,IAAI,CAAC,MAAM,GAAG,CAAC,CAAC,CAAA;IACzC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,MAAM,GAAG,CAAC,SAAiB,EAAE,SAAiB,EAAU,EAAE;IAC9D,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,EAAE,CAAC,CAAA;IACrC,MAAM,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;IAC1B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAA;IAC5B,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,CAAC,CAAC,CAAA;IACjC,MAAM,CAAC,YAAY,CAAC,SAAS,EAAE,EAAE,CAAC,CAAA;IAClC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAOD,MAAM,cAAc,GAAG,CAAC,IAAU,EAAE,MAAc,EAAU,EAAE;IAC5D,MAAM,SAAS,GAAG,MAAM,CAAC,UAAU,CAAC,MAAM,CAAC,CAAA;IAC3C,MAAM,GAAG,GAAG,CAAC,GAAG,SAAS,GAAG,CAAC,CAAA;IAC7B,yBAAyB;IACzB,MAAM,MAAM,GAAG,MAAM,CAAC,WAAW,CAAC,CAAC,GAAG,GAAG,CAAC,CAAA;IAC1C,MAAM,CAAC,CAAC,CAAC,GAAG,IAAI,CAAA;IAChB,MAAM,CAAC,YAAY,CAAC,GAAG,EAAE,CAAC,CAAC,CAAA;IAC3B,MAAM,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC,EAAE,OAAO,CAAC,CAAA;IAChC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA,CAAC,yBAAyB;IACzC,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,mBAAe,CAAA;AACvE,MAAM,sBAAsB,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC,KAAK,mBAAe,CAAA;AAE1E,MAAM,QAAQ,GAAG,CAAC,GAAe,EAAU,EAAE;IAC3C,OAAO,GAAG,CAAC,IAAI;QACb,CAAC,CAAC,cAAc,oBAAgB,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAC;QAC/D,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,GAAG;YAClB,CAAC,CAAC,mBAAmB;YACrB,CAAC,CAAC,sBAAsB,CAAA;AAC5B,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,GAAe,EAAU,EAAE;IACxC,MAAM,IAAI,GAAG,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAC,IAAI,IAAI,EAAE,EAAE,CAAA;IAC3C,OAAO,cAAc,iBAAa,IAAI,CAAC,CAAA;AACzC,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,KAAa,EAAU,EAAE;IACzC,OAAO,MAAM,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,KAAK,yBAAoB,CAAA;AACpD,CAAC,CAAA;AAED,MAAM,QAAQ,GAAG,CAAC,OAAe,EAAU,EAAE;IAC3C,OAAO,cAAc,qBAAgB,OAAO,CAAC,CAAA;AAC/C,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,CAAC,IAAU,EAAU,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;AAE1F,MAAM,WAAW,GAAG,cAAc,gBAAY,CAAA;AAC9C,MAAM,UAAU,GAAG,cAAc,eAAW,CAAA;AAC5C,MAAM,SAAS,GAAG,cAAc,cAAU,CAAA;AAC1C,MAAM,cAAc,GAAG,cAAc,mBAAe,CAAA;AAEpD,MAAM,SAAS,GAAG;IAChB,OAAO;IACP,QAAQ;IACR,UAAU;IACV,8BAA8B;IAC9B,2BAA2B;IAC3B,KAAK;IACL,KAAK;IACL,IAAI;IACJ,OAAO;IACP,QAAQ;IACR,KAAK;IACL,KAAK,EAAE,GAAG,EAAE,CAAC,WAAW;IACxB,IAAI,EAAE,GAAG,EAAE,CAAC,UAAU;IACtB,GAAG,EAAE,GAAG,EAAE,CAAC,SAAS;IACpB,QAAQ;IACR,QAAQ,EAAE,GAAG,EAAE,CAAC,cAAc;IAC9B,QAAQ;IACR,MAAM;CACP,CAAA;AAEQ,8BAAS"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/testing/buffer-list.d.ts b/node_modules/pg-protocol/dist/testing/buffer-list.d.ts new file mode 100644 index 00000000..8824112d --- /dev/null +++ b/node_modules/pg-protocol/dist/testing/buffer-list.d.ts @@ -0,0 +1,15 @@ +/// +export default class BufferList { + buffers: Buffer[]; + constructor(buffers?: Buffer[]); + add(buffer: Buffer, front?: boolean): this; + addInt16(val: number, front?: boolean): this; + getByteLength(initial?: number): number; + addInt32(val: number, first?: boolean): this; + addCString(val: string, front?: boolean): this; + addString(val: string, front?: boolean): this; + addChar(char: string, first?: boolean): this; + addByte(byte: number): this; + join(appendLength?: boolean, char?: string): Buffer; + static concat(): Buffer; +} diff --git a/node_modules/pg-protocol/dist/testing/buffer-list.js b/node_modules/pg-protocol/dist/testing/buffer-list.js new file mode 100644 index 00000000..feb0414a --- /dev/null +++ b/node_modules/pg-protocol/dist/testing/buffer-list.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +class BufferList { + constructor(buffers = []) { + this.buffers = buffers; + } + add(buffer, front) { + this.buffers[front ? 'unshift' : 'push'](buffer); + return this; + } + addInt16(val, front) { + return this.add(Buffer.from([val >>> 8, val >>> 0]), front); + } + getByteLength(initial) { + return this.buffers.reduce(function (previous, current) { + return previous + current.length; + }, initial || 0); + } + addInt32(val, first) { + return this.add(Buffer.from([(val >>> 24) & 0xff, (val >>> 16) & 0xff, (val >>> 8) & 0xff, (val >>> 0) & 0xff]), first); + } + addCString(val, front) { + var len = Buffer.byteLength(val); + var buffer = Buffer.alloc(len + 1); + buffer.write(val); + buffer[len] = 0; + return this.add(buffer, front); + } + addString(val, front) { + var len = Buffer.byteLength(val); + var buffer = Buffer.alloc(len); + buffer.write(val); + return this.add(buffer, front); + } + addChar(char, first) { + return this.add(Buffer.from(char, 'utf8'), first); + } + addByte(byte) { + return this.add(Buffer.from([byte])); + } + join(appendLength, char) { + var length = this.getByteLength(); + if (appendLength) { + this.addInt32(length + 4, true); + return this.join(false, char); + } + if (char) { + this.addChar(char, true); + length++; + } + var result = Buffer.alloc(length); + var index = 0; + this.buffers.forEach(function (buffer) { + buffer.copy(result, index, 0); + index += buffer.length; + }); + return result; + } + static concat() { + var total = new BufferList(); + for (var i = 0; i < arguments.length; i++) { + total.add(arguments[i]); + } + return total.join(); + } +} +exports.default = BufferList; +//# sourceMappingURL=buffer-list.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/testing/buffer-list.js.map b/node_modules/pg-protocol/dist/testing/buffer-list.js.map new file mode 100644 index 00000000..0e4cb023 --- /dev/null +++ b/node_modules/pg-protocol/dist/testing/buffer-list.js.map @@ -0,0 +1 @@ +{"version":3,"file":"buffer-list.js","sourceRoot":"","sources":["../../src/testing/buffer-list.ts"],"names":[],"mappings":";;AAAA,MAAqB,UAAU;IAC7B,YAAmB,UAAoB,EAAE;QAAtB,YAAO,GAAP,OAAO,CAAe;IAAG,CAAC;IAEtC,GAAG,CAAC,MAAc,EAAE,KAAe;QACxC,IAAI,CAAC,OAAO,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,CAAA;QAChD,OAAO,IAAI,CAAA;IACb,CAAC;IAEM,QAAQ,CAAC,GAAW,EAAE,KAAe;QAC1C,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,GAAG,KAAK,CAAC,EAAE,GAAG,KAAK,CAAC,CAAC,CAAC,EAAE,KAAK,CAAC,CAAA;IAC7D,CAAC;IAEM,aAAa,CAAC,OAAgB;QACnC,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC,UAAU,QAAQ,EAAE,OAAO;YACpD,OAAO,QAAQ,GAAG,OAAO,CAAC,MAAM,CAAA;QAClC,CAAC,EAAE,OAAO,IAAI,CAAC,CAAC,CAAA;IAClB,CAAC;IAEM,QAAQ,CAAC,GAAW,EAAE,KAAe;QAC1C,OAAO,IAAI,CAAC,GAAG,CACb,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,EAAE,CAAC,GAAG,KAAK,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC,EAC/F,KAAK,CACN,CAAA;IACH,CAAC;IAEM,UAAU,CAAC,GAAW,EAAE,KAAe;QAC5C,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;QAChC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,GAAG,CAAC,CAAC,CAAA;QAClC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACjB,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAChC,CAAC;IAEM,SAAS,CAAC,GAAW,EAAE,KAAe;QAC3C,IAAI,GAAG,GAAG,MAAM,CAAC,UAAU,CAAC,GAAG,CAAC,CAAA;QAChC,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QAC9B,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;QACjB,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAA;IAChC,CAAC;IAEM,OAAO,CAAC,IAAY,EAAE,KAAe;QAC1C,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,EAAE,MAAM,CAAC,EAAE,KAAK,CAAC,CAAA;IACnD,CAAC;IAEM,OAAO,CAAC,IAAY;QACzB,OAAO,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACtC,CAAC;IAEM,IAAI,CAAC,YAAsB,EAAE,IAAa;QAC/C,IAAI,MAAM,GAAG,IAAI,CAAC,aAAa,EAAE,CAAA;QACjC,IAAI,YAAY,EAAE;YAChB,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE,IAAI,CAAC,CAAA;YAC/B,OAAO,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,IAAI,CAAC,CAAA;SAC9B;QACD,IAAI,IAAI,EAAE;YACR,IAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;YACxB,MAAM,EAAE,CAAA;SACT;QACD,IAAI,MAAM,GAAG,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,CAAA;QACjC,IAAI,KAAK,GAAG,CAAC,CAAA;QACb,IAAI,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,MAAM;YACnC,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,KAAK,EAAE,CAAC,CAAC,CAAA;YAC7B,KAAK,IAAI,MAAM,CAAC,MAAM,CAAA;QACxB,CAAC,CAAC,CAAA;QACF,OAAO,MAAM,CAAA;IACf,CAAC;IAEM,MAAM,CAAC,MAAM;QAClB,IAAI,KAAK,GAAG,IAAI,UAAU,EAAE,CAAA;QAC5B,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACzC,KAAK,CAAC,GAAG,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAA;SACxB;QACD,OAAO,KAAK,CAAC,IAAI,EAAE,CAAA;IACrB,CAAC;CACF;AA1ED,6BA0EC"} \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/testing/test-buffers.d.ts b/node_modules/pg-protocol/dist/testing/test-buffers.d.ts new file mode 100644 index 00000000..779ae70f --- /dev/null +++ b/node_modules/pg-protocol/dist/testing/test-buffers.d.ts @@ -0,0 +1,30 @@ +/// +import BufferList from './buffer-list'; +declare const buffers: { + readyForQuery: () => Buffer; + authenticationOk: () => Buffer; + authenticationCleartextPassword: () => Buffer; + authenticationMD5Password: () => Buffer; + authenticationSASL: () => Buffer; + authenticationSASLContinue: () => Buffer; + authenticationSASLFinal: () => Buffer; + parameterStatus: (name: string, value: string) => Buffer; + backendKeyData: (processID: number, secretKey: number) => Buffer; + commandComplete: (string: string) => Buffer; + rowDescription: (fields: any[]) => Buffer; + dataRow: (columns: any[]) => Buffer; + error: (fields: any) => Buffer; + notice: (fields: any) => Buffer; + errorOrNotice: (fields: any) => BufferList; + parseComplete: () => Buffer; + bindComplete: () => Buffer; + notification: (id: number, channel: string, payload: string) => Buffer; + emptyQuery: () => Buffer; + portalSuspended: () => Buffer; + closeComplete: () => Buffer; + copyIn: (cols: number) => Buffer; + copyOut: (cols: number) => Buffer; + copyData: (bytes: Buffer) => Buffer; + copyDone: () => Buffer; +}; +export default buffers; diff --git a/node_modules/pg-protocol/dist/testing/test-buffers.js b/node_modules/pg-protocol/dist/testing/test-buffers.js new file mode 100644 index 00000000..72ce8a92 --- /dev/null +++ b/node_modules/pg-protocol/dist/testing/test-buffers.js @@ -0,0 +1,137 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +// http://developer.postgresql.org/pgdocs/postgres/protocol-message-formats.html +const buffer_list_1 = __importDefault(require("./buffer-list")); +const buffers = { + readyForQuery: function () { + return new buffer_list_1.default().add(Buffer.from('I')).join(true, 'Z'); + }, + authenticationOk: function () { + return new buffer_list_1.default().addInt32(0).join(true, 'R'); + }, + authenticationCleartextPassword: function () { + return new buffer_list_1.default().addInt32(3).join(true, 'R'); + }, + authenticationMD5Password: function () { + return new buffer_list_1.default() + .addInt32(5) + .add(Buffer.from([1, 2, 3, 4])) + .join(true, 'R'); + }, + authenticationSASL: function () { + return new buffer_list_1.default().addInt32(10).addCString('SCRAM-SHA-256').addCString('').join(true, 'R'); + }, + authenticationSASLContinue: function () { + return new buffer_list_1.default().addInt32(11).addString('data').join(true, 'R'); + }, + authenticationSASLFinal: function () { + return new buffer_list_1.default().addInt32(12).addString('data').join(true, 'R'); + }, + parameterStatus: function (name, value) { + return new buffer_list_1.default().addCString(name).addCString(value).join(true, 'S'); + }, + backendKeyData: function (processID, secretKey) { + return new buffer_list_1.default().addInt32(processID).addInt32(secretKey).join(true, 'K'); + }, + commandComplete: function (string) { + return new buffer_list_1.default().addCString(string).join(true, 'C'); + }, + rowDescription: function (fields) { + fields = fields || []; + var buf = new buffer_list_1.default(); + buf.addInt16(fields.length); + fields.forEach(function (field) { + buf + .addCString(field.name) + .addInt32(field.tableID || 0) + .addInt16(field.attributeNumber || 0) + .addInt32(field.dataTypeID || 0) + .addInt16(field.dataTypeSize || 0) + .addInt32(field.typeModifier || 0) + .addInt16(field.formatCode || 0); + }); + return buf.join(true, 'T'); + }, + dataRow: function (columns) { + columns = columns || []; + var buf = new buffer_list_1.default(); + buf.addInt16(columns.length); + columns.forEach(function (col) { + if (col == null) { + buf.addInt32(-1); + } + else { + var strBuf = Buffer.from(col, 'utf8'); + buf.addInt32(strBuf.length); + buf.add(strBuf); + } + }); + return buf.join(true, 'D'); + }, + error: function (fields) { + return buffers.errorOrNotice(fields).join(true, 'E'); + }, + notice: function (fields) { + return buffers.errorOrNotice(fields).join(true, 'N'); + }, + errorOrNotice: function (fields) { + fields = fields || []; + var buf = new buffer_list_1.default(); + fields.forEach(function (field) { + buf.addChar(field.type); + buf.addCString(field.value); + }); + return buf.add(Buffer.from([0])); // terminator + }, + parseComplete: function () { + return new buffer_list_1.default().join(true, '1'); + }, + bindComplete: function () { + return new buffer_list_1.default().join(true, '2'); + }, + notification: function (id, channel, payload) { + return new buffer_list_1.default().addInt32(id).addCString(channel).addCString(payload).join(true, 'A'); + }, + emptyQuery: function () { + return new buffer_list_1.default().join(true, 'I'); + }, + portalSuspended: function () { + return new buffer_list_1.default().join(true, 's'); + }, + closeComplete: function () { + return new buffer_list_1.default().join(true, '3'); + }, + copyIn: function (cols) { + const list = new buffer_list_1.default() + // text mode + .addByte(0) + // column count + .addInt16(cols); + for (let i = 0; i < cols; i++) { + list.addInt16(i); + } + return list.join(true, 'G'); + }, + copyOut: function (cols) { + const list = new buffer_list_1.default() + // text mode + .addByte(0) + // column count + .addInt16(cols); + for (let i = 0; i < cols; i++) { + list.addInt16(i); + } + return list.join(true, 'H'); + }, + copyData: function (bytes) { + return new buffer_list_1.default().add(bytes).join(true, 'd'); + }, + copyDone: function () { + return new buffer_list_1.default().join(true, 'c'); + }, +}; +exports.default = buffers; +//# sourceMappingURL=test-buffers.js.map \ No newline at end of file diff --git a/node_modules/pg-protocol/dist/testing/test-buffers.js.map b/node_modules/pg-protocol/dist/testing/test-buffers.js.map new file mode 100644 index 00000000..7dacbead --- /dev/null +++ b/node_modules/pg-protocol/dist/testing/test-buffers.js.map @@ -0,0 +1 @@ +{"version":3,"file":"test-buffers.js","sourceRoot":"","sources":["../../src/testing/test-buffers.ts"],"names":[],"mappings":";;;;;AAAA,gFAAgF;AAChF,gEAAsC;AAEtC,MAAM,OAAO,GAAG;IACd,aAAa,EAAE;QACb,OAAO,IAAI,qBAAU,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC/D,CAAC;IAED,gBAAgB,EAAE;QAChB,OAAO,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACrD,CAAC;IAED,+BAA+B,EAAE;QAC/B,OAAO,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACrD,CAAC;IAED,yBAAyB,EAAE;QACzB,OAAO,IAAI,qBAAU,EAAE;aACpB,QAAQ,CAAC,CAAC,CAAC;aACX,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;aAC9B,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACpB,CAAC;IAED,kBAAkB,EAAE;QAClB,OAAO,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,eAAe,CAAC,CAAC,UAAU,CAAC,EAAE,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACjG,CAAC;IAED,0BAA0B,EAAE;QAC1B,OAAO,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACxE,CAAC;IAED,uBAAuB,EAAE;QACvB,OAAO,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACxE,CAAC;IAED,eAAe,EAAE,UAAU,IAAY,EAAE,KAAa;QACpD,OAAO,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC5E,CAAC;IAED,cAAc,EAAE,UAAU,SAAiB,EAAE,SAAiB;QAC5D,OAAO,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,QAAQ,CAAC,SAAS,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACjF,CAAC;IAED,eAAe,EAAE,UAAU,MAAc;QACvC,OAAO,IAAI,qBAAU,EAAE,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC5D,CAAC;IAED,cAAc,EAAE,UAAU,MAAa;QACrC,MAAM,GAAG,MAAM,IAAI,EAAE,CAAA;QACrB,IAAI,GAAG,GAAG,IAAI,qBAAU,EAAE,CAAA;QAC1B,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;QAC3B,MAAM,CAAC,OAAO,CAAC,UAAU,KAAK;YAC5B,GAAG;iBACA,UAAU,CAAC,KAAK,CAAC,IAAI,CAAC;iBACtB,QAAQ,CAAC,KAAK,CAAC,OAAO,IAAI,CAAC,CAAC;iBAC5B,QAAQ,CAAC,KAAK,CAAC,eAAe,IAAI,CAAC,CAAC;iBACpC,QAAQ,CAAC,KAAK,CAAC,UAAU,IAAI,CAAC,CAAC;iBAC/B,QAAQ,CAAC,KAAK,CAAC,YAAY,IAAI,CAAC,CAAC;iBACjC,QAAQ,CAAC,KAAK,CAAC,YAAY,IAAI,CAAC,CAAC;iBACjC,QAAQ,CAAC,KAAK,CAAC,UAAU,IAAI,CAAC,CAAC,CAAA;QACpC,CAAC,CAAC,CAAA;QACF,OAAO,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC5B,CAAC;IAED,OAAO,EAAE,UAAU,OAAc;QAC/B,OAAO,GAAG,OAAO,IAAI,EAAE,CAAA;QACvB,IAAI,GAAG,GAAG,IAAI,qBAAU,EAAE,CAAA;QAC1B,GAAG,CAAC,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CAAA;QAC5B,OAAO,CAAC,OAAO,CAAC,UAAU,GAAG;YAC3B,IAAI,GAAG,IAAI,IAAI,EAAE;gBACf,GAAG,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAA;aACjB;iBAAM;gBACL,IAAI,MAAM,GAAG,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;gBACrC,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,MAAM,CAAC,CAAA;gBAC3B,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;aAChB;QACH,CAAC,CAAC,CAAA;QACF,OAAO,GAAG,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC5B,CAAC;IAED,KAAK,EAAE,UAAU,MAAW;QAC1B,OAAO,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACtD,CAAC;IAED,MAAM,EAAE,UAAU,MAAW;QAC3B,OAAO,OAAO,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACtD,CAAC;IAED,aAAa,EAAE,UAAU,MAAW;QAClC,MAAM,GAAG,MAAM,IAAI,EAAE,CAAA;QACrB,IAAI,GAAG,GAAG,IAAI,qBAAU,EAAE,CAAA;QAC1B,MAAM,CAAC,OAAO,CAAC,UAAU,KAAU;YACjC,GAAG,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,CAAA;YACvB,GAAG,CAAC,UAAU,CAAC,KAAK,CAAC,KAAK,CAAC,CAAA;QAC7B,CAAC,CAAC,CAAA;QACF,OAAO,GAAG,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAA,CAAC,aAAa;IAChD,CAAC;IAED,aAAa,EAAE;QACb,OAAO,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACzC,CAAC;IAED,YAAY,EAAE;QACZ,OAAO,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACzC,CAAC;IAED,YAAY,EAAE,UAAU,EAAU,EAAE,OAAe,EAAE,OAAe;QAClE,OAAO,IAAI,qBAAU,EAAE,CAAC,QAAQ,CAAC,EAAE,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC9F,CAAC;IAED,UAAU,EAAE;QACV,OAAO,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACzC,CAAC;IAED,eAAe,EAAE;QACf,OAAO,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACzC,CAAC;IAED,aAAa,EAAE;QACb,OAAO,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACzC,CAAC;IAED,MAAM,EAAE,UAAU,IAAY;QAC5B,MAAM,IAAI,GAAG,IAAI,qBAAU,EAAE;YAC3B,YAAY;aACX,OAAO,CAAC,CAAC,CAAC;YACX,eAAe;aACd,QAAQ,CAAC,IAAI,CAAC,CAAA;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;YAC7B,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;SACjB;QACD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC7B,CAAC;IAED,OAAO,EAAE,UAAU,IAAY;QAC7B,MAAM,IAAI,GAAG,IAAI,qBAAU,EAAE;YAC3B,YAAY;aACX,OAAO,CAAC,CAAC,CAAC;YACX,eAAe;aACd,QAAQ,CAAC,IAAI,CAAC,CAAA;QACjB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,EAAE,CAAC,EAAE,EAAE;YAC7B,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAA;SACjB;QACD,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAC7B,CAAC;IAED,QAAQ,EAAE,UAAU,KAAa;QAC/B,OAAO,IAAI,qBAAU,EAAE,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACpD,CAAC;IAED,QAAQ,EAAE;QACR,OAAO,IAAI,qBAAU,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IACzC,CAAC;CACF,CAAA;AAED,kBAAe,OAAO,CAAA"} \ No newline at end of file diff --git a/node_modules/pg-protocol/package.json b/node_modules/pg-protocol/package.json new file mode 100644 index 00000000..48bd9262 --- /dev/null +++ b/node_modules/pg-protocol/package.json @@ -0,0 +1,51 @@ +{ + "_from": "pg-protocol@^1.2.5", + "_id": "pg-protocol@1.2.5", + "_inBundle": false, + "_integrity": "sha512-1uYCckkuTfzz/FCefvavRywkowa6M5FohNMF5OjKrqo9PSR8gYc8poVmwwYQaBxhmQdBjhtP514eXy9/Us2xKg==", + "_location": "/pg-protocol", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-protocol@^1.2.5", + "name": "pg-protocol", + "escapedName": "pg-protocol", + "rawSpec": "^1.2.5", + "saveSpec": null, + "fetchSpec": "^1.2.5" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.2.5.tgz", + "_shasum": "28a1492cde11646ff2d2d06bdee42a3ba05f126c", + "_spec": "pg-protocol@^1.2.5", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "bundleDependencies": false, + "deprecated": false, + "description": "The postgres client/server binary protocol, implemented in TypeScript", + "devDependencies": { + "@types/chai": "^4.2.7", + "@types/mocha": "^5.2.7", + "@types/node": "^12.12.21", + "chai": "^4.2.0", + "chunky": "^0.0.0", + "mocha": "^7.1.2", + "ts-node": "^8.5.4", + "typescript": "^3.7.3" + }, + "gitHead": "dec892ed015af8844f1aa6a9475832c88693b464", + "license": "MIT", + "main": "dist/index.js", + "name": "pg-protocol", + "scripts": { + "build": "tsc", + "build:watch": "tsc --watch", + "prepublish": "yarn build", + "pretest": "yarn build", + "test": "mocha dist/**/*.test.js" + }, + "types": "dist/index.d.ts", + "version": "1.2.5" +} diff --git a/node_modules/pg-protocol/src/b.ts b/node_modules/pg-protocol/src/b.ts new file mode 100644 index 00000000..028b7639 --- /dev/null +++ b/node_modules/pg-protocol/src/b.ts @@ -0,0 +1,28 @@ +// file for microbenchmarking + +import { Writer } from './buffer-writer' +import { serialize } from './index' +import { BufferReader } from './buffer-reader' + +const LOOPS = 1000 +let count = 0 +let start = Date.now() +const writer = new Writer() + +const reader = new BufferReader() +const buffer = Buffer.from([33, 33, 33, 33, 33, 33, 33, 0]) + +const run = () => { + if (count > LOOPS) { + console.log(Date.now() - start) + return + } + count++ + for (let i = 0; i < LOOPS; i++) { + reader.setBuffer(0, buffer) + reader.cstring() + } + setImmediate(run) +} + +run() diff --git a/node_modules/pg-protocol/src/buffer-reader.ts b/node_modules/pg-protocol/src/buffer-reader.ts new file mode 100644 index 00000000..2305e130 --- /dev/null +++ b/node_modules/pg-protocol/src/buffer-reader.ts @@ -0,0 +1,53 @@ +const emptyBuffer = Buffer.allocUnsafe(0) + +export class BufferReader { + private buffer: Buffer = emptyBuffer + + // TODO(bmc): support non-utf8 encoding? + private encoding: string = 'utf-8' + + constructor(private offset: number = 0) {} + + public setBuffer(offset: number, buffer: Buffer): void { + this.offset = offset + this.buffer = buffer + } + + public int16(): number { + const result = this.buffer.readInt16BE(this.offset) + this.offset += 2 + return result + } + + public byte(): number { + const result = this.buffer[this.offset] + this.offset++ + return result + } + + public int32(): number { + const result = this.buffer.readInt32BE(this.offset) + this.offset += 4 + return result + } + + public string(length: number): string { + const result = this.buffer.toString(this.encoding, this.offset, this.offset + length) + this.offset += length + return result + } + + public cstring(): string { + const start = this.offset + let end = start + while (this.buffer[end++] !== 0) {} + this.offset = end + return this.buffer.toString(this.encoding, start, end - 1) + } + + public bytes(length: number): Buffer { + const result = this.buffer.slice(this.offset, this.offset + length) + this.offset += length + return result + } +} diff --git a/node_modules/pg-protocol/src/buffer-writer.ts b/node_modules/pg-protocol/src/buffer-writer.ts new file mode 100644 index 00000000..3a8d80b3 --- /dev/null +++ b/node_modules/pg-protocol/src/buffer-writer.ts @@ -0,0 +1,85 @@ +//binary data writer tuned for encoding binary specific to the postgres binary protocol + +export class Writer { + private buffer: Buffer + private offset: number = 5 + private headerPosition: number = 0 + constructor(private size = 256) { + this.buffer = Buffer.alloc(size) + } + + private ensure(size: number): void { + var remaining = this.buffer.length - this.offset + if (remaining < size) { + var oldBuffer = this.buffer + // exponential growth factor of around ~ 1.5 + // https://stackoverflow.com/questions/2269063/buffer-growth-strategy + var newSize = oldBuffer.length + (oldBuffer.length >> 1) + size + this.buffer = Buffer.alloc(newSize) + oldBuffer.copy(this.buffer) + } + } + + public addInt32(num: number): Writer { + this.ensure(4) + this.buffer[this.offset++] = (num >>> 24) & 0xff + this.buffer[this.offset++] = (num >>> 16) & 0xff + this.buffer[this.offset++] = (num >>> 8) & 0xff + this.buffer[this.offset++] = (num >>> 0) & 0xff + return this + } + + public addInt16(num: number): Writer { + this.ensure(2) + this.buffer[this.offset++] = (num >>> 8) & 0xff + this.buffer[this.offset++] = (num >>> 0) & 0xff + return this + } + + public addCString(string: string): Writer { + if (!string) { + this.ensure(1) + } else { + var len = Buffer.byteLength(string) + this.ensure(len + 1) // +1 for null terminator + this.buffer.write(string, this.offset, 'utf-8') + this.offset += len + } + + this.buffer[this.offset++] = 0 // null terminator + return this + } + + public addString(string: string = ''): Writer { + var len = Buffer.byteLength(string) + this.ensure(len) + this.buffer.write(string, this.offset) + this.offset += len + return this + } + + public add(otherBuffer: Buffer): Writer { + this.ensure(otherBuffer.length) + otherBuffer.copy(this.buffer, this.offset) + this.offset += otherBuffer.length + return this + } + + private join(code?: number): Buffer { + if (code) { + this.buffer[this.headerPosition] = code + //length is everything in this packet minus the code + const length = this.offset - (this.headerPosition + 1) + this.buffer.writeInt32BE(length, this.headerPosition + 1) + } + return this.buffer.slice(code ? 0 : 5, this.offset) + } + + public flush(code?: number): Buffer { + var result = this.join(code) + this.offset = 5 + this.headerPosition = 0 + this.buffer = Buffer.allocUnsafe(this.size) + return result + } +} diff --git a/node_modules/pg-protocol/src/inbound-parser.test.ts b/node_modules/pg-protocol/src/inbound-parser.test.ts new file mode 100644 index 00000000..3fcbe410 --- /dev/null +++ b/node_modules/pg-protocol/src/inbound-parser.test.ts @@ -0,0 +1,522 @@ +import buffers from './testing/test-buffers' +import BufferList from './testing/buffer-list' +import { parse } from '.' +import assert from 'assert' +import { PassThrough } from 'stream' +import { BackendMessage } from './messages' + +var authOkBuffer = buffers.authenticationOk() +var paramStatusBuffer = buffers.parameterStatus('client_encoding', 'UTF8') +var readyForQueryBuffer = buffers.readyForQuery() +var backendKeyDataBuffer = buffers.backendKeyData(1, 2) +var commandCompleteBuffer = buffers.commandComplete('SELECT 3') +var parseCompleteBuffer = buffers.parseComplete() +var bindCompleteBuffer = buffers.bindComplete() +var portalSuspendedBuffer = buffers.portalSuspended() + +var addRow = function (bufferList: BufferList, name: string, offset: number) { + return bufferList + .addCString(name) // field name + .addInt32(offset++) // table id + .addInt16(offset++) // attribute of column number + .addInt32(offset++) // objectId of field's data type + .addInt16(offset++) // datatype size + .addInt32(offset++) // type modifier + .addInt16(0) // format code, 0 => text +} + +var row1 = { + name: 'id', + tableID: 1, + attributeNumber: 2, + dataTypeID: 3, + dataTypeSize: 4, + typeModifier: 5, + formatCode: 0, +} +var oneRowDescBuff = buffers.rowDescription([row1]) +row1.name = 'bang' + +var twoRowBuf = buffers.rowDescription([ + row1, + { + name: 'whoah', + tableID: 10, + attributeNumber: 11, + dataTypeID: 12, + dataTypeSize: 13, + typeModifier: 14, + formatCode: 0, + }, +]) + +var emptyRowFieldBuf = new BufferList().addInt16(0).join(true, 'D') + +var emptyRowFieldBuf = buffers.dataRow([]) + +var oneFieldBuf = new BufferList() + .addInt16(1) // number of fields + .addInt32(5) // length of bytes of fields + .addCString('test') + .join(true, 'D') + +var oneFieldBuf = buffers.dataRow(['test']) + +var expectedAuthenticationOkayMessage = { + name: 'authenticationOk', + length: 8, +} + +var expectedParameterStatusMessage = { + name: 'parameterStatus', + parameterName: 'client_encoding', + parameterValue: 'UTF8', + length: 25, +} + +var expectedBackendKeyDataMessage = { + name: 'backendKeyData', + processID: 1, + secretKey: 2, +} + +var expectedReadyForQueryMessage = { + name: 'readyForQuery', + length: 5, + status: 'I', +} + +var expectedCommandCompleteMessage = { + name: 'commandComplete', + length: 13, + text: 'SELECT 3', +} +var emptyRowDescriptionBuffer = new BufferList() + .addInt16(0) // number of fields + .join(true, 'T') + +var expectedEmptyRowDescriptionMessage = { + name: 'rowDescription', + length: 6, + fieldCount: 0, + fields: [], +} +var expectedOneRowMessage = { + name: 'rowDescription', + length: 27, + fieldCount: 1, + fields: [ + { + name: 'id', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + ], +} + +var expectedTwoRowMessage = { + name: 'rowDescription', + length: 53, + fieldCount: 2, + fields: [ + { + name: 'bang', + tableID: 1, + columnID: 2, + dataTypeID: 3, + dataTypeSize: 4, + dataTypeModifier: 5, + format: 'text', + }, + { + name: 'whoah', + tableID: 10, + columnID: 11, + dataTypeID: 12, + dataTypeSize: 13, + dataTypeModifier: 14, + format: 'text', + }, + ], +} + +var testForMessage = function (buffer: Buffer, expectedMessage: any) { + it('recieves and parses ' + expectedMessage.name, async () => { + const messages = await parseBuffers([buffer]) + const [lastMessage] = messages + + for (const key in expectedMessage) { + assert.deepEqual((lastMessage as any)[key], expectedMessage[key]) + } + }) +} + +var plainPasswordBuffer = buffers.authenticationCleartextPassword() +var md5PasswordBuffer = buffers.authenticationMD5Password() +var SASLBuffer = buffers.authenticationSASL() +var SASLContinueBuffer = buffers.authenticationSASLContinue() +var SASLFinalBuffer = buffers.authenticationSASLFinal() + +var expectedPlainPasswordMessage = { + name: 'authenticationCleartextPassword', +} + +var expectedMD5PasswordMessage = { + name: 'authenticationMD5Password', + salt: Buffer.from([1, 2, 3, 4]), +} + +var expectedSASLMessage = { + name: 'authenticationSASL', + mechanisms: ['SCRAM-SHA-256'], +} + +var expectedSASLContinueMessage = { + name: 'authenticationSASLContinue', + data: 'data', +} + +var expectedSASLFinalMessage = { + name: 'authenticationSASLFinal', + data: 'data', +} + +var notificationResponseBuffer = buffers.notification(4, 'hi', 'boom') +var expectedNotificationResponseMessage = { + name: 'notification', + processId: 4, + channel: 'hi', + payload: 'boom', +} + +const parseBuffers = async (buffers: Buffer[]): Promise => { + const stream = new PassThrough() + for (const buffer of buffers) { + stream.write(buffer) + } + stream.end() + const msgs: BackendMessage[] = [] + await parse(stream, (msg) => msgs.push(msg)) + return msgs +} + +describe('PgPacketStream', function () { + testForMessage(authOkBuffer, expectedAuthenticationOkayMessage) + testForMessage(plainPasswordBuffer, expectedPlainPasswordMessage) + testForMessage(md5PasswordBuffer, expectedMD5PasswordMessage) + testForMessage(SASLBuffer, expectedSASLMessage) + testForMessage(SASLContinueBuffer, expectedSASLContinueMessage) + + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLContinueBuffer = Buffer.concat([SASLContinueBuffer, Buffer.from([1, 2, 3, 4])]) + testForMessage(extendedSASLContinueBuffer, expectedSASLContinueMessage) + + testForMessage(SASLFinalBuffer, expectedSASLFinalMessage) + + // this exercises a found bug in the parser: + // https://github.com/brianc/node-postgres/pull/2210#issuecomment-627626084 + // and adds a test which is deterministic, rather than relying on network packet chunking + const extendedSASLFinalBuffer = Buffer.concat([SASLFinalBuffer, Buffer.from([1, 2, 4, 5])]) + testForMessage(extendedSASLFinalBuffer, expectedSASLFinalMessage) + + testForMessage(paramStatusBuffer, expectedParameterStatusMessage) + testForMessage(backendKeyDataBuffer, expectedBackendKeyDataMessage) + testForMessage(readyForQueryBuffer, expectedReadyForQueryMessage) + testForMessage(commandCompleteBuffer, expectedCommandCompleteMessage) + testForMessage(notificationResponseBuffer, expectedNotificationResponseMessage) + testForMessage(buffers.emptyQuery(), { + name: 'emptyQuery', + length: 4, + }) + + testForMessage(Buffer.from([0x6e, 0, 0, 0, 4]), { + name: 'noData', + }) + + describe('rowDescription messages', function () { + testForMessage(emptyRowDescriptionBuffer, expectedEmptyRowDescriptionMessage) + testForMessage(oneRowDescBuff, expectedOneRowMessage) + testForMessage(twoRowBuf, expectedTwoRowMessage) + }) + + describe('parsing rows', function () { + describe('parsing empty row', function () { + testForMessage(emptyRowFieldBuf, { + name: 'dataRow', + fieldCount: 0, + }) + }) + + describe('parsing data row with fields', function () { + testForMessage(oneFieldBuf, { + name: 'dataRow', + fieldCount: 1, + fields: ['test'], + }) + }) + }) + + describe('notice message', function () { + // this uses the same logic as error message + var buff = buffers.notice([{ type: 'C', value: 'code' }]) + testForMessage(buff, { + name: 'notice', + code: 'code', + }) + }) + + testForMessage(buffers.error([]), { + name: 'error', + }) + + describe('with all the fields', function () { + var buffer = buffers.error([ + { + type: 'S', + value: 'ERROR', + }, + { + type: 'C', + value: 'code', + }, + { + type: 'M', + value: 'message', + }, + { + type: 'D', + value: 'details', + }, + { + type: 'H', + value: 'hint', + }, + { + type: 'P', + value: '100', + }, + { + type: 'p', + value: '101', + }, + { + type: 'q', + value: 'query', + }, + { + type: 'W', + value: 'where', + }, + { + type: 'F', + value: 'file', + }, + { + type: 'L', + value: 'line', + }, + { + type: 'R', + value: 'routine', + }, + { + type: 'Z', // ignored + value: 'alsdkf', + }, + ]) + + testForMessage(buffer, { + name: 'error', + severity: 'ERROR', + code: 'code', + message: 'message', + detail: 'details', + hint: 'hint', + position: '100', + internalPosition: '101', + internalQuery: 'query', + where: 'where', + file: 'file', + line: 'line', + routine: 'routine', + }) + }) + + testForMessage(parseCompleteBuffer, { + name: 'parseComplete', + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }) + + testForMessage(bindCompleteBuffer, { + name: 'bindComplete', + }) + + testForMessage(buffers.closeComplete(), { + name: 'closeComplete', + }) + + describe('parses portal suspended message', function () { + testForMessage(portalSuspendedBuffer, { + name: 'portalSuspended', + }) + }) + + describe('parses replication start message', function () { + testForMessage(Buffer.from([0x57, 0x00, 0x00, 0x00, 0x04]), { + name: 'replicationStart', + length: 4, + }) + }) + + describe('copy', () => { + testForMessage(buffers.copyIn(0), { + name: 'copyInResponse', + length: 7, + binary: false, + columnTypes: [], + }) + + testForMessage(buffers.copyIn(2), { + name: 'copyInResponse', + length: 11, + binary: false, + columnTypes: [0, 1], + }) + + testForMessage(buffers.copyOut(0), { + name: 'copyOutResponse', + length: 7, + binary: false, + columnTypes: [], + }) + + testForMessage(buffers.copyOut(3), { + name: 'copyOutResponse', + length: 13, + binary: false, + columnTypes: [0, 1, 2], + }) + + testForMessage(buffers.copyDone(), { + name: 'copyDone', + length: 4, + }) + + testForMessage(buffers.copyData(Buffer.from([5, 6, 7])), { + name: 'copyData', + length: 7, + chunk: Buffer.from([5, 6, 7]), + }) + }) + + // since the data message on a stream can randomly divide the incomming + // tcp packets anywhere, we need to make sure we can parse every single + // split on a tcp message + describe('split buffer, single message parsing', function () { + var fullBuffer = buffers.dataRow([null, 'bang', 'zug zug', null, '!']) + + it('parses when full buffer comes in', async function () { + const messages = await parseBuffers([fullBuffer]) + const message = messages[0] as any + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + }) + + var testMessageRecievedAfterSpiltAt = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const messages = await parseBuffers([fullBuffer]) + const message = messages[0] as any + assert.equal(message.fields.length, 5) + assert.equal(message.fields[0], null) + assert.equal(message.fields[1], 'bang') + assert.equal(message.fields[2], 'zug zug') + assert.equal(message.fields[3], null) + assert.equal(message.fields[4], '!') + } + + it('parses when split in the middle', function () { + testMessageRecievedAfterSpiltAt(6) + }) + + it('parses when split at end', function () { + testMessageRecievedAfterSpiltAt(2) + }) + + it('parses when split at beginning', function () { + testMessageRecievedAfterSpiltAt(fullBuffer.length - 2) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 1) + testMessageRecievedAfterSpiltAt(fullBuffer.length - 5) + }) + }) + + describe('split buffer, multiple message parsing', function () { + var dataRowBuffer = buffers.dataRow(['!']) + var readyForQueryBuffer = buffers.readyForQuery() + var fullBuffer = Buffer.alloc(dataRowBuffer.length + readyForQueryBuffer.length) + dataRowBuffer.copy(fullBuffer, 0, 0) + readyForQueryBuffer.copy(fullBuffer, dataRowBuffer.length, 0) + + var verifyMessages = function (messages: any[]) { + assert.strictEqual(messages.length, 2) + assert.deepEqual(messages[0], { + name: 'dataRow', + fieldCount: 1, + length: 11, + fields: ['!'], + }) + assert.equal(messages[0].fields[0], '!') + assert.deepEqual(messages[1], { + name: 'readyForQuery', + length: 5, + status: 'I', + }) + } + // sanity check + it('recieves both messages when packet is not split', async function () { + const messages = await parseBuffers([fullBuffer]) + verifyMessages(messages) + }) + + var splitAndVerifyTwoMessages = async function (split: number) { + var firstBuffer = Buffer.alloc(fullBuffer.length - split) + var secondBuffer = Buffer.alloc(fullBuffer.length - firstBuffer.length) + fullBuffer.copy(firstBuffer, 0, 0) + fullBuffer.copy(secondBuffer, 0, firstBuffer.length) + const messages = await parseBuffers([firstBuffer, secondBuffer]) + verifyMessages(messages) + } + + describe('recieves both messages when packet is split', function () { + it('in the middle', function () { + return splitAndVerifyTwoMessages(11) + }) + it('at the front', function () { + return Promise.all([ + splitAndVerifyTwoMessages(fullBuffer.length - 1), + splitAndVerifyTwoMessages(fullBuffer.length - 4), + splitAndVerifyTwoMessages(fullBuffer.length - 6), + ]) + }) + + it('at the end', function () { + return Promise.all([splitAndVerifyTwoMessages(8), splitAndVerifyTwoMessages(1)]) + }) + }) + }) +}) diff --git a/node_modules/pg-protocol/src/index.ts b/node_modules/pg-protocol/src/index.ts new file mode 100644 index 00000000..486f79c8 --- /dev/null +++ b/node_modules/pg-protocol/src/index.ts @@ -0,0 +1,11 @@ +import { BackendMessage } from './messages' +import { serialize } from './serializer' +import { Parser, MessageCallback } from './parser' + +export function parse(stream: NodeJS.ReadableStream, callback: MessageCallback): Promise { + const parser = new Parser() + stream.on('data', (buffer: Buffer) => parser.parse(buffer, callback)) + return new Promise((resolve) => stream.on('end', () => resolve())) +} + +export { serialize } diff --git a/node_modules/pg-protocol/src/messages.ts b/node_modules/pg-protocol/src/messages.ts new file mode 100644 index 00000000..03c2f61e --- /dev/null +++ b/node_modules/pg-protocol/src/messages.ts @@ -0,0 +1,222 @@ +export type Mode = 'text' | 'binary' + +export const enum MessageName { + parseComplete = 'parseComplete', + bindComplete = 'bindComplete', + closeComplete = 'closeComplete', + noData = 'noData', + portalSuspended = 'portalSuspended', + replicationStart = 'replicationStart', + emptyQuery = 'emptyQuery', + copyDone = 'copyDone', + copyData = 'copyData', + rowDescription = 'rowDescription', + parameterStatus = 'parameterStatus', + backendKeyData = 'backendKeyData', + notification = 'notification', + readyForQuery = 'readyForQuery', + commandComplete = 'commandComplete', + dataRow = 'dataRow', + copyInResponse = 'copyInResponse', + copyOutResponse = 'copyOutResponse', + authenticationOk = 'authenticationOk', + authenticationMD5Password = 'authenticationMD5Password', + authenticationCleartextPassword = 'authenticationCleartextPassword', + authenticationSASL = 'authenticationSASL', + authenticationSASLContinue = 'authenticationSASLContinue', + authenticationSASLFinal = 'authenticationSASLFinal', + error = 'error', + notice = 'notice', +} + +export interface BackendMessage { + name: MessageName + length: number +} + +export const parseComplete: BackendMessage = { + name: MessageName.parseComplete, + length: 5, +} + +export const bindComplete: BackendMessage = { + name: MessageName.bindComplete, + length: 5, +} + +export const closeComplete: BackendMessage = { + name: MessageName.closeComplete, + length: 5, +} + +export const noData: BackendMessage = { + name: MessageName.noData, + length: 5, +} + +export const portalSuspended: BackendMessage = { + name: MessageName.portalSuspended, + length: 5, +} + +export const replicationStart: BackendMessage = { + name: MessageName.replicationStart, + length: 4, +} + +export const emptyQuery: BackendMessage = { + name: MessageName.emptyQuery, + length: 4, +} + +export const copyDone: BackendMessage = { + name: MessageName.copyDone, + length: 4, +} + +interface NoticeOrError { + message: string | undefined + severity: string | undefined + code: string | undefined + detail: string | undefined + hint: string | undefined + position: string | undefined + internalPosition: string | undefined + internalQuery: string | undefined + where: string | undefined + schema: string | undefined + table: string | undefined + column: string | undefined + dataType: string | undefined + constraint: string | undefined + file: string | undefined + line: string | undefined + routine: string | undefined +} + +export class DatabaseError extends Error implements NoticeOrError { + public severity: string | undefined + public code: string | undefined + public detail: string | undefined + public hint: string | undefined + public position: string | undefined + public internalPosition: string | undefined + public internalQuery: string | undefined + public where: string | undefined + public schema: string | undefined + public table: string | undefined + public column: string | undefined + public dataType: string | undefined + public constraint: string | undefined + public file: string | undefined + public line: string | undefined + public routine: string | undefined + constructor(message: string, public readonly length: number, public readonly name: MessageName) { + super(message) + } +} + +export class CopyDataMessage { + public readonly name = MessageName.copyData + constructor(public readonly length: number, public readonly chunk: Buffer) {} +} + +export class CopyResponse { + public readonly columnTypes: number[] + constructor( + public readonly length: number, + public readonly name: MessageName, + public readonly binary: boolean, + columnCount: number + ) { + this.columnTypes = new Array(columnCount) + } +} + +export class Field { + constructor( + public readonly name: string, + public readonly tableID: number, + public readonly columnID: number, + public readonly dataTypeID: number, + public readonly dataTypeSize: number, + public readonly dataTypeModifier: number, + public readonly format: Mode + ) {} +} + +export class RowDescriptionMessage { + public readonly name: MessageName = MessageName.rowDescription + public readonly fields: Field[] + constructor(public readonly length: number, public readonly fieldCount: number) { + this.fields = new Array(this.fieldCount) + } +} + +export class ParameterStatusMessage { + public readonly name: MessageName = MessageName.parameterStatus + constructor( + public readonly length: number, + public readonly parameterName: string, + public readonly parameterValue: string + ) {} +} + +export class AuthenticationMD5Password implements BackendMessage { + public readonly name: MessageName = MessageName.authenticationMD5Password + constructor(public readonly length: number, public readonly salt: Buffer) {} +} + +export class BackendKeyDataMessage { + public readonly name: MessageName = MessageName.backendKeyData + constructor(public readonly length: number, public readonly processID: number, public readonly secretKey: number) {} +} + +export class NotificationResponseMessage { + public readonly name: MessageName = MessageName.notification + constructor( + public readonly length: number, + public readonly processId: number, + public readonly channel: string, + public readonly payload: string + ) {} +} + +export class ReadyForQueryMessage { + public readonly name: MessageName = MessageName.readyForQuery + constructor(public readonly length: number, public readonly status: string) {} +} + +export class CommandCompleteMessage { + public readonly name: MessageName = MessageName.commandComplete + constructor(public readonly length: number, public readonly text: string) {} +} + +export class DataRowMessage { + public readonly fieldCount: number + public readonly name: MessageName = MessageName.dataRow + constructor(public length: number, public fields: any[]) { + this.fieldCount = fields.length + } +} + +export class NoticeMessage implements BackendMessage, NoticeOrError { + constructor(public readonly length: number, public readonly message: string | undefined) {} + public readonly name = MessageName.notice + public severity: string | undefined + public code: string | undefined + public detail: string | undefined + public hint: string | undefined + public position: string | undefined + public internalPosition: string | undefined + public internalQuery: string | undefined + public where: string | undefined + public schema: string | undefined + public table: string | undefined + public column: string | undefined + public dataType: string | undefined + public constraint: string | undefined + public file: string | undefined + public line: string | undefined + public routine: string | undefined +} diff --git a/node_modules/pg-protocol/src/outbound-serializer.test.ts b/node_modules/pg-protocol/src/outbound-serializer.test.ts new file mode 100644 index 00000000..06f20cf9 --- /dev/null +++ b/node_modules/pg-protocol/src/outbound-serializer.test.ts @@ -0,0 +1,243 @@ +import assert from 'assert' +import { serialize } from './serializer' +import BufferList from './testing/buffer-list' + +describe('serializer', () => { + it('builds startup message', function () { + const actual = serialize.startup({ + user: 'brian', + database: 'bang', + }) + assert.deepEqual( + actual, + new BufferList() + .addInt16(3) + .addInt16(0) + .addCString('user') + .addCString('brian') + .addCString('database') + .addCString('bang') + .addCString('client_encoding') + .addCString('UTF8') + .addCString('') + .join(true) + ) + }) + + it('builds password message', function () { + const actual = serialize.password('!') + assert.deepEqual(actual, new BufferList().addCString('!').join(true, 'p')) + }) + + it('builds request ssl message', function () { + const actual = serialize.requestSsl() + const expected = new BufferList().addInt32(80877103).join(true) + assert.deepEqual(actual, expected) + }) + + it('builds SASLInitialResponseMessage message', function () { + const actual = serialize.sendSASLInitialResponseMessage('mech', 'data') + assert.deepEqual(actual, new BufferList().addCString('mech').addInt32(4).addString('data').join(true, 'p')) + }) + + it('builds SCRAMClientFinalMessage message', function () { + const actual = serialize.sendSCRAMClientFinalMessage('data') + assert.deepEqual(actual, new BufferList().addString('data').join(true, 'p')) + }) + + it('builds query message', function () { + var txt = 'select * from boom' + const actual = serialize.query(txt) + assert.deepEqual(actual, new BufferList().addCString(txt).join(true, 'Q')) + }) + + describe('parse message', () => { + it('builds parse message', function () { + const actual = serialize.parse({ text: '!' }) + var expected = new BufferList().addCString('').addCString('!').addInt16(0).join(true, 'P') + assert.deepEqual(actual, expected) + }) + + it('builds parse message with named query', function () { + const actual = serialize.parse({ + name: 'boom', + text: 'select * from boom', + types: [], + }) + var expected = new BufferList().addCString('boom').addCString('select * from boom').addInt16(0).join(true, 'P') + assert.deepEqual(actual, expected) + }) + + it('with multiple parameters', function () { + const actual = serialize.parse({ + name: 'force', + text: 'select * from bang where name = $1', + types: [1, 2, 3, 4], + }) + var expected = new BufferList() + .addCString('force') + .addCString('select * from bang where name = $1') + .addInt16(4) + .addInt32(1) + .addInt32(2) + .addInt32(3) + .addInt32(4) + .join(true, 'P') + assert.deepEqual(actual, expected) + }) + }) + + describe('bind messages', function () { + it('with no values', function () { + const actual = serialize.bind() + + var expectedBuffer = new BufferList() + .addCString('') + .addCString('') + .addInt16(0) + .addInt16(0) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + it('with named statement, portal, and values', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, 'zing'], + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(0) + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing')) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + }) + + it('with named statement, portal, and buffer value', function () { + const actual = serialize.bind({ + portal: 'bang', + statement: 'woo', + values: ['1', 'hi', null, Buffer.from('zing', 'utf8')], + }) + var expectedBuffer = new BufferList() + .addCString('bang') // portal name + .addCString('woo') // statement name + .addInt16(4) // value count + .addInt16(0) // string + .addInt16(0) // string + .addInt16(0) // string + .addInt16(1) // binary + .addInt16(4) + .addInt32(1) + .add(Buffer.from('1')) + .addInt32(2) + .add(Buffer.from('hi')) + .addInt32(-1) + .addInt32(4) + .add(Buffer.from('zing', 'utf-8')) + .addInt16(0) + .join(true, 'B') + assert.deepEqual(actual, expectedBuffer) + }) + + describe('builds execute message', function () { + it('for unamed portal with no row limit', function () { + const actual = serialize.execute() + var expectedBuffer = new BufferList().addCString('').addInt32(0).join(true, 'E') + assert.deepEqual(actual, expectedBuffer) + }) + + it('for named portal with row limit', function () { + const actual = serialize.execute({ + portal: 'my favorite portal', + rows: 100, + }) + var expectedBuffer = new BufferList().addCString('my favorite portal').addInt32(100).join(true, 'E') + assert.deepEqual(actual, expectedBuffer) + }) + }) + + it('builds flush command', function () { + const actual = serialize.flush() + var expected = new BufferList().join(true, 'H') + assert.deepEqual(actual, expected) + }) + + it('builds sync command', function () { + const actual = serialize.sync() + var expected = new BufferList().join(true, 'S') + assert.deepEqual(actual, expected) + }) + + it('builds end command', function () { + const actual = serialize.end() + var expected = Buffer.from([0x58, 0, 0, 0, 4]) + assert.deepEqual(actual, expected) + }) + + describe('builds describe command', function () { + it('describe statement', function () { + const actual = serialize.describe({ type: 'S', name: 'bang' }) + var expected = new BufferList().addChar('S').addCString('bang').join(true, 'D') + assert.deepEqual(actual, expected) + }) + + it('describe unnamed portal', function () { + const actual = serialize.describe({ type: 'P' }) + var expected = new BufferList().addChar('P').addCString('').join(true, 'D') + assert.deepEqual(actual, expected) + }) + }) + + describe('builds close command', function () { + it('describe statement', function () { + const actual = serialize.close({ type: 'S', name: 'bang' }) + var expected = new BufferList().addChar('S').addCString('bang').join(true, 'C') + assert.deepEqual(actual, expected) + }) + + it('describe unnamed portal', function () { + const actual = serialize.close({ type: 'P' }) + var expected = new BufferList().addChar('P').addCString('').join(true, 'C') + assert.deepEqual(actual, expected) + }) + }) + + describe('copy messages', function () { + it('builds copyFromChunk', () => { + const actual = serialize.copyData(Buffer.from([1, 2, 3])) + const expected = new BufferList().add(Buffer.from([1, 2, 3])).join(true, 'd') + assert.deepEqual(actual, expected) + }) + + it('builds copy fail', () => { + const actual = serialize.copyFail('err!') + const expected = new BufferList().addCString('err!').join(true, 'f') + assert.deepEqual(actual, expected) + }) + + it('builds copy done', () => { + const actual = serialize.copyDone() + const expected = new BufferList().join(true, 'c') + assert.deepEqual(actual, expected) + }) + }) + + it('builds cancel message', () => { + const actual = serialize.cancel(3, 4) + const expected = new BufferList().addInt16(1234).addInt16(5678).addInt32(3).addInt32(4).join(true) + assert.deepEqual(actual, expected) + }) +}) diff --git a/node_modules/pg-protocol/src/parser.ts b/node_modules/pg-protocol/src/parser.ts new file mode 100644 index 00000000..a00dabec --- /dev/null +++ b/node_modules/pg-protocol/src/parser.ts @@ -0,0 +1,377 @@ +import { TransformOptions } from 'stream' +import { + Mode, + bindComplete, + parseComplete, + closeComplete, + noData, + portalSuspended, + copyDone, + replicationStart, + emptyQuery, + ReadyForQueryMessage, + CommandCompleteMessage, + CopyDataMessage, + CopyResponse, + NotificationResponseMessage, + RowDescriptionMessage, + Field, + DataRowMessage, + ParameterStatusMessage, + BackendKeyDataMessage, + DatabaseError, + BackendMessage, + MessageName, + AuthenticationMD5Password, + NoticeMessage, +} from './messages' +import { BufferReader } from './buffer-reader' +import assert from 'assert' + +// every message is prefixed with a single bye +const CODE_LENGTH = 1 +// every message has an int32 length which includes itself but does +// NOT include the code in the length +const LEN_LENGTH = 4 + +const HEADER_LENGTH = CODE_LENGTH + LEN_LENGTH + +export type Packet = { + code: number + packet: Buffer +} + +const emptyBuffer = Buffer.allocUnsafe(0) + +type StreamOptions = TransformOptions & { + mode: Mode +} + +const enum MessageCodes { + DataRow = 0x44, // D + ParseComplete = 0x31, // 1 + BindComplete = 0x32, // 2 + CloseComplete = 0x33, // 3 + CommandComplete = 0x43, // C + ReadyForQuery = 0x5a, // Z + NoData = 0x6e, // n + NotificationResponse = 0x41, // A + AuthenticationResponse = 0x52, // R + ParameterStatus = 0x53, // S + BackendKeyData = 0x4b, // K + ErrorMessage = 0x45, // E + NoticeMessage = 0x4e, // N + RowDescriptionMessage = 0x54, // T + PortalSuspended = 0x73, // s + ReplicationStart = 0x57, // W + EmptyQuery = 0x49, // I + CopyIn = 0x47, // G + CopyOut = 0x48, // H + CopyDone = 0x63, // c + CopyData = 0x64, // d +} + +export type MessageCallback = (msg: BackendMessage) => void + +export class Parser { + private buffer: Buffer = emptyBuffer + private bufferLength: number = 0 + private bufferOffset: number = 0 + private reader = new BufferReader() + private mode: Mode + + constructor(opts?: StreamOptions) { + if (opts?.mode === 'binary') { + throw new Error('Binary mode not supported yet') + } + this.mode = opts?.mode || 'text' + } + + public parse(buffer: Buffer, callback: MessageCallback) { + this.mergeBuffer(buffer) + const bufferFullLength = this.bufferOffset + this.bufferLength + let offset = this.bufferOffset + while (offset + HEADER_LENGTH <= bufferFullLength) { + // code is 1 byte long - it identifies the message type + const code = this.buffer[offset] + // length is 1 Uint32BE - it is the length of the message EXCLUDING the code + const length = this.buffer.readUInt32BE(offset + CODE_LENGTH) + const fullMessageLength = CODE_LENGTH + length + if (fullMessageLength + offset <= bufferFullLength) { + const message = this.handlePacket(offset + HEADER_LENGTH, code, length, this.buffer) + callback(message) + offset += fullMessageLength + } else { + break + } + } + if (offset === bufferFullLength) { + // No more use for the buffer + this.buffer = emptyBuffer + this.bufferLength = 0 + this.bufferOffset = 0 + } else { + // Adjust the cursors of remainingBuffer + this.bufferLength = bufferFullLength - offset + this.bufferOffset = offset + } + } + + private mergeBuffer(buffer: Buffer): void { + if (this.bufferLength > 0) { + const newLength = this.bufferLength + buffer.byteLength + const newFullLength = newLength + this.bufferOffset + if (newFullLength > this.buffer.byteLength) { + // We can't concat the new buffer with the remaining one + let newBuffer: Buffer + if (newLength <= this.buffer.byteLength && this.bufferOffset >= this.bufferLength) { + // We can move the relevant part to the beginning of the buffer instead of allocating a new buffer + newBuffer = this.buffer + } else { + // Allocate a new larger buffer + let newBufferLength = this.buffer.byteLength * 2 + while (newLength >= newBufferLength) { + newBufferLength *= 2 + } + newBuffer = Buffer.allocUnsafe(newBufferLength) + } + // Move the remaining buffer to the new one + this.buffer.copy(newBuffer, 0, this.bufferOffset, this.bufferOffset + this.bufferLength) + this.buffer = newBuffer + this.bufferOffset = 0 + } + // Concat the new buffer with the remaining one + buffer.copy(this.buffer, this.bufferOffset + this.bufferLength) + this.bufferLength = newLength + } else { + this.buffer = buffer + this.bufferOffset = 0 + this.bufferLength = buffer.byteLength + } + } + + private handlePacket(offset: number, code: number, length: number, bytes: Buffer): BackendMessage { + switch (code) { + case MessageCodes.BindComplete: + return bindComplete + case MessageCodes.ParseComplete: + return parseComplete + case MessageCodes.CloseComplete: + return closeComplete + case MessageCodes.NoData: + return noData + case MessageCodes.PortalSuspended: + return portalSuspended + case MessageCodes.CopyDone: + return copyDone + case MessageCodes.ReplicationStart: + return replicationStart + case MessageCodes.EmptyQuery: + return emptyQuery + case MessageCodes.DataRow: + return this.parseDataRowMessage(offset, length, bytes) + case MessageCodes.CommandComplete: + return this.parseCommandCompleteMessage(offset, length, bytes) + case MessageCodes.ReadyForQuery: + return this.parseReadyForQueryMessage(offset, length, bytes) + case MessageCodes.NotificationResponse: + return this.parseNotificationMessage(offset, length, bytes) + case MessageCodes.AuthenticationResponse: + return this.parseAuthenticationResponse(offset, length, bytes) + case MessageCodes.ParameterStatus: + return this.parseParameterStatusMessage(offset, length, bytes) + case MessageCodes.BackendKeyData: + return this.parseBackendKeyData(offset, length, bytes) + case MessageCodes.ErrorMessage: + return this.parseErrorMessage(offset, length, bytes, MessageName.error) + case MessageCodes.NoticeMessage: + return this.parseErrorMessage(offset, length, bytes, MessageName.notice) + case MessageCodes.RowDescriptionMessage: + return this.parseRowDescriptionMessage(offset, length, bytes) + case MessageCodes.CopyIn: + return this.parseCopyInMessage(offset, length, bytes) + case MessageCodes.CopyOut: + return this.parseCopyOutMessage(offset, length, bytes) + case MessageCodes.CopyData: + return this.parseCopyData(offset, length, bytes) + default: + assert.fail(`unknown message code: ${code.toString(16)}`) + } + } + + private parseReadyForQueryMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const status = this.reader.string(1) + return new ReadyForQueryMessage(length, status) + } + + private parseCommandCompleteMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const text = this.reader.cstring() + return new CommandCompleteMessage(length, text) + } + + private parseCopyData(offset: number, length: number, bytes: Buffer) { + const chunk = bytes.slice(offset, offset + (length - 4)) + return new CopyDataMessage(length, chunk) + } + + private parseCopyInMessage(offset: number, length: number, bytes: Buffer) { + return this.parseCopyMessage(offset, length, bytes, MessageName.copyInResponse) + } + + private parseCopyOutMessage(offset: number, length: number, bytes: Buffer) { + return this.parseCopyMessage(offset, length, bytes, MessageName.copyOutResponse) + } + + private parseCopyMessage(offset: number, length: number, bytes: Buffer, messageName: MessageName) { + this.reader.setBuffer(offset, bytes) + const isBinary = this.reader.byte() !== 0 + const columnCount = this.reader.int16() + const message = new CopyResponse(length, messageName, isBinary, columnCount) + for (let i = 0; i < columnCount; i++) { + message.columnTypes[i] = this.reader.int16() + } + return message + } + + private parseNotificationMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const processId = this.reader.int32() + const channel = this.reader.cstring() + const payload = this.reader.cstring() + return new NotificationResponseMessage(length, processId, channel, payload) + } + + private parseRowDescriptionMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const fieldCount = this.reader.int16() + const message = new RowDescriptionMessage(length, fieldCount) + for (let i = 0; i < fieldCount; i++) { + message.fields[i] = this.parseField() + } + return message + } + + private parseField(): Field { + const name = this.reader.cstring() + const tableID = this.reader.int32() + const columnID = this.reader.int16() + const dataTypeID = this.reader.int32() + const dataTypeSize = this.reader.int16() + const dataTypeModifier = this.reader.int32() + const mode = this.reader.int16() === 0 ? 'text' : 'binary' + return new Field(name, tableID, columnID, dataTypeID, dataTypeSize, dataTypeModifier, mode) + } + + private parseDataRowMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const fieldCount = this.reader.int16() + const fields: any[] = new Array(fieldCount) + for (let i = 0; i < fieldCount; i++) { + const len = this.reader.int32() + // a -1 for length means the value of the field is null + fields[i] = len === -1 ? null : this.reader.string(len) + } + return new DataRowMessage(length, fields) + } + + private parseParameterStatusMessage(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const name = this.reader.cstring() + const value = this.reader.cstring() + return new ParameterStatusMessage(length, name, value) + } + + private parseBackendKeyData(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const processID = this.reader.int32() + const secretKey = this.reader.int32() + return new BackendKeyDataMessage(length, processID, secretKey) + } + + public parseAuthenticationResponse(offset: number, length: number, bytes: Buffer) { + this.reader.setBuffer(offset, bytes) + const code = this.reader.int32() + // TODO(bmc): maybe better types here + const message: BackendMessage & any = { + name: MessageName.authenticationOk, + length, + } + + switch (code) { + case 0: // AuthenticationOk + break + case 3: // AuthenticationCleartextPassword + if (message.length === 8) { + message.name = MessageName.authenticationCleartextPassword + } + break + case 5: // AuthenticationMD5Password + if (message.length === 12) { + message.name = MessageName.authenticationMD5Password + const salt = this.reader.bytes(4) + return new AuthenticationMD5Password(length, salt) + } + break + case 10: // AuthenticationSASL + message.name = MessageName.authenticationSASL + message.mechanisms = [] + let mechanism: string + do { + mechanism = this.reader.cstring() + + if (mechanism) { + message.mechanisms.push(mechanism) + } + } while (mechanism) + break + case 11: // AuthenticationSASLContinue + message.name = MessageName.authenticationSASLContinue + message.data = this.reader.string(length - 8) + break + case 12: // AuthenticationSASLFinal + message.name = MessageName.authenticationSASLFinal + message.data = this.reader.string(length - 8) + break + default: + throw new Error('Unknown authenticationOk message type ' + code) + } + return message + } + + private parseErrorMessage(offset: number, length: number, bytes: Buffer, name: MessageName) { + this.reader.setBuffer(offset, bytes) + const fields: Record = {} + let fieldType = this.reader.string(1) + while (fieldType !== '\0') { + fields[fieldType] = this.reader.cstring() + fieldType = this.reader.string(1) + } + + const messageValue = fields.M + + const message = + name === MessageName.notice + ? new NoticeMessage(length, messageValue) + : new DatabaseError(messageValue, length, name) + + message.severity = fields.S + message.code = fields.C + message.detail = fields.D + message.hint = fields.H + message.position = fields.P + message.internalPosition = fields.p + message.internalQuery = fields.q + message.where = fields.W + message.schema = fields.s + message.table = fields.t + message.column = fields.c + message.dataType = fields.d + message.constraint = fields.n + message.file = fields.F + message.line = fields.L + message.routine = fields.R + return message + } +} diff --git a/node_modules/pg-protocol/src/serializer.ts b/node_modules/pg-protocol/src/serializer.ts new file mode 100644 index 00000000..bff2fd33 --- /dev/null +++ b/node_modules/pg-protocol/src/serializer.ts @@ -0,0 +1,264 @@ +import { Writer } from './buffer-writer' + +const enum code { + startup = 0x70, + query = 0x51, + parse = 0x50, + bind = 0x42, + execute = 0x45, + flush = 0x48, + sync = 0x53, + end = 0x58, + close = 0x43, + describe = 0x44, + copyFromChunk = 0x64, + copyDone = 0x63, + copyFail = 0x66, +} + +const writer = new Writer() + +const startup = (opts: Record): Buffer => { + // protocol version + writer.addInt16(3).addInt16(0) + for (const key of Object.keys(opts)) { + writer.addCString(key).addCString(opts[key]) + } + + writer.addCString('client_encoding').addCString('UTF8') + + var bodyBuffer = writer.addCString('').flush() + // this message is sent without a code + + var length = bodyBuffer.length + 4 + + return new Writer().addInt32(length).add(bodyBuffer).flush() +} + +const requestSsl = (): Buffer => { + const response = Buffer.allocUnsafe(8) + response.writeInt32BE(8, 0) + response.writeInt32BE(80877103, 4) + return response +} + +const password = (password: string): Buffer => { + return writer.addCString(password).flush(code.startup) +} + +const sendSASLInitialResponseMessage = function (mechanism: string, initialResponse: string): Buffer { + // 0x70 = 'p' + writer.addCString(mechanism).addInt32(Buffer.byteLength(initialResponse)).addString(initialResponse) + + return writer.flush(code.startup) +} + +const sendSCRAMClientFinalMessage = function (additionalData: string): Buffer { + return writer.addString(additionalData).flush(code.startup) +} + +const query = (text: string): Buffer => { + return writer.addCString(text).flush(code.query) +} + +type ParseOpts = { + name?: string + types?: number[] + text: string +} + +const emptyArray: any[] = [] + +const parse = (query: ParseOpts): Buffer => { + // expect something like this: + // { name: 'queryName', + // text: 'select * from blah', + // types: ['int8', 'bool'] } + + // normalize missing query names to allow for null + const name = query.name || '' + if (name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', name, name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ + } + + const types = query.types || emptyArray + + var len = types.length + + var buffer = writer + .addCString(name) // name of query + .addCString(query.text) // actual query text + .addInt16(len) + + for (var i = 0; i < len; i++) { + buffer.addInt32(types[i]) + } + + return writer.flush(code.parse) +} + +type BindOpts = { + portal?: string + binary?: boolean + statement?: string + values?: any[] +} + +const bind = (config: BindOpts = {}): Buffer => { + // normalize config + const portal = config.portal || '' + const statement = config.statement || '' + const binary = config.binary || false + var values = config.values || emptyArray + var len = values.length + + var useBinary = false + // TODO(bmc): all the loops in here aren't nice, we can do better + for (var j = 0; j < len; j++) { + useBinary = useBinary || values[j] instanceof Buffer + } + + var buffer = writer.addCString(portal).addCString(statement) + if (!useBinary) { + buffer.addInt16(0) + } else { + buffer.addInt16(len) + for (j = 0; j < len; j++) { + buffer.addInt16(values[j] instanceof Buffer ? 1 : 0) + } + } + buffer.addInt16(len) + for (var i = 0; i < len; i++) { + var val = values[i] + if (val === null || typeof val === 'undefined') { + buffer.addInt32(-1) + } else if (val instanceof Buffer) { + buffer.addInt32(val.length) + buffer.add(val) + } else { + buffer.addInt32(Buffer.byteLength(val)) + buffer.addString(val) + } + } + + if (binary) { + buffer.addInt16(1) // format codes to use binary + buffer.addInt16(1) + } else { + buffer.addInt16(0) // format codes to use text + } + return writer.flush(code.bind) +} + +type ExecOpts = { + portal?: string + rows?: number +} + +const emptyExecute = Buffer.from([code.execute, 0x00, 0x00, 0x00, 0x09, 0x00, 0x00, 0x00, 0x00, 0x00]) + +const execute = (config?: ExecOpts): Buffer => { + // this is the happy path for most queries + if (!config || (!config.portal && !config.rows)) { + return emptyExecute + } + + const portal = config.portal || '' + const rows = config.rows || 0 + + const portalLength = Buffer.byteLength(portal) + const len = 4 + portalLength + 1 + 4 + // one extra bit for code + const buff = Buffer.allocUnsafe(1 + len) + buff[0] = code.execute + buff.writeInt32BE(len, 1) + buff.write(portal, 5, 'utf-8') + buff[portalLength + 5] = 0 // null terminate portal cString + buff.writeUInt32BE(rows, buff.length - 4) + return buff +} + +const cancel = (processID: number, secretKey: number): Buffer => { + const buffer = Buffer.allocUnsafe(16) + buffer.writeInt32BE(16, 0) + buffer.writeInt16BE(1234, 4) + buffer.writeInt16BE(5678, 6) + buffer.writeInt32BE(processID, 8) + buffer.writeInt32BE(secretKey, 12) + return buffer +} + +type PortalOpts = { + type: 'S' | 'P' + name?: string +} + +const cstringMessage = (code: code, string: string): Buffer => { + const stringLen = Buffer.byteLength(string) + const len = 4 + stringLen + 1 + // one extra bit for code + const buffer = Buffer.allocUnsafe(1 + len) + buffer[0] = code + buffer.writeInt32BE(len, 1) + buffer.write(string, 5, 'utf-8') + buffer[len] = 0 // null terminate cString + return buffer +} + +const emptyDescribePortal = writer.addCString('P').flush(code.describe) +const emptyDescribeStatement = writer.addCString('S').flush(code.describe) + +const describe = (msg: PortalOpts): Buffer => { + return msg.name + ? cstringMessage(code.describe, `${msg.type}${msg.name || ''}`) + : msg.type === 'P' + ? emptyDescribePortal + : emptyDescribeStatement +} + +const close = (msg: PortalOpts): Buffer => { + const text = `${msg.type}${msg.name || ''}` + return cstringMessage(code.close, text) +} + +const copyData = (chunk: Buffer): Buffer => { + return writer.add(chunk).flush(code.copyFromChunk) +} + +const copyFail = (message: string): Buffer => { + return cstringMessage(code.copyFail, message) +} + +const codeOnlyBuffer = (code: code): Buffer => Buffer.from([code, 0x00, 0x00, 0x00, 0x04]) + +const flushBuffer = codeOnlyBuffer(code.flush) +const syncBuffer = codeOnlyBuffer(code.sync) +const endBuffer = codeOnlyBuffer(code.end) +const copyDoneBuffer = codeOnlyBuffer(code.copyDone) + +const serialize = { + startup, + password, + requestSsl, + sendSASLInitialResponseMessage, + sendSCRAMClientFinalMessage, + query, + parse, + bind, + execute, + describe, + close, + flush: () => flushBuffer, + sync: () => syncBuffer, + end: () => endBuffer, + copyData, + copyDone: () => copyDoneBuffer, + copyFail, + cancel, +} + +export { serialize } diff --git a/node_modules/pg-protocol/src/testing/buffer-list.ts b/node_modules/pg-protocol/src/testing/buffer-list.ts new file mode 100644 index 00000000..15ac785c --- /dev/null +++ b/node_modules/pg-protocol/src/testing/buffer-list.ts @@ -0,0 +1,75 @@ +export default class BufferList { + constructor(public buffers: Buffer[] = []) {} + + public add(buffer: Buffer, front?: boolean) { + this.buffers[front ? 'unshift' : 'push'](buffer) + return this + } + + public addInt16(val: number, front?: boolean) { + return this.add(Buffer.from([val >>> 8, val >>> 0]), front) + } + + public getByteLength(initial?: number) { + return this.buffers.reduce(function (previous, current) { + return previous + current.length + }, initial || 0) + } + + public addInt32(val: number, first?: boolean) { + return this.add( + Buffer.from([(val >>> 24) & 0xff, (val >>> 16) & 0xff, (val >>> 8) & 0xff, (val >>> 0) & 0xff]), + first + ) + } + + public addCString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len + 1) + buffer.write(val) + buffer[len] = 0 + return this.add(buffer, front) + } + + public addString(val: string, front?: boolean) { + var len = Buffer.byteLength(val) + var buffer = Buffer.alloc(len) + buffer.write(val) + return this.add(buffer, front) + } + + public addChar(char: string, first?: boolean) { + return this.add(Buffer.from(char, 'utf8'), first) + } + + public addByte(byte: number) { + return this.add(Buffer.from([byte])) + } + + public join(appendLength?: boolean, char?: string): Buffer { + var length = this.getByteLength() + if (appendLength) { + this.addInt32(length + 4, true) + return this.join(false, char) + } + if (char) { + this.addChar(char, true) + length++ + } + var result = Buffer.alloc(length) + var index = 0 + this.buffers.forEach(function (buffer) { + buffer.copy(result, index, 0) + index += buffer.length + }) + return result + } + + public static concat(): Buffer { + var total = new BufferList() + for (var i = 0; i < arguments.length; i++) { + total.add(arguments[i]) + } + return total.join() + } +} diff --git a/node_modules/pg-protocol/src/testing/test-buffers.ts b/node_modules/pg-protocol/src/testing/test-buffers.ts new file mode 100644 index 00000000..19ba16cc --- /dev/null +++ b/node_modules/pg-protocol/src/testing/test-buffers.ts @@ -0,0 +1,156 @@ +// http://developer.postgresql.org/pgdocs/postgres/protocol-message-formats.html +import BufferList from './buffer-list' + +const buffers = { + readyForQuery: function () { + return new BufferList().add(Buffer.from('I')).join(true, 'Z') + }, + + authenticationOk: function () { + return new BufferList().addInt32(0).join(true, 'R') + }, + + authenticationCleartextPassword: function () { + return new BufferList().addInt32(3).join(true, 'R') + }, + + authenticationMD5Password: function () { + return new BufferList() + .addInt32(5) + .add(Buffer.from([1, 2, 3, 4])) + .join(true, 'R') + }, + + authenticationSASL: function () { + return new BufferList().addInt32(10).addCString('SCRAM-SHA-256').addCString('').join(true, 'R') + }, + + authenticationSASLContinue: function () { + return new BufferList().addInt32(11).addString('data').join(true, 'R') + }, + + authenticationSASLFinal: function () { + return new BufferList().addInt32(12).addString('data').join(true, 'R') + }, + + parameterStatus: function (name: string, value: string) { + return new BufferList().addCString(name).addCString(value).join(true, 'S') + }, + + backendKeyData: function (processID: number, secretKey: number) { + return new BufferList().addInt32(processID).addInt32(secretKey).join(true, 'K') + }, + + commandComplete: function (string: string) { + return new BufferList().addCString(string).join(true, 'C') + }, + + rowDescription: function (fields: any[]) { + fields = fields || [] + var buf = new BufferList() + buf.addInt16(fields.length) + fields.forEach(function (field) { + buf + .addCString(field.name) + .addInt32(field.tableID || 0) + .addInt16(field.attributeNumber || 0) + .addInt32(field.dataTypeID || 0) + .addInt16(field.dataTypeSize || 0) + .addInt32(field.typeModifier || 0) + .addInt16(field.formatCode || 0) + }) + return buf.join(true, 'T') + }, + + dataRow: function (columns: any[]) { + columns = columns || [] + var buf = new BufferList() + buf.addInt16(columns.length) + columns.forEach(function (col) { + if (col == null) { + buf.addInt32(-1) + } else { + var strBuf = Buffer.from(col, 'utf8') + buf.addInt32(strBuf.length) + buf.add(strBuf) + } + }) + return buf.join(true, 'D') + }, + + error: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'E') + }, + + notice: function (fields: any) { + return buffers.errorOrNotice(fields).join(true, 'N') + }, + + errorOrNotice: function (fields: any) { + fields = fields || [] + var buf = new BufferList() + fields.forEach(function (field: any) { + buf.addChar(field.type) + buf.addCString(field.value) + }) + return buf.add(Buffer.from([0])) // terminator + }, + + parseComplete: function () { + return new BufferList().join(true, '1') + }, + + bindComplete: function () { + return new BufferList().join(true, '2') + }, + + notification: function (id: number, channel: string, payload: string) { + return new BufferList().addInt32(id).addCString(channel).addCString(payload).join(true, 'A') + }, + + emptyQuery: function () { + return new BufferList().join(true, 'I') + }, + + portalSuspended: function () { + return new BufferList().join(true, 's') + }, + + closeComplete: function () { + return new BufferList().join(true, '3') + }, + + copyIn: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols) + for (let i = 0; i < cols; i++) { + list.addInt16(i) + } + return list.join(true, 'G') + }, + + copyOut: function (cols: number) { + const list = new BufferList() + // text mode + .addByte(0) + // column count + .addInt16(cols) + for (let i = 0; i < cols; i++) { + list.addInt16(i) + } + return list.join(true, 'H') + }, + + copyData: function (bytes: Buffer) { + return new BufferList().add(bytes).join(true, 'd') + }, + + copyDone: function () { + return new BufferList().join(true, 'c') + }, +} + +export default buffers diff --git a/node_modules/pg-protocol/src/types/chunky.d.ts b/node_modules/pg-protocol/src/types/chunky.d.ts new file mode 100644 index 00000000..7389bda6 --- /dev/null +++ b/node_modules/pg-protocol/src/types/chunky.d.ts @@ -0,0 +1 @@ +declare module 'chunky' diff --git a/node_modules/pg-protocol/tsconfig.json b/node_modules/pg-protocol/tsconfig.json new file mode 100644 index 00000000..bdbe07a3 --- /dev/null +++ b/node_modules/pg-protocol/tsconfig.json @@ -0,0 +1,24 @@ +{ + "compilerOptions": { + "module": "commonjs", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "strict": true, + "target": "es6", + "noImplicitAny": true, + "moduleResolution": "node", + "sourceMap": true, + "outDir": "dist", + "baseUrl": ".", + "declaration": true, + "paths": { + "*": [ + "node_modules/*", + "src/types/*" + ] + } + }, + "include": [ + "src/**/*" + ] +} diff --git a/node_modules/pg-types/.travis.yml b/node_modules/pg-types/.travis.yml new file mode 100644 index 00000000..dd6b0332 --- /dev/null +++ b/node_modules/pg-types/.travis.yml @@ -0,0 +1,7 @@ +language: node_js +node_js: + - '4' + - 'lts/*' + - 'node' +env: + - PGUSER=postgres diff --git a/node_modules/pg-types/Makefile b/node_modules/pg-types/Makefile new file mode 100644 index 00000000..d7ec83d5 --- /dev/null +++ b/node_modules/pg-types/Makefile @@ -0,0 +1,14 @@ +.PHONY: publish-patch test + +test: + npm test + +patch: test + npm version patch -m "Bump version" + git push origin master --tags + npm publish + +minor: test + npm version minor -m "Bump version" + git push origin master --tags + npm publish diff --git a/node_modules/pg-types/README.md b/node_modules/pg-types/README.md new file mode 100644 index 00000000..54a3f2c6 --- /dev/null +++ b/node_modules/pg-types/README.md @@ -0,0 +1,75 @@ +# pg-types + +This is the code that turns all the raw text from postgres into JavaScript types for [node-postgres](https://github.com/brianc/node-postgres.git) + +## use + +This module is consumed and exported from the root `pg` object of node-postgres. To access it, do the following: + +```js +var types = require('pg').types +``` + +Generally what you'll want to do is override how a specific data-type is parsed and turned into a JavaScript type. By default the PostgreSQL backend server returns everything as strings. Every data type corresponds to a unique `OID` within the server, and these `OIDs` are sent back with the query response. So, you need to match a particluar `OID` to a function you'd like to use to take the raw text input and produce a valid JavaScript object as a result. `null` values are never parsed. + +Let's do something I commonly like to do on projects: return 64-bit integers `(int8)` as JavaScript integers. Because JavaScript doesn't have support for 64-bit integers node-postgres cannot confidently parse `int8` data type results as numbers because if you have a _huge_ number it will overflow and the result you'd get back from node-postgres would not be the result in the datbase. That would be a __very bad thing__ so node-postgres just returns `int8` results as strings and leaves the parsing up to you. Let's say that you know you don't and wont ever have numbers greater than `int4` in your database, but you're tired of recieving results from the `COUNT(*)` function as strings (because that function returns `int8`). You would do this: + +```js +var types = require('pg').types +types.setTypeParser(20, function(val) { + return parseInt(val) +}) +``` + +__boom__: now you get numbers instead of strings. + +Just as another example -- not saying this is a good idea -- let's say you want to return all dates from your database as [moment](http://momentjs.com/docs/) objects. Okay, do this: + +```js +var types = require('pg').types +var moment = require('moment') +var parseFn = function(val) { + return val === null ? null : moment(val) +} +types.setTypeParser(types.builtins.TIMESTAMPTZ, parseFn) +types.setTypeParser(types.builtins.TIMESTAMP, parseFn) +``` +_note: I've never done that with my dates, and I'm not 100% sure moment can parse all the date strings returned from postgres. It's just an example!_ + +If you're thinking "gee, this seems pretty handy, but how can I get a list of all the OIDs in the database and what they correspond to?!?!?!" worry not: + +```bash +$ psql -c "select typname, oid, typarray from pg_type order by oid" +``` + +If you want to find out the OID of a specific type: + +```bash +$ psql -c "select typname, oid, typarray from pg_type where typname = 'daterange' order by oid" +``` + +:smile: + +## license + +The MIT License (MIT) + +Copyright (c) 2014 Brian M. Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pg-types/index.d.ts b/node_modules/pg-types/index.d.ts new file mode 100644 index 00000000..4bebcbe6 --- /dev/null +++ b/node_modules/pg-types/index.d.ts @@ -0,0 +1,137 @@ +export enum TypeId { + BOOL = 16, + BYTEA = 17, + CHAR = 18, + INT8 = 20, + INT2 = 21, + INT4 = 23, + REGPROC = 24, + TEXT = 25, + OID = 26, + TID = 27, + XID = 28, + CID = 29, + JSON = 114, + XML = 142, + PG_NODE_TREE = 194, + SMGR = 210, + PATH = 602, + POLYGON = 604, + CIDR = 650, + FLOAT4 = 700, + FLOAT8 = 701, + ABSTIME = 702, + RELTIME = 703, + TINTERVAL = 704, + CIRCLE = 718, + MACADDR8 = 774, + MONEY = 790, + MACADDR = 829, + INET = 869, + ACLITEM = 1033, + BPCHAR = 1042, + VARCHAR = 1043, + DATE = 1082, + TIME = 1083, + TIMESTAMP = 1114, + TIMESTAMPTZ = 1184, + INTERVAL = 1186, + TIMETZ = 1266, + BIT = 1560, + VARBIT = 1562, + NUMERIC = 1700, + REFCURSOR = 1790, + REGPROCEDURE = 2202, + REGOPER = 2203, + REGOPERATOR = 2204, + REGCLASS = 2205, + REGTYPE = 2206, + UUID = 2950, + TXID_SNAPSHOT = 2970, + PG_LSN = 3220, + PG_NDISTINCT = 3361, + PG_DEPENDENCIES = 3402, + TSVECTOR = 3614, + TSQUERY = 3615, + GTSVECTOR = 3642, + REGCONFIG = 3734, + REGDICTIONARY = 3769, + JSONB = 3802, + REGNAMESPACE = 4089, + REGROLE = 4096 +} + +export type builtinsTypes = + 'BOOL' | + 'BYTEA' | + 'CHAR' | + 'INT8' | + 'INT2' | + 'INT4' | + 'REGPROC' | + 'TEXT' | + 'OID' | + 'TID' | + 'XID' | + 'CID' | + 'JSON' | + 'XML' | + 'PG_NODE_TREE' | + 'SMGR' | + 'PATH' | + 'POLYGON' | + 'CIDR' | + 'FLOAT4' | + 'FLOAT8' | + 'ABSTIME' | + 'RELTIME' | + 'TINTERVAL' | + 'CIRCLE' | + 'MACADDR8' | + 'MONEY' | + 'MACADDR' | + 'INET' | + 'ACLITEM' | + 'BPCHAR' | + 'VARCHAR' | + 'DATE' | + 'TIME' | + 'TIMESTAMP' | + 'TIMESTAMPTZ' | + 'INTERVAL' | + 'TIMETZ' | + 'BIT' | + 'VARBIT' | + 'NUMERIC' | + 'REFCURSOR' | + 'REGPROCEDURE' | + 'REGOPER' | + 'REGOPERATOR' | + 'REGCLASS' | + 'REGTYPE' | + 'UUID' | + 'TXID_SNAPSHOT' | + 'PG_LSN' | + 'PG_NDISTINCT' | + 'PG_DEPENDENCIES' | + 'TSVECTOR' | + 'TSQUERY' | + 'GTSVECTOR' | + 'REGCONFIG' | + 'REGDICTIONARY' | + 'JSONB' | + 'REGNAMESPACE' | + 'REGROLE'; + +export type TypesBuiltins = {[key in builtinsTypes]: TypeId}; + +export type TypeFormat = 'text' | 'binary'; + +export const builtins: TypesBuiltins; + +export function setTypeParser (id: TypeId, parseFn: ((value: string) => any)): void; +export function setTypeParser (id: TypeId, format: TypeFormat, parseFn: (value: string) => any): void; + +export const getTypeParser: (id: TypeId, format?: TypeFormat) => any + +export const arrayParser: (source: string, transform: (entry: any) => any) => any[]; diff --git a/node_modules/pg-types/index.js b/node_modules/pg-types/index.js new file mode 100644 index 00000000..952d8c27 --- /dev/null +++ b/node_modules/pg-types/index.js @@ -0,0 +1,47 @@ +var textParsers = require('./lib/textParsers'); +var binaryParsers = require('./lib/binaryParsers'); +var arrayParser = require('./lib/arrayParser'); +var builtinTypes = require('./lib/builtins'); + +exports.getTypeParser = getTypeParser; +exports.setTypeParser = setTypeParser; +exports.arrayParser = arrayParser; +exports.builtins = builtinTypes; + +var typeParsers = { + text: {}, + binary: {} +}; + +//the empty parse function +function noParse (val) { + return String(val); +}; + +//returns a function used to convert a specific type (specified by +//oid) into a result javascript type +//note: the oid can be obtained via the following sql query: +//SELECT oid FROM pg_type WHERE typname = 'TYPE_NAME_HERE'; +function getTypeParser (oid, format) { + format = format || 'text'; + if (!typeParsers[format]) { + return noParse; + } + return typeParsers[format][oid] || noParse; +}; + +function setTypeParser (oid, format, parseFn) { + if(typeof format == 'function') { + parseFn = format; + format = 'text'; + } + typeParsers[format][oid] = parseFn; +}; + +textParsers.init(function(oid, converter) { + typeParsers.text[oid] = converter; +}); + +binaryParsers.init(function(oid, converter) { + typeParsers.binary[oid] = converter; +}); diff --git a/node_modules/pg-types/index.test-d.ts b/node_modules/pg-types/index.test-d.ts new file mode 100644 index 00000000..d530e6ef --- /dev/null +++ b/node_modules/pg-types/index.test-d.ts @@ -0,0 +1,21 @@ +import * as types from '.'; +import { expectType } from 'tsd'; + +// builtins +expectType(types.builtins); + +// getTypeParser +const noParse = types.getTypeParser(types.builtins.NUMERIC, 'text'); +const numericParser = types.getTypeParser(types.builtins.NUMERIC, 'binary'); +expectType(noParse('noParse')); +expectType(numericParser([200, 1, 0, 15])); + +// getArrayParser +const value = types.arrayParser('{1,2,3}', (num) => parseInt(num)); +expectType(value); + +//setTypeParser +types.setTypeParser(types.builtins.INT8, parseInt); +types.setTypeParser(types.builtins.FLOAT8, parseFloat); +types.setTypeParser(types.builtins.FLOAT8, 'binary', (data) => data[0]); +types.setTypeParser(types.builtins.FLOAT8, 'text', parseFloat); diff --git a/node_modules/pg-types/lib/arrayParser.js b/node_modules/pg-types/lib/arrayParser.js new file mode 100644 index 00000000..81ccffbc --- /dev/null +++ b/node_modules/pg-types/lib/arrayParser.js @@ -0,0 +1,11 @@ +var array = require('postgres-array'); + +module.exports = { + create: function (source, transform) { + return { + parse: function() { + return array.parse(source, transform); + } + }; + } +}; diff --git a/node_modules/pg-types/lib/binaryParsers.js b/node_modules/pg-types/lib/binaryParsers.js new file mode 100644 index 00000000..e12c2f46 --- /dev/null +++ b/node_modules/pg-types/lib/binaryParsers.js @@ -0,0 +1,257 @@ +var parseInt64 = require('pg-int8'); + +var parseBits = function(data, bits, offset, invert, callback) { + offset = offset || 0; + invert = invert || false; + callback = callback || function(lastValue, newValue, bits) { return (lastValue * Math.pow(2, bits)) + newValue; }; + var offsetBytes = offset >> 3; + + var inv = function(value) { + if (invert) { + return ~value & 0xff; + } + + return value; + }; + + // read first (maybe partial) byte + var mask = 0xff; + var firstBits = 8 - (offset % 8); + if (bits < firstBits) { + mask = (0xff << (8 - bits)) & 0xff; + firstBits = bits; + } + + if (offset) { + mask = mask >> (offset % 8); + } + + var result = 0; + if ((offset % 8) + bits >= 8) { + result = callback(0, inv(data[offsetBytes]) & mask, firstBits); + } + + // read bytes + var bytes = (bits + offset) >> 3; + for (var i = offsetBytes + 1; i < bytes; i++) { + result = callback(result, inv(data[i]), 8); + } + + // bits to read, that are not a complete byte + var lastBits = (bits + offset) % 8; + if (lastBits > 0) { + result = callback(result, inv(data[bytes]) >> (8 - lastBits), lastBits); + } + + return result; +}; + +var parseFloatFromBits = function(data, precisionBits, exponentBits) { + var bias = Math.pow(2, exponentBits - 1) - 1; + var sign = parseBits(data, 1); + var exponent = parseBits(data, exponentBits, 1); + + if (exponent === 0) { + return 0; + } + + // parse mantissa + var precisionBitsCounter = 1; + var parsePrecisionBits = function(lastValue, newValue, bits) { + if (lastValue === 0) { + lastValue = 1; + } + + for (var i = 1; i <= bits; i++) { + precisionBitsCounter /= 2; + if ((newValue & (0x1 << (bits - i))) > 0) { + lastValue += precisionBitsCounter; + } + } + + return lastValue; + }; + + var mantissa = parseBits(data, precisionBits, exponentBits + 1, false, parsePrecisionBits); + + // special cases + if (exponent == (Math.pow(2, exponentBits + 1) - 1)) { + if (mantissa === 0) { + return (sign === 0) ? Infinity : -Infinity; + } + + return NaN; + } + + // normale number + return ((sign === 0) ? 1 : -1) * Math.pow(2, exponent - bias) * mantissa; +}; + +var parseInt16 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 15, 1, true) + 1); + } + + return parseBits(value, 15, 1); +}; + +var parseInt32 = function(value) { + if (parseBits(value, 1) == 1) { + return -1 * (parseBits(value, 31, 1, true) + 1); + } + + return parseBits(value, 31, 1); +}; + +var parseFloat32 = function(value) { + return parseFloatFromBits(value, 23, 8); +}; + +var parseFloat64 = function(value) { + return parseFloatFromBits(value, 52, 11); +}; + +var parseNumeric = function(value) { + var sign = parseBits(value, 16, 32); + if (sign == 0xc000) { + return NaN; + } + + var weight = Math.pow(10000, parseBits(value, 16, 16)); + var result = 0; + + var digits = []; + var ndigits = parseBits(value, 16); + for (var i = 0; i < ndigits; i++) { + result += parseBits(value, 16, 64 + (16 * i)) * weight; + weight /= 10000; + } + + var scale = Math.pow(10, parseBits(value, 16, 48)); + return ((sign === 0) ? 1 : -1) * Math.round(result * scale) / scale; +}; + +var parseDate = function(isUTC, value) { + var sign = parseBits(value, 1); + var rawValue = parseBits(value, 63, 1); + + // discard usecs and shift from 2000 to 1970 + var result = new Date((((sign === 0) ? 1 : -1) * rawValue / 1000) + 946684800000); + + if (!isUTC) { + result.setTime(result.getTime() + result.getTimezoneOffset() * 60000); + } + + // add microseconds to the date + result.usec = rawValue % 1000; + result.getMicroSeconds = function() { + return this.usec; + }; + result.setMicroSeconds = function(value) { + this.usec = value; + }; + result.getUTCMicroSeconds = function() { + return this.usec; + }; + + return result; +}; + +var parseArray = function(value) { + var dim = parseBits(value, 32); + + var flags = parseBits(value, 32, 32); + var elementType = parseBits(value, 32, 64); + + var offset = 96; + var dims = []; + for (var i = 0; i < dim; i++) { + // parse dimension + dims[i] = parseBits(value, 32, offset); + offset += 32; + + // ignore lower bounds + offset += 32; + } + + var parseElement = function(elementType) { + // parse content length + var length = parseBits(value, 32, offset); + offset += 32; + + // parse null values + if (length == 0xffffffff) { + return null; + } + + var result; + if ((elementType == 0x17) || (elementType == 0x14)) { + // int/bigint + result = parseBits(value, length * 8, offset); + offset += length * 8; + return result; + } + else if (elementType == 0x19) { + // string + result = value.toString(this.encoding, offset >> 3, (offset += (length << 3)) >> 3); + return result; + } + else { + console.log("ERROR: ElementType not implemented: " + elementType); + } + }; + + var parse = function(dimension, elementType) { + var array = []; + var i; + + if (dimension.length > 1) { + var count = dimension.shift(); + for (i = 0; i < count; i++) { + array[i] = parse(dimension, elementType); + } + dimension.unshift(count); + } + else { + for (i = 0; i < dimension[0]; i++) { + array[i] = parseElement(elementType); + } + } + + return array; + }; + + return parse(dims, elementType); +}; + +var parseText = function(value) { + return value.toString('utf8'); +}; + +var parseBool = function(value) { + if(value === null) return null; + return (parseBits(value, 8) > 0); +}; + +var init = function(register) { + register(20, parseInt64); + register(21, parseInt16); + register(23, parseInt32); + register(26, parseInt32); + register(1700, parseNumeric); + register(700, parseFloat32); + register(701, parseFloat64); + register(16, parseBool); + register(1114, parseDate.bind(null, false)); + register(1184, parseDate.bind(null, true)); + register(1000, parseArray); + register(1007, parseArray); + register(1016, parseArray); + register(1008, parseArray); + register(1009, parseArray); + register(25, parseText); +}; + +module.exports = { + init: init +}; diff --git a/node_modules/pg-types/lib/builtins.js b/node_modules/pg-types/lib/builtins.js new file mode 100644 index 00000000..f0c134a8 --- /dev/null +++ b/node_modules/pg-types/lib/builtins.js @@ -0,0 +1,73 @@ +/** + * Following query was used to generate this file: + + SELECT json_object_agg(UPPER(PT.typname), PT.oid::int4 ORDER BY pt.oid) + FROM pg_type PT + WHERE typnamespace = (SELECT pgn.oid FROM pg_namespace pgn WHERE nspname = 'pg_catalog') -- Take only builting Postgres types with stable OID (extension types are not guaranted to be stable) + AND typtype = 'b' -- Only basic types + AND typelem = 0 -- Ignore aliases + AND typisdefined -- Ignore undefined types + */ + +module.exports = { + BOOL: 16, + BYTEA: 17, + CHAR: 18, + INT8: 20, + INT2: 21, + INT4: 23, + REGPROC: 24, + TEXT: 25, + OID: 26, + TID: 27, + XID: 28, + CID: 29, + JSON: 114, + XML: 142, + PG_NODE_TREE: 194, + SMGR: 210, + PATH: 602, + POLYGON: 604, + CIDR: 650, + FLOAT4: 700, + FLOAT8: 701, + ABSTIME: 702, + RELTIME: 703, + TINTERVAL: 704, + CIRCLE: 718, + MACADDR8: 774, + MONEY: 790, + MACADDR: 829, + INET: 869, + ACLITEM: 1033, + BPCHAR: 1042, + VARCHAR: 1043, + DATE: 1082, + TIME: 1083, + TIMESTAMP: 1114, + TIMESTAMPTZ: 1184, + INTERVAL: 1186, + TIMETZ: 1266, + BIT: 1560, + VARBIT: 1562, + NUMERIC: 1700, + REFCURSOR: 1790, + REGPROCEDURE: 2202, + REGOPER: 2203, + REGOPERATOR: 2204, + REGCLASS: 2205, + REGTYPE: 2206, + UUID: 2950, + TXID_SNAPSHOT: 2970, + PG_LSN: 3220, + PG_NDISTINCT: 3361, + PG_DEPENDENCIES: 3402, + TSVECTOR: 3614, + TSQUERY: 3615, + GTSVECTOR: 3642, + REGCONFIG: 3734, + REGDICTIONARY: 3769, + JSONB: 3802, + REGNAMESPACE: 4089, + REGROLE: 4096 +}; diff --git a/node_modules/pg-types/lib/textParsers.js b/node_modules/pg-types/lib/textParsers.js new file mode 100644 index 00000000..b1218bfe --- /dev/null +++ b/node_modules/pg-types/lib/textParsers.js @@ -0,0 +1,215 @@ +var array = require('postgres-array') +var arrayParser = require('./arrayParser'); +var parseDate = require('postgres-date'); +var parseInterval = require('postgres-interval'); +var parseByteA = require('postgres-bytea'); + +function allowNull (fn) { + return function nullAllowed (value) { + if (value === null) return value + return fn(value) + } +} + +function parseBool (value) { + if (value === null) return value + return value === 'TRUE' || + value === 't' || + value === 'true' || + value === 'y' || + value === 'yes' || + value === 'on' || + value === '1'; +} + +function parseBoolArray (value) { + if (!value) return null + return array.parse(value, parseBool) +} + +function parseBaseTenInt (string) { + return parseInt(string, 10) +} + +function parseIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(parseBaseTenInt)) +} + +function parseBigIntegerArray (value) { + if (!value) return null + return array.parse(value, allowNull(function (entry) { + return parseBigInteger(entry).trim() + })) +} + +var parsePointArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parsePoint(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseFloatArray = function(value) { + if(!value) { return null; } + var p = arrayParser.create(value, function(entry) { + if(entry !== null) { + entry = parseFloat(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseStringArray = function(value) { + if(!value) { return null; } + + var p = arrayParser.create(value); + return p.parse(); +}; + +var parseDateArray = function(value) { + if (!value) { return null; } + + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseDate(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseIntervalArray = function(value) { + if (!value) { return null; } + + var p = arrayParser.create(value, function(entry) { + if (entry !== null) { + entry = parseInterval(entry); + } + return entry; + }); + + return p.parse(); +}; + +var parseByteAArray = function(value) { + if (!value) { return null; } + + return array.parse(value, allowNull(parseByteA)); +}; + +var parseInteger = function(value) { + return parseInt(value, 10); +}; + +var parseBigInteger = function(value) { + var valStr = String(value); + if (/^\d+$/.test(valStr)) { return valStr; } + return value; +}; + +var parseJsonArray = function(value) { + if (!value) { return null; } + + return array.parse(value, allowNull(JSON.parse)); +}; + +var parsePoint = function(value) { + if (value[0] !== '(') { return null; } + + value = value.substring( 1, value.length - 1 ).split(','); + + return { + x: parseFloat(value[0]) + , y: parseFloat(value[1]) + }; +}; + +var parseCircle = function(value) { + if (value[0] !== '<' && value[1] !== '(') { return null; } + + var point = '('; + var radius = ''; + var pointParsed = false; + for (var i = 2; i < value.length - 1; i++){ + if (!pointParsed) { + point += value[i]; + } + + if (value[i] === ')') { + pointParsed = true; + continue; + } else if (!pointParsed) { + continue; + } + + if (value[i] === ','){ + continue; + } + + radius += value[i]; + } + var result = parsePoint(point); + result.radius = parseFloat(radius); + + return result; +}; + +var init = function(register) { + register(20, parseBigInteger); // int8 + register(21, parseInteger); // int2 + register(23, parseInteger); // int4 + register(26, parseInteger); // oid + register(700, parseFloat); // float4/real + register(701, parseFloat); // float8/double + register(16, parseBool); + register(1082, parseDate); // date + register(1114, parseDate); // timestamp without timezone + register(1184, parseDate); // timestamp + register(600, parsePoint); // point + register(651, parseStringArray); // cidr[] + register(718, parseCircle); // circle + register(1000, parseBoolArray); + register(1001, parseByteAArray); + register(1005, parseIntegerArray); // _int2 + register(1007, parseIntegerArray); // _int4 + register(1028, parseIntegerArray); // oid[] + register(1016, parseBigIntegerArray); // _int8 + register(1017, parsePointArray); // point[] + register(1021, parseFloatArray); // _float4 + register(1022, parseFloatArray); // _float8 + register(1231, parseFloatArray); // _numeric + register(1014, parseStringArray); //char + register(1015, parseStringArray); //varchar + register(1008, parseStringArray); + register(1009, parseStringArray); + register(1040, parseStringArray); // macaddr[] + register(1041, parseStringArray); // inet[] + register(1115, parseDateArray); // timestamp without time zone[] + register(1182, parseDateArray); // _date + register(1185, parseDateArray); // timestamp with time zone[] + register(1186, parseInterval); + register(1187, parseIntervalArray); + register(17, parseByteA); + register(114, JSON.parse.bind(JSON)); // json + register(3802, JSON.parse.bind(JSON)); // jsonb + register(199, parseJsonArray); // json[] + register(3807, parseJsonArray); // jsonb[] + register(3907, parseStringArray); // numrange[] + register(2951, parseStringArray); // uuid[] + register(791, parseStringArray); // money[] + register(1183, parseStringArray); // time[] + register(1270, parseStringArray); // timetz[] +}; + +module.exports = { + init: init +}; diff --git a/node_modules/pg-types/package.json b/node_modules/pg-types/package.json new file mode 100644 index 00000000..eaa43cf6 --- /dev/null +++ b/node_modules/pg-types/package.json @@ -0,0 +1,69 @@ +{ + "_from": "pg-types@^2.1.0", + "_id": "pg-types@2.2.0", + "_inBundle": false, + "_integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "_location": "/pg-types", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pg-types@^2.1.0", + "name": "pg-types", + "escapedName": "pg-types", + "rawSpec": "^2.1.0", + "saveSpec": null, + "fetchSpec": "^2.1.0" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "_shasum": "2d0250d636454f7cfa3b6ae0382fdfa8063254a3", + "_spec": "pg-types@^2.1.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "author": { + "name": "Brian M. Carlson" + }, + "bugs": { + "url": "https://github.com/brianc/node-pg-types/issues" + }, + "bundleDependencies": false, + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "deprecated": false, + "description": "Query result type converters for node-postgres", + "devDependencies": { + "if-node-version": "^1.1.1", + "pff": "^1.0.0", + "tap-spec": "^4.0.0", + "tape": "^4.0.0", + "tsd": "^0.7.4" + }, + "engines": { + "node": ">=4" + }, + "homepage": "https://github.com/brianc/node-pg-types", + "keywords": [ + "postgres", + "PostgreSQL", + "pg" + ], + "license": "MIT", + "main": "index.js", + "name": "pg-types", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-pg-types.git" + }, + "scripts": { + "test": "tape test/*.js | tap-spec && npm run test-ts", + "test-ts": "if-node-version '>= 8' tsd" + }, + "version": "2.2.0" +} diff --git a/node_modules/pg-types/test/index.js b/node_modules/pg-types/test/index.js new file mode 100644 index 00000000..b7d05cd6 --- /dev/null +++ b/node_modules/pg-types/test/index.js @@ -0,0 +1,24 @@ + +var test = require('tape') +var printf = require('pff') +var getTypeParser = require('../').getTypeParser +var types = require('./types') + +test('types', function (t) { + Object.keys(types).forEach(function (typeName) { + var type = types[typeName] + t.test(typeName, function (t) { + var parser = getTypeParser(type.id, type.format) + type.tests.forEach(function (tests) { + var input = tests[0] + var expected = tests[1] + var result = parser(input) + if (typeof expected === 'function') { + return expected(t, result) + } + t.equal(result, expected) + }) + t.end() + }) + }) +}) diff --git a/node_modules/pg-types/test/types.js b/node_modules/pg-types/test/types.js new file mode 100644 index 00000000..af708a5c --- /dev/null +++ b/node_modules/pg-types/test/types.js @@ -0,0 +1,597 @@ +'use strict' + +exports['string/varchar'] = { + format: 'text', + id: 1043, + tests: [ + ['bang', 'bang'] + ] +} + +exports['integer/int4'] = { + format: 'text', + id: 23, + tests: [ + ['2147483647', 2147483647] + ] +} + +exports['smallint/int2'] = { + format: 'text', + id: 21, + tests: [ + ['32767', 32767] + ] +} + +exports['bigint/int8'] = { + format: 'text', + id: 20, + tests: [ + ['9223372036854775807', '9223372036854775807'] + ] +} + +exports.oid = { + format: 'text', + id: 26, + tests: [ + ['103', 103] + ] +} + +var bignum = '31415926535897932384626433832795028841971693993751058.16180339887498948482045868343656381177203091798057628' +exports.numeric = { + format: 'text', + id: 1700, + tests: [ + [bignum, bignum] + ] +} + +exports['real/float4'] = { + format: 'text', + id: 700, + tests: [ + ['123.456', 123.456] + ] +} + +exports['double precision / float 8'] = { + format: 'text', + id: 701, + tests: [ + ['12345678.12345678', 12345678.12345678] + ] +} + +exports.boolean = { + format: 'text', + id: 16, + tests: [ + ['TRUE', true], + ['t', true], + ['true', true], + ['y', true], + ['yes', true], + ['on', true], + ['1', true], + ['f', false], + [null, null] + ] +} + +exports.timestamptz = { + format: 'text', + id: 1184, + tests: [ + [ + '2010-10-31 14:54:13.74-05:30', + dateEquals(2010, 9, 31, 20, 24, 13, 740) + ], + [ + '2011-01-23 22:05:00.68-06', + dateEquals(2011, 0, 24, 4, 5, 0, 680) + ], + [ + '2010-10-30 14:11:12.730838Z', + dateEquals(2010, 9, 30, 14, 11, 12, 730) + ], + [ + '2010-10-30 13:10:01+05', + dateEquals(2010, 9, 30, 8, 10, 1, 0) + ] + ] +} + +exports.timestamp = { + format: 'text', + id: 1114, + tests: [ + [ + '2010-10-31 00:00:00', + function (t, value) { + t.equal( + value.toUTCString(), + new Date(2010, 9, 31, 0, 0, 0, 0, 0).toUTCString() + ) + t.equal( + value.toString(), + new Date(2010, 9, 31, 0, 0, 0, 0, 0, 0).toString() + ) + } + ] + ] +} + +exports.date = { + format: 'text', + id: 1082, + tests: [ + ['2010-10-31', function (t, value) { + var now = new Date(2010, 9, 31) + dateEquals( + 2010, + now.getUTCMonth(), + now.getUTCDate(), + now.getUTCHours(), 0, 0, 0)(t, value) + t.equal(value.getHours(), now.getHours()) + }] + ] +} + +exports.inet = { + format: 'text', + id: 869, + tests: [ + ['8.8.8.8', '8.8.8.8'], + ['2001:4860:4860::8888', '2001:4860:4860::8888'], + ['127.0.0.1', '127.0.0.1'], + ['fd00:1::40e', 'fd00:1::40e'], + ['1.2.3.4', '1.2.3.4'] + ] +} + +exports.cidr = { + format: 'text', + id: 650, + tests: [ + ['172.16.0.0/12', '172.16.0.0/12'], + ['fe80::/10', 'fe80::/10'], + ['fc00::/7', 'fc00::/7'], + ['192.168.0.0/24', '192.168.0.0/24'], + ['10.0.0.0/8', '10.0.0.0/8'] + ] +} + +exports.macaddr = { + format: 'text', + id: 829, + tests: [ + ['08:00:2b:01:02:03', '08:00:2b:01:02:03'], + ['16:10:9f:0d:66:00', '16:10:9f:0d:66:00'] + ] +} + +exports.numrange = { + format: 'text', + id: 3906, + tests: [ + ['[,]', '[,]'], + ['(,)', '(,)'], + ['(,]', '(,]'], + ['[1,)', '[1,)'], + ['[,1]', '[,1]'], + ['(1,2)', '(1,2)'], + ['(1,20.5]', '(1,20.5]'] + ] +} + +exports.interval = { + format: 'text', + id: 1186, + tests: [ + ['01:02:03', function (t, value) { + t.equal(value.toPostgres(), '3 seconds 2 minutes 1 hours') + t.deepEqual(value, {hours: 1, minutes: 2, seconds: 3}) + }], + ['01:02:03.456', function (t, value) { + t.deepEqual(value, {hours: 1, minutes:2, seconds: 3, milliseconds: 456}) + }], + ['1 year -32 days', function (t, value) { + t.equal(value.toPostgres(), '-32 days 1 years') + t.deepEqual(value, {years: 1, days: -32}) + }], + ['1 day -00:00:03', function (t, value) { + t.equal(value.toPostgres(), '-3 seconds 1 days') + t.deepEqual(value, {days: 1, seconds: -3}) + }] + ] +} + +exports.bytea = { + format: 'text', + id: 17, + tests: [ + ['foo\\000\\200\\\\\\377', function (t, value) { + var buffer = new Buffer([102, 111, 111, 0, 128, 92, 255]) + t.ok(buffer.equals(value)) + }], + ['', function (t, value) { + var buffer = new Buffer(0) + t.ok(buffer.equals(value)) + }] + ] +} + +exports['array/boolean'] = { + format: 'text', + id: 1000, + tests: [ + ['{true,false}', function (t, value) { + t.deepEqual(value, [true, false]) + }] + ] +} + +exports['array/char'] = { + format: 'text', + id: 1014, + tests: [ + ['{foo,bar}', function (t, value) { + t.deepEqual(value, ['foo', 'bar']) + }] + ] +} + +exports['array/varchar'] = { + format: 'text', + id: 1015, + tests: [ + ['{foo,bar}', function (t, value) { + t.deepEqual(value, ['foo', 'bar']) + }] + ] +} + +exports['array/text'] = { + format: 'text', + id: 1008, + tests: [ + ['{foo}', function (t, value) { + t.deepEqual(value, ['foo']) + }] + ] +} + +exports['array/bytea'] = { + format: 'text', + id: 1001, + tests: [ + ['{"\\\\x00000000"}', function (t, value) { + var buffer = new Buffer('00000000', 'hex') + t.ok(Array.isArray(value)) + t.equal(value.length, 1) + t.ok(buffer.equals(value[0])) + }], + ['{NULL,"\\\\x4e554c4c"}', function (t, value) { + var buffer = new Buffer('4e554c4c', 'hex') + t.ok(Array.isArray(value)) + t.equal(value.length, 2) + t.equal(value[0], null) + t.ok(buffer.equals(value[1])) + }], + ] +} + +exports['array/numeric'] = { + format: 'text', + id: 1231, + tests: [ + ['{1.2,3.4}', function (t, value) { + t.deepEqual(value, [1.2, 3.4]) + }] + ] +} + +exports['array/int2'] = { + format: 'text', + id: 1005, + tests: [ + ['{-32768, -32767, 32766, 32767}', function (t, value) { + t.deepEqual(value, [-32768, -32767, 32766, 32767]) + }] + ] +} + +exports['array/int4'] = { + format: 'text', + id: 1005, + tests: [ + ['{-2147483648, -2147483647, 2147483646, 2147483647}', function (t, value) { + t.deepEqual(value, [-2147483648, -2147483647, 2147483646, 2147483647]) + }] + ] +} + +exports['array/int8'] = { + format: 'text', + id: 1016, + tests: [ + [ + '{-9223372036854775808, -9223372036854775807, 9223372036854775806, 9223372036854775807}', + function (t, value) { + t.deepEqual(value, [ + '-9223372036854775808', + '-9223372036854775807', + '9223372036854775806', + '9223372036854775807' + ]) + } + ] + ] +} + +exports['array/json'] = { + format: 'text', + id: 199, + tests: [ + [ + '{{1,2},{[3],"[4,5]"},{null,NULL}}', + function (t, value) { + t.deepEqual(value, [ + [1, 2], + [[3], [4, 5]], + [null, null], + ]) + } + ] + ] +} + +exports['array/jsonb'] = { + format: 'text', + id: 3807, + tests: exports['array/json'].tests +} + +exports['array/point'] = { + format: 'text', + id: 1017, + tests: [ + ['{"(25.1,50.5)","(10.1,40)"}', function (t, value) { + t.deepEqual(value, [{x: 25.1, y: 50.5}, {x: 10.1, y: 40}]) + }] + ] +} + +exports['array/oid'] = { + format: 'text', + id: 1028, + tests: [ + ['{25864,25860}', function (t, value) { + t.deepEqual(value, [25864, 25860]) + }] + ] +} + +exports['array/float4'] = { + format: 'text', + id: 1021, + tests: [ + ['{1.2, 3.4}', function (t, value) { + t.deepEqual(value, [1.2, 3.4]) + }] + ] +} + +exports['array/float8'] = { + format: 'text', + id: 1022, + tests: [ + ['{-12345678.1234567, 12345678.12345678}', function (t, value) { + t.deepEqual(value, [-12345678.1234567, 12345678.12345678]) + }] + ] +} + +exports['array/date'] = { + format: 'text', + id: 1182, + tests: [ + ['{2014-01-01,2015-12-31}', function (t, value) { + var expecteds = [new Date(2014, 0, 1), new Date(2015, 11, 31)] + t.equal(value.length, 2) + value.forEach(function (date, index) { + var expected = expecteds[index] + dateEquals( + expected.getUTCFullYear(), + expected.getUTCMonth(), + expected.getUTCDate(), + expected.getUTCHours(), 0, 0, 0)(t, date) + }) + }] + ] +} + +exports['array/interval'] = { + format: 'text', + id: 1187, + tests: [ + ['{01:02:03,1 day -00:00:03}', function (t, value) { + var expecteds = [{hours: 1, minutes: 2, seconds: 3}, + {days: 1, seconds: -3}] + t.equal(value.length, 2) + t.deepEqual(value, expecteds); + }] + ] +} + +exports['array/inet'] = { + format: 'text', + id: 1041, + tests: [ + ['{8.8.8.8}', function (t, value) { + t.deepEqual(value, ['8.8.8.8']); + }], + ['{2001:4860:4860::8888}', function (t, value) { + t.deepEqual(value, ['2001:4860:4860::8888']); + }], + ['{127.0.0.1,fd00:1::40e,1.2.3.4}', function (t, value) { + t.deepEqual(value, ['127.0.0.1', 'fd00:1::40e', '1.2.3.4']); + }] + ] +} + +exports['array/cidr'] = { + format: 'text', + id: 651, + tests: [ + ['{172.16.0.0/12}', function (t, value) { + t.deepEqual(value, ['172.16.0.0/12']); + }], + ['{fe80::/10}', function (t, value) { + t.deepEqual(value, ['fe80::/10']); + }], + ['{10.0.0.0/8,fc00::/7,192.168.0.0/24}', function (t, value) { + t.deepEqual(value, ['10.0.0.0/8', 'fc00::/7', '192.168.0.0/24']); + }] + ] +} + +exports['array/macaddr'] = { + format: 'text', + id: 1040, + tests: [ + ['{08:00:2b:01:02:03,16:10:9f:0d:66:00}', function (t, value) { + t.deepEqual(value, ['08:00:2b:01:02:03', '16:10:9f:0d:66:00']); + }] + ] +} + +exports['array/numrange'] = { + format: 'text', + id: 3907, + tests: [ + ['{"[1,2]","(4.5,8)","[10,40)","(-21.2,60.3]"}', function (t, value) { + t.deepEqual(value, ['[1,2]', '(4.5,8)', '[10,40)', '(-21.2,60.3]']); + }], + ['{"[,20]","[3,]","[,]","(,35)","(1,)","(,)"}', function (t, value) { + t.deepEqual(value, ['[,20]', '[3,]', '[,]', '(,35)', '(1,)', '(,)']); + }], + ['{"[,20)","[3,)","[,)","[,35)","[1,)","[,)"}', function (t, value) { + t.deepEqual(value, ['[,20)', '[3,)', '[,)', '[,35)', '[1,)', '[,)']); + }] + ] +} + +exports['binary-string/varchar'] = { + format: 'binary', + id: 1043, + tests: [ + ['bang', 'bang'] + ] +} + +exports['binary-integer/int4'] = { + format: 'binary', + id: 23, + tests: [ + [[0, 0, 0, 100], 100] + ] +} + +exports['binary-smallint/int2'] = { + format: 'binary', + id: 21, + tests: [ + [[0, 101], 101] + ] +} + +exports['binary-bigint/int8'] = { + format: 'binary', + id: 20, + tests: [ + [new Buffer([0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]), '9223372036854775807'] + ] +} + +exports['binary-oid'] = { + format: 'binary', + id: 26, + tests: [ + [[0, 0, 0, 103], 103] + ] +} + +exports['binary-numeric'] = { + format: 'binary', + id: 1700, + tests: [ + [ + [0, 2, 0, 0, 0, 0, 0, hex('0x64'), 0, 12, hex('0xd'), hex('0x48'), 0, 0, 0, 0], + 12.34 + ] + ] +} + +exports['binary-real/float4'] = { + format: 'binary', + id: 700, + tests: [ + [['0x41', '0x48', '0x00', '0x00'].map(hex), 12.5] + ] +} + +exports['binary-boolean'] = { + format: 'binary', + id: 16, + tests: [ + [[1], true], + [[0], false], + [null, null] + ] +} + +exports['binary-string'] = { + format: 'binary', + id: 25, + tests: [ + [ + new Buffer(['0x73', '0x6c', '0x61', '0x64', '0x64', '0x61'].map(hex)), + 'sladda' + ] + ] +} + +exports.point = { + format: 'text', + id: 600, + tests: [ + ['(25.1,50.5)', function (t, value) { + t.deepEqual(value, {x: 25.1, y: 50.5}) + }] + ] +} + +exports.circle = { + format: 'text', + id: 718, + tests: [ + ['<(25,10),5>', function (t, value) { + t.deepEqual(value, {x: 25, y: 10, radius: 5}) + }] + ] +} + +function hex (string) { + return parseInt(string, 16) +} + +function dateEquals () { + var timestamp = Date.UTC.apply(Date, arguments) + return function (t, value) { + t.equal(value.toUTCString(), new Date(timestamp).toUTCString()) + } +} diff --git a/node_modules/pg/LICENSE b/node_modules/pg/LICENSE new file mode 100644 index 00000000..aa66489d --- /dev/null +++ b/node_modules/pg/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010 - 2020 Brian Carlson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/pg/README.md b/node_modules/pg/README.md new file mode 100644 index 00000000..ed4d7a62 --- /dev/null +++ b/node_modules/pg/README.md @@ -0,0 +1,96 @@ +# node-postgres + +[![Build Status](https://secure.travis-ci.org/brianc/node-postgres.svg?branch=master)](http://travis-ci.org/brianc/node-postgres) +[![Dependency Status](https://david-dm.org/brianc/node-postgres.svg?path=packages/pg)](https://david-dm.org/brianc/node-postgres?path=packages/pg) +NPM version +NPM downloads + +Non-blocking PostgreSQL client for Node.js. Pure JavaScript and optional native libpq bindings. + +## Install + +```sh +$ npm install pg +``` + +--- + +## :star: [Documentation](https://node-postgres.com) :star: + +### Features + +- Pure JavaScript client and native libpq bindings share _the same API_ +- Connection pooling +- Extensible JS ↔ PostgreSQL data-type coercion +- Supported PostgreSQL features + - Parameterized queries + - Named statements with query plan caching + - Async notifications with `LISTEN/NOTIFY` + - Bulk import & export with `COPY TO/COPY FROM` + +### Extras + +node-postgres is by design pretty light on abstractions. These are some handy modules we've been using over the years to complete the picture. +The entire list can be found on our [wiki](https://github.com/brianc/node-postgres/wiki/Extras). + +## Support + +node-postgres is free software. If you encounter a bug with the library please open an issue on the [GitHub repo](https://github.com/brianc/node-postgres). If you have questions unanswered by the documentation please open an issue pointing out how the documentation was unclear & I will do my best to make it better! + +When you open an issue please provide: + +- version of Node +- version of Postgres +- smallest possible snippet of code to reproduce the problem + +You can also follow me [@briancarlson](https://twitter.com/briancarlson) if that's your thing. I try to always announce noteworthy changes & developments with node-postgres on Twitter. + +## Sponsorship :two_hearts: + +node-postgres's continued development has been made possible in part by generous finanical support from [the community](https://github.com/brianc/node-postgres/blob/master/SPONSORS.md) and these featured sponsors: + +
+ + + +
+ +If you or your company are benefiting from node-postgres and would like to help keep the project financially sustainable [please consider supporting](https://github.com/sponsors/brianc) its development. + +## Contributing + +**:heart: contributions!** + +I will **happily** accept your pull request if it: + +- **has tests** +- looks reasonable +- does not break backwards compatibility + +If your change involves breaking backwards compatibility please please point that out in the pull request & we can discuss & plan when and how to release it and what type of documentation or communicate it will require. + +## Troubleshooting and FAQ + +The causes and solutions to common errors can be found among the [Frequently Asked Questions (FAQ)](https://github.com/brianc/node-postgres/wiki/FAQ) + +## License + +Copyright (c) 2010-2020 Brian Carlson (brian.m.carlson@gmail.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/pg/lib/client.js b/node_modules/pg/lib/client.js new file mode 100644 index 00000000..72973c44 --- /dev/null +++ b/node_modules/pg/lib/client.js @@ -0,0 +1,604 @@ +'use strict' + +var EventEmitter = require('events').EventEmitter +var util = require('util') +var utils = require('./utils') +var sasl = require('./sasl') +var pgPass = require('pgpass') +var TypeOverrides = require('./type-overrides') + +var ConnectionParameters = require('./connection-parameters') +var Query = require('./query') +var defaults = require('./defaults') +var Connection = require('./connection') + +class Client extends EventEmitter { + constructor(config) { + super() + + this.connectionParameters = new ConnectionParameters(config) + this.user = this.connectionParameters.user + this.database = this.connectionParameters.database + this.port = this.connectionParameters.port + this.host = this.connectionParameters.host + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: this.connectionParameters.password, + }) + + this.replication = this.connectionParameters.replication + + var c = config || {} + + this._Promise = c.Promise || global.Promise + this._types = new TypeOverrides(c.types) + this._ending = false + this._connecting = false + this._connected = false + this._connectionError = false + this._queryable = true + + this.connection = + c.connection || + new Connection({ + stream: c.stream, + ssl: this.connectionParameters.ssl, + keepAlive: c.keepAlive || false, + keepAliveInitialDelayMillis: c.keepAliveInitialDelayMillis || 0, + encoding: this.connectionParameters.client_encoding || 'utf8', + }) + this.queryQueue = [] + this.binary = c.binary || defaults.binary + this.processID = null + this.secretKey = null + this.ssl = this.connectionParameters.ssl || false + this._connectionTimeoutMillis = c.connectionTimeoutMillis || 0 + } + + _errorAllQueries(err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.handleError(err, this.connection) + }) + } + + if (this.activeQuery) { + enqueueError(this.activeQuery) + this.activeQuery = null + } + + this.queryQueue.forEach(enqueueError) + this.queryQueue.length = 0 + } + + _connect(callback) { + var self = this + var con = this.connection + this._connectionCallback = callback + + if (this._connecting || this._connected) { + const err = new Error('Client has already been connected. You cannot reuse a client.') + process.nextTick(() => { + callback(err) + }) + return + } + this._connecting = true + + this.connectionTimeoutHandle + if (this._connectionTimeoutMillis > 0) { + this.connectionTimeoutHandle = setTimeout(() => { + con._ending = true + con.stream.destroy(new Error('timeout expired')) + }, this._connectionTimeoutMillis) + } + + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) + } else { + con.connect(this.port, this.host) + } + + // once connection is established send startup message + con.on('connect', function () { + if (self.ssl) { + con.requestSsl() + } else { + con.startup(self.getStartupConf()) + } + }) + + con.on('sslconnect', function () { + con.startup(self.getStartupConf()) + }) + + this._attachListeners(con) + + con.once('end', () => { + const error = this._ending ? new Error('Connection terminated') : new Error('Connection terminated unexpectedly') + + clearTimeout(this.connectionTimeoutHandle) + this._errorAllQueries(error) + + if (!this._ending) { + // if the connection is ended without us calling .end() + // on this client then we have an unexpected disconnection + // treat this as an error unless we've already emitted an error + // during connection. + if (this._connecting && !this._connectionError) { + if (this._connectionCallback) { + this._connectionCallback(error) + } else { + this._handleErrorEvent(error) + } + } else if (!this._connectionError) { + this._handleErrorEvent(error) + } + } + + process.nextTick(() => { + this.emit('end') + }) + }) + } + + connect(callback) { + if (callback) { + this._connect(callback) + return + } + + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) + } + + _attachListeners(con) { + // password request handling + con.on('authenticationCleartextPassword', this._handleAuthCleartextPassword.bind(this)) + // password request handling + con.on('authenticationMD5Password', this._handleAuthMD5Password.bind(this)) + // password request handling (SASL) + con.on('authenticationSASL', this._handleAuthSASL.bind(this)) + con.on('authenticationSASLContinue', this._handleAuthSASLContinue.bind(this)) + con.on('authenticationSASLFinal', this._handleAuthSASLFinal.bind(this)) + con.on('backendKeyData', this._handleBackendKeyData.bind(this)) + con.on('error', this._handleErrorEvent.bind(this)) + con.on('errorMessage', this._handleErrorMessage.bind(this)) + con.on('readyForQuery', this._handleReadyForQuery.bind(this)) + con.on('notice', this._handleNotice.bind(this)) + con.on('rowDescription', this._handleRowDescription.bind(this)) + con.on('dataRow', this._handleDataRow.bind(this)) + con.on('portalSuspended', this._handlePortalSuspended.bind(this)) + con.on('emptyQuery', this._handleEmptyQuery.bind(this)) + con.on('commandComplete', this._handleCommandComplete.bind(this)) + con.on('parseComplete', this._handleParseComplete.bind(this)) + con.on('copyInResponse', this._handleCopyInResponse.bind(this)) + con.on('copyData', this._handleCopyData.bind(this)) + con.on('notification', this._handleNotification.bind(this)) + } + + // TODO(bmc): deprecate pgpass "built in" integration since this.password can be a function + // it can be supplied by the user if required - this is a breaking change! + _checkPgPass(cb) { + const con = this.connection + if (typeof this.password === 'function') { + this._Promise + .resolve() + .then(() => this.password()) + .then((pass) => { + if (pass !== undefined) { + if (typeof pass !== 'string') { + con.emit('error', new TypeError('Password must be a string')) + return + } + this.connectionParameters.password = this.password = pass + } else { + this.connectionParameters.password = this.password = null + } + cb() + }) + .catch((err) => { + con.emit('error', err) + }) + } else if (this.password !== null) { + cb() + } else { + pgPass(this.connectionParameters, function (pass) { + if (undefined !== pass) { + this.connectionParameters.password = this.password = pass + } + cb() + }) + } + } + + _handleAuthCleartextPassword(msg) { + this._checkPgPass(() => { + this.connection.password(this.password) + }) + } + + _handleAuthMD5Password(msg) { + this._checkPgPass(() => { + const hashedPassword = utils.postgresMd5PasswordHash(this.user, this.password, msg.salt) + this.connection.password(hashedPassword) + }) + } + + _handleAuthSASL(msg) { + this._checkPgPass(() => { + this.saslSession = sasl.startSession(msg.mechanisms) + this.connection.sendSASLInitialResponseMessage(this.saslSession.mechanism, this.saslSession.response) + }) + } + + _handleAuthSASLContinue(msg) { + sasl.continueSession(this.saslSession, this.password, msg.data) + this.connection.sendSCRAMClientFinalMessage(this.saslSession.response) + } + + _handleAuthSASLFinal(msg) { + sasl.finalizeSession(this.saslSession, msg.data) + this.saslSession = null + } + + _handleBackendKeyData(msg) { + this.processID = msg.processID + this.secretKey = msg.secretKey + } + + _handleReadyForQuery(msg) { + if (this._connecting) { + this._connecting = false + this._connected = true + clearTimeout(this.connectionTimeoutHandle) + + // process possible callback argument to Client#connect + if (this._connectionCallback) { + this._connectionCallback(null, this) + // remove callback for proper error handling + // after the connect event + this._connectionCallback = null + } + this.emit('connect') + } + const { activeQuery } = this + this.activeQuery = null + this.readyForQuery = true + if (activeQuery) { + activeQuery.handleReadyForQuery(this.connection) + } + this._pulseQueryQueue() + } + + // if we receieve an error event or error message + // during the connection process we handle it here + _handleErrorWhileConnecting(err) { + if (this._connectionError) { + // TODO(bmc): this is swallowing errors - we shouldn't do this + return + } + this._connectionError = true + clearTimeout(this.connectionTimeoutHandle) + if (this._connectionCallback) { + return this._connectionCallback(err) + } + this.emit('error', err) + } + + // if we're connected and we receive an error event from the connection + // this means the socket is dead - do a hard abort of all queries and emit + // the socket error on the client as well + _handleErrorEvent(err) { + if (this._connecting) { + return this._handleErrorWhileConnecting(err) + } + this._queryable = false + this._errorAllQueries(err) + this.emit('error', err) + } + + // handle error messages from the postgres backend + _handleErrorMessage(msg) { + if (this._connecting) { + return this._handleErrorWhileConnecting(msg) + } + const activeQuery = this.activeQuery + + if (!activeQuery) { + this._handleErrorEvent(msg) + return + } + + this.activeQuery = null + activeQuery.handleError(msg, this.connection) + } + + _handleRowDescription(msg) { + // delegate rowDescription to active query + this.activeQuery.handleRowDescription(msg) + } + + _handleDataRow(msg) { + // delegate dataRow to active query + this.activeQuery.handleDataRow(msg) + } + + _handlePortalSuspended(msg) { + // delegate portalSuspended to active query + this.activeQuery.handlePortalSuspended(this.connection) + } + + _handleEmptyQuery(msg) { + // delegate emptyQuery to active query + this.activeQuery.handleEmptyQuery(this.connection) + } + + _handleCommandComplete(msg) { + // delegate commandComplete to active query + this.activeQuery.handleCommandComplete(msg, this.connection) + } + + _handleParseComplete(msg) { + // if a prepared statement has a name and properly parses + // we track that its already been executed so we don't parse + // it again on the same client + if (this.activeQuery.name) { + this.connection.parsedStatements[this.activeQuery.name] = this.activeQuery.text + } + } + + _handleCopyInResponse(msg) { + this.activeQuery.handleCopyInResponse(this.connection) + } + + _handleCopyData(msg) { + this.activeQuery.handleCopyData(msg, this.connection) + } + + _handleNotification(msg) { + this.emit('notification', msg) + } + + _handleNotice(msg) { + this.emit('notice', msg) + } + + getStartupConf() { + var params = this.connectionParameters + + var data = { + user: params.user, + database: params.database, + } + + var appName = params.application_name || params.fallback_application_name + if (appName) { + data.application_name = appName + } + if (params.replication) { + data.replication = '' + params.replication + } + if (params.statement_timeout) { + data.statement_timeout = String(parseInt(params.statement_timeout, 10)) + } + if (params.idle_in_transaction_session_timeout) { + data.idle_in_transaction_session_timeout = String(parseInt(params.idle_in_transaction_session_timeout, 10)) + } + if (params.options) { + data.options = params.options + } + + return data + } + + cancel(client, query) { + if (client.activeQuery === query) { + var con = this.connection + + if (this.host && this.host.indexOf('/') === 0) { + con.connect(this.host + '/.s.PGSQL.' + this.port) + } else { + con.connect(this.port, this.host) + } + + // once connection is established send cancel message + con.on('connect', function () { + con.cancel(client.processID, client.secretKey) + }) + } else if (client.queryQueue.indexOf(query) !== -1) { + client.queryQueue.splice(client.queryQueue.indexOf(query), 1) + } + } + + setTypeParser(oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) + } + + getTypeParser(oid, format) { + return this._types.getTypeParser(oid, format) + } + + // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c + escapeIdentifier(str) { + return '"' + str.replace(/"/g, '""') + '"' + } + + // Ported from PostgreSQL 9.2.4 source code in src/interfaces/libpq/fe-exec.c + escapeLiteral(str) { + var hasBackslash = false + var escaped = "'" + + for (var i = 0; i < str.length; i++) { + var c = str[i] + if (c === "'") { + escaped += c + c + } else if (c === '\\') { + escaped += c + c + hasBackslash = true + } else { + escaped += c + } + } + + escaped += "'" + + if (hasBackslash === true) { + escaped = ' E' + escaped + } + + return escaped + } + + _pulseQueryQueue() { + if (this.readyForQuery === true) { + this.activeQuery = this.queryQueue.shift() + if (this.activeQuery) { + this.readyForQuery = false + this.hasExecuted = true + + const queryError = this.activeQuery.submit(this.connection) + if (queryError) { + process.nextTick(() => { + this.activeQuery.handleError(queryError, this.connection) + this.readyForQuery = true + this._pulseQueryQueue() + }) + } + } else if (this.hasExecuted) { + this.activeQuery = null + this.emit('drain') + } + } + } + + query(config, values, callback) { + // can take in strings, config object or query object + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback + + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + if (typeof values === 'function') { + query.callback = query.callback || values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new Query(config, values, callback) + if (!query.callback) { + result = new this._Promise((resolve, reject) => { + query.callback = (err, res) => (err ? reject(err) : resolve(res)) + }) + } + } + + if (readTimeout) { + queryCallback = query.callback + + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') + + process.nextTick(() => { + query.handleError(error, this.connection) + }) + + queryCallback(error) + + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} + + // Remove from queue + var index = this.queryQueue.indexOf(query) + if (index > -1) { + this.queryQueue.splice(index, 1) + } + + this._pulseQueryQueue() + }, readTimeout) + + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) + } + } + + if (this.binary && !query.binary) { + query.binary = true + } + + if (query._result && !query._result._types) { + query._result._types = this._types + } + + if (!this._queryable) { + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable'), this.connection) + }) + return result + } + + if (this._ending) { + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable'), this.connection) + }) + return result + } + + this.queryQueue.push(query) + this._pulseQueryQueue() + return result + } + + end(cb) { + this._ending = true + + // if we have never connected, then end is a noop, callback immediately + if (!this.connection._connecting) { + if (cb) { + cb() + } else { + return this._Promise.resolve() + } + } + + if (this.activeQuery || !this._queryable) { + // if we have an active query we need to force a disconnect + // on the socket - otherwise a hung query could block end forever + this.connection.stream.destroy() + } else { + this.connection.end() + } + + if (cb) { + this.connection.once('end', cb) + } else { + return new this._Promise((resolve) => { + this.connection.once('end', resolve) + }) + } + } +} + +// expose a Query constructor +Client.Query = Query + +module.exports = Client diff --git a/node_modules/pg/lib/connection-parameters.js b/node_modules/pg/lib/connection-parameters.js new file mode 100644 index 00000000..7f39cfae --- /dev/null +++ b/node_modules/pg/lib/connection-parameters.js @@ -0,0 +1,156 @@ +'use strict' + +var dns = require('dns') + +var defaults = require('./defaults') + +var parse = require('pg-connection-string').parse // parses a connection string + +var val = function (key, config, envVar) { + if (envVar === undefined) { + envVar = process.env['PG' + key.toUpperCase()] + } else if (envVar === false) { + // do nothing ... use false + } else { + envVar = process.env[envVar] + } + + return config[key] || envVar || defaults[key] +} + +var readSSLConfigFromEnvironment = function () { + switch (process.env.PGSSLMODE) { + case 'disable': + return false + case 'prefer': + case 'require': + case 'verify-ca': + case 'verify-full': + return true + case 'no-verify': + return { rejectUnauthorized: false } + } + return defaults.ssl +} + +// Convert arg to a string, surround in single quotes, and escape single quotes and backslashes +var quoteParamValue = function (value) { + return "'" + ('' + value).replace(/\\/g, '\\\\').replace(/'/g, "\\'") + "'" +} + +var add = function (params, config, paramName) { + var value = config[paramName] + if (value !== undefined && value !== null) { + params.push(paramName + '=' + quoteParamValue(value)) + } +} + +class ConnectionParameters { + constructor(config) { + // if a string is passed, it is a raw connection string so we parse it into a config + config = typeof config === 'string' ? parse(config) : config || {} + + // if the config has a connectionString defined, parse IT into the config we use + // this will override other default values with what is stored in connectionString + if (config.connectionString) { + config = Object.assign({}, config, parse(config.connectionString)) + } + + this.user = val('user', config) + this.database = val('database', config) + + if (this.database === undefined) { + this.database = this.user + } + + this.port = parseInt(val('port', config), 10) + this.host = val('host', config) + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: val('password', config), + }) + + this.binary = val('binary', config) + this.options = val('options', config) + + this.ssl = typeof config.ssl === 'undefined' ? readSSLConfigFromEnvironment() : config.ssl + + // support passing in ssl=no-verify via connection string + if (this.ssl === 'no-verify') { + this.ssl = { rejectUnauthorized: false } + } + + this.client_encoding = val('client_encoding', config) + this.replication = val('replication', config) + // a domain socket begins with '/' + this.isDomainSocket = !(this.host || '').indexOf('/') + + this.application_name = val('application_name', config, 'PGAPPNAME') + this.fallback_application_name = val('fallback_application_name', config, false) + this.statement_timeout = val('statement_timeout', config, false) + this.idle_in_transaction_session_timeout = val('idle_in_transaction_session_timeout', config, false) + this.query_timeout = val('query_timeout', config, false) + + if (config.connectionTimeoutMillis === undefined) { + this.connect_timeout = process.env.PGCONNECT_TIMEOUT || 0 + } else { + this.connect_timeout = Math.floor(config.connectionTimeoutMillis / 1000) + } + + if (config.keepAlive === false) { + this.keepalives = 0 + } else if (config.keepAlive === true) { + this.keepalives = 1 + } + + if (typeof config.keepAliveInitialDelayMillis === 'number') { + this.keepalives_idle = Math.floor(config.keepAliveInitialDelayMillis / 1000) + } + } + + getLibpqConnectionString(cb) { + var params = [] + add(params, this, 'user') + add(params, this, 'password') + add(params, this, 'port') + add(params, this, 'application_name') + add(params, this, 'fallback_application_name') + add(params, this, 'connect_timeout') + add(params, this, 'options') + + var ssl = typeof this.ssl === 'object' ? this.ssl : this.ssl ? { sslmode: this.ssl } : {} + add(params, ssl, 'sslmode') + add(params, ssl, 'sslca') + add(params, ssl, 'sslkey') + add(params, ssl, 'sslcert') + add(params, ssl, 'sslrootcert') + + if (this.database) { + params.push('dbname=' + quoteParamValue(this.database)) + } + if (this.replication) { + params.push('replication=' + quoteParamValue(this.replication)) + } + if (this.host) { + params.push('host=' + quoteParamValue(this.host)) + } + if (this.isDomainSocket) { + return cb(null, params.join(' ')) + } + if (this.client_encoding) { + params.push('client_encoding=' + quoteParamValue(this.client_encoding)) + } + dns.lookup(this.host, function (err, address) { + if (err) return cb(err, null) + params.push('hostaddr=' + quoteParamValue(address)) + return cb(null, params.join(' ')) + }) + } +} + +module.exports = ConnectionParameters diff --git a/node_modules/pg/lib/connection.js b/node_modules/pg/lib/connection.js new file mode 100644 index 00000000..6bc0952e --- /dev/null +++ b/node_modules/pg/lib/connection.js @@ -0,0 +1,208 @@ +'use strict' + +var net = require('net') +var EventEmitter = require('events').EventEmitter +var util = require('util') + +const { parse, serialize } = require('pg-protocol') + +const flushBuffer = serialize.flush() +const syncBuffer = serialize.sync() +const endBuffer = serialize.end() + +// TODO(bmc) support binary mode at some point +class Connection extends EventEmitter { + constructor(config) { + super() + config = config || {} + this.stream = config.stream || new net.Socket() + this._keepAlive = config.keepAlive + this._keepAliveInitialDelayMillis = config.keepAliveInitialDelayMillis + this.lastBuffer = false + this.parsedStatements = {} + this.ssl = config.ssl || false + this._ending = false + this._emitMessage = false + var self = this + this.on('newListener', function (eventName) { + if (eventName === 'message') { + self._emitMessage = true + } + }) + } + + connect(port, host) { + var self = this + + this._connecting = true + this.stream.setNoDelay(true) + this.stream.connect(port, host) + + this.stream.once('connect', function () { + if (self._keepAlive) { + self.stream.setKeepAlive(true, self._keepAliveInitialDelayMillis) + } + self.emit('connect') + }) + + const reportStreamError = function (error) { + // errors about disconnections should be ignored during disconnect + if (self._ending && (error.code === 'ECONNRESET' || error.code === 'EPIPE')) { + return + } + self.emit('error', error) + } + this.stream.on('error', reportStreamError) + + this.stream.on('close', function () { + self.emit('end') + }) + + if (!this.ssl) { + return this.attachListeners(this.stream) + } + + this.stream.once('data', function (buffer) { + var responseCode = buffer.toString('utf8') + switch (responseCode) { + case 'S': // Server supports SSL connections, continue with a secure connection + break + case 'N': // Server does not support SSL connections + self.stream.end() + return self.emit('error', new Error('The server does not support SSL connections')) + default: + // Any other response byte, including 'E' (ErrorResponse) indicating a server error + self.stream.end() + return self.emit('error', new Error('There was an error establishing an SSL connection')) + } + var tls = require('tls') + const options = Object.assign( + { + socket: self.stream, + }, + self.ssl + ) + if (net.isIP(host) === 0) { + options.servername = host + } + try { + self.stream = tls.connect(options) + } catch (err) { + return self.emit('error', err) + } + self.attachListeners(self.stream) + self.stream.on('error', reportStreamError) + + self.emit('sslconnect') + }) + } + + attachListeners(stream) { + stream.on('end', () => { + this.emit('end') + }) + parse(stream, (msg) => { + var eventName = msg.name === 'error' ? 'errorMessage' : msg.name + if (this._emitMessage) { + this.emit('message', msg) + } + this.emit(eventName, msg) + }) + } + + requestSsl() { + this.stream.write(serialize.requestSsl()) + } + + startup(config) { + this.stream.write(serialize.startup(config)) + } + + cancel(processID, secretKey) { + this._send(serialize.cancel(processID, secretKey)) + } + + password(password) { + this._send(serialize.password(password)) + } + + sendSASLInitialResponseMessage(mechanism, initialResponse) { + this._send(serialize.sendSASLInitialResponseMessage(mechanism, initialResponse)) + } + + sendSCRAMClientFinalMessage(additionalData) { + this._send(serialize.sendSCRAMClientFinalMessage(additionalData)) + } + + _send(buffer) { + if (!this.stream.writable) { + return false + } + return this.stream.write(buffer) + } + + query(text) { + this._send(serialize.query(text)) + } + + // send parse message + parse(query) { + this._send(serialize.parse(query)) + } + + // send bind message + bind(config) { + this._send(serialize.bind(config)) + } + + // send execute message + execute(config) { + this._send(serialize.execute(config)) + } + + flush() { + if (this.stream.writable) { + this.stream.write(flushBuffer) + } + } + + sync() { + this._ending = true + this._send(flushBuffer) + this._send(syncBuffer) + } + + end() { + // 0x58 = 'X' + this._ending = true + if (!this._connecting || !this.stream.writable) { + this.stream.end() + return + } + return this.stream.write(endBuffer, () => { + this.stream.end() + }) + } + + close(msg) { + this._send(serialize.close(msg)) + } + + describe(msg) { + this._send(serialize.describe(msg)) + } + + sendCopyFromChunk(chunk) { + this._send(serialize.copyData(chunk)) + } + + endCopyFrom() { + this._send(serialize.copyDone()) + } + + sendCopyFail(msg) { + this._send(serialize.copyFail(msg)) + } +} + +module.exports = Connection diff --git a/node_modules/pg/lib/defaults.js b/node_modules/pg/lib/defaults.js new file mode 100644 index 00000000..9384e01c --- /dev/null +++ b/node_modules/pg/lib/defaults.js @@ -0,0 +1,80 @@ +'use strict' + +module.exports = { + // database host. defaults to localhost + host: 'localhost', + + // database user's name + user: process.platform === 'win32' ? process.env.USERNAME : process.env.USER, + + // name of database to connect + database: undefined, + + // database user's password + password: null, + + // a Postgres connection string to be used instead of setting individual connection items + // NOTE: Setting this value will cause it to override any other value (such as database or user) defined + // in the defaults object. + connectionString: undefined, + + // database port + port: 5432, + + // number of rows to return at a time from a prepared statement's + // portal. 0 will return all rows at once + rows: 0, + + // binary result mode + binary: false, + + // Connection pool options - see https://github.com/brianc/node-pg-pool + + // number of connections to use in connection pool + // 0 will disable connection pooling + max: 10, + + // max milliseconds a client can go unused before it is removed + // from the pool and destroyed + idleTimeoutMillis: 30000, + + client_encoding: '', + + ssl: false, + + application_name: undefined, + + fallback_application_name: undefined, + + options: undefined, + + parseInputDatesAsUTC: false, + + // max milliseconds any query using this connection will execute for before timing out in error. + // false=unlimited + statement_timeout: false, + + // Terminate any session with an open transaction that has been idle for longer than the specified duration in milliseconds + // false=unlimited + idle_in_transaction_session_timeout: false, + + // max milliseconds to wait for query to complete (client side) + query_timeout: false, + + connect_timeout: 0, + + keepalives: 1, + + keepalives_idle: 0, +} + +var pgTypes = require('pg-types') +// save default parsers +var parseBigInteger = pgTypes.getTypeParser(20, 'text') +var parseBigIntegerArray = pgTypes.getTypeParser(1016, 'text') + +// parse int8 so you can get your count values as actual numbers +module.exports.__defineSetter__('parseInt8', function (val) { + pgTypes.setTypeParser(20, 'text', val ? pgTypes.getTypeParser(23, 'text') : parseBigInteger) + pgTypes.setTypeParser(1016, 'text', val ? pgTypes.getTypeParser(1007, 'text') : parseBigIntegerArray) +}) diff --git a/node_modules/pg/lib/index.js b/node_modules/pg/lib/index.js new file mode 100644 index 00000000..fa658055 --- /dev/null +++ b/node_modules/pg/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +var Client = require('./client') +var defaults = require('./defaults') +var Connection = require('./connection') +var Pool = require('pg-pool') + +const poolFactory = (Client) => { + return class BoundPool extends Pool { + constructor(options) { + super(options, Client) + } + } +} + +var PG = function (clientConstructor) { + this.defaults = defaults + this.Client = clientConstructor + this.Query = this.Client.Query + this.Pool = poolFactory(this.Client) + this._pools = [] + this.Connection = Connection + this.types = require('pg-types') +} + +if (typeof process.env.NODE_PG_FORCE_NATIVE !== 'undefined') { + module.exports = new PG(require('./native')) +} else { + module.exports = new PG(Client) + + // lazy require native module...the native module may not have installed + Object.defineProperty(module.exports, 'native', { + configurable: true, + enumerable: false, + get() { + var native = null + try { + native = new PG(require('./native')) + } catch (err) { + if (err.code !== 'MODULE_NOT_FOUND') { + throw err + } + /* eslint-disable no-console */ + console.error(err.message) + /* eslint-enable no-console */ + } + + // overwrite module.exports.native so that getter is never called again + Object.defineProperty(module.exports, 'native', { + value: native, + }) + + return native + }, + }) +} diff --git a/node_modules/pg/lib/native/client.js b/node_modules/pg/lib/native/client.js new file mode 100644 index 00000000..b2cc4347 --- /dev/null +++ b/node_modules/pg/lib/native/client.js @@ -0,0 +1,299 @@ +'use strict' + +// eslint-disable-next-line +var Native = require('pg-native') +var TypeOverrides = require('../type-overrides') +var semver = require('semver') +var pkg = require('../../package.json') +var assert = require('assert') +var EventEmitter = require('events').EventEmitter +var util = require('util') +var ConnectionParameters = require('../connection-parameters') + +var msg = 'Version >= ' + pkg.minNativeVersion + ' of pg-native required.' +assert(semver.gte(Native.version, pkg.minNativeVersion), msg) + +var NativeQuery = require('./query') + +var Client = (module.exports = function (config) { + EventEmitter.call(this) + config = config || {} + + this._Promise = config.Promise || global.Promise + this._types = new TypeOverrides(config.types) + + this.native = new Native({ + types: this._types, + }) + + this._queryQueue = [] + this._ending = false + this._connecting = false + this._connected = false + this._queryable = true + + // keep these on the object for legacy reasons + // for the time being. TODO: deprecate all this jazz + var cp = (this.connectionParameters = new ConnectionParameters(config)) + this.user = cp.user + + // "hiding" the password so it doesn't show up in stack traces + // or if the client is console.logged + Object.defineProperty(this, 'password', { + configurable: true, + enumerable: false, + writable: true, + value: cp.password, + }) + this.database = cp.database + this.host = cp.host + this.port = cp.port + + // a hash to hold named queries + this.namedQueries = {} +}) + +Client.Query = NativeQuery + +util.inherits(Client, EventEmitter) + +Client.prototype._errorAllQueries = function (err) { + const enqueueError = (query) => { + process.nextTick(() => { + query.native = this.native + query.handleError(err) + }) + } + + if (this._hasActiveQuery()) { + enqueueError(this._activeQuery) + this._activeQuery = null + } + + this._queryQueue.forEach(enqueueError) + this._queryQueue.length = 0 +} + +// connect to the backend +// pass an optional callback to be called once connected +// or with an error if there was a connection error +Client.prototype._connect = function (cb) { + var self = this + + if (this._connecting) { + process.nextTick(() => cb(new Error('Client has already been connected. You cannot reuse a client.'))) + return + } + + this._connecting = true + + this.connectionParameters.getLibpqConnectionString(function (err, conString) { + if (err) return cb(err) + self.native.connect(conString, function (err) { + if (err) { + self.native.end() + return cb(err) + } + + // set internal states to connected + self._connected = true + + // handle connection errors from the native layer + self.native.on('error', function (err) { + self._queryable = false + self._errorAllQueries(err) + self.emit('error', err) + }) + + self.native.on('notification', function (msg) { + self.emit('notification', { + channel: msg.relname, + payload: msg.extra, + }) + }) + + // signal we are connected now + self.emit('connect') + self._pulseQueryQueue(true) + + cb() + }) + }) +} + +Client.prototype.connect = function (callback) { + if (callback) { + this._connect(callback) + return + } + + return new this._Promise((resolve, reject) => { + this._connect((error) => { + if (error) { + reject(error) + } else { + resolve() + } + }) + }) +} + +// send a query to the server +// this method is highly overloaded to take +// 1) string query, optional array of parameters, optional function callback +// 2) object query with { +// string query +// optional array values, +// optional function callback instead of as a separate parameter +// optional string name to name & cache the query plan +// optional string rowMode = 'array' for an array of results +// } +Client.prototype.query = function (config, values, callback) { + var query + var result + var readTimeout + var readTimeoutTimer + var queryCallback + + if (config === null || config === undefined) { + throw new TypeError('Client was passed a null or undefined query') + } else if (typeof config.submit === 'function') { + readTimeout = config.query_timeout || this.connectionParameters.query_timeout + result = query = config + // accept query(new Query(...), (err, res) => { }) style + if (typeof values === 'function') { + config.callback = values + } + } else { + readTimeout = this.connectionParameters.query_timeout + query = new NativeQuery(config, values, callback) + if (!query.callback) { + let resolveOut, rejectOut + result = new this._Promise((resolve, reject) => { + resolveOut = resolve + rejectOut = reject + }) + query.callback = (err, res) => (err ? rejectOut(err) : resolveOut(res)) + } + } + + if (readTimeout) { + queryCallback = query.callback + + readTimeoutTimer = setTimeout(() => { + var error = new Error('Query read timeout') + + process.nextTick(() => { + query.handleError(error, this.connection) + }) + + queryCallback(error) + + // we already returned an error, + // just do nothing if query completes + query.callback = () => {} + + // Remove from queue + var index = this._queryQueue.indexOf(query) + if (index > -1) { + this._queryQueue.splice(index, 1) + } + + this._pulseQueryQueue() + }, readTimeout) + + query.callback = (err, res) => { + clearTimeout(readTimeoutTimer) + queryCallback(err, res) + } + } + + if (!this._queryable) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client has encountered a connection error and is not queryable')) + }) + return result + } + + if (this._ending) { + query.native = this.native + process.nextTick(() => { + query.handleError(new Error('Client was closed and is not queryable')) + }) + return result + } + + this._queryQueue.push(query) + this._pulseQueryQueue() + return result +} + +// disconnect from the backend server +Client.prototype.end = function (cb) { + var self = this + + this._ending = true + + if (!this._connected) { + this.once('connect', this.end.bind(this, cb)) + } + var result + if (!cb) { + result = new this._Promise(function (resolve, reject) { + cb = (err) => (err ? reject(err) : resolve()) + }) + } + this.native.end(function () { + self._errorAllQueries(new Error('Connection terminated')) + + process.nextTick(() => { + self.emit('end') + if (cb) cb() + }) + }) + return result +} + +Client.prototype._hasActiveQuery = function () { + return this._activeQuery && this._activeQuery.state !== 'error' && this._activeQuery.state !== 'end' +} + +Client.prototype._pulseQueryQueue = function (initialConnection) { + if (!this._connected) { + return + } + if (this._hasActiveQuery()) { + return + } + var query = this._queryQueue.shift() + if (!query) { + if (!initialConnection) { + this.emit('drain') + } + return + } + this._activeQuery = query + query.submit(this) + var self = this + query.once('_done', function () { + self._pulseQueryQueue() + }) +} + +// attempt to cancel an in-progress query +Client.prototype.cancel = function (query) { + if (this._activeQuery === query) { + this.native.cancel(function () {}) + } else if (this._queryQueue.indexOf(query) !== -1) { + this._queryQueue.splice(this._queryQueue.indexOf(query), 1) + } +} + +Client.prototype.setTypeParser = function (oid, format, parseFn) { + return this._types.setTypeParser(oid, format, parseFn) +} + +Client.prototype.getTypeParser = function (oid, format) { + return this._types.getTypeParser(oid, format) +} diff --git a/node_modules/pg/lib/native/index.js b/node_modules/pg/lib/native/index.js new file mode 100644 index 00000000..eead422a --- /dev/null +++ b/node_modules/pg/lib/native/index.js @@ -0,0 +1,2 @@ +'use strict' +module.exports = require('./client') diff --git a/node_modules/pg/lib/native/query.js b/node_modules/pg/lib/native/query.js new file mode 100644 index 00000000..d06db43c --- /dev/null +++ b/node_modules/pg/lib/native/query.js @@ -0,0 +1,165 @@ +'use strict' + +var EventEmitter = require('events').EventEmitter +var util = require('util') +var utils = require('../utils') + +var NativeQuery = (module.exports = function (config, values, callback) { + EventEmitter.call(this) + config = utils.normalizeQueryConfig(config, values, callback) + this.text = config.text + this.values = config.values + this.name = config.name + this.callback = config.callback + this.state = 'new' + this._arrayMode = config.rowMode === 'array' + + // if the 'row' event is listened for + // then emit them as they come in + // without setting singleRowMode to true + // this has almost no meaning because libpq + // reads all rows into memory befor returning any + this._emitRowEvents = false + this.on( + 'newListener', + function (event) { + if (event === 'row') this._emitRowEvents = true + }.bind(this) + ) +}) + +util.inherits(NativeQuery, EventEmitter) + +var errorFieldMap = { + /* eslint-disable quote-props */ + sqlState: 'code', + statementPosition: 'position', + messagePrimary: 'message', + context: 'where', + schemaName: 'schema', + tableName: 'table', + columnName: 'column', + dataTypeName: 'dataType', + constraintName: 'constraint', + sourceFile: 'file', + sourceLine: 'line', + sourceFunction: 'routine', +} + +NativeQuery.prototype.handleError = function (err) { + // copy pq error fields into the error object + var fields = this.native.pq.resultErrorFields() + if (fields) { + for (var key in fields) { + var normalizedFieldName = errorFieldMap[key] || key + err[normalizedFieldName] = fields[key] + } + } + if (this.callback) { + this.callback(err) + } else { + this.emit('error', err) + } + this.state = 'error' +} + +NativeQuery.prototype.then = function (onSuccess, onFailure) { + return this._getPromise().then(onSuccess, onFailure) +} + +NativeQuery.prototype.catch = function (callback) { + return this._getPromise().catch(callback) +} + +NativeQuery.prototype._getPromise = function () { + if (this._promise) return this._promise + this._promise = new Promise( + function (resolve, reject) { + this._once('end', resolve) + this._once('error', reject) + }.bind(this) + ) + return this._promise +} + +NativeQuery.prototype.submit = function (client) { + this.state = 'running' + var self = this + this.native = client.native + client.native.arrayMode = this._arrayMode + + var after = function (err, rows, results) { + client.native.arrayMode = false + setImmediate(function () { + self.emit('_done') + }) + + // handle possible query error + if (err) { + return self.handleError(err) + } + + // emit row events for each row in the result + if (self._emitRowEvents) { + if (results.length > 1) { + rows.forEach((rowOfRows, i) => { + rowOfRows.forEach((row) => { + self.emit('row', row, results[i]) + }) + }) + } else { + rows.forEach(function (row) { + self.emit('row', row, results) + }) + } + } + + // handle successful result + self.state = 'end' + self.emit('end', results) + if (self.callback) { + self.callback(null, results) + } + } + + if (process.domain) { + after = process.domain.bind(after) + } + + // named query + if (this.name) { + if (this.name.length > 63) { + /* eslint-disable no-console */ + console.error('Warning! Postgres only supports 63 characters for query names.') + console.error('You supplied %s (%s)', this.name, this.name.length) + console.error('This can cause conflicts and silent errors executing queries') + /* eslint-enable no-console */ + } + var values = (this.values || []).map(utils.prepareValue) + + // check if the client has already executed this named query + // if so...just execute it again - skip the planning phase + if (client.namedQueries[this.name]) { + if (this.text && client.namedQueries[this.name] !== this.text) { + const err = new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) + return after(err) + } + return client.native.execute(this.name, values, after) + } + // plan the named query the first time, then execute it + return client.native.prepare(this.name, this.text, values.length, function (err) { + if (err) return after(err) + client.namedQueries[self.name] = self.text + return self.native.execute(self.name, values, after) + }) + } else if (this.values) { + if (!Array.isArray(this.values)) { + const err = new Error('Query values must be an array') + return after(err) + } + var vals = this.values.map(utils.prepareValue) + client.native.query(this.text, vals, after) + } else { + client.native.query(this.text, after) + } +} diff --git a/node_modules/pg/lib/query.js b/node_modules/pg/lib/query.js new file mode 100644 index 00000000..9cd0dab1 --- /dev/null +++ b/node_modules/pg/lib/query.js @@ -0,0 +1,228 @@ +'use strict' + +const { EventEmitter } = require('events') + +const Result = require('./result') +const utils = require('./utils') + +class Query extends EventEmitter { + constructor(config, values, callback) { + super() + + config = utils.normalizeQueryConfig(config, values, callback) + + this.text = config.text + this.values = config.values + this.rows = config.rows + this.types = config.types + this.name = config.name + this.binary = config.binary + // use unique portal name each time + this.portal = config.portal || '' + this.callback = config.callback + this._rowMode = config.rowMode + if (process.domain && config.callback) { + this.callback = process.domain.bind(config.callback) + } + this._result = new Result(this._rowMode, this.types) + + // potential for multiple results + this._results = this._result + this.isPreparedStatement = false + this._canceledDueToError = false + this._promise = null + } + + requiresPreparation() { + // named queries must always be prepared + if (this.name) { + return true + } + // always prepare if there are max number of rows expected per + // portal execution + if (this.rows) { + return true + } + // don't prepare empty text queries + if (!this.text) { + return false + } + // prepare if there are values + if (!this.values) { + return false + } + return this.values.length > 0 + } + + _checkForMultirow() { + // if we already have a result with a command property + // then we've already executed one query in a multi-statement simple query + // turn our results into an array of results + if (this._result.command) { + if (!Array.isArray(this._results)) { + this._results = [this._result] + } + this._result = new Result(this._rowMode, this.types) + this._results.push(this._result) + } + } + + // associates row metadata from the supplied + // message with this query object + // metadata used when parsing row results + handleRowDescription(msg) { + this._checkForMultirow() + this._result.addFields(msg.fields) + this._accumulateRows = this.callback || !this.listeners('row').length + } + + handleDataRow(msg) { + let row + + if (this._canceledDueToError) { + return + } + + try { + row = this._result.parseRow(msg.fields) + } catch (err) { + this._canceledDueToError = err + return + } + + this.emit('row', row, this._result) + if (this._accumulateRows) { + this._result.addRow(row) + } + } + + handleCommandComplete(msg, con) { + this._checkForMultirow() + this._result.addCommandComplete(msg) + // need to sync after each command complete of a prepared statement + if (this.isPreparedStatement) { + con.sync() + } + } + + // if a named prepared statement is created with empty query text + // the backend will send an emptyQuery message but *not* a command complete message + // execution on the connection will hang until the backend receives a sync message + handleEmptyQuery(con) { + if (this.isPreparedStatement) { + con.sync() + } + } + + handleReadyForQuery(con) { + if (this._canceledDueToError) { + return this.handleError(this._canceledDueToError, con) + } + if (this.callback) { + this.callback(null, this._results) + } + this.emit('end', this._results) + } + + handleError(err, connection) { + // need to sync after error during a prepared statement + if (this.isPreparedStatement) { + connection.sync() + } + if (this._canceledDueToError) { + err = this._canceledDueToError + this._canceledDueToError = false + } + // if callback supplied do not emit error event as uncaught error + // events will bubble up to node process + if (this.callback) { + return this.callback(err) + } + this.emit('error', err) + } + + submit(connection) { + if (typeof this.text !== 'string' && typeof this.name !== 'string') { + return new Error('A query must have either text or a name. Supplying neither is unsupported.') + } + const previous = connection.parsedStatements[this.name] + if (this.text && previous && this.text !== previous) { + return new Error(`Prepared statements must be unique - '${this.name}' was used for a different statement`) + } + if (this.values && !Array.isArray(this.values)) { + return new Error('Query values must be an array') + } + if (this.requiresPreparation()) { + this.prepare(connection) + } else { + connection.query(this.text) + } + return null + } + + hasBeenParsed(connection) { + return this.name && connection.parsedStatements[this.name] + } + + handlePortalSuspended(connection) { + this._getRows(connection, this.rows) + } + + _getRows(connection, rows) { + connection.execute({ + portal: this.portal, + rows: rows, + }) + connection.flush() + } + + // http://developer.postgresql.org/pgdocs/postgres/protocol-flow.html#PROTOCOL-FLOW-EXT-QUERY + prepare(connection) { + // prepared statements need sync to be called after each command + // complete or when an error is encountered + this.isPreparedStatement = true + + // TODO refactor this poor encapsulation + if (!this.hasBeenParsed(connection)) { + connection.parse({ + text: this.text, + name: this.name, + types: this.types, + }) + } + + if (this.values) { + try { + this.values = this.values.map(utils.prepareValue) + } catch (err) { + this.handleError(err, connection) + return + } + } + + connection.bind({ + portal: this.portal, + statement: this.name, + values: this.values, + binary: this.binary, + }) + + connection.describe({ + type: 'P', + name: this.portal || '', + }) + + this._getRows(connection, this.rows) + } + + handleCopyInResponse(connection) { + connection.sendCopyFail('No source stream defined') + } + + // eslint-disable-next-line no-unused-vars + handleCopyData(msg, connection) { + // noop + } +} + +module.exports = Query diff --git a/node_modules/pg/lib/result.js b/node_modules/pg/lib/result.js new file mode 100644 index 00000000..35060974 --- /dev/null +++ b/node_modules/pg/lib/result.js @@ -0,0 +1,100 @@ +'use strict' + +var types = require('pg-types') + +var matchRegexp = /^([A-Za-z]+)(?: (\d+))?(?: (\d+))?/ + +// result object returned from query +// in the 'end' event and also +// passed as second argument to provided callback +class Result { + constructor(rowMode, types) { + this.command = null + this.rowCount = null + this.oid = null + this.rows = [] + this.fields = [] + this._parsers = undefined + this._types = types + this.RowCtor = null + this.rowAsArray = rowMode === 'array' + if (this.rowAsArray) { + this.parseRow = this._parseRowAsArray + } + } + + // adds a command complete message + addCommandComplete(msg) { + var match + if (msg.text) { + // pure javascript + match = matchRegexp.exec(msg.text) + } else { + // native bindings + match = matchRegexp.exec(msg.command) + } + if (match) { + this.command = match[1] + if (match[3]) { + // COMMMAND OID ROWS + this.oid = parseInt(match[2], 10) + this.rowCount = parseInt(match[3], 10) + } else if (match[2]) { + // COMMAND ROWS + this.rowCount = parseInt(match[2], 10) + } + } + } + + _parseRowAsArray(rowData) { + var row = new Array(rowData.length) + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + if (rawValue !== null) { + row[i] = this._parsers[i](rawValue) + } else { + row[i] = null + } + } + return row + } + + parseRow(rowData) { + var row = {} + for (var i = 0, len = rowData.length; i < len; i++) { + var rawValue = rowData[i] + var field = this.fields[i].name + if (rawValue !== null) { + row[field] = this._parsers[i](rawValue) + } else { + row[field] = null + } + } + return row + } + + addRow(row) { + this.rows.push(row) + } + + addFields(fieldDescriptions) { + // clears field definitions + // multiple query statements in 1 action can result in multiple sets + // of rowDescriptions...eg: 'select NOW(); select 1::int;' + // you need to reset the fields + this.fields = fieldDescriptions + if (this.fields.length) { + this._parsers = new Array(fieldDescriptions.length) + } + for (var i = 0; i < fieldDescriptions.length; i++) { + var desc = fieldDescriptions[i] + if (this._types) { + this._parsers[i] = this._types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } else { + this._parsers[i] = types.getTypeParser(desc.dataTypeID, desc.format || 'text') + } + } + } +} + +module.exports = Result diff --git a/node_modules/pg/lib/sasl.js b/node_modules/pg/lib/sasl.js new file mode 100644 index 00000000..22abf5c4 --- /dev/null +++ b/node_modules/pg/lib/sasl.js @@ -0,0 +1,151 @@ +'use strict' +const crypto = require('crypto') + +function startSession(mechanisms) { + if (mechanisms.indexOf('SCRAM-SHA-256') === -1) { + throw new Error('SASL: Only mechanism SCRAM-SHA-256 is currently supported') + } + + const clientNonce = crypto.randomBytes(18).toString('base64') + + return { + mechanism: 'SCRAM-SHA-256', + clientNonce, + response: 'n,,n=*,r=' + clientNonce, + message: 'SASLInitialResponse', + } +} + +function continueSession(session, password, serverData) { + if (session.message !== 'SASLInitialResponse') { + throw new Error('SASL: Last message was not SASLInitialResponse') + } + + const sv = extractVariablesFromFirstServerMessage(serverData) + + if (!sv.nonce.startsWith(session.clientNonce)) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: server nonce does not start with client nonce') + } + + var saltBytes = Buffer.from(sv.salt, 'base64') + + var saltedPassword = Hi(password, saltBytes, sv.iteration) + + var clientKey = createHMAC(saltedPassword, 'Client Key') + var storedKey = crypto.createHash('sha256').update(clientKey).digest() + + var clientFirstMessageBare = 'n=*,r=' + session.clientNonce + var serverFirstMessage = 'r=' + sv.nonce + ',s=' + sv.salt + ',i=' + sv.iteration + + var clientFinalMessageWithoutProof = 'c=biws,r=' + sv.nonce + + var authMessage = clientFirstMessageBare + ',' + serverFirstMessage + ',' + clientFinalMessageWithoutProof + + var clientSignature = createHMAC(storedKey, authMessage) + var clientProofBytes = xorBuffers(clientKey, clientSignature) + var clientProof = clientProofBytes.toString('base64') + + var serverKey = createHMAC(saltedPassword, 'Server Key') + var serverSignatureBytes = createHMAC(serverKey, authMessage) + + session.message = 'SASLResponse' + session.serverSignature = serverSignatureBytes.toString('base64') + session.response = clientFinalMessageWithoutProof + ',p=' + clientProof +} + +function finalizeSession(session, serverData) { + if (session.message !== 'SASLResponse') { + throw new Error('SASL: Last message was not SASLResponse') + } + + var serverSignature + + String(serverData) + .split(',') + .forEach(function (part) { + switch (part[0]) { + case 'v': + serverSignature = part.substr(2) + break + } + }) + + if (serverSignature !== session.serverSignature) { + throw new Error('SASL: SCRAM-SERVER-FINAL-MESSAGE: server signature does not match') + } +} + +function extractVariablesFromFirstServerMessage(data) { + var nonce, salt, iteration + + String(data) + .split(',') + .forEach(function (part) { + switch (part[0]) { + case 'r': + nonce = part.substr(2) + break + case 's': + salt = part.substr(2) + break + case 'i': + iteration = parseInt(part.substr(2), 10) + break + } + }) + + if (!nonce) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: nonce missing') + } + + if (!salt) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: salt missing') + } + + if (!iteration) { + throw new Error('SASL: SCRAM-SERVER-FIRST-MESSAGE: iteration missing') + } + + return { + nonce, + salt, + iteration, + } +} + +function xorBuffers(a, b) { + if (!Buffer.isBuffer(a)) a = Buffer.from(a) + if (!Buffer.isBuffer(b)) b = Buffer.from(b) + var res = [] + if (a.length > b.length) { + for (var i = 0; i < b.length; i++) { + res.push(a[i] ^ b[i]) + } + } else { + for (var j = 0; j < a.length; j++) { + res.push(a[j] ^ b[j]) + } + } + return Buffer.from(res) +} + +function createHMAC(key, msg) { + return crypto.createHmac('sha256', key).update(msg).digest() +} + +function Hi(password, saltBytes, iterations) { + var ui1 = createHMAC(password, Buffer.concat([saltBytes, Buffer.from([0, 0, 0, 1])])) + var ui = ui1 + for (var i = 0; i < iterations - 1; i++) { + ui1 = createHMAC(password, ui1) + ui = xorBuffers(ui, ui1) + } + + return ui +} + +module.exports = { + startSession, + continueSession, + finalizeSession, +} diff --git a/node_modules/pg/lib/type-overrides.js b/node_modules/pg/lib/type-overrides.js new file mode 100644 index 00000000..66693482 --- /dev/null +++ b/node_modules/pg/lib/type-overrides.js @@ -0,0 +1,35 @@ +'use strict' + +var types = require('pg-types') + +function TypeOverrides(userTypes) { + this._types = userTypes || types + this.text = {} + this.binary = {} +} + +TypeOverrides.prototype.getOverrides = function (format) { + switch (format) { + case 'text': + return this.text + case 'binary': + return this.binary + default: + return {} + } +} + +TypeOverrides.prototype.setTypeParser = function (oid, format, parseFn) { + if (typeof format === 'function') { + parseFn = format + format = 'text' + } + this.getOverrides(format)[oid] = parseFn +} + +TypeOverrides.prototype.getTypeParser = function (oid, format) { + format = format || 'text' + return this.getOverrides(format)[oid] || this._types.getTypeParser(oid, format) +} + +module.exports = TypeOverrides diff --git a/node_modules/pg/lib/utils.js b/node_modules/pg/lib/utils.js new file mode 100644 index 00000000..b3b4ff4c --- /dev/null +++ b/node_modules/pg/lib/utils.js @@ -0,0 +1,186 @@ +'use strict' + +const crypto = require('crypto') + +const defaults = require('./defaults') + +function escapeElement(elementRepresentation) { + var escaped = elementRepresentation.replace(/\\/g, '\\\\').replace(/"/g, '\\"') + + return '"' + escaped + '"' +} + +// convert a JS array to a postgres array literal +// uses comma separator so won't work for types like box that use +// a different array separator. +function arrayString(val) { + var result = '{' + for (var i = 0; i < val.length; i++) { + if (i > 0) { + result = result + ',' + } + if (val[i] === null || typeof val[i] === 'undefined') { + result = result + 'NULL' + } else if (Array.isArray(val[i])) { + result = result + arrayString(val[i]) + } else if (val[i] instanceof Buffer) { + result += '\\\\x' + val[i].toString('hex') + } else { + result += escapeElement(prepareValue(val[i])) + } + } + result = result + '}' + return result +} + +// converts values from javascript types +// to their 'raw' counterparts for use as a postgres parameter +// note: you can override this function to provide your own conversion mechanism +// for complex types, etc... +var prepareValue = function (val, seen) { + if (val instanceof Buffer) { + return val + } + if (ArrayBuffer.isView(val)) { + var buf = Buffer.from(val.buffer, val.byteOffset, val.byteLength) + if (buf.length === val.byteLength) { + return buf + } + return buf.slice(val.byteOffset, val.byteOffset + val.byteLength) // Node.js v4 does not support those Buffer.from params + } + if (val instanceof Date) { + if (defaults.parseInputDatesAsUTC) { + return dateToStringUTC(val) + } else { + return dateToString(val) + } + } + if (Array.isArray(val)) { + return arrayString(val) + } + if (val === null || typeof val === 'undefined') { + return null + } + if (typeof val === 'object') { + return prepareObject(val, seen) + } + return val.toString() +} + +function prepareObject(val, seen) { + if (val && typeof val.toPostgres === 'function') { + seen = seen || [] + if (seen.indexOf(val) !== -1) { + throw new Error('circular reference detected while preparing "' + val + '" for query') + } + seen.push(val) + + return prepareValue(val.toPostgres(prepareValue), seen) + } + return JSON.stringify(val) +} + +function pad(number, digits) { + number = '' + number + while (number.length < digits) { + number = '0' + number + } + return number +} + +function dateToString(date) { + var offset = -date.getTimezoneOffset() + + var year = date.getFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + + var ret = + pad(year, 4) + + '-' + + pad(date.getMonth() + 1, 2) + + '-' + + pad(date.getDate(), 2) + + 'T' + + pad(date.getHours(), 2) + + ':' + + pad(date.getMinutes(), 2) + + ':' + + pad(date.getSeconds(), 2) + + '.' + + pad(date.getMilliseconds(), 3) + + if (offset < 0) { + ret += '-' + offset *= -1 + } else { + ret += '+' + } + + ret += pad(Math.floor(offset / 60), 2) + ':' + pad(offset % 60, 2) + if (isBCYear) ret += ' BC' + return ret +} + +function dateToStringUTC(date) { + var year = date.getUTCFullYear() + var isBCYear = year < 1 + if (isBCYear) year = Math.abs(year) + 1 // negative years are 1 off their BC representation + + var ret = + pad(year, 4) + + '-' + + pad(date.getUTCMonth() + 1, 2) + + '-' + + pad(date.getUTCDate(), 2) + + 'T' + + pad(date.getUTCHours(), 2) + + ':' + + pad(date.getUTCMinutes(), 2) + + ':' + + pad(date.getUTCSeconds(), 2) + + '.' + + pad(date.getUTCMilliseconds(), 3) + + ret += '+00:00' + if (isBCYear) ret += ' BC' + return ret +} + +function normalizeQueryConfig(config, values, callback) { + // can take in strings or config objects + config = typeof config === 'string' ? { text: config } : config + if (values) { + if (typeof values === 'function') { + config.callback = values + } else { + config.values = values + } + } + if (callback) { + config.callback = callback + } + return config +} + +const md5 = function (string) { + return crypto.createHash('md5').update(string, 'utf-8').digest('hex') +} + +// See AuthenticationMD5Password at https://www.postgresql.org/docs/current/static/protocol-flow.html +const postgresMd5PasswordHash = function (user, password, salt) { + var inner = md5(password + user) + var outer = md5(Buffer.concat([Buffer.from(inner), salt])) + return 'md5' + outer +} + +module.exports = { + prepareValue: function prepareValueWrapper(value) { + // this ensures that extra arguments do not get passed into prepareValue + // by accident, eg: from calling values.map(utils.prepareValue) + return prepareValue(value) + }, + normalizeQueryConfig, + postgresMd5PasswordHash, + md5, +} diff --git a/node_modules/pg/package.json b/node_modules/pg/package.json new file mode 100644 index 00000000..107e7692 --- /dev/null +++ b/node_modules/pg/package.json @@ -0,0 +1,82 @@ +{ + "_from": "pg", + "_id": "pg@8.3.2", + "_inBundle": false, + "_integrity": "sha512-hOoRCTriXS+VWwyXHchRjWb9yv3Koq8irlwwXniqhdgK0AbfWvEnybGS2HIUE+UdCSTuYAM4WGPujFpPg9Vcaw==", + "_location": "/pg", + "_phantomChildren": {}, + "_requested": { + "type": "tag", + "registry": true, + "raw": "pg", + "name": "pg", + "escapedName": "pg", + "rawSpec": "", + "saveSpec": null, + "fetchSpec": "latest" + }, + "_requiredBy": [ + "#USER", + "/" + ], + "_resolved": "https://registry.npmjs.org/pg/-/pg-8.3.2.tgz", + "_shasum": "52766e41302f5b878fe1efa10d4cdd486f6dff50", + "_spec": "pg", + "_where": "/Users/aurelialim/wdi/cli-todo-sql", + "author": { + "name": "Brian Carlson", + "email": "brian.m.carlson@gmail.com" + }, + "bugs": { + "url": "https://github.com/brianc/node-postgres/issues" + }, + "bundleDependencies": false, + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.3.0", + "pg-pool": "^3.2.1", + "pg-protocol": "^1.2.5", + "pg-types": "^2.1.0", + "pgpass": "1.x", + "semver": "4.3.2" + }, + "deprecated": false, + "description": "PostgreSQL client - pure javascript & libpq with the same API", + "devDependencies": { + "async": "0.9.0", + "bluebird": "3.5.2", + "co": "4.6.0", + "pg-copy-streams": "0.3.0" + }, + "engines": { + "node": ">= 8.0.0" + }, + "files": [ + "lib", + "SPONSORS.md" + ], + "gitHead": "acfbafac82641ef909d9d6235d46d38378c67864", + "homepage": "https://github.com/brianc/node-postgres", + "keywords": [ + "database", + "libpq", + "pg", + "postgre", + "postgres", + "postgresql", + "rdbms" + ], + "license": "MIT", + "main": "./lib", + "minNativeVersion": "2.0.0", + "name": "pg", + "repository": { + "type": "git", + "url": "git://github.com/brianc/node-postgres.git" + }, + "scripts": { + "test": "make test-all" + }, + "version": "8.3.2" +} diff --git a/node_modules/pgpass/.npmignore b/node_modules/pgpass/.npmignore new file mode 100644 index 00000000..e68f07b6 --- /dev/null +++ b/node_modules/pgpass/.npmignore @@ -0,0 +1,10 @@ +node_modules/ +*~ +#* +lib-cov/ +coverage.* +npm-debug.log + +.jshintrc +.travis.yml +test/ diff --git a/node_modules/pgpass/README.md b/node_modules/pgpass/README.md new file mode 100644 index 00000000..a97af337 --- /dev/null +++ b/node_modules/pgpass/README.md @@ -0,0 +1,74 @@ +# pgpass + +[![Build Status](https://travis-ci.org/hoegaarden/pgpass.png?branch=master)](https://travis-ci.org/hoegaarden/pgpass) + +## Install + +```sh +npm install pgpass +``` + +## Usage +```js +var pgPass = require('pgpass'); + +var connInfo = { + 'host' : 'pgserver' , + 'user' : 'the_user_name' , +}; + +pgPass(connInfo, function(pass){ + conn_info.password = pass; + // connect to postgresql server +}); +``` + +## Description + +This module tries to read the `~/.pgpass` file (or the equivalent for windows systems). If the environment variable `PGPASSFILE` is set, this file is used instead. If everything goes right, the password from said file is passed to the callback; if the password cannot be read `undefined` is passed to the callback. + +Cases where `undefined` is returned: + +- the environment variable `PGPASSWORD` is set +- the file cannot be read (wrong permissions, no such file, ...) +- for non windows systems: the file is write-/readable by the group or by other users +- there is no matching line for the given connection info + +There should be no need to use this module directly; it is already included in `node-postgresq`. + +## Configuration + +The module reads the environment variable `PGPASS_NO_DEESCAPE` to decide if the the read tokens from the password file should be de-escaped or not. Default is to do de-escaping. For further information on this see [this commit](https://github.com/postgres/postgres/commit/8d15e3ec4fcb735875a8a70a09ec0c62153c3329). + + +## Tests + +There are tests in `./test/`; including linting and coverage testing. Running `npm test` runs: + +- `jshint` +- `mocha` tests +- `jscoverage` and `mocha -R html-cov` + +You can see the coverage report in `coverage.html`. + + +## Development, Patches, Bugs, ... + +If you find Bugs or have improvments, please feel free to open a issue on github. If you provide a pull request, I'm more than happy to merge them, just make sure to add tests for your changes. + +## Links + +- https://github.com/hoegaarden/node-pgpass +- http://www.postgresql.org/docs/current/static/libpq-pgpass.html +- https://wiki.postgresql.org/wiki/Pgpass +- https://github.com/postgres/postgres/blob/master/src/interfaces/libpq/fe-connect.c + +## License + +Copyright (c) 2013-2016 Hannes Hörl + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/pgpass/lib/helper.js b/node_modules/pgpass/lib/helper.js new file mode 100644 index 00000000..6abea0e6 --- /dev/null +++ b/node_modules/pgpass/lib/helper.js @@ -0,0 +1,233 @@ +'use strict'; + +var path = require('path') + , Stream = require('stream').Stream + , Split = require('split') + , util = require('util') + , defaultPort = 5432 + , isWin = (process.platform === 'win32') + , warnStream = process.stderr +; + + +var S_IRWXG = 56 // 00070(8) + , S_IRWXO = 7 // 00007(8) + , S_IFMT = 61440 // 00170000(8) + , S_IFREG = 32768 // 0100000(8) +; +function isRegFile(mode) { + return ((mode & S_IFMT) == S_IFREG); +} + +var fieldNames = [ 'host', 'port', 'database', 'user', 'password' ]; +var nrOfFields = fieldNames.length; +var passKey = fieldNames[ nrOfFields -1 ]; + + +function warn() { + var isWritable = ( + warnStream instanceof Stream && + true === warnStream.writable + ); + + if (isWritable) { + var args = Array.prototype.slice.call(arguments).concat("\n"); + warnStream.write( util.format.apply(util, args) ); + } +} + + +Object.defineProperty(module.exports, 'isWin', { + get : function() { + return isWin; + } , + set : function(val) { + isWin = val; + } +}); + + +module.exports.warnTo = function(stream) { + var old = warnStream; + warnStream = stream; + return old; +}; + +module.exports.getFileName = function(env){ + env = env || process.env; + var file = env.PGPASSFILE || ( + isWin ? + path.join( env.APPDATA , 'postgresql', 'pgpass.conf' ) : + path.join( env.HOME, '.pgpass' ) + ); + return file; +}; + +module.exports.usePgPass = function(stats, fname) { + if (Object.prototype.hasOwnProperty.call(process.env, 'PGPASSWORD')) { + return false; + } + + if (isWin) { + return true; + } + + fname = fname || ''; + + if (! isRegFile(stats.mode)) { + warn('WARNING: password file "%s" is not a plain file', fname); + return false; + } + + if (stats.mode & (S_IRWXG | S_IRWXO)) { + /* If password file is insecure, alert the user and ignore it. */ + warn('WARNING: password file "%s" has group or world access; permissions should be u=rw (0600) or less', fname); + return false; + } + + return true; +}; + + +var matcher = module.exports.match = function(connInfo, entry) { + return fieldNames.slice(0, -1).reduce(function(prev, field, idx){ + if (idx == 1) { + // the port + if ( Number( connInfo[field] || defaultPort ) === Number( entry[field] ) ) { + return prev && true; + } + } + return prev && ( + entry[field] === '*' || + entry[field] === connInfo[field] + ); + }, true); +}; + + +module.exports.getPassword = function(connInfo, stream, cb) { + var pass; + var lineStream = stream.pipe(new Split()); + + function onLine(line) { + var entry = parseLine(line); + if (entry && isValidEntry(entry) && matcher(connInfo, entry)) { + pass = entry[passKey]; + lineStream.end(); // -> calls onEnd(), but pass is set now + } + } + + var onEnd = function() { + stream.destroy(); + cb(pass); + }; + + var onErr = function(err) { + stream.destroy(); + warn('WARNING: error on reading file: %s', err); + cb(undefined); + }; + + stream.on('error', onErr); + lineStream + .on('data', onLine) + .on('end', onEnd) + .on('error', onErr) + ; + +}; + + +var parseLine = module.exports.parseLine = function(line) { + if (line.length < 11 || line.match(/^\s+#/)) { + return null; + } + + var curChar = ''; + var prevChar = ''; + var fieldIdx = 0; + var startIdx = 0; + var endIdx = 0; + var obj = {}; + var isLastField = false; + var addToObj = function(idx, i0, i1) { + var field = line.substring(i0, i1); + + if (! Object.hasOwnProperty.call(process.env, 'PGPASS_NO_DEESCAPE')) { + field = field.replace(/\\([:\\])/g, '$1'); + } + + obj[ fieldNames[idx] ] = field; + }; + + for (var i = 0 ; i < line.length-1 ; i += 1) { + curChar = line.charAt(i+1); + prevChar = line.charAt(i); + + isLastField = (fieldIdx == nrOfFields-1); + + if (isLastField) { + addToObj(fieldIdx, startIdx); + break; + } + + if (i >= 0 && curChar == ':' && prevChar !== '\\') { + addToObj(fieldIdx, startIdx, i+1); + + startIdx = i+2; + fieldIdx += 1; + } + } + + obj = ( Object.keys(obj).length === nrOfFields ) ? obj : null; + + return obj; +}; + + +var isValidEntry = module.exports.isValidEntry = function(entry){ + var rules = { + // host + 0 : function(x){ + return x.length > 0; + } , + // port + 1 : function(x){ + if (x === '*') { + return true; + } + x = Number(x); + return ( + isFinite(x) && + x > 0 && + x < 9007199254740992 && + Math.floor(x) === x + ); + } , + // database + 2 : function(x){ + return x.length > 0; + } , + // username + 3 : function(x){ + return x.length > 0; + } , + // password + 4 : function(x){ + return x.length > 0; + } + }; + + for (var idx = 0 ; idx < fieldNames.length ; idx += 1) { + var rule = rules[idx]; + var value = entry[ fieldNames[idx] ] || ''; + + var res = rule(value); + if (!res) { + return false; + } + } + + return true; +}; + diff --git a/node_modules/pgpass/lib/index.js b/node_modules/pgpass/lib/index.js new file mode 100644 index 00000000..ecfcf308 --- /dev/null +++ b/node_modules/pgpass/lib/index.js @@ -0,0 +1,23 @@ +'use strict'; + +var path = require('path') + , fs = require('fs') + , helper = require('./helper.js') +; + + +module.exports = function(connInfo, cb) { + var file = helper.getFileName(); + + fs.stat(file, function(err, stat){ + if (err || !helper.usePgPass(stat, file)) { + return cb(undefined); + } + + var st = fs.createReadStream(file); + + helper.getPassword(connInfo, st, cb); + }); +}; + +module.exports.warnTo = helper.warnTo; diff --git a/node_modules/pgpass/package.json b/node_modules/pgpass/package.json new file mode 100644 index 00000000..d097a7bf --- /dev/null +++ b/node_modules/pgpass/package.json @@ -0,0 +1,71 @@ +{ + "_from": "pgpass@1.x", + "_id": "pgpass@1.0.2", + "_inBundle": false, + "_integrity": "sha1-Knu0G2BltnkH6R2hsHwYR8h3swY=", + "_location": "/pgpass", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "pgpass@1.x", + "name": "pgpass", + "escapedName": "pgpass", + "rawSpec": "1.x", + "saveSpec": null, + "fetchSpec": "1.x" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.2.tgz", + "_shasum": "2a7bb41b6065b67907e91da1b07c1847c877b306", + "_spec": "pgpass@1.x", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "author": { + "name": "Hannes Hörl", + "email": "hannes.hoerl+pgpass@snowreporter.com" + }, + "bugs": { + "url": "https://github.com/hoegaarden/pgpass/issues" + }, + "bundleDependencies": false, + "dependencies": { + "split": "^1.0.0" + }, + "deprecated": false, + "description": "Module for reading .pgpass", + "devDependencies": { + "jscoverage": "^0.6.0", + "jshint": "^2.9.2", + "mocha": "^2.5.3", + "pg": "^4.5.6", + "pg-escape": "^0.2.0", + "pg-native": "^1.10.0", + "resumer": "0.0.0", + "tmp": "0.0.28", + "which": "^1.2.10" + }, + "homepage": "https://github.com/hoegaarden/pgpass#readme", + "keywords": [ + "postgres", + "pg", + "pgpass", + "password", + "postgresql" + ], + "license": "MIT", + "main": "lib/index", + "name": "pgpass", + "repository": { + "type": "git", + "url": "git+https://github.com/hoegaarden/pgpass.git" + }, + "scripts": { + "coverage": "rm -rf -- lib-cov ; jscoverage lib lib-cov && mocha --recursive -R html-cov > coverage.html", + "hint": "jshint --verbose lib test", + "pretest": "chmod 600 ./test/_pgpass", + "test": "npm run hint && mocha --recursive -R list && npm run coverage" + }, + "version": "1.0.2" +} diff --git a/node_modules/postgres-array/index.d.ts b/node_modules/postgres-array/index.d.ts new file mode 100644 index 00000000..88665bd9 --- /dev/null +++ b/node_modules/postgres-array/index.d.ts @@ -0,0 +1,4 @@ + +export function parse(source: string): string[]; +export function parse(source: string, transform: (value: string) => T): T[]; + diff --git a/node_modules/postgres-array/index.js b/node_modules/postgres-array/index.js new file mode 100644 index 00000000..18bfd163 --- /dev/null +++ b/node_modules/postgres-array/index.js @@ -0,0 +1,97 @@ +'use strict' + +exports.parse = function (source, transform) { + return new ArrayParser(source, transform).parse() +} + +class ArrayParser { + constructor (source, transform) { + this.source = source + this.transform = transform || identity + this.position = 0 + this.entries = [] + this.recorded = [] + this.dimension = 0 + } + + isEof () { + return this.position >= this.source.length + } + + nextCharacter () { + var character = this.source[this.position++] + if (character === '\\') { + return { + value: this.source[this.position++], + escaped: true + } + } + return { + value: character, + escaped: false + } + } + + record (character) { + this.recorded.push(character) + } + + newEntry (includeEmpty) { + var entry + if (this.recorded.length > 0 || includeEmpty) { + entry = this.recorded.join('') + if (entry === 'NULL' && !includeEmpty) { + entry = null + } + if (entry !== null) entry = this.transform(entry) + this.entries.push(entry) + this.recorded = [] + } + } + + consumeDimensions () { + if (this.source[0] === '[') { + while (!this.isEof()) { + var char = this.nextCharacter() + if (char.value === '=') break + } + } + } + + parse (nested) { + var character, parser, quote + this.consumeDimensions() + while (!this.isEof()) { + character = this.nextCharacter() + if (character.value === '{' && !quote) { + this.dimension++ + if (this.dimension > 1) { + parser = new ArrayParser(this.source.substr(this.position - 1), this.transform) + this.entries.push(parser.parse(true)) + this.position += parser.position - 2 + } + } else if (character.value === '}' && !quote) { + this.dimension-- + if (!this.dimension) { + this.newEntry() + if (nested) return this.entries + } + } else if (character.value === '"' && !character.escaped) { + if (quote) this.newEntry(true) + quote = !quote + } else if (character.value === ',' && !quote) { + this.newEntry() + } else { + this.record(character.value) + } + } + if (this.dimension !== 0) { + throw new Error('array dimension not balanced') + } + return this.entries + } +} + +function identity (value) { + return value +} diff --git a/node_modules/postgres-array/license b/node_modules/postgres-array/license new file mode 100644 index 00000000..25c62470 --- /dev/null +++ b/node_modules/postgres-array/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-array/package.json b/node_modules/postgres-array/package.json new file mode 100644 index 00000000..24d2323c --- /dev/null +++ b/node_modules/postgres-array/package.json @@ -0,0 +1,67 @@ +{ + "_from": "postgres-array@~2.0.0", + "_id": "postgres-array@2.0.0", + "_inBundle": false, + "_integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "_location": "/postgres-array", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-array@~2.0.0", + "name": "postgres-array", + "escapedName": "postgres-array", + "rawSpec": "~2.0.0", + "saveSpec": null, + "fetchSpec": "~2.0.0" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "_shasum": "48f8fce054fbc69671999329b8834b772652d82e", + "_spec": "postgres-array@~2.0.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-array/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Parse postgres array columns", + "devDependencies": { + "standard": "^12.0.1", + "tape": "^4.0.0" + }, + "engines": { + "node": ">=4" + }, + "files": [ + "index.js", + "index.d.ts", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-array#readme", + "keywords": [ + "postgres", + "array", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-array", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-array.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "types": "index.d.ts", + "version": "2.0.0" +} diff --git a/node_modules/postgres-array/readme.md b/node_modules/postgres-array/readme.md new file mode 100644 index 00000000..b74b369d --- /dev/null +++ b/node_modules/postgres-array/readme.md @@ -0,0 +1,43 @@ +# postgres-array [![Build Status](https://travis-ci.org/bendrucker/postgres-array.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-array) + +> Parse postgres array columns + + +## Install + +``` +$ npm install --save postgres-array +``` + + +## Usage + +```js +var postgresArray = require('postgres-array') + +postgresArray.parse('{1,2,3}', (value) => parseInt(value, 10)) +//=> [1, 2, 3] +``` + +## API + +#### `parse(input, [transform])` -> `array` + +##### input + +*Required* +Type: `string` + +A Postgres array string. + +##### transform + +Type: `function` +Default: `identity` + +A function that transforms non-null values inserted into the array. + + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/postgres-bytea/index.js b/node_modules/postgres-bytea/index.js new file mode 100644 index 00000000..d1107a01 --- /dev/null +++ b/node_modules/postgres-bytea/index.js @@ -0,0 +1,31 @@ +'use strict' + +module.exports = function parseBytea (input) { + if (/^\\x/.test(input)) { + // new 'hex' style response (pg >9.0) + return new Buffer(input.substr(2), 'hex') + } + var output = '' + var i = 0 + while (i < input.length) { + if (input[i] !== '\\') { + output += input[i] + ++i + } else { + if (/[0-7]{3}/.test(input.substr(i + 1, 3))) { + output += String.fromCharCode(parseInt(input.substr(i + 1, 3), 8)) + i += 4 + } else { + var backslashes = 1 + while (i + backslashes < input.length && input[i + backslashes] === '\\') { + backslashes++ + } + for (var k = 0; k < Math.floor(backslashes / 2); ++k) { + output += '\\' + } + i += Math.floor(backslashes / 2) * 2 + } + } + } + return new Buffer(output, 'binary') +} diff --git a/node_modules/postgres-bytea/license b/node_modules/postgres-bytea/license new file mode 100644 index 00000000..25c62470 --- /dev/null +++ b/node_modules/postgres-bytea/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-bytea/package.json b/node_modules/postgres-bytea/package.json new file mode 100644 index 00000000..1690d98b --- /dev/null +++ b/node_modules/postgres-bytea/package.json @@ -0,0 +1,66 @@ +{ + "_from": "postgres-bytea@~1.0.0", + "_id": "postgres-bytea@1.0.0", + "_inBundle": false, + "_integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=", + "_location": "/postgres-bytea", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-bytea@~1.0.0", + "name": "postgres-bytea", + "escapedName": "postgres-bytea", + "rawSpec": "~1.0.0", + "saveSpec": null, + "fetchSpec": "~1.0.0" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "_shasum": "027b533c0aa890e26d172d47cf9ccecc521acd35", + "_spec": "postgres-bytea@~1.0.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-bytea/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Postgres bytea parser", + "devDependencies": { + "standard": "^4.0.0", + "tape": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-bytea#readme", + "keywords": [ + "bytea", + "postgres", + "binary", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-bytea", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-bytea.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.0.0" +} diff --git a/node_modules/postgres-bytea/readme.md b/node_modules/postgres-bytea/readme.md new file mode 100644 index 00000000..4939c3be --- /dev/null +++ b/node_modules/postgres-bytea/readme.md @@ -0,0 +1,34 @@ +# postgres-bytea [![Build Status](https://travis-ci.org/bendrucker/postgres-bytea.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-bytea) + +> Postgres bytea parser + + +## Install + +``` +$ npm install --save postgres-bytea +``` + + +## Usage + +```js +var bytea = require('postgres-bytea'); +bytea('\\000\\100\\200') +//=> buffer +``` + +## API + +#### `bytea(input)` -> `buffer` + +##### input + +*Required* +Type: `string` + +A Postgres bytea binary string. + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/postgres-date/index.js b/node_modules/postgres-date/index.js new file mode 100644 index 00000000..1f03c457 --- /dev/null +++ b/node_modules/postgres-date/index.js @@ -0,0 +1,110 @@ +'use strict' + +var DATE_TIME = /(\d{1,})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})(\.\d{1,})?.*?( BC)?$/ +var DATE = /^(\d{1,})-(\d{2})-(\d{2})( BC)?$/ +var TIME_ZONE = /([Z+-])(\d{2})?:?(\d{2})?:?(\d{2})?/ +var INFINITY = /^-?infinity$/ + +module.exports = function parseDate (isoDate) { + if (INFINITY.test(isoDate)) { + // Capitalize to Infinity before passing to Number + return Number(isoDate.replace('i', 'I')) + } + var matches = DATE_TIME.exec(isoDate) + + if (!matches) { + // Force YYYY-MM-DD dates to be parsed as local time + return getDate(isoDate) || null + } + + var isBC = !!matches[8] + var year = parseInt(matches[1], 10) + if (isBC) { + year = bcYearToNegativeYear(year) + } + + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + var hour = parseInt(matches[4], 10) + var minute = parseInt(matches[5], 10) + var second = parseInt(matches[6], 10) + + var ms = matches[7] + ms = ms ? 1000 * parseFloat(ms) : 0 + + var date + var offset = timeZoneOffset(isoDate) + if (offset != null) { + date = new Date(Date.UTC(year, month, day, hour, minute, second, ms)) + + // Account for years from 0 to 99 being interpreted as 1900-1999 + // by Date.UTC / the multi-argument form of the Date constructor + if (is0To99(year)) { + date.setUTCFullYear(year) + } + + date.setTime(date.getTime() - offset) + } else { + date = new Date(year, month, day, hour, minute, second, ms) + + if (is0To99(year)) { + date.setFullYear(year) + } + } + + return date +} + +function getDate (isoDate) { + var matches = DATE.exec(isoDate) + if (!matches) { + return + } + + var year = parseInt(matches[1], 10) + var isBC = !!matches[4] + if (isBC) { + year = bcYearToNegativeYear(year) + } + + var month = parseInt(matches[2], 10) - 1 + var day = matches[3] + // YYYY-MM-DD will be parsed as local time + var date = new Date(year, month, day) + + if (is0To99(year)) { + date.setFullYear(year) + } + + return date +} + +// match timezones: +// Z (UTC) +// -05 +// +06:30 +function timeZoneOffset (isoDate) { + var zone = TIME_ZONE.exec(isoDate.split(' ')[1]) + if (!zone) return + var type = zone[1] + + if (type === 'Z') { + return 0 + } + var sign = type === '-' ? -1 : 1 + var offset = parseInt(zone[2], 10) * 3600 + + parseInt(zone[3] || 0, 10) * 60 + + parseInt(zone[4] || 0, 10) + + return offset * sign * 1000 +} + +function bcYearToNegativeYear (year) { + // Account for numerical difference between representations of BC years + // See: https://github.com/bendrucker/postgres-date/issues/5 + return -(year - 1) +} + +function is0To99 (num) { + return num >= 0 && num < 100 +} diff --git a/node_modules/postgres-date/license b/node_modules/postgres-date/license new file mode 100644 index 00000000..25c62470 --- /dev/null +++ b/node_modules/postgres-date/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-date/package.json b/node_modules/postgres-date/package.json new file mode 100644 index 00000000..7cbfec5f --- /dev/null +++ b/node_modules/postgres-date/package.json @@ -0,0 +1,65 @@ +{ + "_from": "postgres-date@~1.0.4", + "_id": "postgres-date@1.0.6", + "_inBundle": false, + "_integrity": "sha512-o2a4gxeFcox+CgB3Ig/kNHBP23PiEXHCXx7pcIIsvzoNz4qv+lKTyiSkjOXIMNUl12MO/mOYl2K6wR9X5K6Plg==", + "_location": "/postgres-date", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-date@~1.0.4", + "name": "postgres-date", + "escapedName": "postgres-date", + "rawSpec": "~1.0.4", + "saveSpec": null, + "fetchSpec": "~1.0.4" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.6.tgz", + "_shasum": "4925e8085b30c2ba1a06ac91b9a3473954a2ce2d", + "_spec": "postgres-date@~1.0.4", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-date/issues" + }, + "bundleDependencies": false, + "dependencies": {}, + "deprecated": false, + "description": "Postgres date column parser", + "devDependencies": { + "standard": "^14.0.0", + "tape": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-date#readme", + "keywords": [ + "postgres", + "date", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-date", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-date.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.0.6" +} diff --git a/node_modules/postgres-date/readme.md b/node_modules/postgres-date/readme.md new file mode 100644 index 00000000..095431a0 --- /dev/null +++ b/node_modules/postgres-date/readme.md @@ -0,0 +1,49 @@ +# postgres-date [![Build Status](https://travis-ci.org/bendrucker/postgres-date.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-date) [![Greenkeeper badge](https://badges.greenkeeper.io/bendrucker/postgres-date.svg)](https://greenkeeper.io/) + +> Postgres date output parser + +This package parses [date/time outputs](https://www.postgresql.org/docs/current/datatype-datetime.html#DATATYPE-DATETIME-OUTPUT) from Postgres into Javascript `Date` objects. Its goal is to match Postgres behavior and preserve data accuracy. + +If you find a case where a valid Postgres output results in incorrect parsing (including loss of precision), please [create a pull request](https://github.com/bendrucker/postgres-date/compare) and provide a failing test. + +**Supported Postgres Versions:** `>= 9.6` + +All prior versions of Postgres are likely compatible but not officially supported. + +## Install + +``` +$ npm install --save postgres-date +``` + + +## Usage + +```js +var parse = require('postgres-date') +parse('2011-01-23 22:15:51Z') +// => 2011-01-23T22:15:51.000Z +``` + +## API + +#### `parse(isoDate)` -> `date` + +##### isoDate + +*Required* +Type: `string` + +A date string from Postgres. + +## Releases + +The following semantic versioning increments will be used for changes: + +* **Major**: Removal of support for Node.js versions or Postgres versions (not expected) +* **Minor**: Unused, since Postgres returns dates in standard ISO 8601 format +* **Patch**: Any fix for parsing behavior + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/postgres-interval/index.d.ts b/node_modules/postgres-interval/index.d.ts new file mode 100644 index 00000000..f82b4c37 --- /dev/null +++ b/node_modules/postgres-interval/index.d.ts @@ -0,0 +1,20 @@ +declare namespace PostgresInterval { + export interface IPostgresInterval { + years?: number; + months?: number; + days?: number; + hours?: number; + minutes?: number; + seconds?: number; + milliseconds?: number; + + toPostgres(): string; + + toISO(): string; + toISOString(): string; + } +} + +declare function PostgresInterval(raw: string): PostgresInterval.IPostgresInterval; + +export = PostgresInterval; diff --git a/node_modules/postgres-interval/index.js b/node_modules/postgres-interval/index.js new file mode 100644 index 00000000..8ecca800 --- /dev/null +++ b/node_modules/postgres-interval/index.js @@ -0,0 +1,125 @@ +'use strict' + +var extend = require('xtend/mutable') + +module.exports = PostgresInterval + +function PostgresInterval (raw) { + if (!(this instanceof PostgresInterval)) { + return new PostgresInterval(raw) + } + extend(this, parse(raw)) +} +var properties = ['seconds', 'minutes', 'hours', 'days', 'months', 'years'] +PostgresInterval.prototype.toPostgres = function () { + var filtered = properties.filter(this.hasOwnProperty, this) + + // In addition to `properties`, we need to account for fractions of seconds. + if (this.milliseconds && filtered.indexOf('seconds') < 0) { + filtered.push('seconds') + } + + if (filtered.length === 0) return '0' + return filtered + .map(function (property) { + var value = this[property] || 0 + + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/\.?0+$/, '') + } + + return value + ' ' + property + }, this) + .join(' ') +} + +var propertiesISOEquivalent = { + years: 'Y', + months: 'M', + days: 'D', + hours: 'H', + minutes: 'M', + seconds: 'S' +} +var dateProperties = ['years', 'months', 'days'] +var timeProperties = ['hours', 'minutes', 'seconds'] +// according to ISO 8601 +PostgresInterval.prototype.toISOString = PostgresInterval.prototype.toISO = function () { + var datePart = dateProperties + .map(buildProperty, this) + .join('') + + var timePart = timeProperties + .map(buildProperty, this) + .join('') + + return 'P' + datePart + 'T' + timePart + + function buildProperty (property) { + var value = this[property] || 0 + + // Account for fractional part of seconds, + // remove trailing zeroes. + if (property === 'seconds' && this.milliseconds) { + value = (value + this.milliseconds / 1000).toFixed(6).replace(/0+$/, '') + } + + return value + propertiesISOEquivalent[property] + } +} + +var NUMBER = '([+-]?\\d+)' +var YEAR = NUMBER + '\\s+years?' +var MONTH = NUMBER + '\\s+mons?' +var DAY = NUMBER + '\\s+days?' +var TIME = '([+-])?([\\d]*):(\\d\\d):(\\d\\d)\\.?(\\d{1,6})?' +var INTERVAL = new RegExp([YEAR, MONTH, DAY, TIME].map(function (regexString) { + return '(' + regexString + ')?' +}) + .join('\\s*')) + +// Positions of values in regex match +var positions = { + years: 2, + months: 4, + days: 6, + hours: 9, + minutes: 10, + seconds: 11, + milliseconds: 12 +} +// We can use negative time +var negatives = ['hours', 'minutes', 'seconds', 'milliseconds'] + +function parseMilliseconds (fraction) { + // add omitted zeroes + var microseconds = fraction + '000000'.slice(fraction.length) + return parseInt(microseconds, 10) / 1000 +} + +function parse (interval) { + if (!interval) return {} + var matches = INTERVAL.exec(interval) + var isNegative = matches[8] === '-' + return Object.keys(positions) + .reduce(function (parsed, property) { + var position = positions[property] + var value = matches[position] + // no empty string + if (!value) return parsed + // milliseconds are actually microseconds (up to 6 digits) + // with omitted trailing zeroes. + value = property === 'milliseconds' + ? parseMilliseconds(value) + : parseInt(value, 10) + // no zeros + if (!value) return parsed + if (isNegative && ~negatives.indexOf(property)) { + value *= -1 + } + parsed[property] = value + return parsed + }, {}) +} diff --git a/node_modules/postgres-interval/license b/node_modules/postgres-interval/license new file mode 100644 index 00000000..25c62470 --- /dev/null +++ b/node_modules/postgres-interval/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Ben Drucker (bendrucker.me) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/postgres-interval/package.json b/node_modules/postgres-interval/package.json new file mode 100644 index 00000000..743f8b64 --- /dev/null +++ b/node_modules/postgres-interval/package.json @@ -0,0 +1,68 @@ +{ + "_from": "postgres-interval@^1.1.0", + "_id": "postgres-interval@1.2.0", + "_inBundle": false, + "_integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "_location": "/postgres-interval", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "postgres-interval@^1.1.0", + "name": "postgres-interval", + "escapedName": "postgres-interval", + "rawSpec": "^1.1.0", + "saveSpec": null, + "fetchSpec": "^1.1.0" + }, + "_requiredBy": [ + "/pg-types" + ], + "_resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "_shasum": "b460c82cb1587507788819a06aa0fffdb3544695", + "_spec": "postgres-interval@^1.1.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg-types", + "author": { + "name": "Ben Drucker", + "email": "bvdrucker@gmail.com", + "url": "bendrucker.me" + }, + "bugs": { + "url": "https://github.com/bendrucker/postgres-interval/issues" + }, + "bundleDependencies": false, + "dependencies": { + "xtend": "^4.0.0" + }, + "deprecated": false, + "description": "Parse Postgres interval columns", + "devDependencies": { + "standard": "^12.0.1", + "tape": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + }, + "files": [ + "index.js", + "index.d.ts", + "readme.md" + ], + "homepage": "https://github.com/bendrucker/postgres-interval#readme", + "keywords": [ + "postgres", + "interval", + "parser" + ], + "license": "MIT", + "main": "index.js", + "name": "postgres-interval", + "repository": { + "type": "git", + "url": "git+https://github.com/bendrucker/postgres-interval.git" + }, + "scripts": { + "test": "standard && tape test.js" + }, + "version": "1.2.0" +} diff --git a/node_modules/postgres-interval/readme.md b/node_modules/postgres-interval/readme.md new file mode 100644 index 00000000..53cda4ad --- /dev/null +++ b/node_modules/postgres-interval/readme.md @@ -0,0 +1,48 @@ +# postgres-interval [![Build Status](https://travis-ci.org/bendrucker/postgres-interval.svg?branch=master)](https://travis-ci.org/bendrucker/postgres-interval) [![Greenkeeper badge](https://badges.greenkeeper.io/bendrucker/postgres-interval.svg)](https://greenkeeper.io/) + +> Parse Postgres interval columns + + +## Install + +``` +$ npm install --save postgres-interval +``` + + +## Usage + +```js +var parse = require('postgres-interval') +var interval = parse('01:02:03') +//=> {hours: 1, minutes: 2, seconds: 3} +interval.toPostgres() +// 3 seconds 2 minutes 1 hours +interval.toISO() +// P0Y0M0DT1H2M3S +``` + +## API + +#### `parse(pgInterval)` -> `interval` + +##### pgInterval + +*Required* +Type: `string` + +A Postgres interval string. + +#### `interval.toPostgres()` -> `string` + +Returns an interval string. This allows the interval object to be passed into prepared statements. + +#### `interval.toISOString()` -> `string` + +Returns an [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601#Durations) compliant string. + +Also available as `interval.toISO()` for backwards compatibility. + +## License + +MIT © [Ben Drucker](http://bendrucker.me) diff --git a/node_modules/semver/.npmignore b/node_modules/semver/.npmignore new file mode 100644 index 00000000..7300fbc7 --- /dev/null +++ b/node_modules/semver/.npmignore @@ -0,0 +1 @@ +# nada diff --git a/node_modules/semver/LICENSE b/node_modules/semver/LICENSE new file mode 100644 index 00000000..0c44ae71 --- /dev/null +++ b/node_modules/semver/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) Isaac Z. Schlueter ("Author") +All rights reserved. + +The BSD License + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/semver/Makefile b/node_modules/semver/Makefile new file mode 100644 index 00000000..71af0e97 --- /dev/null +++ b/node_modules/semver/Makefile @@ -0,0 +1,24 @@ +files = semver.browser.js \ + semver.min.js \ + semver.browser.js.gz \ + semver.min.js.gz + +all: $(files) + +clean: + rm -f $(files) + +semver.browser.js: head.js.txt semver.js foot.js.txt + ( cat head.js.txt; \ + cat semver.js | \ + egrep -v '^ *\/\* nomin \*\/' | \ + perl -pi -e 's/debug\([^\)]+\)//g'; \ + cat foot.js.txt ) > semver.browser.js + +semver.min.js: semver.browser.js + uglifyjs -m semver.min.js + +%.gz: % + gzip --stdout -9 <$< >$@ + +.PHONY: all clean diff --git a/node_modules/semver/README.md b/node_modules/semver/README.md new file mode 100644 index 00000000..b5e35ff0 --- /dev/null +++ b/node_modules/semver/README.md @@ -0,0 +1,303 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Usage + + $ npm install semver + + semver.valid('1.2.3') // '1.2.3' + semver.valid('a.b.c') // null + semver.clean(' =v1.2.3 ') // '1.2.3' + semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true + semver.gt('1.2.3', '9.8.7') // false + semver.lt('1.2.3', '9.8.7') // true + +As a command-line utility: + + $ semver -h + + Usage: semver [ [...]] [-r | -i | --preid | -l | -rv] + Test if version(s) satisfy the supplied range(s), and sort them. + + Multiple versions or ranges may be supplied, unless increment + option is specified. In that case, only a single version may + be used, and it is incremented by the specified level + + Program exits successfully if any valid version satisfies + all supplied ranges, and prints all satisfying versions. + + If no versions are valid, or ranges are not satisfied, + then exits failure. + + Versions are printed in ascending order, so supplying + multiple versions to the utility will just sort them. + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +> semver.inc('1.2.3', 'pre', 'beta') +'1.2.4-beta.0' +``` + +command-line example: + +```shell +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```shell +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any version satisfies) +* `1.x` := `>=1.0.0 <2.0.0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero digit in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0` +* `^0.2.3` := `>=0.2.3 <0.3.0` +* `^0.0.3` := `>=0.0.3 <0.0.4` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0` +* `^0.0.x` := `>=0.0.0 <0.1.0` +* `^0.0` := `>=0.0.0 <0.1.0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0` +* `^0.x` := `>=0.0.0 <1.0.0` + +## Functions + +All methods and classes take a final `loose` boolean argument that, if +true, will be more forgiving about not-quite-valid semver strings. +The resulting output will always be 100% strict, of course. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. diff --git a/node_modules/semver/bin/semver b/node_modules/semver/bin/semver new file mode 100755 index 00000000..c5f2e857 --- /dev/null +++ b/node_modules/semver/bin/semver @@ -0,0 +1,133 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +var argv = process.argv.slice(2) + , versions = [] + , range = [] + , gt = [] + , lt = [] + , eq = [] + , inc = null + , version = require("../package.json").version + , loose = false + , identifier = undefined + , semver = require("../semver") + , reverse = false + +main() + +function main () { + if (!argv.length) return help() + while (argv.length) { + var a = argv.shift() + var i = a.indexOf('=') + if (i !== -1) { + a = a.slice(0, i) + argv.unshift(a.slice(i + 1)) + } + switch (a) { + case "-rv": case "-rev": case "--rev": case "--reverse": + reverse = true + break + case "-l": case "--loose": + loose = true + break + case "-v": case "--version": + versions.push(argv.shift()) + break + case "-i": case "--inc": case "--increment": + switch (argv[0]) { + case "major": case "minor": case "patch": case "prerelease": + case "premajor": case "preminor": case "prepatch": + inc = argv.shift() + break + default: + inc = "patch" + break + } + break + case "--preid": + identifier = argv.shift() + break + case "-r": case "--range": + range.push(argv.shift()) + break + case "-h": case "--help": case "-?": + return help() + default: + versions.push(a) + break + } + } + + versions = versions.filter(function (v) { + return semver.valid(v, loose) + }) + if (!versions.length) return fail() + if (inc && (versions.length !== 1 || range.length)) + return failInc() + + for (var i = 0, l = range.length; i < l ; i ++) { + versions = versions.filter(function (v) { + return semver.satisfies(v, range[i], loose) + }) + if (!versions.length) return fail() + } + return success(versions) +} + +function failInc () { + console.error("--inc can only be used on a single version with no range") + fail() +} + +function fail () { process.exit(1) } + +function success () { + var compare = reverse ? "rcompare" : "compare" + versions.sort(function (a, b) { + return semver[compare](a, b, loose) + }).map(function (v) { + return semver.clean(v, loose) + }).map(function (v) { + return inc ? semver.inc(v, inc, loose, identifier) : v + }).forEach(function (v,i,_) { console.log(v) }) +} + +function help () { + console.log(["SemVer " + version + ,"" + ,"A JavaScript implementation of the http://semver.org/ specification" + ,"Copyright Isaac Z. Schlueter" + ,"" + ,"Usage: semver [options] [ [...]]" + ,"Prints valid versions sorted by SemVer precedence" + ,"" + ,"Options:" + ,"-r --range " + ," Print versions that match the specified range." + ,"" + ,"-i --increment []" + ," Increment a version by the specified level. Level can" + ," be one of: major, minor, patch, premajor, preminor," + ," prepatch, or prerelease. Default level is 'patch'." + ," Only one version may be specified." + ,"" + ,"--preid " + ," Identifier to be used to prefix premajor, preminor," + ," prepatch or prerelease version increments." + ,"" + ,"-l --loose" + ," Interpret versions and ranges loosely" + ,"" + ,"Program exits successfully if any valid version satisfies" + ,"all supplied ranges, and prints all satisfying versions." + ,"" + ,"If no satisfying versions are found, then exits failure." + ,"" + ,"Versions are printed in ascending order, so supplying" + ,"multiple versions to the utility will just sort them." + ].join("\n")) +} diff --git a/node_modules/semver/foot.js.txt b/node_modules/semver/foot.js.txt new file mode 100644 index 00000000..8f83c20f --- /dev/null +++ b/node_modules/semver/foot.js.txt @@ -0,0 +1,6 @@ + +})( + typeof exports === 'object' ? exports : + typeof define === 'function' && define.amd ? {} : + semver = {} +); diff --git a/node_modules/semver/head.js.txt b/node_modules/semver/head.js.txt new file mode 100644 index 00000000..65368651 --- /dev/null +++ b/node_modules/semver/head.js.txt @@ -0,0 +1,2 @@ +;(function(exports) { + diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json new file mode 100644 index 00000000..430b2b32 --- /dev/null +++ b/node_modules/semver/package.json @@ -0,0 +1,53 @@ +{ + "_from": "semver@4.3.2", + "_id": "semver@4.3.2", + "_inBundle": false, + "_integrity": "sha1-x6BxWKgL7dBSNVt3DYLWZA+AO+c=", + "_location": "/semver", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "semver@4.3.2", + "name": "semver", + "escapedName": "semver", + "rawSpec": "4.3.2", + "saveSpec": null, + "fetchSpec": "4.3.2" + }, + "_requiredBy": [ + "/pg" + ], + "_resolved": "https://registry.npmjs.org/semver/-/semver-4.3.2.tgz", + "_shasum": "c7a07158a80bedd052355b770d82d6640f803be7", + "_spec": "semver@4.3.2", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pg", + "bin": { + "semver": "bin/semver" + }, + "browser": "semver.browser.js", + "bugs": { + "url": "https://github.com/npm/node-semver/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "The semantic version parser used by npm.", + "devDependencies": { + "tap": "0.x >=0.0.4", + "uglify-js": "~2.3.6" + }, + "homepage": "https://github.com/npm/node-semver#readme", + "license": "BSD", + "main": "semver.js", + "min": "semver.min.js", + "name": "semver", + "repository": { + "type": "git", + "url": "git://github.com/npm/node-semver.git" + }, + "scripts": { + "prepublish": "make", + "test": "tap test/*.js" + }, + "version": "4.3.2" +} diff --git a/node_modules/semver/semver.browser.js b/node_modules/semver/semver.browser.js new file mode 100644 index 00000000..250885a7 --- /dev/null +++ b/node_modules/semver/semver.browser.js @@ -0,0 +1,1187 @@ +;(function(exports) { + +// export the class if we are in a Node-like system. +if (typeof module === 'object' && module.exports === exports) + exports = module.exports = SemVer; + +// The debug function is excluded entirely from the minified version. + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0'; + +var MAX_LENGTH = 256; +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991; + +// The actual regexps go on exports.re +var re = exports.re = []; +var src = exports.src = []; +var R = 0; + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++; +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; +var NUMERICIDENTIFIERLOOSE = R++; +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; + + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++; +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; + + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++; +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')'; + +var MAINVERSIONLOOSE = R++; +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++; +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + +var PRERELEASEIDENTIFIERLOOSE = R++; +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++; +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; + +var PRERELEASELOOSE = R++; +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++; +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++; +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; + + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++; +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?'; + +src[FULL] = '^' + FULLPLAIN + '$'; + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?'; + +var LOOSE = R++; +src[LOOSE] = '^' + LOOSEPLAIN + '$'; + +var GTLT = R++; +src[GTLT] = '((?:<|>)?=?)'; + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++; +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; +var XRANGEIDENTIFIER = R++; +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; + +var XRANGEPLAIN = R++; +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGEPLAINLOOSE = R++; +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGE = R++; +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; +var XRANGELOOSE = R++; +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++; +src[LONETILDE] = '(?:~>?)'; + +var TILDETRIM = R++; +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); +var tildeTrimReplace = '$1~'; + +var TILDE = R++; +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; +var TILDELOOSE = R++; +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++; +src[LONECARET] = '(?:\\^)'; + +var CARETTRIM = R++; +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); +var caretTrimReplace = '$1^'; + +var CARET = R++; +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; +var CARETLOOSE = R++; +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++; +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; +var COMPARATOR = R++; +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; + + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++; +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); +var comparatorTrimReplace = '$1$2$3'; + + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++; +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$'; + +var HYPHENRANGELOOSE = R++; +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$'; + +// Star ranges basically just allow anything at all. +var STAR = R++; +src[STAR] = '(<|>)?=?\\s*\\*'; + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + ; + if (!re[i]) + re[i] = new RegExp(src[i]); +} + +exports.parse = parse; +function parse(version, loose) { + if (version.length > MAX_LENGTH) + return null; + + var r = loose ? re[LOOSE] : re[FULL]; + if (!r.test(version)) + return null; + + try { + return new SemVer(version, loose); + } catch (er) { + return null; + } +} + +exports.valid = valid; +function valid(version, loose) { + var v = parse(version, loose); + return v ? v.version : null; +} + + +exports.clean = clean; +function clean(version, loose) { + var s = parse(version.trim().replace(/^[=v]+/, ''), loose); + return s ? s.version : null; +} + +exports.SemVer = SemVer; + +function SemVer(version, loose) { + if (version instanceof SemVer) { + if (version.loose === loose) + return version; + else + version = version.version; + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version); + } + + if (version.length > MAX_LENGTH) + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + + if (!(this instanceof SemVer)) + return new SemVer(version, loose); + + ; + this.loose = loose; + var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); + + if (!m) + throw new TypeError('Invalid Version: ' + version); + + this.raw = version; + + // these are actually numbers + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) + throw new TypeError('Invalid major version') + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) + throw new TypeError('Invalid minor version') + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) + throw new TypeError('Invalid patch version') + + // numberify any prerelease numeric ids + if (!m[4]) + this.prerelease = []; + else + this.prerelease = m[4].split('.').map(function(id) { + return (/^[0-9]+$/.test(id)) ? +id : id; + }); + + this.build = m[5] ? m[5].split('.') : []; + this.format(); +} + +SemVer.prototype.format = function() { + this.version = this.major + '.' + this.minor + '.' + this.patch; + if (this.prerelease.length) + this.version += '-' + this.prerelease.join('.'); + return this.version; +}; + +SemVer.prototype.inspect = function() { + return ''; +}; + +SemVer.prototype.toString = function() { + return this.version; +}; + +SemVer.prototype.compare = function(other) { + ; + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return this.compareMain(other) || this.comparePre(other); +}; + +SemVer.prototype.compareMain = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch); +}; + +SemVer.prototype.comparePre = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) + return -1; + else if (!this.prerelease.length && other.prerelease.length) + return 1; + else if (!this.prerelease.length && !other.prerelease.length) + return 0; + + var i = 0; + do { + var a = this.prerelease[i]; + var b = other.prerelease[i]; + ; + if (a === undefined && b === undefined) + return 0; + else if (b === undefined) + return 1; + else if (a === undefined) + return -1; + else if (a === b) + continue; + else + return compareIdentifiers(a, b); + } while (++i); +}; + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function(release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc('pre', identifier); + break; + case 'preminor': + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc('pre', identifier); + break; + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0; + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) + this.major++; + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break; + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) + this.minor++; + this.patch = 0; + this.prerelease = []; + break; + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) + this.patch++; + this.prerelease = []; + break; + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) + this.prerelease = [0]; + else { + var i = this.prerelease.length; + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++; + i = -2; + } + } + if (i === -1) // didn't increment anything + this.prerelease.push(0); + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) + this.prerelease = [identifier, 0]; + } else + this.prerelease = [identifier, 0]; + } + break; + + default: + throw new Error('invalid increment argument: ' + release); + } + this.format(); + return this; +}; + +exports.inc = inc; +function inc(version, release, loose, identifier) { + if (typeof(loose) === 'string') { + identifier = loose; + loose = undefined; + } + + try { + return new SemVer(version, loose).inc(release, identifier).version; + } catch (er) { + return null; + } +} + +exports.diff = diff; +function diff(version1, version2) { + if (eq(version1, version2)) { + return null; + } else { + var v1 = parse(version1); + var v2 = parse(version2); + if (v1.prerelease.length || v2.prerelease.length) { + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return 'pre'+key; + } + } + } + return 'prerelease'; + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return key; + } + } + } + } +} + +exports.compareIdentifiers = compareIdentifiers; + +var numeric = /^[0-9]+$/; +function compareIdentifiers(a, b) { + var anum = numeric.test(a); + var bnum = numeric.test(b); + + if (anum && bnum) { + a = +a; + b = +b; + } + + return (anum && !bnum) ? -1 : + (bnum && !anum) ? 1 : + a < b ? -1 : + a > b ? 1 : + 0; +} + +exports.rcompareIdentifiers = rcompareIdentifiers; +function rcompareIdentifiers(a, b) { + return compareIdentifiers(b, a); +} + +exports.major = major; +function major(a, loose) { + return new SemVer(a, loose).major; +} + +exports.minor = minor; +function minor(a, loose) { + return new SemVer(a, loose).minor; +} + +exports.patch = patch; +function patch(a, loose) { + return new SemVer(a, loose).patch; +} + +exports.compare = compare; +function compare(a, b, loose) { + return new SemVer(a, loose).compare(b); +} + +exports.compareLoose = compareLoose; +function compareLoose(a, b) { + return compare(a, b, true); +} + +exports.rcompare = rcompare; +function rcompare(a, b, loose) { + return compare(b, a, loose); +} + +exports.sort = sort; +function sort(list, loose) { + return list.sort(function(a, b) { + return exports.compare(a, b, loose); + }); +} + +exports.rsort = rsort; +function rsort(list, loose) { + return list.sort(function(a, b) { + return exports.rcompare(a, b, loose); + }); +} + +exports.gt = gt; +function gt(a, b, loose) { + return compare(a, b, loose) > 0; +} + +exports.lt = lt; +function lt(a, b, loose) { + return compare(a, b, loose) < 0; +} + +exports.eq = eq; +function eq(a, b, loose) { + return compare(a, b, loose) === 0; +} + +exports.neq = neq; +function neq(a, b, loose) { + return compare(a, b, loose) !== 0; +} + +exports.gte = gte; +function gte(a, b, loose) { + return compare(a, b, loose) >= 0; +} + +exports.lte = lte; +function lte(a, b, loose) { + return compare(a, b, loose) <= 0; +} + +exports.cmp = cmp; +function cmp(a, op, b, loose) { + var ret; + switch (op) { + case '===': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a === b; + break; + case '!==': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a !== b; + break; + case '': case '=': case '==': ret = eq(a, b, loose); break; + case '!=': ret = neq(a, b, loose); break; + case '>': ret = gt(a, b, loose); break; + case '>=': ret = gte(a, b, loose); break; + case '<': ret = lt(a, b, loose); break; + case '<=': ret = lte(a, b, loose); break; + default: throw new TypeError('Invalid operator: ' + op); + } + return ret; +} + +exports.Comparator = Comparator; +function Comparator(comp, loose) { + if (comp instanceof Comparator) { + if (comp.loose === loose) + return comp; + else + comp = comp.value; + } + + if (!(this instanceof Comparator)) + return new Comparator(comp, loose); + + ; + this.loose = loose; + this.parse(comp); + + if (this.semver === ANY) + this.value = ''; + else + this.value = this.operator + this.semver.version; + + ; +} + +var ANY = {}; +Comparator.prototype.parse = function(comp) { + var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var m = comp.match(r); + + if (!m) + throw new TypeError('Invalid comparator: ' + comp); + + this.operator = m[1]; + if (this.operator === '=') + this.operator = ''; + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) + this.semver = ANY; + else + this.semver = new SemVer(m[2], this.loose); +}; + +Comparator.prototype.inspect = function() { + return ''; +}; + +Comparator.prototype.toString = function() { + return this.value; +}; + +Comparator.prototype.test = function(version) { + ; + + if (this.semver === ANY) + return true; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + return cmp(version, this.operator, this.semver, this.loose); +}; + + +exports.Range = Range; +function Range(range, loose) { + if ((range instanceof Range) && range.loose === loose) + return range; + + if (!(this instanceof Range)) + return new Range(range, loose); + + this.loose = loose; + + // First, split based on boolean or || + this.raw = range; + this.set = range.split(/\s*\|\|\s*/).map(function(range) { + return this.parseRange(range.trim()); + }, this).filter(function(c) { + // throw out any that are not relevant for whatever reason + return c.length; + }); + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range); + } + + this.format(); +} + +Range.prototype.inspect = function() { + return ''; +}; + +Range.prototype.format = function() { + this.range = this.set.map(function(comps) { + return comps.join(' ').trim(); + }).join('||').trim(); + return this.range; +}; + +Range.prototype.toString = function() { + return this.range; +}; + +Range.prototype.parseRange = function(range) { + var loose = this.loose; + range = range.trim(); + ; + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; + range = range.replace(hr, hyphenReplace); + ; + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); + ; + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace); + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace); + + // normalize spaces + range = range.split(/\s+/).join(' '); + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var set = range.split(' ').map(function(comp) { + return parseComparator(comp, loose); + }).join(' ').split(/\s+/); + if (this.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function(comp) { + return !!comp.match(compRe); + }); + } + set = set.map(function(comp) { + return new Comparator(comp, loose); + }); + + return set; +}; + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators; +function toComparators(range, loose) { + return new Range(range, loose).set.map(function(comp) { + return comp.map(function(c) { + return c.value; + }).join(' ').trim().split(' '); + }); +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator(comp, loose) { + ; + comp = replaceCarets(comp, loose); + ; + comp = replaceTildes(comp, loose); + ; + comp = replaceXRanges(comp, loose); + ; + comp = replaceStars(comp, loose); + ; + return comp; +} + +function isX(id) { + return !id || id.toLowerCase() === 'x' || id === '*'; +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceTilde(comp, loose); + }).join(' '); +} + +function replaceTilde(comp, loose) { + var r = loose ? re[TILDELOOSE] : re[TILDE]; + return comp.replace(r, function(_, M, m, p, pr) { + ; + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) + // ~1.2 == >=1.2.0- <1.3.0- + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else if (pr) { + ; + if (pr.charAt(0) !== '-') + pr = '-' + pr; + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + + ; + return ret; + }); +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceCaret(comp, loose); + }).join(' '); +} + +function replaceCaret(comp, loose) { + ; + var r = loose ? re[CARETLOOSE] : re[CARET]; + return comp.replace(r, function(_, M, m, p, pr) { + ; + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) { + if (M === '0') + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'; + } else if (pr) { + ; + if (pr.charAt(0) !== '-') + pr = '-' + pr; + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + (+M + 1) + '.0.0'; + } else { + ; + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0'; + } + + ; + return ret; + }); +} + +function replaceXRanges(comp, loose) { + ; + return comp.split(/\s+/).map(function(comp) { + return replaceXRange(comp, loose); + }).join(' '); +} + +function replaceXRange(comp, loose) { + comp = comp.trim(); + var r = loose ? re[XRANGELOOSE] : re[XRANGE]; + return comp.replace(r, function(ret, gtlt, M, m, p, pr) { + ; + var xM = isX(M); + var xm = xM || isX(m); + var xp = xm || isX(p); + var anyX = xp; + + if (gtlt === '=' && anyX) + gtlt = ''; + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0'; + } else { + // nothing is forbidden + ret = '*'; + } + } else if (gtlt && anyX) { + // replace X with 0 + if (xm) + m = 0; + if (xp) + p = 0; + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>='; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else if (xp) { + m = +m + 1; + p = 0; + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) + M = +M + 1 + else + m = +m + 1 + } + + ret = gtlt + M + '.' + m + '.' + p; + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + } + + ; + + return ret; + }); +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars(comp, loose) { + ; + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], ''); +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + + if (isX(fM)) + from = ''; + else if (isX(fm)) + from = '>=' + fM + '.0.0'; + else if (isX(fp)) + from = '>=' + fM + '.' + fm + '.0'; + else + from = '>=' + from; + + if (isX(tM)) + to = ''; + else if (isX(tm)) + to = '<' + (+tM + 1) + '.0.0'; + else if (isX(tp)) + to = '<' + tM + '.' + (+tm + 1) + '.0'; + else if (tpr) + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; + else + to = '<=' + to; + + return (from + ' ' + to).trim(); +} + + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function(version) { + if (!version) + return false; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version)) + return true; + } + return false; +}; + +function testSet(set, version) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) + return false; + } + + if (version.prerelease.length) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (var i = 0; i < set.length; i++) { + ; + if (set[i].semver === ANY) + return true; + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver; + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) + return true; + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false; + } + + return true; +} + +exports.satisfies = satisfies; +function satisfies(version, range, loose) { + try { + range = new Range(range, loose); + } catch (er) { + return false; + } + return range.test(version); +} + +exports.maxSatisfying = maxSatisfying; +function maxSatisfying(versions, range, loose) { + return versions.filter(function(version) { + return satisfies(version, range, loose); + }).sort(function(a, b) { + return rcompare(a, b, loose); + })[0] || null; +} + +exports.validRange = validRange; +function validRange(range, loose) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, loose).range || '*'; + } catch (er) { + return null; + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr; +function ltr(version, range, loose) { + return outside(version, range, '<', loose); +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr; +function gtr(version, range, loose) { + return outside(version, range, '>', loose); +} + +exports.outside = outside; +function outside(version, range, hilo, loose) { + version = new SemVer(version, loose); + range = new Range(range, loose); + + var gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case '>': + gtfn = gt; + ltefn = lte; + ltfn = lt; + comp = '>'; + ecomp = '>='; + break; + case '<': + gtfn = lt; + ltefn = gte; + ltfn = gt; + comp = '<'; + ecomp = '<='; + break; + default: + throw new TypeError('Must provide a hilo val of "<" or ">"'); + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, loose)) { + return false; + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i]; + + var high = null; + var low = null; + + comparators.forEach(function(comparator) { + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, loose)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, loose)) { + low = comparator; + } + }); + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false; + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false; + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false; + } + } + return true; +} + +// Use the define() function if we're in AMD land +if (typeof define === 'function' && define.amd) + define(exports); + +})( + typeof exports === 'object' ? exports : + typeof define === 'function' && define.amd ? {} : + semver = {} +); diff --git a/node_modules/semver/semver.browser.js.gz b/node_modules/semver/semver.browser.js.gz new file mode 100644 index 0000000000000000000000000000000000000000..6a8cf09559b126dbaa32606c11ae43c302a9d1b5 GIT binary patch literal 7938 zcmV+dAN}ATiwFR$u@qGT1KmAqa~n63-}NhCtt%uglBVSNc4LWBDT$K!%934@vgKV{ ziihNgnwXqnW`=qsl7IX41CPc$NXl_`tL|=Nia?{$=ocD51Ke(2%;RAmC2=#nnI&nS zwbUPLYfqml{;KlJPz@(RmZ|7MU56@2Llwm;P)Erq>`bDMp~`NvJe+pd0HB%Q&cftE zO_R}l5~_Z`uNukV*Kn9O)K_2W;;!zAs!gwJO4;HPoH_}o{|wV@BJ30>8ij-TSQ*k) zltJU+WIhT z|HHvAFONG^D;j+$FZJ^_G(n^_7x z1MUD@f>;egHP1jq7fGt}kbRIC%0$yuI4X~x3+ z+ux8QVHyq9%Moe>$Unu3eT8j7H9zz|v=wv~s5pr`zlUj}M$s6=%d`S1gIh(#05nZt z z^6K^LlY@nr2!hbP>jAP2R1`B{7)M+==zo`o(e}m}s7aXTz)S$Tyq#T!F|!(!IW|l{ zM1+7r0fWJ08(HSOK3dG5ih3Fo^5$L8`F+3h@6MV21BTf-dm_ep7=ZuzCrh0bEDd2; zN#4mq(2XF6F&REYmJf!u!~K^>&e|+VqL?UYHq?fyk>!SJwBEn(0t7g%(*OOHO`Th? z)Rt7HShr+28fGzJ%c%R(NE+Kl*l*IX1KvCgve3x9WYJ||3(GE7YDIW+d~kg5>R|um zpjt_*>Y0~&FBXZZcIY(xJKMC<2BeUy2q;Or7HBD{hTRM4eGt+0BEy=Wv|o{aCy7Jn zmplWW2_#nzN!gwq1FG3#_|UP6S1s5oK43f6=S2-+^iNt9Qd=M^PePr&g`_Q$(+Wfi z%hHsxq`sX;laV?M^8n`8plDa|Ed(snK@X^kTfp4CN3^AjqUCyTU|p(ncKq&|SRCF)W6tNepPF z!LJGUN{El~V;1DYOC4ef)LBR|b_NQ7Xt1##a?z4t7|ilHeH*9Ae3o?;1%5su7-+{3 zFOx6|E!q$Q4g-h*F~rU;lR30Q*bq9i(V^oDVMwMR>nNj$9;Nxi&#zt?VVdu6Ucr2b z^1RwJvhn8nvRxJZ2S{$OQ3I#dg+M^EScILD0-a-jjs$>ttW}&=O&1`381x+2fdbCd z)n<3A`;7SCr$5{3`Z5|`f@UYM;s6J!vIJNeO+!#$Kod-|q^&k_y%0=hm%*lLg5ZqY zuA>~d$kJvbZE1WU1%S)*Tg}8NuApK9jAv6^4n!c?+b;2te1~P?yQ}{D_Zfs^WmeYv zsY>5h$JHvfS~{eqIv+}m7IYd;W3wMmU!BTU_?^ug>~-gEx3$;bGam5-xqFEkNdj8m z>ONUV_p*-CxLNP|Ry_d~ zlxDc745x?*?P-`pRT};_hehm&Mez1`|LDg>K|mz{j&ezyb$4_3_U`?Avm7a+S=2=> z_99||*=SJ#DHWed?Xpg{+$+Tzb}oO{{8m2CFJAE#Dx?VKFS8c`P9+bYN% z3(Q1w3bSvhQuIqaWgKdc1kPqXg=qrfi2?W#!!Nr0N~!hO|jEGIyePh6&$ZF zG-1-G-8~!rQpM@<%R|}17IF(Dzh&qb*mz_Z2d1IlCJwK0=jh;O*7WhURb$*}F*|b{ z>}eWJkHgs{7~)n{tD`Lo?K_C4@;ae`q;PuADNGmy z!;jZNI?6Z{%A>)A=Y1!=@Ai)mPK(l`LKozoTj^0nQF^wpMtT~XCq1uCL3*xEN^gjx zE=lj)qpU<#na!4%MbaanlJs2Z>S)W-!wp8=ck|e|J`*iBoXHb6I z0GL5Y>xav5_z{+7!5CJppxz<4ADTl#9nPZSGa5%RVWgE1PBMW&HI`-3SCo&{L`|n*N32dV%Hgh%#zg>>lTk=)hKA$i0X&+fF7P0w|0`_SrE-h zFzsaEyKohG$pnSYmBpVUm!(^QgFI6*4TZMLn5g;U6cy-+8RkC0UeXnE-j_}GbvWf!T z-4%@fs%v(mUg++jYuon_f~6QR(WmH}zSs7h75najC!01~-ITp-^Xd7!{?*yWQ}D-) zRs~@h5SCRDX3u{4>{1+TThgleUQ`!_&NhgLc-YP@j3_*vA{D`7cE+M@-LQ|Lwy89n zWTCD$UG(jNxq!SUNF~nt4frEG?B^}Ph9M@uOu;vxI-LUWL7FCMv+*)!mC$>iFBJMH zqoJ%mtJ+2-e#@c^h7BHzS2Wa%I8O)n2a^}fM?>7pZnR9FkD8nlP~vZERSVHx1ko4? zaNV}nuc=_I)~qR7RCAF@Gh(7}+WKtf8oJWp+7Us;ytkWih6lS}@bEF^6=bFv?HEJD zjp@71GdoDyK<3gdS4#WIT>9+H5--r~s;YDa0kETbD~yz3^-!D~1!apmB7hY;BG~GV zc(4mPVt~~;Vpy*ukPeF?y11oaTI}??BhJif$h)u4Oh?$c1LhnJVee%CY~0OelPGUC zx((FLnadK2M#i?SEucH3WQ500dA0$V7HIJX4CRGFpO3)zLi_kw0DF3V27ve>I)p~= zoEl-M801Z!YT4v~+$2xX{peaiWeH^RsHt;gLaD&+>6Au@-zy^KZ+o(}%+ZcWaMpCO z0lu%}aJUw~CQ(eI64Qfh3B%g17#Fyo8H7CLfoT+t9X*Atn@({Q>${B_e0jnrOtlDC zpo!!6P@+gM*e?W5Tv+QsxwbDoC>6DZ!(>&Flf59QSXuP%^pVsMi_+__DqHa-j}CY;Bk@FI#~odo0!{Gvjr*mFT5Ay^E(S%3}}LU*dc z2L^1IfFj59uoS1(8(7eW<{YhFU*d|gxv>#xbq93=-mv4pj&L7iKAm~T+VdH%KQYSRw=mT}0)UT8DBX5obQ&^b>x7{k= zKa_@Ndv@syY=#*uY;hT;0nEHSM9XQI?i(*Ow`w%AZ)97D+Wb-z;T1};16n&rP*)vr?#%&>fSlU{Q_I zWLAjZGTwZfM&rxe;ty}4EN^47>X3KbC@hbY+@ZqeS3#UB+PT9+eF!!3?!)-V+@S$i z^5XV(9v!(MYUt7EA}|LwYScFMi?C z3bK4NE2s4T;cBi977kmKFZvx$~trY&oPRxL0AJexT4&N#s+!Pc@|{jt%NK1 zPW53O=X=Ha&Chcigr&&mr5vTybjr0#26W2KS#5D6xA|u9Ngf*0V#S`0Ni0Yl75j%-a@D2Vk8} zq;|10fkMe`#Bx~6*N1r9qv5wx72r-l8aEKv*^*`VYvsR?f{1$RY_@2Ijz~mKmCeb< zT5adu**v>!_Us&SFBo48+6pQ>2=l=A5VmxyT?2lRB!kKCc&_FOD>3xEQ5HGYV+L0f zsEN;xf}>Ky&9hc(kt|ScWG=o_A+vqF_8D#OD<)uAPz3Wy?qWx?sirs7BE6v|gqe=# z_y_Ncn#h?qol6@|-WCL}9?S+l$LR2{I8?yE!Ma2n8`4U*9K1WtZ|WWXe#QQ|YbE!P zl``APc1>ls%^z@cZdef6!siD7v@+y6{7A2EUr#H`~UaZ#hnczg1P&btqjC zofT~s4{SDDZcTi(Rjl1=xux*cW`$dXY4~cZVySKo#GWF4gvcDTk~gLCAYQB>iE?;| zC|wbSEnX2%yk!f#+I$BvXA~Y>ZPDLFPiS@o(dBOdz+#VgzgB8xf@-}_yS8kAUu1YI z4Nep34ay(LeNh3+y8(FoS1Q!U_Gat6uiPHCJSME!TCgXD0U*FlE=I$<-$Bcq^9(9# z2ksn)8spXre426N2G2&oqs7gfje)gnv)gW(J>qtIFmb6Dj%m$-sSk9$R~@JwKwNAT zs9h@c3VYs>Wm?JEw2HNsf9XO7*6-g8+A8qR*3H2aeTp9FJK_|_q-Dov>$|$W#H`mP z^eHjp_lIES_Fg^!hvND~LF5DAS>NiIb^EfCJIyZZEb)2eE=;3AtE!(@dbW{Y%RN$Y zt#~y(c{&eUm3k$VsX~Klhhr%Rm2pR~0)K|L*!v1U1YZ2woX{Ri87G!gJ9q5mhGn4& zO!6osqb-b66OtAvNxvW|>58G7 zXVQSDGi;U2ikSHUfP7nQHYYQGpBb3!FEn{I7kFnW!ed@^ zU!xw>?qNLoFHrPoAw`WBhFR_}{F7T3^l!UrL>4$+4!vuE3r1Z9+ZVv$GUy!(JXwl) zr!RmPU^Yv*#TgOFETqIyUbcY_niZSYeNs}P<=?qf)4pVu)A6EwtOiL!nZKY|lp7;*9yMiF)pJv(Gti!5O8OoAfA-{Q zA@?n126&;j^P_@xoz=;+6bq)+y@+5UOdYnN!9j zRX7tN5_K!7j@vpQ0DUW515$LGieDXoC4;^j#P}}@cMZz7SzZ*1&&mXt#Q95bQ~Vf# zzOdvGrEhn^QV>6%YqE417u8}e-==C;Uy;&1>MlRo3uVyNCw-r6K_Bw!Y4O6TO=tUX zF0r03$11Nx7B50ttTC$nB>Eknql5CSgwXo0ji)WA&!+jl%o1tS`Jwjke0_8hVmgyT zRhi<|nRv9O(@ZcQB~S3slD#anPXurAEfrQ3EIST;by+zQD|=$H#*+CKte0%0HUt(n zjL0O1ykJbseH?7XdJNNK6t)++=VBbl87w>)bU!7pFNRcnf_mt?7*6}d)|q~9CXFb)>*(SrQb|S@kkp~K|NZ&o)HgZ*cG;rUEDl1i zH+8>s!#pb~Mj4Ra4_F28EeUKDNW4f`4d<!~eo3BG~Oy7M0q0M#)svwA-U*J>87H?$UR^$rh$#Q4^RL+Vn3s;jc8`nI@dY zvcq{px(PlE^hwW(4(eGEs%M~jRk+;tbFHL{dyBq#=xJt|K}z%seX}p}+GgIWUraOd z>4g`z;H;omZq9AHcsQOICMplK0k zw@)|F=(Y(d0)B07V8Ld~lvZJySxj2Cpx8#99WBsKeP8(3l<{+)uDH1|6}=bxo{5YQ zxj{NhyLfSKKX3MUH@wklxP*`yZClV?m|42L3Rwh7;Dy`Bjg{#2QcA3iI38KcSe=vy zQXv#Z?nlbG<9wmtIS0c(Cogcm$j6+MRi9V*6bC|1+makRV|CQSuu%x zNsmG3AMP>A&Fmal;WgxwA1`dd{~NF2Wo#Vsbl4N7`URYY^7FY?!B|85{a@mE{E-*( zPo`y-e6FjibQDv;yBUqyrLh#!;J#q5HFCOtfTR|5*jPn~Rl@u7QuvPx;6KUxhp~O> z6jbz+RRO#8l+yQoj_+pV4|m=b?Fhd-EpuC075@BfX8*pXV|(V@OgOSdJS{RUqb zVuvmY@gNCmaN$92TvQ?LP_L_Iq6&h6w^%t7Qw2STq|bzKuN`5GWpJH26k_FQv4?Otdk(VoEXeRh;FD+?O@ed+5%5mGgzX>5_qGc2 zVYkG@oko#`l?<$N?v5i?4ZSFfx~Xx+rRYS;?=yn58o_;mSj^4>3s)lF>Sw&3Aour= zzVC!1wlF+b(r-91h5S65+jXK}*K&ke_Mk^-5RVH$!O;Z7U-tn1(wJ~BN8z#mbr#;_`RRJ{%nJMQ!n z6+Pc$_(FGeE;G!FTNC)lhosVvR3wsE&Hp{kr?yI1^f3 z^{Gfh(3uPrco5IJ&*1~#V@J>%zAZIMJf!6Yj^W;cOY?EBUH9Y{{$dVHh%lB5{G8!` zI71F zqM!wpPem8yUC@cODUojMBA8@D1>Ex=rTJ#B?Bu z%61vRJRt75TO=@=@Fu_`N4-_`tOHB?O*IO$`50yeY^=AYxMIh}WaOy>`x612?)1UJ zZhfGK7%<*2;6DL9lpjM-Uw7!O{4Rv8bQbf$V8S;@2A2M|X$?(r9!%!rOVIL0MsuZ} z8u|PbSuwaR%$2(P2MEqzEo%u@Wgb;!A5}K1xY)GoU%;d_h!z-~ExN{j%&LLRHH^M{ zzl=-1`pTOjEI8k-kl<8O1FkPj2ylL9Z+XkWl!<$RrDIQVV^1IF*JeU3 zx{fC(_Oq~jH&D1$b8|u@-g26X_uac?Bgze`tZFd+(}G#)_}g=9?htwjpY-m*(t8jK z?myslS6B^~USg#4dy`}1z7*f7tjm;h!UN4a)~QWT8kFh=?gjM>qGj`bF(?o^4-9MZ zAsFYQDEOqfj#CXN*|UKebDUoQaW=%dD`d;*{3jTxUUaW`e-mi_13r#Y+xebD(`1e6}?cEE+qOTco0Udy=Sm+ zHJRZH8^-lTxm~yr2fT|BWQM@Wa!pv7NoN&YMueJb3^KF4mhRXQj+v< z8x8|&e%3!2z|T=MTVf|oxR3qA@6`k|OT{9dTksm*G@^qKuIL6+dN5EUHa`~)DtX^( sVgSZi3jIODKkliGy$SQj{TlT7f5O`GH!gbs0GL~K=Kufz literal 0 HcmV?d00001 diff --git a/node_modules/semver/semver.js b/node_modules/semver/semver.js new file mode 100644 index 00000000..d265b568 --- /dev/null +++ b/node_modules/semver/semver.js @@ -0,0 +1,1191 @@ +// export the class if we are in a Node-like system. +if (typeof module === 'object' && module.exports === exports) + exports = module.exports = SemVer; + +// The debug function is excluded entirely from the minified version. +/* nomin */ var debug; +/* nomin */ if (typeof process === 'object' && + /* nomin */ process.env && + /* nomin */ process.env.NODE_DEBUG && + /* nomin */ /\bsemver\b/i.test(process.env.NODE_DEBUG)) + /* nomin */ debug = function() { + /* nomin */ var args = Array.prototype.slice.call(arguments, 0); + /* nomin */ args.unshift('SEMVER'); + /* nomin */ console.log.apply(console, args); + /* nomin */ }; +/* nomin */ else + /* nomin */ debug = function() {}; + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0'; + +var MAX_LENGTH = 256; +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991; + +// The actual regexps go on exports.re +var re = exports.re = []; +var src = exports.src = []; +var R = 0; + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++; +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*'; +var NUMERICIDENTIFIERLOOSE = R++; +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'; + + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +var NONNUMERICIDENTIFIER = R++; +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'; + + +// ## Main Version +// Three dot-separated numeric identifiers. + +var MAINVERSION = R++; +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')'; + +var MAINVERSIONLOOSE = R++; +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')'; + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +var PRERELEASEIDENTIFIER = R++; +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + +var PRERELEASEIDENTIFIERLOOSE = R++; +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')'; + + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++; +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))'; + +var PRERELEASELOOSE = R++; +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))'; + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +var BUILDIDENTIFIER = R++; +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+'; + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +var BUILD = R++; +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))'; + + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +var FULL = R++; +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?'; + +src[FULL] = '^' + FULLPLAIN + '$'; + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?'; + +var LOOSE = R++; +src[LOOSE] = '^' + LOOSEPLAIN + '$'; + +var GTLT = R++; +src[GTLT] = '((?:<|>)?=?)'; + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++; +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*'; +var XRANGEIDENTIFIER = R++; +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*'; + +var XRANGEPLAIN = R++; +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGEPLAINLOOSE = R++; +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?'; + +var XRANGE = R++; +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$'; +var XRANGELOOSE = R++; +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$'; + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++; +src[LONETILDE] = '(?:~>?)'; + +var TILDETRIM = R++; +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+'; +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g'); +var tildeTrimReplace = '$1~'; + +var TILDE = R++; +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$'; +var TILDELOOSE = R++; +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$'; + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++; +src[LONECARET] = '(?:\\^)'; + +var CARETTRIM = R++; +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+'; +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g'); +var caretTrimReplace = '$1^'; + +var CARET = R++; +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$'; +var CARETLOOSE = R++; +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$'; + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++; +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$'; +var COMPARATOR = R++; +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$'; + + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++; +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')'; + +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g'); +var comparatorTrimReplace = '$1$2$3'; + + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++; +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$'; + +var HYPHENRANGELOOSE = R++; +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$'; + +// Star ranges basically just allow anything at all. +var STAR = R++; +src[STAR] = '(<|>)?=?\\s*\\*'; + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]); + if (!re[i]) + re[i] = new RegExp(src[i]); +} + +exports.parse = parse; +function parse(version, loose) { + if (version.length > MAX_LENGTH) + return null; + + var r = loose ? re[LOOSE] : re[FULL]; + if (!r.test(version)) + return null; + + try { + return new SemVer(version, loose); + } catch (er) { + return null; + } +} + +exports.valid = valid; +function valid(version, loose) { + var v = parse(version, loose); + return v ? v.version : null; +} + + +exports.clean = clean; +function clean(version, loose) { + var s = parse(version.trim().replace(/^[=v]+/, ''), loose); + return s ? s.version : null; +} + +exports.SemVer = SemVer; + +function SemVer(version, loose) { + if (version instanceof SemVer) { + if (version.loose === loose) + return version; + else + version = version.version; + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version); + } + + if (version.length > MAX_LENGTH) + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + + if (!(this instanceof SemVer)) + return new SemVer(version, loose); + + debug('SemVer', version, loose); + this.loose = loose; + var m = version.trim().match(loose ? re[LOOSE] : re[FULL]); + + if (!m) + throw new TypeError('Invalid Version: ' + version); + + this.raw = version; + + // these are actually numbers + this.major = +m[1]; + this.minor = +m[2]; + this.patch = +m[3]; + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) + throw new TypeError('Invalid major version') + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) + throw new TypeError('Invalid minor version') + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) + throw new TypeError('Invalid patch version') + + // numberify any prerelease numeric ids + if (!m[4]) + this.prerelease = []; + else + this.prerelease = m[4].split('.').map(function(id) { + return (/^[0-9]+$/.test(id)) ? +id : id; + }); + + this.build = m[5] ? m[5].split('.') : []; + this.format(); +} + +SemVer.prototype.format = function() { + this.version = this.major + '.' + this.minor + '.' + this.patch; + if (this.prerelease.length) + this.version += '-' + this.prerelease.join('.'); + return this.version; +}; + +SemVer.prototype.inspect = function() { + return ''; +}; + +SemVer.prototype.toString = function() { + return this.version; +}; + +SemVer.prototype.compare = function(other) { + debug('SemVer.compare', this.version, this.loose, other); + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return this.compareMain(other) || this.comparePre(other); +}; + +SemVer.prototype.compareMain = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch); +}; + +SemVer.prototype.comparePre = function(other) { + if (!(other instanceof SemVer)) + other = new SemVer(other, this.loose); + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) + return -1; + else if (!this.prerelease.length && other.prerelease.length) + return 1; + else if (!this.prerelease.length && !other.prerelease.length) + return 0; + + var i = 0; + do { + var a = this.prerelease[i]; + var b = other.prerelease[i]; + debug('prerelease compare', i, a, b); + if (a === undefined && b === undefined) + return 0; + else if (b === undefined) + return 1; + else if (a === undefined) + return -1; + else if (a === b) + continue; + else + return compareIdentifiers(a, b); + } while (++i); +}; + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function(release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0; + this.patch = 0; + this.minor = 0; + this.major++; + this.inc('pre', identifier); + break; + case 'preminor': + this.prerelease.length = 0; + this.patch = 0; + this.minor++; + this.inc('pre', identifier); + break; + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0; + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) + this.inc('patch', identifier); + this.inc('pre', identifier); + break; + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) + this.major++; + this.minor = 0; + this.patch = 0; + this.prerelease = []; + break; + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) + this.minor++; + this.patch = 0; + this.prerelease = []; + break; + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) + this.patch++; + this.prerelease = []; + break; + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) + this.prerelease = [0]; + else { + var i = this.prerelease.length; + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++; + i = -2; + } + } + if (i === -1) // didn't increment anything + this.prerelease.push(0); + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) + this.prerelease = [identifier, 0]; + } else + this.prerelease = [identifier, 0]; + } + break; + + default: + throw new Error('invalid increment argument: ' + release); + } + this.format(); + return this; +}; + +exports.inc = inc; +function inc(version, release, loose, identifier) { + if (typeof(loose) === 'string') { + identifier = loose; + loose = undefined; + } + + try { + return new SemVer(version, loose).inc(release, identifier).version; + } catch (er) { + return null; + } +} + +exports.diff = diff; +function diff(version1, version2) { + if (eq(version1, version2)) { + return null; + } else { + var v1 = parse(version1); + var v2 = parse(version2); + if (v1.prerelease.length || v2.prerelease.length) { + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return 'pre'+key; + } + } + } + return 'prerelease'; + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return key; + } + } + } + } +} + +exports.compareIdentifiers = compareIdentifiers; + +var numeric = /^[0-9]+$/; +function compareIdentifiers(a, b) { + var anum = numeric.test(a); + var bnum = numeric.test(b); + + if (anum && bnum) { + a = +a; + b = +b; + } + + return (anum && !bnum) ? -1 : + (bnum && !anum) ? 1 : + a < b ? -1 : + a > b ? 1 : + 0; +} + +exports.rcompareIdentifiers = rcompareIdentifiers; +function rcompareIdentifiers(a, b) { + return compareIdentifiers(b, a); +} + +exports.major = major; +function major(a, loose) { + return new SemVer(a, loose).major; +} + +exports.minor = minor; +function minor(a, loose) { + return new SemVer(a, loose).minor; +} + +exports.patch = patch; +function patch(a, loose) { + return new SemVer(a, loose).patch; +} + +exports.compare = compare; +function compare(a, b, loose) { + return new SemVer(a, loose).compare(b); +} + +exports.compareLoose = compareLoose; +function compareLoose(a, b) { + return compare(a, b, true); +} + +exports.rcompare = rcompare; +function rcompare(a, b, loose) { + return compare(b, a, loose); +} + +exports.sort = sort; +function sort(list, loose) { + return list.sort(function(a, b) { + return exports.compare(a, b, loose); + }); +} + +exports.rsort = rsort; +function rsort(list, loose) { + return list.sort(function(a, b) { + return exports.rcompare(a, b, loose); + }); +} + +exports.gt = gt; +function gt(a, b, loose) { + return compare(a, b, loose) > 0; +} + +exports.lt = lt; +function lt(a, b, loose) { + return compare(a, b, loose) < 0; +} + +exports.eq = eq; +function eq(a, b, loose) { + return compare(a, b, loose) === 0; +} + +exports.neq = neq; +function neq(a, b, loose) { + return compare(a, b, loose) !== 0; +} + +exports.gte = gte; +function gte(a, b, loose) { + return compare(a, b, loose) >= 0; +} + +exports.lte = lte; +function lte(a, b, loose) { + return compare(a, b, loose) <= 0; +} + +exports.cmp = cmp; +function cmp(a, op, b, loose) { + var ret; + switch (op) { + case '===': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a === b; + break; + case '!==': + if (typeof a === 'object') a = a.version; + if (typeof b === 'object') b = b.version; + ret = a !== b; + break; + case '': case '=': case '==': ret = eq(a, b, loose); break; + case '!=': ret = neq(a, b, loose); break; + case '>': ret = gt(a, b, loose); break; + case '>=': ret = gte(a, b, loose); break; + case '<': ret = lt(a, b, loose); break; + case '<=': ret = lte(a, b, loose); break; + default: throw new TypeError('Invalid operator: ' + op); + } + return ret; +} + +exports.Comparator = Comparator; +function Comparator(comp, loose) { + if (comp instanceof Comparator) { + if (comp.loose === loose) + return comp; + else + comp = comp.value; + } + + if (!(this instanceof Comparator)) + return new Comparator(comp, loose); + + debug('comparator', comp, loose); + this.loose = loose; + this.parse(comp); + + if (this.semver === ANY) + this.value = ''; + else + this.value = this.operator + this.semver.version; + + debug('comp', this); +} + +var ANY = {}; +Comparator.prototype.parse = function(comp) { + var r = this.loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var m = comp.match(r); + + if (!m) + throw new TypeError('Invalid comparator: ' + comp); + + this.operator = m[1]; + if (this.operator === '=') + this.operator = ''; + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) + this.semver = ANY; + else + this.semver = new SemVer(m[2], this.loose); +}; + +Comparator.prototype.inspect = function() { + return ''; +}; + +Comparator.prototype.toString = function() { + return this.value; +}; + +Comparator.prototype.test = function(version) { + debug('Comparator.test', version, this.loose); + + if (this.semver === ANY) + return true; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + return cmp(version, this.operator, this.semver, this.loose); +}; + + +exports.Range = Range; +function Range(range, loose) { + if ((range instanceof Range) && range.loose === loose) + return range; + + if (!(this instanceof Range)) + return new Range(range, loose); + + this.loose = loose; + + // First, split based on boolean or || + this.raw = range; + this.set = range.split(/\s*\|\|\s*/).map(function(range) { + return this.parseRange(range.trim()); + }, this).filter(function(c) { + // throw out any that are not relevant for whatever reason + return c.length; + }); + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range); + } + + this.format(); +} + +Range.prototype.inspect = function() { + return ''; +}; + +Range.prototype.format = function() { + this.range = this.set.map(function(comps) { + return comps.join(' ').trim(); + }).join('||').trim(); + return this.range; +}; + +Range.prototype.toString = function() { + return this.range; +}; + +Range.prototype.parseRange = function(range) { + var loose = this.loose; + range = range.trim(); + debug('range', range, loose); + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE]; + range = range.replace(hr, hyphenReplace); + debug('hyphen replace', range); + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace); + debug('comparator trim', range, re[COMPARATORTRIM]); + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace); + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace); + + // normalize spaces + range = range.split(/\s+/).join(' '); + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR]; + var set = range.split(' ').map(function(comp) { + return parseComparator(comp, loose); + }).join(' ').split(/\s+/); + if (this.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function(comp) { + return !!comp.match(compRe); + }); + } + set = set.map(function(comp) { + return new Comparator(comp, loose); + }); + + return set; +}; + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators; +function toComparators(range, loose) { + return new Range(range, loose).set.map(function(comp) { + return comp.map(function(c) { + return c.value; + }).join(' ').trim().split(' '); + }); +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator(comp, loose) { + debug('comp', comp); + comp = replaceCarets(comp, loose); + debug('caret', comp); + comp = replaceTildes(comp, loose); + debug('tildes', comp); + comp = replaceXRanges(comp, loose); + debug('xrange', comp); + comp = replaceStars(comp, loose); + debug('stars', comp); + return comp; +} + +function isX(id) { + return !id || id.toLowerCase() === 'x' || id === '*'; +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceTilde(comp, loose); + }).join(' '); +} + +function replaceTilde(comp, loose) { + var r = loose ? re[TILDELOOSE] : re[TILDE]; + return comp.replace(r, function(_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr); + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) + // ~1.2 == >=1.2.0- <1.3.0- + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else if (pr) { + debug('replaceTilde pr', pr); + if (pr.charAt(0) !== '-') + pr = '-' + pr; + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + + debug('tilde return', ret); + return ret; + }); +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets(comp, loose) { + return comp.trim().split(/\s+/).map(function(comp) { + return replaceCaret(comp, loose); + }).join(' '); +} + +function replaceCaret(comp, loose) { + debug('caret', comp, loose); + var r = loose ? re[CARETLOOSE] : re[CARET]; + return comp.replace(r, function(_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr); + var ret; + + if (isX(M)) + ret = ''; + else if (isX(m)) + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + else if (isX(p)) { + if (M === '0') + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + else + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0'; + } else if (pr) { + debug('replaceCaret pr', pr); + if (pr.charAt(0) !== '-') + pr = '-' + pr; + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + pr + + ' <' + (+M + 1) + '.0.0'; + } else { + debug('no pr'); + if (M === '0') { + if (m === '0') + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1); + else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0'; + } else + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0'; + } + + debug('caret return', ret); + return ret; + }); +} + +function replaceXRanges(comp, loose) { + debug('replaceXRanges', comp, loose); + return comp.split(/\s+/).map(function(comp) { + return replaceXRange(comp, loose); + }).join(' '); +} + +function replaceXRange(comp, loose) { + comp = comp.trim(); + var r = loose ? re[XRANGELOOSE] : re[XRANGE]; + return comp.replace(r, function(ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr); + var xM = isX(M); + var xm = xM || isX(m); + var xp = xm || isX(p); + var anyX = xp; + + if (gtlt === '=' && anyX) + gtlt = ''; + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0'; + } else { + // nothing is forbidden + ret = '*'; + } + } else if (gtlt && anyX) { + // replace X with 0 + if (xm) + m = 0; + if (xp) + p = 0; + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>='; + if (xm) { + M = +M + 1; + m = 0; + p = 0; + } else if (xp) { + m = +m + 1; + p = 0; + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) + M = +M + 1 + else + m = +m + 1 + } + + ret = gtlt + M + '.' + m + '.' + p; + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0'; + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0'; + } + + debug('xRange return', ret); + + return ret; + }); +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars(comp, loose) { + debug('replaceStars', comp, loose); + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], ''); +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + + if (isX(fM)) + from = ''; + else if (isX(fm)) + from = '>=' + fM + '.0.0'; + else if (isX(fp)) + from = '>=' + fM + '.' + fm + '.0'; + else + from = '>=' + from; + + if (isX(tM)) + to = ''; + else if (isX(tm)) + to = '<' + (+tM + 1) + '.0.0'; + else if (isX(tp)) + to = '<' + tM + '.' + (+tm + 1) + '.0'; + else if (tpr) + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr; + else + to = '<=' + to; + + return (from + ' ' + to).trim(); +} + + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function(version) { + if (!version) + return false; + + if (typeof version === 'string') + version = new SemVer(version, this.loose); + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version)) + return true; + } + return false; +}; + +function testSet(set, version) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) + return false; + } + + if (version.prerelease.length) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (var i = 0; i < set.length; i++) { + debug(set[i].semver); + if (set[i].semver === ANY) + return true; + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver; + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) + return true; + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false; + } + + return true; +} + +exports.satisfies = satisfies; +function satisfies(version, range, loose) { + try { + range = new Range(range, loose); + } catch (er) { + return false; + } + return range.test(version); +} + +exports.maxSatisfying = maxSatisfying; +function maxSatisfying(versions, range, loose) { + return versions.filter(function(version) { + return satisfies(version, range, loose); + }).sort(function(a, b) { + return rcompare(a, b, loose); + })[0] || null; +} + +exports.validRange = validRange; +function validRange(range, loose) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, loose).range || '*'; + } catch (er) { + return null; + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr; +function ltr(version, range, loose) { + return outside(version, range, '<', loose); +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr; +function gtr(version, range, loose) { + return outside(version, range, '>', loose); +} + +exports.outside = outside; +function outside(version, range, hilo, loose) { + version = new SemVer(version, loose); + range = new Range(range, loose); + + var gtfn, ltefn, ltfn, comp, ecomp; + switch (hilo) { + case '>': + gtfn = gt; + ltefn = lte; + ltfn = lt; + comp = '>'; + ecomp = '>='; + break; + case '<': + gtfn = lt; + ltefn = gte; + ltfn = gt; + comp = '<'; + ecomp = '<='; + break; + default: + throw new TypeError('Must provide a hilo val of "<" or ">"'); + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, loose)) { + return false; + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i]; + + var high = null; + var low = null; + + comparators.forEach(function(comparator) { + high = high || comparator; + low = low || comparator; + if (gtfn(comparator.semver, high.semver, loose)) { + high = comparator; + } else if (ltfn(comparator.semver, low.semver, loose)) { + low = comparator; + } + }); + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false; + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false; + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false; + } + } + return true; +} + +// Use the define() function if we're in AMD land +if (typeof define === 'function' && define.amd) + define(exports); diff --git a/node_modules/semver/semver.min.js b/node_modules/semver/semver.min.js new file mode 100644 index 00000000..abe2d818 --- /dev/null +++ b/node_modules/semver/semver.min.js @@ -0,0 +1 @@ +(function(e){if(typeof module==="object"&&module.exports===e)e=module.exports=K;e.SEMVER_SPEC_VERSION="2.0.0";var r=256;var t=Number.MAX_SAFE_INTEGER||9007199254740991;var n=e.re=[];var i=e.src=[];var s=0;var o=s++;i[o]="0|[1-9]\\d*";var a=s++;i[a]="[0-9]+";var f=s++;i[f]="\\d*[a-zA-Z-][a-zA-Z0-9-]*";var u=s++;i[u]="("+i[o]+")\\."+"("+i[o]+")\\."+"("+i[o]+")";var l=s++;i[l]="("+i[a]+")\\."+"("+i[a]+")\\."+"("+i[a]+")";var h=s++;i[h]="(?:"+i[o]+"|"+i[f]+")";var p=s++;i[p]="(?:"+i[a]+"|"+i[f]+")";var c=s++;i[c]="(?:-("+i[h]+"(?:\\."+i[h]+")*))";var v=s++;i[v]="(?:-?("+i[p]+"(?:\\."+i[p]+")*))";var m=s++;i[m]="[0-9A-Za-z-]+";var g=s++;i[g]="(?:\\+("+i[m]+"(?:\\."+i[m]+")*))";var w=s++;var y="v?"+i[u]+i[c]+"?"+i[g]+"?";i[w]="^"+y+"$";var d="[v=\\s]*"+i[l]+i[v]+"?"+i[g]+"?";var j=s++;i[j]="^"+d+"$";var b=s++;i[b]="((?:<|>)?=?)";var E=s++;i[E]=i[a]+"|x|X|\\*";var $=s++;i[$]=i[o]+"|x|X|\\*";var k=s++;i[k]="[v=\\s]*("+i[$]+")"+"(?:\\.("+i[$]+")"+"(?:\\.("+i[$]+")"+"(?:"+i[c]+")?"+i[g]+"?"+")?)?";var R=s++;i[R]="[v=\\s]*("+i[E]+")"+"(?:\\.("+i[E]+")"+"(?:\\.("+i[E]+")"+"(?:"+i[v]+")?"+i[g]+"?"+")?)?";var S=s++;i[S]="^"+i[b]+"\\s*"+i[k]+"$";var x=s++;i[x]="^"+i[b]+"\\s*"+i[R]+"$";var I=s++;i[I]="(?:~>?)";var T=s++;i[T]="(\\s*)"+i[I]+"\\s+";n[T]=new RegExp(i[T],"g");var V="$1~";var A=s++;i[A]="^"+i[I]+i[k]+"$";var C=s++;i[C]="^"+i[I]+i[R]+"$";var M=s++;i[M]="(?:\\^)";var N=s++;i[N]="(\\s*)"+i[M]+"\\s+";n[N]=new RegExp(i[N],"g");var _="$1^";var z=s++;i[z]="^"+i[M]+i[k]+"$";var P=s++;i[P]="^"+i[M]+i[R]+"$";var X=s++;i[X]="^"+i[b]+"\\s*("+d+")$|^$";var Z=s++;i[Z]="^"+i[b]+"\\s*("+y+")$|^$";var q=s++;i[q]="(\\s*)"+i[b]+"\\s*("+d+"|"+i[k]+")";n[q]=new RegExp(i[q],"g");var L="$1$2$3";var F=s++;i[F]="^\\s*("+i[k]+")"+"\\s+-\\s+"+"("+i[k]+")"+"\\s*$";var G=s++;i[G]="^\\s*("+i[R]+")"+"\\s+-\\s+"+"("+i[R]+")"+"\\s*$";var O=s++;i[O]="(<|>)?=?\\s*\\*";for(var B=0;Br)return null;var i=t?n[j]:n[w];if(!i.test(e))return null;try{return new K(e,t)}catch(s){return null}}e.valid=H;function H(e,r){var t=D(e,r);return t?t.version:null}e.clean=J;function J(e,r){var t=D(e.trim().replace(/^[=v]+/,""),r);return t?t.version:null}e.SemVer=K;function K(e,i){if(e instanceof K){if(e.loose===i)return e;else e=e.version}else if(typeof e!=="string"){throw new TypeError("Invalid Version: "+e)}if(e.length>r)throw new TypeError("version is longer than "+r+" characters");if(!(this instanceof K))return new K(e,i);this.loose=i;var s=e.trim().match(i?n[j]:n[w]);if(!s)throw new TypeError("Invalid Version: "+e);this.raw=e;this.major=+s[1];this.minor=+s[2];this.patch=+s[3];if(this.major>t||this.major<0)throw new TypeError("Invalid major version");if(this.minor>t||this.minor<0)throw new TypeError("Invalid minor version");if(this.patch>t||this.patch<0)throw new TypeError("Invalid patch version");if(!s[4])this.prerelease=[];else this.prerelease=s[4].split(".").map(function(e){return/^[0-9]+$/.test(e)?+e:e});this.build=s[5]?s[5].split("."):[];this.format()}K.prototype.format=function(){this.version=this.major+"."+this.minor+"."+this.patch;if(this.prerelease.length)this.version+="-"+this.prerelease.join(".");return this.version};K.prototype.inspect=function(){return''};K.prototype.toString=function(){return this.version};K.prototype.compare=function(e){if(!(e instanceof K))e=new K(e,this.loose);return this.compareMain(e)||this.comparePre(e)};K.prototype.compareMain=function(e){if(!(e instanceof K))e=new K(e,this.loose);return Y(this.major,e.major)||Y(this.minor,e.minor)||Y(this.patch,e.patch)};K.prototype.comparePre=function(e){if(!(e instanceof K))e=new K(e,this.loose);if(this.prerelease.length&&!e.prerelease.length)return-1;else if(!this.prerelease.length&&e.prerelease.length)return 1;else if(!this.prerelease.length&&!e.prerelease.length)return 0;var r=0;do{var t=this.prerelease[r];var n=e.prerelease[r];if(t===undefined&&n===undefined)return 0;else if(n===undefined)return 1;else if(t===undefined)return-1;else if(t===n)continue;else return Y(t,n)}while(++r)};K.prototype.inc=function(e,r){switch(e){case"premajor":this.prerelease.length=0;this.patch=0;this.minor=0;this.major++;this.inc("pre",r);break;case"preminor":this.prerelease.length=0;this.patch=0;this.minor++;this.inc("pre",r);break;case"prepatch":this.prerelease.length=0;this.inc("patch",r);this.inc("pre",r);break;case"prerelease":if(this.prerelease.length===0)this.inc("patch",r);this.inc("pre",r);break;case"major":if(this.minor!==0||this.patch!==0||this.prerelease.length===0)this.major++;this.minor=0;this.patch=0;this.prerelease=[];break;case"minor":if(this.patch!==0||this.prerelease.length===0)this.minor++;this.patch=0;this.prerelease=[];break;case"patch":if(this.prerelease.length===0)this.patch++;this.prerelease=[];break;case"pre":if(this.prerelease.length===0)this.prerelease=[0];else{var t=this.prerelease.length;while(--t>=0){if(typeof this.prerelease[t]==="number"){this.prerelease[t]++;t=-2}}if(t===-1)this.prerelease.push(0)}if(r){if(this.prerelease[0]===r){if(isNaN(this.prerelease[1]))this.prerelease=[r,0]}else this.prerelease=[r,0]}break;default:throw new Error("invalid increment argument: "+e)}this.format();return this};e.inc=Q;function Q(e,r,t,n){if(typeof t==="string"){n=t;t=undefined}try{return new K(e,t).inc(r,n).version}catch(i){return null}}e.diff=U;function U(e,r){if(hr(e,r)){return null}else{var t=D(e);var n=D(r);if(t.prerelease.length||n.prerelease.length){for(var i in t){if(i==="major"||i==="minor"||i==="patch"){if(t[i]!==n[i]){return"pre"+i}}}return"prerelease"}for(var i in t){if(i==="major"||i==="minor"||i==="patch"){if(t[i]!==n[i]){return i}}}}}e.compareIdentifiers=Y;var W=/^[0-9]+$/;function Y(e,r){var t=W.test(e);var n=W.test(r);if(t&&n){e=+e;r=+r}return t&&!n?-1:n&&!t?1:er?1:0}e.rcompareIdentifiers=er;function er(e,r){return Y(r,e)}e.major=rr;function rr(e,r){return new K(e,r).major}e.minor=tr;function tr(e,r){return new K(e,r).minor}e.patch=nr;function nr(e,r){return new K(e,r).patch}e.compare=ir;function ir(e,r,t){return new K(e,t).compare(r)}e.compareLoose=sr;function sr(e,r){return ir(e,r,true)}e.rcompare=or;function or(e,r,t){return ir(r,e,t)}e.sort=ar;function ar(r,t){return r.sort(function(r,n){return e.compare(r,n,t)})}e.rsort=fr;function fr(r,t){return r.sort(function(r,n){return e.rcompare(r,n,t)})}e.gt=ur;function ur(e,r,t){return ir(e,r,t)>0}e.lt=lr;function lr(e,r,t){return ir(e,r,t)<0}e.eq=hr;function hr(e,r,t){return ir(e,r,t)===0}e.neq=pr;function pr(e,r,t){return ir(e,r,t)!==0}e.gte=cr;function cr(e,r,t){return ir(e,r,t)>=0}e.lte=vr;function vr(e,r,t){return ir(e,r,t)<=0}e.cmp=mr;function mr(e,r,t,n){var i;switch(r){case"===":if(typeof e==="object")e=e.version;if(typeof t==="object")t=t.version;i=e===t;break;case"!==":if(typeof e==="object")e=e.version;if(typeof t==="object")t=t.version;i=e!==t;break;case"":case"=":case"==":i=hr(e,t,n);break;case"!=":i=pr(e,t,n);break;case">":i=ur(e,t,n);break;case">=":i=cr(e,t,n);break;case"<":i=lr(e,t,n);break;case"<=":i=vr(e,t,n);break;default:throw new TypeError("Invalid operator: "+r)}return i}e.Comparator=gr;function gr(e,r){if(e instanceof gr){if(e.loose===r)return e;else e=e.value}if(!(this instanceof gr))return new gr(e,r);this.loose=r;this.parse(e);if(this.semver===wr)this.value="";else this.value=this.operator+this.semver.version}var wr={};gr.prototype.parse=function(e){var r=this.loose?n[X]:n[Z];var t=e.match(r);if(!t)throw new TypeError("Invalid comparator: "+e);this.operator=t[1];if(this.operator==="=")this.operator="";if(!t[2])this.semver=wr;else this.semver=new K(t[2],this.loose)};gr.prototype.inspect=function(){return''};gr.prototype.toString=function(){return this.value};gr.prototype.test=function(e){if(this.semver===wr)return true;if(typeof e==="string")e=new K(e,this.loose);return mr(e,this.operator,this.semver,this.loose)};e.Range=yr;function yr(e,r){if(e instanceof yr&&e.loose===r)return e;if(!(this instanceof yr))return new yr(e,r);this.loose=r;this.raw=e;this.set=e.split(/\s*\|\|\s*/).map(function(e){return this.parseRange(e.trim())},this).filter(function(e){return e.length});if(!this.set.length){throw new TypeError("Invalid SemVer Range: "+e)}this.format()}yr.prototype.inspect=function(){return''};yr.prototype.format=function(){this.range=this.set.map(function(e){return e.join(" ").trim()}).join("||").trim();return this.range};yr.prototype.toString=function(){return this.range};yr.prototype.parseRange=function(e){var r=this.loose;e=e.trim();var t=r?n[G]:n[F];e=e.replace(t,Tr);e=e.replace(n[q],L);e=e.replace(n[T],V);e=e.replace(n[N],_);e=e.split(/\s+/).join(" ");var i=r?n[X]:n[Z];var s=e.split(" ").map(function(e){return jr(e,r)}).join(" ").split(/\s+/);if(this.loose){s=s.filter(function(e){return!!e.match(i)})}s=s.map(function(e){return new gr(e,r)});return s};e.toComparators=dr;function dr(e,r){return new yr(e,r).set.map(function(e){return e.map(function(e){return e.value}).join(" ").trim().split(" ")})}function jr(e,r){e=kr(e,r);e=Er(e,r);e=Sr(e,r);e=Ir(e,r);return e}function br(e){return!e||e.toLowerCase()==="x"||e==="*"}function Er(e,r){return e.trim().split(/\s+/).map(function(e){return $r(e,r)}).join(" ")}function $r(e,r){var t=r?n[C]:n[A];return e.replace(t,function(e,r,t,n,i){var s;if(br(r))s="";else if(br(t))s=">="+r+".0.0 <"+(+r+1)+".0.0";else if(br(n))s=">="+r+"."+t+".0 <"+r+"."+(+t+1)+".0";else if(i){if(i.charAt(0)!=="-")i="-"+i;s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0"}else s=">="+r+"."+t+"."+n+" <"+r+"."+(+t+1)+".0";return s})}function kr(e,r){return e.trim().split(/\s+/).map(function(e){return Rr(e,r)}).join(" ")}function Rr(e,r){var t=r?n[P]:n[z];return e.replace(t,function(e,r,t,n,i){var s;if(br(r))s="";else if(br(t))s=">="+r+".0.0 <"+(+r+1)+".0.0";else if(br(n)){if(r==="0")s=">="+r+"."+t+".0 <"+r+"."+(+t+1)+".0";else s=">="+r+"."+t+".0 <"+(+r+1)+".0.0"}else if(i){if(i.charAt(0)!=="-")i="-"+i;if(r==="0"){if(t==="0")s=">="+r+"."+t+"."+n+i+" <"+r+"."+t+"."+(+n+1);else s=">="+r+"."+t+"."+n+i+" <"+r+"."+(+t+1)+".0"}else s=">="+r+"."+t+"."+n+i+" <"+(+r+1)+".0.0"}else{if(r==="0"){if(t==="0")s=">="+r+"."+t+"."+n+" <"+r+"."+t+"."+(+n+1);else s=">="+r+"."+t+"."+n+" <"+r+"."+(+t+1)+".0"}else s=">="+r+"."+t+"."+n+" <"+(+r+1)+".0.0"}return s})}function Sr(e,r){return e.split(/\s+/).map(function(e){return xr(e,r)}).join(" ")}function xr(e,r){e=e.trim();var t=r?n[x]:n[S];return e.replace(t,function(e,r,t,n,i,s){var o=br(t);var a=o||br(n);var f=a||br(i);var u=f;if(r==="="&&u)r="";if(o){if(r===">"||r==="<"){e="<0.0.0"}else{e="*"}}else if(r&&u){if(a)n=0;if(f)i=0;if(r===">"){r=">=";if(a){t=+t+1;n=0;i=0}else if(f){n=+n+1;i=0}}else if(r==="<="){r="<";if(a)t=+t+1;else n=+n+1}e=r+t+"."+n+"."+i}else if(a){e=">="+t+".0.0 <"+(+t+1)+".0.0"}else if(f){e=">="+t+"."+n+".0 <"+t+"."+(+n+1)+".0"}return e})}function Ir(e,r){return e.trim().replace(n[O],"")}function Tr(e,r,t,n,i,s,o,a,f,u,l,h,p){if(br(t))r="";else if(br(n))r=">="+t+".0.0";else if(br(i))r=">="+t+"."+n+".0";else r=">="+r;if(br(f))a="";else if(br(u))a="<"+(+f+1)+".0.0";else if(br(l))a="<"+f+"."+(+u+1)+".0";else if(h)a="<="+f+"."+u+"."+l+"-"+h;else a="<="+a;return(r+" "+a).trim()}yr.prototype.test=function(e){if(!e)return false;if(typeof e==="string")e=new K(e,this.loose);for(var r=0;r0){var n=e[t].semver;if(n.major===r.major&&n.minor===r.minor&&n.patch===r.patch)return true}}return false}return true}e.satisfies=Ar;function Ar(e,r,t){try{r=new yr(r,t)}catch(n){return false}return r.test(e)}e.maxSatisfying=Cr;function Cr(e,r,t){return e.filter(function(e){return Ar(e,r,t)}).sort(function(e,r){return or(e,r,t)})[0]||null}e.validRange=Mr;function Mr(e,r){try{return new yr(e,r).range||"*"}catch(t){return null}}e.ltr=Nr;function Nr(e,r,t){return zr(e,r,"<",t)}e.gtr=_r;function _r(e,r,t){return zr(e,r,">",t)}e.outside=zr;function zr(e,r,t,n){e=new K(e,n);r=new yr(r,n);var i,s,o,a,f;switch(t){case">":i=ur;s=vr;o=lr;a=">";f=">=";break;case"<":i=lr;s=cr;o=ur;a="<";f="<=";break;default:throw new TypeError('Must provide a hilo val of "<" or ">"')}if(Ar(e,r,n)){return false}for(var u=0;u3wucrg;nm(gF!X9o$_c< znMD2Bm@~$l=*w5$OPsAO)z#oP(?}#S3~`sULg%X%Z=4^Uygxd-Jb!z1cnJ^Z$8S!V zv(socnojG=7Zt&Fe)*4lO4#Xa@`a1W$-#%q^Me;hm&d2?j$R&}Ef!Ck&0n{lKHd4{ z=U;zrK7G0^=|jdFg0oInwgPCyqL(#e)|9^yi#@LubfPYEnv2eM{b~2&qW@UU?bDzS zgPkS}d8&Uv{R8Mn>W*LkaZvwM@6rR1*1Lo;Bib3zRviy#@*MZ#qTzUJ+KP8fyyJ}5 zFVL2@74L|6N65Rsn;|doVW7#TL^joAehFER$a;#bE@vCTI5gz^s^va*iQ$SEu83h@ za!f6bsl_oNjtMCn^a$FlXF4C!@K6C>TzC?3Vj)f}#B0fdPq)mu+DC#}*TY44j%*F( z1Atxw=x4{f^_)$`+6Nh~*u_N*X7tbi9xlaZL%Odd)mH`3&jG&Bz!wx31Z*$%+C+n>4(M!_m_4HyZxd)ly1jY)XS=3b{v@L)vhSbz#e;<(<(? zXT_P0mSNX-Ii$cNI1lz#EY<(5o9p$4=x)mB&NSUI(H$#y`Fk%@{W}_X zhXcsyBImI}0bdJoB;?nXGd?`JnO2dq<_sNIa=&NJ=JwwT>41<9GEt7ZwycLlaA*-2 zLY@%8N#@g^Nyt+gIJJa4F@!uV2zhDhDxti!D8CcscWZq%@FKIki|hSfSnqdZy{~b-n>(97DexBr{sP4y zybMVxur9SoG1C}$Oya#HgqIe=*(!vyWrR0`@CFx1dmo489v(!ZibT)g40_g%ThBaC z9;O>%=ULaqKlWOoGv$q`FJjJ~x3Uws^1RAxiDcxBF%O5yXivC;Co>UN!r6FC$6m4@ zLU`H@A;`6GlAw|BIDzxqqD{o@JZr<;uOxQb^ON4F8oP6YGzD3%{Bh7{ziC;11A5`k z)iM2CHd@4!>?e&YE@B|tmHfQX8*@Koziam23+#Vw%1BdIg19n{!XXERjQkMT zh38ayBVYKv1W3U1(U#R@1ccVA+=3MYw}nKs&VZ7iT*DJ-;XwO{0*>ALFH7MI|C(|2 zIPt$mf_ZUgyGtEGs5*A2V~Vq4$4}BHHP>FUSm;)}xn2xOR3ViqmFW326t?b!LZT8V zDQONRTX#W8A{%NW?)=__58JRJu~)oD0rqiNO5yOo7uPso&X z^GUX$_dUMLQ?kdGSupMc)GytA{A1wlcE}14hrs+*H+==OM@fY4O`R;C4;>t6X*o`n3Y{b&==0CZ5Nfdd8*+zeYhQMa?(5!IHL2al!YeaN#iJRXmT{DilUqK~?y#O=Q#_{Q>i`c8ux}3* zT^nrDbRP^&%C-(W$Vi&z8D+Lt&uZ!2vWTTb`^YjSQTDLT%6rai+U0(ek%{i1Oi7u8 zuf!AgGmQz*R3~=%UMJ3$+DP?!vIh`Gk*pXRk}ej^LRr&t=$N&JaS5!xlcuzP>)S<% z8q--ksy1b2Db!pAlqOE1hJyIiKV2f&?z+qC6t!kIEzM2TkWwQA8-F%Vc6ByHnNUEP z5O^?bS{^19UkqpXK#8!Gewb%s+EM}M51qgNfkCH+Ax;uY`qYEGU=p;dqJ9}x}C+O^nX2B1?NHKQ6_NQ z2Wtm|094IBN|pS@bjGT6`_a7l{gq|1q(f?xBH@H|=bU-G1$PONW?lnFbhuyN-VNa) z+27vfZGla`C!o;;iA0G=Txb!vGU_}(4k7o!3%9z)2!m4Cl$mb{SJ9@FGO@(qNmlb9 zZA$qR3k{xdC6A=lT4sU46$rIc+!Eg>dQhciel2f=v4IfV^JaizCiRi&kwpeeRGbu; zKz~>~;EgyE3G)qV9|tw9kQDlEgS#^u<67ogh;UK_i8wG22M<9MC6Gf190uO3WId{5 z4}EEzu(3fsUP;|XYW|C>KJYCLU#J_LrT!7yFPu%Q@+31>Znp|@Ikro`v$%Amhm zEx)AiO{Q#O&`*R8#xi=eC>Ioz3u2(yH7X^>Db;-wU#h$^OANKNvkYMw)H5A6}MpaeKJFB|VY3cUE z&Ic@wed^L%NKQpFbu4Wp>#IS%T-j!|EEAO_SR~8VoA-k6F~_z0VAXQE&JIsG!Ci-AtA2p5SYP1^kO2 zKUq}^tmyqCmyuV{-Bd2qZ43g4{i2LDOF&a{XPQ4xOV@fgIf|ULEVoZ@ALelqD04Q@ z2(-xMET9!dN=eW16|B%zR$r_*E@>%sscW&w`>aZx1TM&Mm)n=|XbY_kIW3+S=qct1 zh(a$h3ccvc!R*CIQhNu!WcJ{zvfArn-@9(@eX;MfTf0exV{do`{Rz6e8Ce6hgQzK*WB zID{OcirJ-e1HLUI=VM3n9oZ}Aww&Y-E0(#rY+wycgLCb?hv-2E-ImV9a}PF)i$TBA(-gUf?;UsH&1u%2Ljt9oNw5w|`-*vvtNgTQb&L zG}e#*o3T*!G6!lp4>Mdju{BkCUke*D%&GoXD$kPP6Ot{2Nn5Ke{|}snSeDl_e~7RT z7xkZcwJm0uN1qox`aZ6Fv(A-o@=#iGe%_#0pWoA~Yq9d{h)EBo&jF*wLb@n@1?bCW z;L?YFgWPGDv$ZvIvz$HB9=!))SU$8Jl+tN84J&dPopTQrh=H8G8?q(>4Zu5_EsX_A zk?L4x_$wBCcC7H2IaADoKXx&E3>78X(TCFMlzFs#~#M4 zks_jTpZvUvAC5pX&vkF_x`%D>?7lMa0q|j0udJ?&Z<0j2aG7Wc`qJsW$ZmT{M|5lY zTO_>erRMCZ&ePtL6J%2)_{ylAzjjHouc+|VNme+vRwCb$tgF1QK$L)ME~du#GWuRq z1(J{%WXKg#ff;U)>S=2$r24V!!YAxf!9ULKRtUURK6$&L#RS-hU1a_`_mBk*~RV=CVLAx68KXW{)&gr`cUB z=tD1g|5R706GMO#a+1O;pe&)6mom8JL7Y{qO_tvNj1$354aC#pl76TT*r%$>Jp_hJ zgW+-&!yYk2vm_4soc%DEei)Tiy|dt?F-%0~P9D#);%h?1SE@Zia(X3N5f-X_JSAEK zbx1519D%Zjlru@$mXz(Ba$S}2(a9`MDiBAmU?CO1G782~1x!+bC;>nftQhS%kK~I< zGIQt_94p&(GuU;rwlL#_=griv88g}6nRV%-_WZ-fsd^k^E#SxpEtn2AVk5Zjw5Du| zKbYrLc~&XKu2x$Run^brXt8rRA1.2.3', null], + ['~1.2.3', null], + ['<=1.2.3', null], + ['1.2.x', null] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var msg = 'clean(' + range + ') = ' + version; + t.equal(clean(range), version, msg); + }); + t.end(); +}); diff --git a/node_modules/semver/test/gtr.js b/node_modules/semver/test/gtr.js new file mode 100644 index 00000000..bbb87896 --- /dev/null +++ b/node_modules/semver/test/gtr.js @@ -0,0 +1,173 @@ +var tap = require('tap'); +var test = tap.test; +var semver = require('../semver.js'); +var gtr = semver.gtr; + +test('\ngtr tests', function(t) { + // [range, version, loose] + // Version should be greater than range + [ + ['~1.2.2', '1.3.0'], + ['~0.6.1-1', '0.7.1-1'], + ['1.0.0 - 2.0.0', '2.0.1'], + ['1.0.0', '1.0.1-beta1'], + ['1.0.0', '2.0.0'], + ['<=2.0.0', '2.1.1'], + ['<=2.0.0', '3.2.9'], + ['<2.0.0', '2.0.0'], + ['0.1.20 || 1.2.4', '1.2.5'], + ['2.x.x', '3.0.0'], + ['1.2.x', '1.3.0'], + ['1.2.x || 2.x', '3.0.0'], + ['2.*.*', '5.0.1'], + ['1.2.*', '1.3.3'], + ['1.2.* || 2.*', '4.0.0'], + ['2', '3.0.0'], + ['2.3', '2.4.2'], + ['~2.4', '2.5.0'], // >=2.4.0 <2.5.0 + ['~2.4', '2.5.5'], + ['~>3.2.1', '3.3.0'], // >=3.2.1 <3.3.0 + ['~1', '2.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '2.2.4'], + ['~> 1', '3.2.3'], + ['~1.0', '1.1.2'], // >=1.0.0 <1.1.0 + ['~ 1.0', '1.1.0'], + ['<1.2', '1.2.0'], + ['< 1.2', '1.2.1'], + ['1', '2.0.0beta', true], + ['~v0.5.4-pre', '0.6.0'], + ['~v0.5.4-pre', '0.6.1-pre'], + ['=0.7.x', '0.8.0'], + ['=0.7.x', '0.8.0-asdf'], + ['<0.7.x', '0.7.0'], + ['~1.2.2', '1.3.0'], + ['1.0.0 - 2.0.0', '2.2.3'], + ['1.0.0', '1.0.1'], + ['<=2.0.0', '3.0.0'], + ['<=2.0.0', '2.9999.9999'], + ['<=2.0.0', '2.2.9'], + ['<2.0.0', '2.9999.9999'], + ['<2.0.0', '2.2.9'], + ['2.x.x', '3.1.3'], + ['1.2.x', '1.3.3'], + ['1.2.x || 2.x', '3.1.3'], + ['2.*.*', '3.1.3'], + ['1.2.*', '1.3.3'], + ['1.2.* || 2.*', '3.1.3'], + ['2', '3.1.2'], + ['2.3', '2.4.1'], + ['~2.4', '2.5.0'], // >=2.4.0 <2.5.0 + ['~>3.2.1', '3.3.2'], // >=3.2.1 <3.3.0 + ['~1', '2.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '2.2.3'], + ['~1.0', '1.1.0'], // >=1.0.0 <1.1.0 + ['<1', '1.0.0'], + ['1', '2.0.0beta', true], + ['<1', '1.0.0beta', true], + ['< 1', '1.0.0beta', true], + ['=0.7.x', '0.8.2'], + ['<0.7.x', '0.7.2'] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var loose = tuple[2] || false; + var msg = 'gtr(' + version + ', ' + range + ', ' + loose + ')'; + t.ok(gtr(version, range, loose), msg); + }); + t.end(); +}); + +test('\nnegative gtr tests', function(t) { + // [range, version, loose] + // Version should NOT be greater than range + [ + ['~0.6.1-1', '0.6.1-1'], + ['1.0.0 - 2.0.0', '1.2.3'], + ['1.0.0 - 2.0.0', '0.9.9'], + ['1.0.0', '1.0.0'], + ['>=*', '0.2.4'], + ['', '1.0.0', true], + ['*', '1.2.3'], + ['*', 'v1.2.3-foo'], + ['>=1.0.0', '1.0.0'], + ['>=1.0.0', '1.0.1'], + ['>=1.0.0', '1.1.0'], + ['>1.0.0', '1.0.1'], + ['>1.0.0', '1.1.0'], + ['<=2.0.0', '2.0.0'], + ['<=2.0.0', '1.9999.9999'], + ['<=2.0.0', '0.2.9'], + ['<2.0.0', '1.9999.9999'], + ['<2.0.0', '0.2.9'], + ['>= 1.0.0', '1.0.0'], + ['>= 1.0.0', '1.0.1'], + ['>= 1.0.0', '1.1.0'], + ['> 1.0.0', '1.0.1'], + ['> 1.0.0', '1.1.0'], + ['<= 2.0.0', '2.0.0'], + ['<= 2.0.0', '1.9999.9999'], + ['<= 2.0.0', '0.2.9'], + ['< 2.0.0', '1.9999.9999'], + ['<\t2.0.0', '0.2.9'], + ['>=0.1.97', 'v0.1.97'], + ['>=0.1.97', '0.1.97'], + ['0.1.20 || 1.2.4', '1.2.4'], + ['0.1.20 || >1.2.4', '1.2.4'], + ['0.1.20 || 1.2.4', '1.2.3'], + ['0.1.20 || 1.2.4', '0.1.20'], + ['>=0.2.3 || <0.0.1', '0.0.0'], + ['>=0.2.3 || <0.0.1', '0.2.3'], + ['>=0.2.3 || <0.0.1', '0.2.4'], + ['||', '1.3.4'], + ['2.x.x', '2.1.3'], + ['1.2.x', '1.2.3'], + ['1.2.x || 2.x', '2.1.3'], + ['1.2.x || 2.x', '1.2.3'], + ['x', '1.2.3'], + ['2.*.*', '2.1.3'], + ['1.2.*', '1.2.3'], + ['1.2.* || 2.*', '2.1.3'], + ['1.2.* || 2.*', '1.2.3'], + ['1.2.* || 2.*', '1.2.3'], + ['*', '1.2.3'], + ['2', '2.1.2'], + ['2.3', '2.3.1'], + ['~2.4', '2.4.0'], // >=2.4.0 <2.5.0 + ['~2.4', '2.4.5'], + ['~>3.2.1', '3.2.2'], // >=3.2.1 <3.3.0 + ['~1', '1.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '1.2.3'], + ['~> 1', '1.2.3'], + ['~1.0', '1.0.2'], // >=1.0.0 <1.1.0 + ['~ 1.0', '1.0.2'], + ['>=1', '1.0.0'], + ['>= 1', '1.0.0'], + ['<1.2', '1.1.1'], + ['< 1.2', '1.1.1'], + ['1', '1.0.0beta', true], + ['~v0.5.4-pre', '0.5.5'], + ['~v0.5.4-pre', '0.5.4'], + ['=0.7.x', '0.7.2'], + ['>=0.7.x', '0.7.2'], + ['=0.7.x', '0.7.0-asdf'], + ['>=0.7.x', '0.7.0-asdf'], + ['<=0.7.x', '0.6.2'], + ['>0.2.3 >0.2.4 <=0.2.5', '0.2.5'], + ['>=0.2.3 <=0.2.4', '0.2.4'], + ['1.0.0 - 2.0.0', '2.0.0'], + ['^1', '0.0.0-0'], + ['^3.0.0', '2.0.0'], + ['^1.0.0 || ~2.0.1', '2.0.0'], + ['^0.1.0 || ~3.0.1 || 5.0.0', '3.2.0'], + ['^0.1.0 || ~3.0.1 || 5.0.0', '1.0.0beta', true], + ['^0.1.0 || ~3.0.1 || 5.0.0', '5.0.0-0', true], + ['^0.1.0 || ~3.0.1 || >4 <=5.0.0', '3.5.0'] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var loose = tuple[2] || false; + var msg = '!gtr(' + version + ', ' + range + ', ' + loose + ')'; + t.notOk(gtr(version, range, loose), msg); + }); + t.end(); +}); diff --git a/node_modules/semver/test/index.js b/node_modules/semver/test/index.js new file mode 100644 index 00000000..926d560f --- /dev/null +++ b/node_modules/semver/test/index.js @@ -0,0 +1,684 @@ +'use strict'; + +var tap = require('tap'); +var test = tap.test; +var semver = require('../semver.js'); +var eq = semver.eq; +var gt = semver.gt; +var lt = semver.lt; +var neq = semver.neq; +var cmp = semver.cmp; +var gte = semver.gte; +var lte = semver.lte; +var satisfies = semver.satisfies; +var validRange = semver.validRange; +var inc = semver.inc; +var diff = semver.diff; +var replaceStars = semver.replaceStars; +var toComparators = semver.toComparators; +var SemVer = semver.SemVer; +var Range = semver.Range; + +test('\ncomparison tests', function(t) { + // [version1, version2] + // version1 should be greater than version2 + [['0.0.0', '0.0.0-foo'], + ['0.0.1', '0.0.0'], + ['1.0.0', '0.9.9'], + ['0.10.0', '0.9.0'], + ['0.99.0', '0.10.0'], + ['2.0.0', '1.2.3'], + ['v0.0.0', '0.0.0-foo', true], + ['v0.0.1', '0.0.0', true], + ['v1.0.0', '0.9.9', true], + ['v0.10.0', '0.9.0', true], + ['v0.99.0', '0.10.0', true], + ['v2.0.0', '1.2.3', true], + ['0.0.0', 'v0.0.0-foo', true], + ['0.0.1', 'v0.0.0', true], + ['1.0.0', 'v0.9.9', true], + ['0.10.0', 'v0.9.0', true], + ['0.99.0', 'v0.10.0', true], + ['2.0.0', 'v1.2.3', true], + ['1.2.3', '1.2.3-asdf'], + ['1.2.3', '1.2.3-4'], + ['1.2.3', '1.2.3-4-foo'], + ['1.2.3-5-foo', '1.2.3-5'], + ['1.2.3-5', '1.2.3-4'], + ['1.2.3-5-foo', '1.2.3-5-Foo'], + ['3.0.0', '2.7.2+asdf'], + ['1.2.3-a.10', '1.2.3-a.5'], + ['1.2.3-a.b', '1.2.3-a.5'], + ['1.2.3-a.b', '1.2.3-a'], + ['1.2.3-a.b.c.10.d.5', '1.2.3-a.b.c.5.d.100'], + ['1.2.3-r2', '1.2.3-r100'], + ['1.2.3-r100', '1.2.3-R2'] + ].forEach(function(v) { + var v0 = v[0]; + var v1 = v[1]; + var loose = v[2]; + t.ok(gt(v0, v1, loose), "gt('" + v0 + "', '" + v1 + "')"); + t.ok(lt(v1, v0, loose), "lt('" + v1 + "', '" + v0 + "')"); + t.ok(!gt(v1, v0, loose), "!gt('" + v1 + "', '" + v0 + "')"); + t.ok(!lt(v0, v1, loose), "!lt('" + v0 + "', '" + v1 + "')"); + t.ok(eq(v0, v0, loose), "eq('" + v0 + "', '" + v0 + "')"); + t.ok(eq(v1, v1, loose), "eq('" + v1 + "', '" + v1 + "')"); + t.ok(neq(v0, v1, loose), "neq('" + v0 + "', '" + v1 + "')"); + t.ok(cmp(v1, '==', v1, loose), "cmp('" + v1 + "' == '" + v1 + "')"); + t.ok(cmp(v0, '>=', v1, loose), "cmp('" + v0 + "' >= '" + v1 + "')"); + t.ok(cmp(v1, '<=', v0, loose), "cmp('" + v1 + "' <= '" + v0 + "')"); + t.ok(cmp(v0, '!=', v1, loose), "cmp('" + v0 + "' != '" + v1 + "')"); + }); + t.end(); +}); + +test('\nequality tests', function(t) { + // [version1, version2] + // version1 should be equivalent to version2 + [['1.2.3', 'v1.2.3', true], + ['1.2.3', '=1.2.3', true], + ['1.2.3', 'v 1.2.3', true], + ['1.2.3', '= 1.2.3', true], + ['1.2.3', ' v1.2.3', true], + ['1.2.3', ' =1.2.3', true], + ['1.2.3', ' v 1.2.3', true], + ['1.2.3', ' = 1.2.3', true], + ['1.2.3-0', 'v1.2.3-0', true], + ['1.2.3-0', '=1.2.3-0', true], + ['1.2.3-0', 'v 1.2.3-0', true], + ['1.2.3-0', '= 1.2.3-0', true], + ['1.2.3-0', ' v1.2.3-0', true], + ['1.2.3-0', ' =1.2.3-0', true], + ['1.2.3-0', ' v 1.2.3-0', true], + ['1.2.3-0', ' = 1.2.3-0', true], + ['1.2.3-1', 'v1.2.3-1', true], + ['1.2.3-1', '=1.2.3-1', true], + ['1.2.3-1', 'v 1.2.3-1', true], + ['1.2.3-1', '= 1.2.3-1', true], + ['1.2.3-1', ' v1.2.3-1', true], + ['1.2.3-1', ' =1.2.3-1', true], + ['1.2.3-1', ' v 1.2.3-1', true], + ['1.2.3-1', ' = 1.2.3-1', true], + ['1.2.3-beta', 'v1.2.3-beta', true], + ['1.2.3-beta', '=1.2.3-beta', true], + ['1.2.3-beta', 'v 1.2.3-beta', true], + ['1.2.3-beta', '= 1.2.3-beta', true], + ['1.2.3-beta', ' v1.2.3-beta', true], + ['1.2.3-beta', ' =1.2.3-beta', true], + ['1.2.3-beta', ' v 1.2.3-beta', true], + ['1.2.3-beta', ' = 1.2.3-beta', true], + ['1.2.3-beta+build', ' = 1.2.3-beta+otherbuild', true], + ['1.2.3+build', ' = 1.2.3+otherbuild', true], + ['1.2.3-beta+build', '1.2.3-beta+otherbuild'], + ['1.2.3+build', '1.2.3+otherbuild'], + [' v1.2.3+build', '1.2.3+otherbuild'] + ].forEach(function(v) { + var v0 = v[0]; + var v1 = v[1]; + var loose = v[2]; + t.ok(eq(v0, v1, loose), "eq('" + v0 + "', '" + v1 + "')"); + t.ok(!neq(v0, v1, loose), "!neq('" + v0 + "', '" + v1 + "')"); + t.ok(cmp(v0, '==', v1, loose), 'cmp(' + v0 + '==' + v1 + ')'); + t.ok(!cmp(v0, '!=', v1, loose), '!cmp(' + v0 + '!=' + v1 + ')'); + t.ok(!cmp(v0, '===', v1, loose), '!cmp(' + v0 + '===' + v1 + ')'); + t.ok(cmp(v0, '!==', v1, loose), 'cmp(' + v0 + '!==' + v1 + ')'); + t.ok(!gt(v0, v1, loose), "!gt('" + v0 + "', '" + v1 + "')"); + t.ok(gte(v0, v1, loose), "gte('" + v0 + "', '" + v1 + "')"); + t.ok(!lt(v0, v1, loose), "!lt('" + v0 + "', '" + v1 + "')"); + t.ok(lte(v0, v1, loose), "lte('" + v0 + "', '" + v1 + "')"); + }); + t.end(); +}); + + +test('\nrange tests', function(t) { + // [range, version] + // version should be included by range + [['1.0.0 - 2.0.0', '1.2.3'], + ['^1.2.3+build', '1.2.3'], + ['^1.2.3+build', '1.3.0'], + ['1.2.3-pre+asdf - 2.4.3-pre+asdf', '1.2.3'], + ['1.2.3pre+asdf - 2.4.3-pre+asdf', '1.2.3', true], + ['1.2.3-pre+asdf - 2.4.3pre+asdf', '1.2.3', true], + ['1.2.3pre+asdf - 2.4.3pre+asdf', '1.2.3', true], + ['1.2.3-pre+asdf - 2.4.3-pre+asdf', '1.2.3-pre.2'], + ['1.2.3-pre+asdf - 2.4.3-pre+asdf', '2.4.3-alpha'], + ['1.2.3+asdf - 2.4.3+asdf', '1.2.3'], + ['1.0.0', '1.0.0'], + ['>=*', '0.2.4'], + ['', '1.0.0'], + ['*', '1.2.3'], + ['*', 'v1.2.3-foo', true], + ['>=1.0.0', '1.0.0'], + ['>=1.0.0', '1.0.1'], + ['>=1.0.0', '1.1.0'], + ['>1.0.0', '1.0.1'], + ['>1.0.0', '1.1.0'], + ['<=2.0.0', '2.0.0'], + ['<=2.0.0', '1.9999.9999'], + ['<=2.0.0', '0.2.9'], + ['<2.0.0', '1.9999.9999'], + ['<2.0.0', '0.2.9'], + ['>= 1.0.0', '1.0.0'], + ['>= 1.0.0', '1.0.1'], + ['>= 1.0.0', '1.1.0'], + ['> 1.0.0', '1.0.1'], + ['> 1.0.0', '1.1.0'], + ['<= 2.0.0', '2.0.0'], + ['<= 2.0.0', '1.9999.9999'], + ['<= 2.0.0', '0.2.9'], + ['< 2.0.0', '1.9999.9999'], + ['<\t2.0.0', '0.2.9'], + ['>=0.1.97', 'v0.1.97', true], + ['>=0.1.97', '0.1.97'], + ['0.1.20 || 1.2.4', '1.2.4'], + ['>=0.2.3 || <0.0.1', '0.0.0'], + ['>=0.2.3 || <0.0.1', '0.2.3'], + ['>=0.2.3 || <0.0.1', '0.2.4'], + ['||', '1.3.4'], + ['2.x.x', '2.1.3'], + ['1.2.x', '1.2.3'], + ['1.2.x || 2.x', '2.1.3'], + ['1.2.x || 2.x', '1.2.3'], + ['x', '1.2.3'], + ['2.*.*', '2.1.3'], + ['1.2.*', '1.2.3'], + ['1.2.* || 2.*', '2.1.3'], + ['1.2.* || 2.*', '1.2.3'], + ['*', '1.2.3'], + ['2', '2.1.2'], + ['2.3', '2.3.1'], + ['~2.4', '2.4.0'], // >=2.4.0 <2.5.0 + ['~2.4', '2.4.5'], + ['~>3.2.1', '3.2.2'], // >=3.2.1 <3.3.0, + ['~1', '1.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '1.2.3'], + ['~> 1', '1.2.3'], + ['~1.0', '1.0.2'], // >=1.0.0 <1.1.0, + ['~ 1.0', '1.0.2'], + ['~ 1.0.3', '1.0.12'], + ['>=1', '1.0.0'], + ['>= 1', '1.0.0'], + ['<1.2', '1.1.1'], + ['< 1.2', '1.1.1'], + ['~v0.5.4-pre', '0.5.5'], + ['~v0.5.4-pre', '0.5.4'], + ['=0.7.x', '0.7.2'], + ['<=0.7.x', '0.7.2'], + ['>=0.7.x', '0.7.2'], + ['<=0.7.x', '0.6.2'], + ['~1.2.1 >=1.2.3', '1.2.3'], + ['~1.2.1 =1.2.3', '1.2.3'], + ['~1.2.1 1.2.3', '1.2.3'], + ['~1.2.1 >=1.2.3 1.2.3', '1.2.3'], + ['~1.2.1 1.2.3 >=1.2.3', '1.2.3'], + ['~1.2.1 1.2.3', '1.2.3'], + ['>=1.2.1 1.2.3', '1.2.3'], + ['1.2.3 >=1.2.1', '1.2.3'], + ['>=1.2.3 >=1.2.1', '1.2.3'], + ['>=1.2.1 >=1.2.3', '1.2.3'], + ['>=1.2', '1.2.8'], + ['^1.2.3', '1.8.1'], + ['^0.1.2', '0.1.2'], + ['^0.1', '0.1.2'], + ['^1.2', '1.4.2'], + ['^1.2 ^1', '1.4.2'], + ['^1.2.3-alpha', '1.2.3-pre'], + ['^1.2.0-alpha', '1.2.0-pre'], + ['^0.0.1-alpha', '0.0.1-beta'] + ].forEach(function(v) { + var range = v[0]; + var ver = v[1]; + var loose = v[2]; + t.ok(satisfies(ver, range, loose), range + ' satisfied by ' + ver); + }); + t.end(); +}); + +test('\nnegative range tests', function(t) { + // [range, version] + // version should not be included by range + [['1.0.0 - 2.0.0', '2.2.3'], + ['1.2.3+asdf - 2.4.3+asdf', '1.2.3-pre.2'], + ['1.2.3+asdf - 2.4.3+asdf', '2.4.3-alpha'], + ['^1.2.3+build', '2.0.0'], + ['^1.2.3+build', '1.2.0'], + ['^1.2.3', '1.2.3-pre'], + ['^1.2', '1.2.0-pre'], + ['>1.2', '1.3.0-beta'], + ['<=1.2.3', '1.2.3-beta'], + ['^1.2.3', '1.2.3-beta'], + ['=0.7.x', '0.7.0-asdf'], + ['>=0.7.x', '0.7.0-asdf'], + ['1', '1.0.0beta', true], + ['<1', '1.0.0beta', true], + ['< 1', '1.0.0beta', true], + ['1.0.0', '1.0.1'], + ['>=1.0.0', '0.0.0'], + ['>=1.0.0', '0.0.1'], + ['>=1.0.0', '0.1.0'], + ['>1.0.0', '0.0.1'], + ['>1.0.0', '0.1.0'], + ['<=2.0.0', '3.0.0'], + ['<=2.0.0', '2.9999.9999'], + ['<=2.0.0', '2.2.9'], + ['<2.0.0', '2.9999.9999'], + ['<2.0.0', '2.2.9'], + ['>=0.1.97', 'v0.1.93', true], + ['>=0.1.97', '0.1.93'], + ['0.1.20 || 1.2.4', '1.2.3'], + ['>=0.2.3 || <0.0.1', '0.0.3'], + ['>=0.2.3 || <0.0.1', '0.2.2'], + ['2.x.x', '1.1.3'], + ['2.x.x', '3.1.3'], + ['1.2.x', '1.3.3'], + ['1.2.x || 2.x', '3.1.3'], + ['1.2.x || 2.x', '1.1.3'], + ['2.*.*', '1.1.3'], + ['2.*.*', '3.1.3'], + ['1.2.*', '1.3.3'], + ['1.2.* || 2.*', '3.1.3'], + ['1.2.* || 2.*', '1.1.3'], + ['2', '1.1.2'], + ['2.3', '2.4.1'], + ['~2.4', '2.5.0'], // >=2.4.0 <2.5.0 + ['~2.4', '2.3.9'], + ['~>3.2.1', '3.3.2'], // >=3.2.1 <3.3.0 + ['~>3.2.1', '3.2.0'], // >=3.2.1 <3.3.0 + ['~1', '0.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '2.2.3'], + ['~1.0', '1.1.0'], // >=1.0.0 <1.1.0 + ['<1', '1.0.0'], + ['>=1.2', '1.1.1'], + ['1', '2.0.0beta', true], + ['~v0.5.4-beta', '0.5.4-alpha'], + ['=0.7.x', '0.8.2'], + ['>=0.7.x', '0.6.2'], + ['<0.7.x', '0.7.2'], + ['<1.2.3', '1.2.3-beta'], + ['=1.2.3', '1.2.3-beta'], + ['>1.2', '1.2.8'], + ['^1.2.3', '2.0.0-alpha'], + ['^1.2.3', '1.2.2'], + ['^1.2', '1.1.9'], + // invalid ranges never satisfied! + ['blerg', '1.2.3'], + ['git+https://user:password0123@github.com/foo', '123.0.0', true], + ['^1.2.3', '2.0.0-pre'] + ].forEach(function(v) { + var range = v[0]; + var ver = v[1]; + var loose = v[2]; + var found = satisfies(ver, range, loose); + t.ok(!found, ver + ' not satisfied by ' + range); + }); + t.end(); +}); + +test('\nincrement versions test', function(t) { +// [version, inc, result, identifier] +// inc(version, inc) -> result + [['1.2.3', 'major', '2.0.0'], + ['1.2.3', 'minor', '1.3.0'], + ['1.2.3', 'patch', '1.2.4'], + ['1.2.3tag', 'major', '2.0.0', true], + ['1.2.3-tag', 'major', '2.0.0'], + ['1.2.3', 'fake', null], + ['1.2.0-0', 'patch', '1.2.0'], + ['fake', 'major', null], + ['1.2.3-4', 'major', '2.0.0'], + ['1.2.3-4', 'minor', '1.3.0'], + ['1.2.3-4', 'patch', '1.2.3'], + ['1.2.3-alpha.0.beta', 'major', '2.0.0'], + ['1.2.3-alpha.0.beta', 'minor', '1.3.0'], + ['1.2.3-alpha.0.beta', 'patch', '1.2.3'], + ['1.2.4', 'prerelease', '1.2.5-0'], + ['1.2.3-0', 'prerelease', '1.2.3-1'], + ['1.2.3-alpha.0', 'prerelease', '1.2.3-alpha.1'], + ['1.2.3-alpha.1', 'prerelease', '1.2.3-alpha.2'], + ['1.2.3-alpha.2', 'prerelease', '1.2.3-alpha.3'], + ['1.2.3-alpha.0.beta', 'prerelease', '1.2.3-alpha.1.beta'], + ['1.2.3-alpha.1.beta', 'prerelease', '1.2.3-alpha.2.beta'], + ['1.2.3-alpha.2.beta', 'prerelease', '1.2.3-alpha.3.beta'], + ['1.2.3-alpha.10.0.beta', 'prerelease', '1.2.3-alpha.10.1.beta'], + ['1.2.3-alpha.10.1.beta', 'prerelease', '1.2.3-alpha.10.2.beta'], + ['1.2.3-alpha.10.2.beta', 'prerelease', '1.2.3-alpha.10.3.beta'], + ['1.2.3-alpha.10.beta.0', 'prerelease', '1.2.3-alpha.10.beta.1'], + ['1.2.3-alpha.10.beta.1', 'prerelease', '1.2.3-alpha.10.beta.2'], + ['1.2.3-alpha.10.beta.2', 'prerelease', '1.2.3-alpha.10.beta.3'], + ['1.2.3-alpha.9.beta', 'prerelease', '1.2.3-alpha.10.beta'], + ['1.2.3-alpha.10.beta', 'prerelease', '1.2.3-alpha.11.beta'], + ['1.2.3-alpha.11.beta', 'prerelease', '1.2.3-alpha.12.beta'], + ['1.2.0', 'prepatch', '1.2.1-0'], + ['1.2.0-1', 'prepatch', '1.2.1-0'], + ['1.2.0', 'preminor', '1.3.0-0'], + ['1.2.3-1', 'preminor', '1.3.0-0'], + ['1.2.0', 'premajor', '2.0.0-0'], + ['1.2.3-1', 'premajor', '2.0.0-0'], + ['1.2.0-1', 'minor', '1.2.0'], + ['1.0.0-1', 'major', '1.0.0'], + + ['1.2.3', 'major', '2.0.0', false, 'dev'], + ['1.2.3', 'minor', '1.3.0', false, 'dev'], + ['1.2.3', 'patch', '1.2.4', false, 'dev'], + ['1.2.3tag', 'major', '2.0.0', true, 'dev'], + ['1.2.3-tag', 'major', '2.0.0', false, 'dev'], + ['1.2.3', 'fake', null, false, 'dev'], + ['1.2.0-0', 'patch', '1.2.0', false, 'dev'], + ['fake', 'major', null, false, 'dev'], + ['1.2.3-4', 'major', '2.0.0', false, 'dev'], + ['1.2.3-4', 'minor', '1.3.0', false, 'dev'], + ['1.2.3-4', 'patch', '1.2.3', false, 'dev'], + ['1.2.3-alpha.0.beta', 'major', '2.0.0', false, 'dev'], + ['1.2.3-alpha.0.beta', 'minor', '1.3.0', false, 'dev'], + ['1.2.3-alpha.0.beta', 'patch', '1.2.3', false, 'dev'], + ['1.2.4', 'prerelease', '1.2.5-dev.0', false, 'dev'], + ['1.2.3-0', 'prerelease', '1.2.3-dev.0', false, 'dev'], + ['1.2.3-alpha.0', 'prerelease', '1.2.3-dev.0', false, 'dev'], + ['1.2.3-alpha.0', 'prerelease', '1.2.3-alpha.1', false, 'alpha'], + ['1.2.3-alpha.0.beta', 'prerelease', '1.2.3-dev.0', false, 'dev'], + ['1.2.3-alpha.0.beta', 'prerelease', '1.2.3-alpha.1.beta', false, 'alpha'], + ['1.2.3-alpha.10.0.beta', 'prerelease', '1.2.3-dev.0', false, 'dev'], + ['1.2.3-alpha.10.0.beta', 'prerelease', '1.2.3-alpha.10.1.beta', false, 'alpha'], + ['1.2.3-alpha.10.1.beta', 'prerelease', '1.2.3-alpha.10.2.beta', false, 'alpha'], + ['1.2.3-alpha.10.2.beta', 'prerelease', '1.2.3-alpha.10.3.beta', false, 'alpha'], + ['1.2.3-alpha.10.beta.0', 'prerelease', '1.2.3-dev.0', false, 'dev'], + ['1.2.3-alpha.10.beta.0', 'prerelease', '1.2.3-alpha.10.beta.1', false, 'alpha'], + ['1.2.3-alpha.10.beta.1', 'prerelease', '1.2.3-alpha.10.beta.2', false, 'alpha'], + ['1.2.3-alpha.10.beta.2', 'prerelease', '1.2.3-alpha.10.beta.3', false, 'alpha'], + ['1.2.3-alpha.9.beta', 'prerelease', '1.2.3-dev.0', false, 'dev'], + ['1.2.3-alpha.9.beta', 'prerelease', '1.2.3-alpha.10.beta', false, 'alpha'], + ['1.2.3-alpha.10.beta', 'prerelease', '1.2.3-alpha.11.beta', false, 'alpha'], + ['1.2.3-alpha.11.beta', 'prerelease', '1.2.3-alpha.12.beta', false, 'alpha'], + ['1.2.0', 'prepatch', '1.2.1-dev.0', 'dev'], + ['1.2.0-1', 'prepatch', '1.2.1-dev.0', 'dev'], + ['1.2.0', 'preminor', '1.3.0-dev.0', 'dev'], + ['1.2.3-1', 'preminor', '1.3.0-dev.0', 'dev'], + ['1.2.0', 'premajor', '2.0.0-dev.0', 'dev'], + ['1.2.3-1', 'premajor', '2.0.0-dev.0', 'dev'], + ['1.2.0-1', 'minor', '1.2.0', 'dev'], + ['1.0.0-1', 'major', '1.0.0', 'dev'], + ['1.2.3-dev.bar', 'prerelease', '1.2.3-dev.0', false, 'dev'] + + ].forEach(function(v) { + var pre = v[0]; + var what = v[1]; + var wanted = v[2]; + var loose = v[3]; + var id = v[4]; + var found = inc(pre, what, loose, id); + var cmd = 'inc(' + pre + ', ' + what + ', ' + id + ')'; + t.equal(found, wanted, cmd + ' === ' + wanted); + }); + + t.end(); +}); + +test('\ndiff versions test', function(t) { +// [version1, version2, result] +// diff(version1, version2) -> result + [['1.2.3', '0.2.3', 'major'], + ['1.4.5', '0.2.3', 'major'], + ['1.2.3', '2.0.0-pre', 'premajor'], + ['1.2.3', '1.3.3', 'minor'], + ['1.0.1', '1.1.0-pre', 'preminor'], + ['1.2.3', '1.2.4', 'patch'], + ['1.2.3', '1.2.4-pre', 'prepatch'], + ['0.0.1', '0.0.1-pre', 'prerelease'], + ['0.0.1', '0.0.1-pre-2', 'prerelease'], + ['1.1.0', '1.1.0-pre', 'prerelease'], + ['1.1.0-pre-1', '1.1.0-pre-2', 'prerelease'], + ['1.0.0', '1.0.0', null] + + ].forEach(function(v) { + var version1 = v[0]; + var version2 = v[1]; + var wanted = v[2]; + var found = diff(version1, version2); + var cmd = 'diff(' + version1 + ', ' + version2 + ')'; + t.equal(found, wanted, cmd + ' === ' + wanted); + }); + + t.end(); +}); + +test('\nvalid range test', function(t) { + // [range, result] + // validRange(range) -> result + // translate ranges into their canonical form + [['1.0.0 - 2.0.0', '>=1.0.0 <=2.0.0'], + ['1.0.0', '1.0.0'], + ['>=*', '*'], + ['', '*'], + ['*', '*'], + ['*', '*'], + ['>=1.0.0', '>=1.0.0'], + ['>1.0.0', '>1.0.0'], + ['<=2.0.0', '<=2.0.0'], + ['1', '>=1.0.0 <2.0.0'], + ['<=2.0.0', '<=2.0.0'], + ['<=2.0.0', '<=2.0.0'], + ['<2.0.0', '<2.0.0'], + ['<2.0.0', '<2.0.0'], + ['>= 1.0.0', '>=1.0.0'], + ['>= 1.0.0', '>=1.0.0'], + ['>= 1.0.0', '>=1.0.0'], + ['> 1.0.0', '>1.0.0'], + ['> 1.0.0', '>1.0.0'], + ['<= 2.0.0', '<=2.0.0'], + ['<= 2.0.0', '<=2.0.0'], + ['<= 2.0.0', '<=2.0.0'], + ['< 2.0.0', '<2.0.0'], + ['< 2.0.0', '<2.0.0'], + ['>=0.1.97', '>=0.1.97'], + ['>=0.1.97', '>=0.1.97'], + ['0.1.20 || 1.2.4', '0.1.20||1.2.4'], + ['>=0.2.3 || <0.0.1', '>=0.2.3||<0.0.1'], + ['>=0.2.3 || <0.0.1', '>=0.2.3||<0.0.1'], + ['>=0.2.3 || <0.0.1', '>=0.2.3||<0.0.1'], + ['||', '||'], + ['2.x.x', '>=2.0.0 <3.0.0'], + ['1.2.x', '>=1.2.0 <1.3.0'], + ['1.2.x || 2.x', '>=1.2.0 <1.3.0||>=2.0.0 <3.0.0'], + ['1.2.x || 2.x', '>=1.2.0 <1.3.0||>=2.0.0 <3.0.0'], + ['x', '*'], + ['2.*.*', '>=2.0.0 <3.0.0'], + ['1.2.*', '>=1.2.0 <1.3.0'], + ['1.2.* || 2.*', '>=1.2.0 <1.3.0||>=2.0.0 <3.0.0'], + ['*', '*'], + ['2', '>=2.0.0 <3.0.0'], + ['2.3', '>=2.3.0 <2.4.0'], + ['~2.4', '>=2.4.0 <2.5.0'], + ['~2.4', '>=2.4.0 <2.5.0'], + ['~>3.2.1', '>=3.2.1 <3.3.0'], + ['~1', '>=1.0.0 <2.0.0'], + ['~>1', '>=1.0.0 <2.0.0'], + ['~> 1', '>=1.0.0 <2.0.0'], + ['~1.0', '>=1.0.0 <1.1.0'], + ['~ 1.0', '>=1.0.0 <1.1.0'], + ['^0', '>=0.0.0 <1.0.0'], + ['^ 1', '>=1.0.0 <2.0.0'], + ['^0.1', '>=0.1.0 <0.2.0'], + ['^1.0', '>=1.0.0 <2.0.0'], + ['^1.2', '>=1.2.0 <2.0.0'], + ['^0.0.1', '>=0.0.1 <0.0.2'], + ['^0.0.1-beta', '>=0.0.1-beta <0.0.2'], + ['^0.1.2', '>=0.1.2 <0.2.0'], + ['^1.2.3', '>=1.2.3 <2.0.0'], + ['^1.2.3-beta.4', '>=1.2.3-beta.4 <2.0.0'], + ['<1', '<1.0.0'], + ['< 1', '<1.0.0'], + ['>=1', '>=1.0.0'], + ['>= 1', '>=1.0.0'], + ['<1.2', '<1.2.0'], + ['< 1.2', '<1.2.0'], + ['1', '>=1.0.0 <2.0.0'], + ['>01.02.03', '>1.2.3', true], + ['>01.02.03', null], + ['~1.2.3beta', '>=1.2.3-beta <1.3.0', true], + ['~1.2.3beta', null], + ['^ 1.2 ^ 1', '>=1.2.0 <2.0.0 >=1.0.0 <2.0.0'] + ].forEach(function(v) { + var pre = v[0]; + var wanted = v[1]; + var loose = v[2]; + var found = validRange(pre, loose); + + t.equal(found, wanted, 'validRange(' + pre + ') === ' + wanted); + }); + + t.end(); +}); + +test('\ncomparators test', function(t) { + // [range, comparators] + // turn range into a set of individual comparators + [['1.0.0 - 2.0.0', [['>=1.0.0', '<=2.0.0']]], + ['1.0.0', [['1.0.0']]], + ['>=*', [['']]], + ['', [['']]], + ['*', [['']]], + ['*', [['']]], + ['>=1.0.0', [['>=1.0.0']]], + ['>=1.0.0', [['>=1.0.0']]], + ['>=1.0.0', [['>=1.0.0']]], + ['>1.0.0', [['>1.0.0']]], + ['>1.0.0', [['>1.0.0']]], + ['<=2.0.0', [['<=2.0.0']]], + ['1', [['>=1.0.0', '<2.0.0']]], + ['<=2.0.0', [['<=2.0.0']]], + ['<=2.0.0', [['<=2.0.0']]], + ['<2.0.0', [['<2.0.0']]], + ['<2.0.0', [['<2.0.0']]], + ['>= 1.0.0', [['>=1.0.0']]], + ['>= 1.0.0', [['>=1.0.0']]], + ['>= 1.0.0', [['>=1.0.0']]], + ['> 1.0.0', [['>1.0.0']]], + ['> 1.0.0', [['>1.0.0']]], + ['<= 2.0.0', [['<=2.0.0']]], + ['<= 2.0.0', [['<=2.0.0']]], + ['<= 2.0.0', [['<=2.0.0']]], + ['< 2.0.0', [['<2.0.0']]], + ['<\t2.0.0', [['<2.0.0']]], + ['>=0.1.97', [['>=0.1.97']]], + ['>=0.1.97', [['>=0.1.97']]], + ['0.1.20 || 1.2.4', [['0.1.20'], ['1.2.4']]], + ['>=0.2.3 || <0.0.1', [['>=0.2.3'], ['<0.0.1']]], + ['>=0.2.3 || <0.0.1', [['>=0.2.3'], ['<0.0.1']]], + ['>=0.2.3 || <0.0.1', [['>=0.2.3'], ['<0.0.1']]], + ['||', [[''], ['']]], + ['2.x.x', [['>=2.0.0', '<3.0.0']]], + ['1.2.x', [['>=1.2.0', '<1.3.0']]], + ['1.2.x || 2.x', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]], + ['1.2.x || 2.x', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]], + ['x', [['']]], + ['2.*.*', [['>=2.0.0', '<3.0.0']]], + ['1.2.*', [['>=1.2.0', '<1.3.0']]], + ['1.2.* || 2.*', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]], + ['1.2.* || 2.*', [['>=1.2.0', '<1.3.0'], ['>=2.0.0', '<3.0.0']]], + ['*', [['']]], + ['2', [['>=2.0.0', '<3.0.0']]], + ['2.3', [['>=2.3.0', '<2.4.0']]], + ['~2.4', [['>=2.4.0', '<2.5.0']]], + ['~2.4', [['>=2.4.0', '<2.5.0']]], + ['~>3.2.1', [['>=3.2.1', '<3.3.0']]], + ['~1', [['>=1.0.0', '<2.0.0']]], + ['~>1', [['>=1.0.0', '<2.0.0']]], + ['~> 1', [['>=1.0.0', '<2.0.0']]], + ['~1.0', [['>=1.0.0', '<1.1.0']]], + ['~ 1.0', [['>=1.0.0', '<1.1.0']]], + ['~ 1.0.3', [['>=1.0.3', '<1.1.0']]], + ['~> 1.0.3', [['>=1.0.3', '<1.1.0']]], + ['<1', [['<1.0.0']]], + ['< 1', [['<1.0.0']]], + ['>=1', [['>=1.0.0']]], + ['>= 1', [['>=1.0.0']]], + ['<1.2', [['<1.2.0']]], + ['< 1.2', [['<1.2.0']]], + ['1', [['>=1.0.0', '<2.0.0']]], + ['1 2', [['>=1.0.0', '<2.0.0', '>=2.0.0', '<3.0.0']]], + ['1.2 - 3.4.5', [['>=1.2.0', '<=3.4.5']]], + ['1.2.3 - 3.4', [['>=1.2.3', '<3.5.0']]], + ['1.2.3 - 3', [['>=1.2.3', '<4.0.0']]], + ['>*', [['<0.0.0']]], + ['<*', [['<0.0.0']]] + ].forEach(function(v) { + var pre = v[0]; + var wanted = v[1]; + var found = toComparators(v[0]); + var jw = JSON.stringify(wanted); + t.equivalent(found, wanted, 'toComparators(' + pre + ') === ' + jw); + }); + + t.end(); +}); + +test('\ninvalid version numbers', function(t) { + ['1.2.3.4', + 'NOT VALID', + 1.2, + null, + 'Infinity.NaN.Infinity' + ].forEach(function(v) { + t.throws(function() { + new SemVer(v); + }, {name:'TypeError', message:'Invalid Version: ' + v}); + }); + + t.end(); +}); + +test('\nstrict vs loose version numbers', function(t) { + [['=1.2.3', '1.2.3'], + ['01.02.03', '1.2.3'], + ['1.2.3-beta.01', '1.2.3-beta.1'], + [' =1.2.3', '1.2.3'], + ['1.2.3foo', '1.2.3-foo'] + ].forEach(function(v) { + var loose = v[0]; + var strict = v[1]; + t.throws(function() { + new SemVer(loose); + }); + var lv = new SemVer(loose, true); + t.equal(lv.version, strict); + t.ok(eq(loose, strict, true)); + t.throws(function() { + eq(loose, strict); + }); + t.throws(function() { + new SemVer(strict).compare(loose); + }); + }); + t.end(); +}); + +test('\nstrict vs loose ranges', function(t) { + [['>=01.02.03', '>=1.2.3'], + ['~1.02.03beta', '>=1.2.3-beta <1.3.0'] + ].forEach(function(v) { + var loose = v[0]; + var comps = v[1]; + t.throws(function() { + new Range(loose); + }); + t.equal(new Range(loose, true).range, comps); + }); + t.end(); +}); + +test('\nmax satisfying', function(t) { + [[['1.2.3', '1.2.4'], '1.2', '1.2.4'], + [['1.2.4', '1.2.3'], '1.2', '1.2.4'], + [['1.2.3', '1.2.4', '1.2.5', '1.2.6'], '~1.2.3', '1.2.6'], + [['1.1.0', '1.2.0', '1.2.1', '1.3.0', '2.0.0b1', '2.0.0b2', '2.0.0b3', '2.0.0', '2.1.0'], '~2.0.0', '2.0.0', true] + ].forEach(function(v) { + var versions = v[0]; + var range = v[1]; + var expect = v[2]; + var loose = v[3]; + var actual = semver.maxSatisfying(versions, range, loose); + t.equal(actual, expect); + }); + t.end(); +}); diff --git a/node_modules/semver/test/ltr.js b/node_modules/semver/test/ltr.js new file mode 100644 index 00000000..ecd1387d --- /dev/null +++ b/node_modules/semver/test/ltr.js @@ -0,0 +1,181 @@ +var tap = require('tap'); +var test = tap.test; +var semver = require('../semver.js'); +var ltr = semver.ltr; + +test('\nltr tests', function(t) { + // [range, version, loose] + // Version should be less than range + [ + ['~1.2.2', '1.2.1'], + ['~0.6.1-1', '0.6.1-0'], + ['1.0.0 - 2.0.0', '0.0.1'], + ['1.0.0-beta.2', '1.0.0-beta.1'], + ['1.0.0', '0.0.0'], + ['>=2.0.0', '1.1.1'], + ['>=2.0.0', '1.2.9'], + ['>2.0.0', '2.0.0'], + ['0.1.20 || 1.2.4', '0.1.5'], + ['2.x.x', '1.0.0'], + ['1.2.x', '1.1.0'], + ['1.2.x || 2.x', '1.0.0'], + ['2.*.*', '1.0.1'], + ['1.2.*', '1.1.3'], + ['1.2.* || 2.*', '1.1.9999'], + ['2', '1.0.0'], + ['2.3', '2.2.2'], + ['~2.4', '2.3.0'], // >=2.4.0 <2.5.0 + ['~2.4', '2.3.5'], + ['~>3.2.1', '3.2.0'], // >=3.2.1 <3.3.0 + ['~1', '0.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '0.2.4'], + ['~> 1', '0.2.3'], + ['~1.0', '0.1.2'], // >=1.0.0 <1.1.0 + ['~ 1.0', '0.1.0'], + ['>1.2', '1.2.0'], + ['> 1.2', '1.2.1'], + ['1', '0.0.0beta', true], + ['~v0.5.4-pre', '0.5.4-alpha'], + ['~v0.5.4-pre', '0.5.4-alpha'], + ['=0.7.x', '0.6.0'], + ['=0.7.x', '0.6.0-asdf'], + ['>=0.7.x', '0.6.0'], + ['~1.2.2', '1.2.1'], + ['1.0.0 - 2.0.0', '0.2.3'], + ['1.0.0', '0.0.1'], + ['>=2.0.0', '1.0.0'], + ['>=2.0.0', '1.9999.9999'], + ['>=2.0.0', '1.2.9'], + ['>2.0.0', '2.0.0'], + ['>2.0.0', '1.2.9'], + ['2.x.x', '1.1.3'], + ['1.2.x', '1.1.3'], + ['1.2.x || 2.x', '1.1.3'], + ['2.*.*', '1.1.3'], + ['1.2.*', '1.1.3'], + ['1.2.* || 2.*', '1.1.3'], + ['2', '1.9999.9999'], + ['2.3', '2.2.1'], + ['~2.4', '2.3.0'], // >=2.4.0 <2.5.0 + ['~>3.2.1', '2.3.2'], // >=3.2.1 <3.3.0 + ['~1', '0.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '0.2.3'], + ['~1.0', '0.0.0'], // >=1.0.0 <1.1.0 + ['>1', '1.0.0'], + ['2', '1.0.0beta', true], + ['>1', '1.0.0beta', true], + ['> 1', '1.0.0beta', true], + ['=0.7.x', '0.6.2'], + ['=0.7.x', '0.7.0-asdf'], + ['^1', '1.0.0-0'], + ['>=0.7.x', '0.7.0-asdf'], + ['1', '1.0.0beta', true], + ['>=0.7.x', '0.6.2'] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var loose = tuple[2] || false; + var msg = 'ltr(' + version + ', ' + range + ', ' + loose + ')'; + t.ok(ltr(version, range, loose), msg); + }); + t.end(); +}); + +test('\nnegative ltr tests', function(t) { + // [range, version, loose] + // Version should NOT be greater than range + [ + ['~ 1.0', '1.1.0'], + ['~0.6.1-1', '0.6.1-1'], + ['1.0.0 - 2.0.0', '1.2.3'], + ['1.0.0 - 2.0.0', '2.9.9'], + ['1.0.0', '1.0.0'], + ['>=*', '0.2.4'], + ['', '1.0.0', true], + ['*', '1.2.3'], + ['*', 'v1.2.3-foo'], + ['>=1.0.0', '1.0.0'], + ['>=1.0.0', '1.0.1'], + ['>=1.0.0', '1.1.0'], + ['>1.0.0', '1.0.1'], + ['>1.0.0', '1.1.0'], + ['<=2.0.0', '2.0.0'], + ['<=2.0.0', '1.9999.9999'], + ['<=2.0.0', '0.2.9'], + ['<2.0.0', '1.9999.9999'], + ['<2.0.0', '0.2.9'], + ['>= 1.0.0', '1.0.0'], + ['>= 1.0.0', '1.0.1'], + ['>= 1.0.0', '1.1.0'], + ['> 1.0.0', '1.0.1'], + ['> 1.0.0', '1.1.0'], + ['<= 2.0.0', '2.0.0'], + ['<= 2.0.0', '1.9999.9999'], + ['<= 2.0.0', '0.2.9'], + ['< 2.0.0', '1.9999.9999'], + ['<\t2.0.0', '0.2.9'], + ['>=0.1.97', 'v0.1.97'], + ['>=0.1.97', '0.1.97'], + ['0.1.20 || 1.2.4', '1.2.4'], + ['0.1.20 || >1.2.4', '1.2.4'], + ['0.1.20 || 1.2.4', '1.2.3'], + ['0.1.20 || 1.2.4', '0.1.20'], + ['>=0.2.3 || <0.0.1', '0.0.0'], + ['>=0.2.3 || <0.0.1', '0.2.3'], + ['>=0.2.3 || <0.0.1', '0.2.4'], + ['||', '1.3.4'], + ['2.x.x', '2.1.3'], + ['1.2.x', '1.2.3'], + ['1.2.x || 2.x', '2.1.3'], + ['1.2.x || 2.x', '1.2.3'], + ['x', '1.2.3'], + ['2.*.*', '2.1.3'], + ['1.2.*', '1.2.3'], + ['1.2.* || 2.*', '2.1.3'], + ['1.2.* || 2.*', '1.2.3'], + ['1.2.* || 2.*', '1.2.3'], + ['*', '1.2.3'], + ['2', '2.1.2'], + ['2.3', '2.3.1'], + ['~2.4', '2.4.0'], // >=2.4.0 <2.5.0 + ['~2.4', '2.4.5'], + ['~>3.2.1', '3.2.2'], // >=3.2.1 <3.3.0 + ['~1', '1.2.3'], // >=1.0.0 <2.0.0 + ['~>1', '1.2.3'], + ['~> 1', '1.2.3'], + ['~1.0', '1.0.2'], // >=1.0.0 <1.1.0 + ['~ 1.0', '1.0.2'], + ['>=1', '1.0.0'], + ['>= 1', '1.0.0'], + ['<1.2', '1.1.1'], + ['< 1.2', '1.1.1'], + ['~v0.5.4-pre', '0.5.5'], + ['~v0.5.4-pre', '0.5.4'], + ['=0.7.x', '0.7.2'], + ['>=0.7.x', '0.7.2'], + ['<=0.7.x', '0.6.2'], + ['>0.2.3 >0.2.4 <=0.2.5', '0.2.5'], + ['>=0.2.3 <=0.2.4', '0.2.4'], + ['1.0.0 - 2.0.0', '2.0.0'], + ['^3.0.0', '4.0.0'], + ['^1.0.0 || ~2.0.1', '2.0.0'], + ['^0.1.0 || ~3.0.1 || 5.0.0', '3.2.0'], + ['^0.1.0 || ~3.0.1 || 5.0.0', '1.0.0beta', true], + ['^0.1.0 || ~3.0.1 || 5.0.0', '5.0.0-0', true], + ['^0.1.0 || ~3.0.1 || >4 <=5.0.0', '3.5.0'], + ['^1.0.0alpha', '1.0.0beta', true], + ['~1.0.0alpha', '1.0.0beta', true], + ['^1.0.0-alpha', '1.0.0beta', true], + ['~1.0.0-alpha', '1.0.0beta', true], + ['^1.0.0-alpha', '1.0.0-beta'], + ['~1.0.0-alpha', '1.0.0-beta'], + ['=0.1.0', '1.0.0'] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var loose = tuple[2] || false; + var msg = '!ltr(' + version + ', ' + range + ', ' + loose + ')'; + t.notOk(ltr(version, range, loose), msg); + }); + t.end(); +}); diff --git a/node_modules/semver/test/major-minor-patch.js b/node_modules/semver/test/major-minor-patch.js new file mode 100644 index 00000000..e9d4039c --- /dev/null +++ b/node_modules/semver/test/major-minor-patch.js @@ -0,0 +1,72 @@ +var tap = require('tap'); +var test = tap.test; +var semver = require('../semver.js'); + +test('\nmajor tests', function(t) { + // [range, version] + // Version should be detectable despite extra characters + [ + ['1.2.3', 1], + [' 1.2.3 ', 1], + [' 2.2.3-4 ', 2], + [' 3.2.3-pre ', 3], + ['v5.2.3', 5], + [' v8.2.3 ', 8], + ['\t13.2.3', 13], + ['=21.2.3', 21, true], + ['v=34.2.3', 34, true] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var loose = tuple[2] || false; + var msg = 'major(' + range + ') = ' + version; + t.equal(semver.major(range, loose), version, msg); + }); + t.end(); +}); + +test('\nminor tests', function(t) { + // [range, version] + // Version should be detectable despite extra characters + [ + ['1.1.3', 1], + [' 1.1.3 ', 1], + [' 1.2.3-4 ', 2], + [' 1.3.3-pre ', 3], + ['v1.5.3', 5], + [' v1.8.3 ', 8], + ['\t1.13.3', 13], + ['=1.21.3', 21, true], + ['v=1.34.3', 34, true] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var loose = tuple[2] || false; + var msg = 'minor(' + range + ') = ' + version; + t.equal(semver.minor(range, loose), version, msg); + }); + t.end(); +}); + +test('\npatch tests', function(t) { + // [range, version] + // Version should be detectable despite extra characters + [ + ['1.2.1', 1], + [' 1.2.1 ', 1], + [' 1.2.2-4 ', 2], + [' 1.2.3-pre ', 3], + ['v1.2.5', 5], + [' v1.2.8 ', 8], + ['\t1.2.13', 13], + ['=1.2.21', 21, true], + ['v=1.2.34', 34, true] + ].forEach(function(tuple) { + var range = tuple[0]; + var version = tuple[1]; + var loose = tuple[2] || false; + var msg = 'patch(' + range + ') = ' + version; + t.equal(semver.patch(range, loose), version, msg); + }); + t.end(); +}); diff --git a/node_modules/semver/test/no-module.js b/node_modules/semver/test/no-module.js new file mode 100644 index 00000000..8b50873f --- /dev/null +++ b/node_modules/semver/test/no-module.js @@ -0,0 +1,19 @@ +var tap = require('tap'); +var test = tap.test; + +test('no module system', function(t) { + var fs = require('fs'); + var vm = require('vm'); + var head = fs.readFileSync(require.resolve('../head.js.txt'), 'utf8'); + var src = fs.readFileSync(require.resolve('../'), 'utf8'); + var foot = fs.readFileSync(require.resolve('../foot.js.txt'), 'utf8'); + vm.runInThisContext(head + src + foot, 'semver.js'); + + // just some basic poking to see if it did some stuff + t.type(global.semver, 'object'); + t.type(global.semver.SemVer, 'function'); + t.type(global.semver.Range, 'function'); + t.ok(global.semver.satisfies('1.2.3', '1.2')); + t.end(); +}); + diff --git a/node_modules/split/.npmignore b/node_modules/split/.npmignore new file mode 100644 index 00000000..13abef4f --- /dev/null +++ b/node_modules/split/.npmignore @@ -0,0 +1,3 @@ +node_modules +node_modules/* +npm_debug.log diff --git a/node_modules/split/.travis.yml b/node_modules/split/.travis.yml new file mode 100644 index 00000000..6e5919de --- /dev/null +++ b/node_modules/split/.travis.yml @@ -0,0 +1,3 @@ +language: node_js +node_js: + - "0.10" diff --git a/node_modules/split/LICENCE b/node_modules/split/LICENCE new file mode 100644 index 00000000..171dd970 --- /dev/null +++ b/node_modules/split/LICENCE @@ -0,0 +1,22 @@ +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/split/examples/pretty.js b/node_modules/split/examples/pretty.js new file mode 100644 index 00000000..2e891316 --- /dev/null +++ b/node_modules/split/examples/pretty.js @@ -0,0 +1,26 @@ + +var inspect = require('util').inspect +var es = require('event-stream') //load event-stream +var split = require('../') + +if(!module.parent) { + es.pipe( //pipe joins streams together + process.openStdin(), //open stdin + split(), //split stream to break on newlines + es.map(function (data, callback) {//turn this async function into a stream + var j + try { + j = JSON.parse(data) //try to parse input into json + } catch (err) { + return callback(null, data) //if it fails just pass it anyway + } + callback(null, inspect(j)) //render it nicely + }), + process.stdout // pipe it to stdout ! + ) + } + +// run this +// +// curl -sS registry.npmjs.org/event-stream | node pretty.js +// diff --git a/node_modules/split/index.js b/node_modules/split/index.js new file mode 100644 index 00000000..d2bbe5dc --- /dev/null +++ b/node_modules/split/index.js @@ -0,0 +1,63 @@ +//filter will reemit the data if cb(err,pass) pass is truthy + +// reduce is more tricky +// maybe we want to group the reductions or emit progress updates occasionally +// the most basic reduce just emits one 'data' event after it has recieved 'end' + + +var through = require('through') +var Decoder = require('string_decoder').StringDecoder + +module.exports = split + +//TODO pass in a function to map across the lines. + +function split (matcher, mapper, options) { + var decoder = new Decoder() + var soFar = '' + var maxLength = options && options.maxLength; + var trailing = options && options.trailing === false ? false : true + if('function' === typeof matcher) + mapper = matcher, matcher = null + if (!matcher) + matcher = /\r?\n/ + + function emit(stream, piece) { + if(mapper) { + try { + piece = mapper(piece) + } + catch (err) { + return stream.emit('error', err) + } + if('undefined' !== typeof piece) + stream.queue(piece) + } + else + stream.queue(piece) + } + + function next (stream, buffer) { + var pieces = ((soFar != null ? soFar : '') + buffer).split(matcher) + soFar = pieces.pop() + + if (maxLength && soFar.length > maxLength) + return stream.emit('error', new Error('maximum buffer reached')) + + for (var i = 0; i < pieces.length; i++) { + var piece = pieces[i] + emit(stream, piece) + } + } + + return through(function (b) { + next(this, decoder.write(b)) + }, + function () { + if(decoder.end) + next(this, decoder.end()) + if(trailing && soFar != null) + emit(this, soFar) + this.queue(null) + }) +} diff --git a/node_modules/split/package.json b/node_modules/split/package.json new file mode 100644 index 00000000..e83fdfb4 --- /dev/null +++ b/node_modules/split/package.json @@ -0,0 +1,62 @@ +{ + "_from": "split@^1.0.0", + "_id": "split@1.0.1", + "_inBundle": false, + "_integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "_location": "/split", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "split@^1.0.0", + "name": "split", + "escapedName": "split", + "rawSpec": "^1.0.0", + "saveSpec": null, + "fetchSpec": "^1.0.0" + }, + "_requiredBy": [ + "/pgpass" + ], + "_resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "_shasum": "605bd9be303aa59fb35f9229fbea0ddec9ea07d9", + "_spec": "split@^1.0.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/pgpass", + "author": { + "name": "Dominic Tarr", + "email": "dominic.tarr@gmail.com", + "url": "http://bit.ly/dominictarr" + }, + "bugs": { + "url": "https://github.com/dominictarr/split/issues" + }, + "bundleDependencies": false, + "dependencies": { + "through": "2" + }, + "deprecated": false, + "description": "split a Text Stream into a Line Stream", + "devDependencies": { + "asynct": "*", + "event-stream": "~3.0.2", + "it-is": "1", + "stream-spec": "~0.2", + "string-to-stream": "~1.0.0", + "ubelt": "~2.9" + }, + "engines": { + "node": "*" + }, + "homepage": "http://github.com/dominictarr/split", + "license": "MIT", + "name": "split", + "optionalDependencies": {}, + "repository": { + "type": "git", + "url": "git://github.com/dominictarr/split.git" + }, + "scripts": { + "test": "asynct test/" + }, + "version": "1.0.1" +} diff --git a/node_modules/split/readme.markdown b/node_modules/split/readme.markdown new file mode 100644 index 00000000..c2e527d8 --- /dev/null +++ b/node_modules/split/readme.markdown @@ -0,0 +1,72 @@ +# Split (matcher) + +[![build status](https://secure.travis-ci.org/dominictarr/split.png)](http://travis-ci.org/dominictarr/split) + +Break up a stream and reassemble it so that each line is a chunk. matcher may be a `String`, or a `RegExp` + +Example, read every line in a file ... + +``` js + fs.createReadStream(file) + .pipe(split()) + .on('data', function (line) { + //each chunk now is a separate line! + }) + +``` + +`split` takes the same arguments as `string.split` except it defaults to '/\r?\n/' instead of ',', and the optional `limit` parameter is ignored. +[String#split](https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/String/split) + +`split` takes an optional options object on its third argument. + +``` js + split(matcher, mapper, options) +``` + +Valid options: + +* maxLength - The maximum buffer length without seeing a newline or `matcher`, + if a single line exceeds this, the split stream will emit an error. + +``` js + split(JSON.parse, null, { maxLength: 2}) +``` + +* trailing - By default the last buffer not delimited by a newline or `matcher` will be emitted. To prevent this set `options.trailing` to `false`. + +``` js + split(JSON.parse, null, { trailing: false }) +``` + +## keep matched splitter + +As with `String#split`, if you split by a regular expression with a matching group, +the matches will be retained in the collection. + +``` +stdin +.pipe(split(/(\r?\n)/)) +... //lines + separators. +``` + + +# NDJ - Newline Delimited Json + +`split` accepts a function which transforms each line. + +``` js +fs.createReadStream(file) + .pipe(split(JSON.parse)) + .on('data', function (obj) { + //each chunk now is a a js object + }) + .on('error', function (err) { + //syntax errors will land here + //note, this ends the stream. + }) +``` + +# License + +MIT diff --git a/node_modules/split/test/options.asynct.js b/node_modules/split/test/options.asynct.js new file mode 100644 index 00000000..3f137d90 --- /dev/null +++ b/node_modules/split/test/options.asynct.js @@ -0,0 +1,46 @@ +var it = require('it-is').style('colour') + , split = require('..') + +exports ['maximum buffer limit'] = function (test) { + var s = split(JSON.parse, null, { + maxLength: 2 + }) + , caughtError = false + , rows = [] + + s.on('error', function (err) { + caughtError = true + }) + + s.on('data', function (row) { rows.push(row) }) + + s.write('{"a":1}\n{"') + s.write('{ "') + it(caughtError).equal(true) + + s.end() + test.done() +} + +exports ['ignore trailing buffers'] = function (test) { + var s = split(JSON.parse, null, { + trailing: false + }) + , caughtError = false + , rows = [] + + s.on('error', function (err) { + caughtError = true + }) + + s.on('data', function (row) { rows.push(row) }) + + s.write('{"a":1}\n{"') + s.write('{ "') + s.end() + + it(caughtError).equal(false) + it(rows).deepEqual([ { a: 1 } ]) + + test.done() +} diff --git a/node_modules/split/test/partitioned_unicode.js b/node_modules/split/test/partitioned_unicode.js new file mode 100644 index 00000000..aff3d5da --- /dev/null +++ b/node_modules/split/test/partitioned_unicode.js @@ -0,0 +1,34 @@ +var it = require('it-is').style('colour') + , split = require('..') + +exports ['split data with partitioned unicode character'] = function (test) { + var s = split(/,/g) + , caughtError = false + , rows = [] + + s.on('error', function (err) { + caughtError = true + }) + + s.on('data', function (row) { rows.push(row) }) + + var x = 'テスト試験今日とても,よい天気で' + unicodeData = new Buffer(x); + + // partition of 日 + piece1 = unicodeData.slice(0, 20); + piece2 = unicodeData.slice(20, unicodeData.length); + + s.write(piece1); + s.write(piece2); + + s.end() + + it(caughtError).equal(false) + + it(rows).deepEqual(['テスト試験今日とても', 'よい天気で']); + + it(rows).deepEqual(x.split(',')) + + test.done() +} diff --git a/node_modules/split/test/split.asynct.js b/node_modules/split/test/split.asynct.js new file mode 100644 index 00000000..a586e120 --- /dev/null +++ b/node_modules/split/test/split.asynct.js @@ -0,0 +1,137 @@ +var es = require('event-stream') + , it = require('it-is').style('colour') + , d = require('ubelt') + , split = require('..') + , join = require('path').join + , fs = require('fs') + , Stream = require('stream').Stream + , Readable = require('stream').Readable + , spec = require('stream-spec') + , through = require('through') + , stringStream = require('string-to-stream') + +exports ['split() works like String#split'] = function (test) { + var readme = join(__filename) + , expected = fs.readFileSync(readme, 'utf-8').split('\n') + , cs = split() + , actual = [] + , ended = false + , x = spec(cs).through() + + var a = new Stream () + + a.write = function (l) { + actual.push(l.trim()) + } + a.end = function () { + + ended = true + expected.forEach(function (v,k) { + //String.split will append an empty string '' + //if the string ends in a split pattern. + //es.split doesn't which was breaking this test. + //clearly, appending the empty string is correct. + //tests are passing though. which is the current job. + if(v) + it(actual[k]).like(v) + }) + //give the stream time to close + process.nextTick(function () { + test.done() + x.validate() + }) + } + a.writable = true + + fs.createReadStream(readme, {flags: 'r'}).pipe(cs) + cs.pipe(a) + +} + +exports ['split() takes mapper function'] = function (test) { + var readme = join(__filename) + , expected = fs.readFileSync(readme, 'utf-8').split('\n') + , cs = split(function (line) { return line.toUpperCase() }) + , actual = [] + , ended = false + , x = spec(cs).through() + + var a = new Stream () + + a.write = function (l) { + actual.push(l.trim()) + } + a.end = function () { + + ended = true + expected.forEach(function (v,k) { + //String.split will append an empty string '' + //if the string ends in a split pattern. + //es.split doesn't which was breaking this test. + //clearly, appending the empty string is correct. + //tests are passing though. which is the current job. + if(v) + it(actual[k]).equal(v.trim().toUpperCase()) + }) + //give the stream time to close + process.nextTick(function () { + test.done() + x.validate() + }) + } + a.writable = true + + fs.createReadStream(readme, {flags: 'r'}).pipe(cs) + cs.pipe(a) + +} + +exports ['split() works with empty string chunks'] = function (test) { + var str = ' foo' + , expected = str.split(/[\s]*/).reduce(splitBy(/[\s]*/), []) + , cs1 = split(/[\s]*/) + , cs2 = split(/[\s]*/) + , actual = [] + , ended = false + , x = spec(cs1).through() + , y = spec(cs2).through() + + var a = new Stream () + + a.write = function (l) { + actual.push(l.trim()) + } + a.end = function () { + + ended = true + expected.forEach(function (v,k) { + //String.split will append an empty string '' + //if the string ends in a split pattern. + //es.split doesn't which was breaking this test. + //clearly, appending the empty string is correct. + //tests are passing though. which is the current job. + if(v) + it(actual[k]).like(v) + }) + //give the stream time to close + process.nextTick(function () { + test.done() + x.validate() + y.validate() + }) + } + a.writable = true + + cs1.pipe(cs2) + cs2.pipe(a) + + cs1.write(str) + cs1.end() + +} + +function splitBy (delimiter) { + return function (arr, piece) { + return arr.concat(piece.split(delimiter)) + } +} diff --git a/node_modules/split/test/try_catch.asynct.js b/node_modules/split/test/try_catch.asynct.js new file mode 100644 index 00000000..39e49f73 --- /dev/null +++ b/node_modules/split/test/try_catch.asynct.js @@ -0,0 +1,51 @@ +var it = require('it-is').style('colour') + , split = require('..') + +exports ['emit mapper exceptions as error events'] = function (test) { + var s = split(JSON.parse) + , caughtError = false + , rows = [] + + s.on('error', function (err) { + caughtError = true + }) + + s.on('data', function (row) { rows.push(row) }) + + s.write('{"a":1}\n{"') + it(caughtError).equal(false) + it(rows).deepEqual([ { a: 1 } ]) + + s.write('b":2}\n{"c":}\n') + it(caughtError).equal(true) + it(rows).deepEqual([ { a: 1 }, { b: 2 } ]) + + s.end() + test.done() +} + +exports ['mapper error events on trailing chunks'] = function (test) { + var s = split(JSON.parse) + , caughtError = false + , rows = [] + + s.on('error', function (err) { + caughtError = true + }) + + s.on('data', function (row) { rows.push(row) }) + + s.write('{"a":1}\n{"') + it(caughtError).equal(false) + it(rows).deepEqual([ { a: 1 } ]) + + s.write('b":2}\n{"c":}') + it(caughtError).equal(false) + it(rows).deepEqual([ { a: 1 }, { b: 2 } ]) + + s.end() + it(caughtError).equal(true) + it(rows).deepEqual([ { a: 1 }, { b: 2 } ]) + + test.done() +} diff --git a/node_modules/through/.travis.yml b/node_modules/through/.travis.yml new file mode 100644 index 00000000..c693a939 --- /dev/null +++ b/node_modules/through/.travis.yml @@ -0,0 +1,5 @@ +language: node_js +node_js: + - 0.6 + - 0.8 + - "0.10" diff --git a/node_modules/through/LICENSE.APACHE2 b/node_modules/through/LICENSE.APACHE2 new file mode 100644 index 00000000..6366c047 --- /dev/null +++ b/node_modules/through/LICENSE.APACHE2 @@ -0,0 +1,15 @@ +Apache License, Version 2.0 + +Copyright (c) 2011 Dominic Tarr + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/through/LICENSE.MIT b/node_modules/through/LICENSE.MIT new file mode 100644 index 00000000..6eafbd73 --- /dev/null +++ b/node_modules/through/LICENSE.MIT @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2011 Dominic Tarr + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/through/index.js b/node_modules/through/index.js new file mode 100644 index 00000000..ca5fc590 --- /dev/null +++ b/node_modules/through/index.js @@ -0,0 +1,108 @@ +var Stream = require('stream') + +// through +// +// a stream that does nothing but re-emit the input. +// useful for aggregating a series of changing but not ending streams into one stream) + +exports = module.exports = through +through.through = through + +//create a readable writable stream. + +function through (write, end, opts) { + write = write || function (data) { this.queue(data) } + end = end || function () { this.queue(null) } + + var ended = false, destroyed = false, buffer = [], _ended = false + var stream = new Stream() + stream.readable = stream.writable = true + stream.paused = false + +// stream.autoPause = !(opts && opts.autoPause === false) + stream.autoDestroy = !(opts && opts.autoDestroy === false) + + stream.write = function (data) { + write.call(this, data) + return !stream.paused + } + + function drain() { + while(buffer.length && !stream.paused) { + var data = buffer.shift() + if(null === data) + return stream.emit('end') + else + stream.emit('data', data) + } + } + + stream.queue = stream.push = function (data) { +// console.error(ended) + if(_ended) return stream + if(data === null) _ended = true + buffer.push(data) + drain() + return stream + } + + //this will be registered as the first 'end' listener + //must call destroy next tick, to make sure we're after any + //stream piped from here. + //this is only a problem if end is not emitted synchronously. + //a nicer way to do this is to make sure this is the last listener for 'end' + + stream.on('end', function () { + stream.readable = false + if(!stream.writable && stream.autoDestroy) + process.nextTick(function () { + stream.destroy() + }) + }) + + function _end () { + stream.writable = false + end.call(stream) + if(!stream.readable && stream.autoDestroy) + stream.destroy() + } + + stream.end = function (data) { + if(ended) return + ended = true + if(arguments.length) stream.write(data) + _end() // will emit or queue + return stream + } + + stream.destroy = function () { + if(destroyed) return + destroyed = true + ended = true + buffer.length = 0 + stream.writable = stream.readable = false + stream.emit('close') + return stream + } + + stream.pause = function () { + if(stream.paused) return + stream.paused = true + return stream + } + + stream.resume = function () { + if(stream.paused) { + stream.paused = false + stream.emit('resume') + } + drain() + //may have become paused again, + //as drain emits 'data'. + if(!stream.paused) + stream.emit('drain') + return stream + } + return stream +} + diff --git a/node_modules/through/package.json b/node_modules/through/package.json new file mode 100644 index 00000000..acd32dfa --- /dev/null +++ b/node_modules/through/package.json @@ -0,0 +1,68 @@ +{ + "_from": "through@2", + "_id": "through@2.3.8", + "_inBundle": false, + "_integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "_location": "/through", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "through@2", + "name": "through", + "escapedName": "through", + "rawSpec": "2", + "saveSpec": null, + "fetchSpec": "2" + }, + "_requiredBy": [ + "/split" + ], + "_resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "_shasum": "0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5", + "_spec": "through@2", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/split", + "author": { + "name": "Dominic Tarr", + "email": "dominic.tarr@gmail.com", + "url": "dominictarr.com" + }, + "bugs": { + "url": "https://github.com/dominictarr/through/issues" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "simplified stream construction", + "devDependencies": { + "from": "~0.1.3", + "stream-spec": "~0.3.5", + "tape": "~2.3.2" + }, + "homepage": "https://github.com/dominictarr/through", + "keywords": [ + "stream", + "streams", + "user-streams", + "pipe" + ], + "license": "MIT", + "main": "index.js", + "name": "through", + "repository": { + "type": "git", + "url": "git+https://github.com/dominictarr/through.git" + }, + "scripts": { + "test": "set -e; for t in test/*.js; do node $t; done" + }, + "testling": { + "browsers": [ + "ie/8..latest", + "ff/15..latest", + "chrome/20..latest", + "safari/5.1..latest" + ], + "files": "test/*.js" + }, + "version": "2.3.8" +} diff --git a/node_modules/through/readme.markdown b/node_modules/through/readme.markdown new file mode 100644 index 00000000..cb34c813 --- /dev/null +++ b/node_modules/through/readme.markdown @@ -0,0 +1,64 @@ +#through + +[![build status](https://secure.travis-ci.org/dominictarr/through.png)](http://travis-ci.org/dominictarr/through) +[![testling badge](https://ci.testling.com/dominictarr/through.png)](https://ci.testling.com/dominictarr/through) + +Easy way to create a `Stream` that is both `readable` and `writable`. + +* Pass in optional `write` and `end` methods. +* `through` takes care of pause/resume logic if you use `this.queue(data)` instead of `this.emit('data', data)`. +* Use `this.pause()` and `this.resume()` to manage flow. +* Check `this.paused` to see current flow state. (`write` always returns `!this.paused`). + +This function is the basis for most of the synchronous streams in +[event-stream](http://github.com/dominictarr/event-stream). + +``` js +var through = require('through') + +through(function write(data) { + this.queue(data) //data *must* not be null + }, + function end () { //optional + this.queue(null) + }) +``` + +Or, can also be used _without_ buffering on pause, use `this.emit('data', data)`, +and this.emit('end') + +``` js +var through = require('through') + +through(function write(data) { + this.emit('data', data) + //this.pause() + }, + function end () { //optional + this.emit('end') + }) +``` + +## Extended Options + +You will probably not need these 99% of the time. + +### autoDestroy=false + +By default, `through` emits close when the writable +and readable side of the stream has ended. +If that is not desired, set `autoDestroy=false`. + +``` js +var through = require('through') + +//like this +var ts = through(write, end, {autoDestroy: false}) +//or like this +var ts = through(write, end) +ts.autoDestroy = false +``` + +## License + +MIT / Apache2 diff --git a/node_modules/through/test/async.js b/node_modules/through/test/async.js new file mode 100644 index 00000000..46bdbaeb --- /dev/null +++ b/node_modules/through/test/async.js @@ -0,0 +1,28 @@ +var from = require('from') +var through = require('../') + +var tape = require('tape') + +tape('simple async example', function (t) { + + var n = 0, expected = [1,2,3,4,5], actual = [] + from(expected) + .pipe(through(function(data) { + this.pause() + n ++ + setTimeout(function(){ + console.log('pushing data', data) + this.push(data) + this.resume() + }.bind(this), 300) + })).pipe(through(function(data) { + console.log('pushing data second time', data); + this.push(data) + })).on('data', function (d) { + actual.push(d) + }).on('end', function() { + t.deepEqual(actual, expected) + t.end() + }) + +}) diff --git a/node_modules/through/test/auto-destroy.js b/node_modules/through/test/auto-destroy.js new file mode 100644 index 00000000..9a8fd000 --- /dev/null +++ b/node_modules/through/test/auto-destroy.js @@ -0,0 +1,30 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('end before close', function (assert) { + var ts = through() + ts.autoDestroy = false + var ended = false, closed = false + + ts.on('end', function () { + assert.ok(!closed) + ended = true + }) + ts.on('close', function () { + assert.ok(ended) + closed = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.ok(ended) + assert.notOk(closed) + ts.destroy() + assert.ok(closed) + assert.end() +}) + diff --git a/node_modules/through/test/buffering.js b/node_modules/through/test/buffering.js new file mode 100644 index 00000000..b0084bfc --- /dev/null +++ b/node_modules/through/test/buffering.js @@ -0,0 +1,71 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('buffering', function(assert) { + var ts = through(function (data) { + this.queue(data) + }, function () { + this.queue(null) + }) + + var ended = false, actual = [] + + ts.on('data', actual.push.bind(actual)) + ts.on('end', function () { + ended = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + assert.deepEqual(actual, [1, 2, 3]) + ts.pause() + ts.write(4) + ts.write(5) + ts.write(6) + assert.deepEqual(actual, [1, 2, 3]) + ts.resume() + assert.deepEqual(actual, [1, 2, 3, 4, 5, 6]) + ts.pause() + ts.end() + assert.ok(!ended) + ts.resume() + assert.ok(ended) + assert.end() +}) + +test('buffering has data in queue, when ends', function (assert) { + + /* + * If stream ends while paused with data in the queue, + * stream should still emit end after all data is written + * on resume. + */ + + var ts = through(function (data) { + this.queue(data) + }, function () { + this.queue(null) + }) + + var ended = false, actual = [] + + ts.on('data', actual.push.bind(actual)) + ts.on('end', function () { + ended = true + }) + + ts.pause() + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.deepEqual(actual, [], 'no data written yet, still paused') + assert.ok(!ended, 'end not emitted yet, still paused') + ts.resume() + assert.deepEqual(actual, [1, 2, 3], 'resumed, all data should be delivered') + assert.ok(ended, 'end should be emitted once all data was delivered') + assert.end(); +}) diff --git a/node_modules/through/test/end.js b/node_modules/through/test/end.js new file mode 100644 index 00000000..fa113f58 --- /dev/null +++ b/node_modules/through/test/end.js @@ -0,0 +1,45 @@ +var test = require('tape') +var through = require('../') + +// must emit end before close. + +test('end before close', function (assert) { + var ts = through() + var ended = false, closed = false + + ts.on('end', function () { + assert.ok(!closed) + ended = true + }) + ts.on('close', function () { + assert.ok(ended) + closed = true + }) + + ts.write(1) + ts.write(2) + ts.write(3) + ts.end() + assert.ok(ended) + assert.ok(closed) + assert.end() +}) + +test('end only once', function (t) { + + var ts = through() + var ended = false, closed = false + + ts.on('end', function () { + t.equal(ended, false) + ended = true + }) + + ts.queue(null) + ts.queue(null) + ts.queue(null) + + ts.resume() + + t.end() +}) diff --git a/node_modules/through/test/index.js b/node_modules/through/test/index.js new file mode 100644 index 00000000..96da82f9 --- /dev/null +++ b/node_modules/through/test/index.js @@ -0,0 +1,133 @@ + +var test = require('tape') +var spec = require('stream-spec') +var through = require('../') + +/* + I'm using these two functions, and not streams and pipe + so there is less to break. if this test fails it must be + the implementation of _through_ +*/ + +function write(array, stream) { + array = array.slice() + function next() { + while(array.length) + if(stream.write(array.shift()) === false) + return stream.once('drain', next) + + stream.end() + } + + next() +} + +function read(stream, callback) { + var actual = [] + stream.on('data', function (data) { + actual.push(data) + }) + stream.once('end', function () { + callback(null, actual) + }) + stream.once('error', function (err) { + callback(err) + }) +} + +test('simple defaults', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l * Math.random()) + + var t = through() + var s = spec(t).through().pausable() + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected) + assert.end() + }) + + t.on('close', s.validate) + + write(expected, t) +}); + +test('simple functions', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l * Math.random()) + + var t = through(function (data) { + this.emit('data', data*2) + }) + var s = spec(t).through().pausable() + + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected.map(function (data) { + return data*2 + })) + assert.end() + }) + + t.on('close', s.validate) + + write(expected, t) +}) + +test('pauses', function(assert) { + + var l = 1000 + , expected = [] + + while(l--) expected.push(l) //Math.random()) + + var t = through() + + var s = spec(t) + .through() + .pausable() + + t.on('data', function () { + if(Math.random() > 0.1) return + t.pause() + process.nextTick(function () { + t.resume() + }) + }) + + read(t, function (err, actual) { + assert.ifError(err) + assert.deepEqual(actual, expected) + }) + + t.on('close', function () { + s.validate() + assert.end() + }) + + write(expected, t) +}) + +test('does not soft-end on `undefined`', function(assert) { + var stream = through() + , count = 0 + + stream.on('data', function (data) { + count++ + }) + + stream.write(undefined) + stream.write(undefined) + + assert.equal(count, 2) + + assert.end() +}) diff --git a/node_modules/xtend/.jshintrc b/node_modules/xtend/.jshintrc new file mode 100644 index 00000000..77887b5f --- /dev/null +++ b/node_modules/xtend/.jshintrc @@ -0,0 +1,30 @@ +{ + "maxdepth": 4, + "maxstatements": 200, + "maxcomplexity": 12, + "maxlen": 80, + "maxparams": 5, + + "curly": true, + "eqeqeq": true, + "immed": true, + "latedef": false, + "noarg": true, + "noempty": true, + "nonew": true, + "undef": true, + "unused": "vars", + "trailing": true, + + "quotmark": true, + "expr": true, + "asi": true, + + "browser": false, + "esnext": true, + "devel": false, + "node": false, + "nonstandard": false, + + "predef": ["require", "module", "__dirname", "__filename"] +} diff --git a/node_modules/xtend/LICENSE b/node_modules/xtend/LICENSE new file mode 100644 index 00000000..0099f4f6 --- /dev/null +++ b/node_modules/xtend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/xtend/README.md b/node_modules/xtend/README.md new file mode 100644 index 00000000..4a2703cf --- /dev/null +++ b/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: "c" +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licensed + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/node_modules/xtend/immutable.js b/node_modules/xtend/immutable.js new file mode 100644 index 00000000..94889c9d --- /dev/null +++ b/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/mutable.js b/node_modules/xtend/mutable.js new file mode 100644 index 00000000..72debede --- /dev/null +++ b/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/package.json b/node_modules/xtend/package.json new file mode 100644 index 00000000..d94dd016 --- /dev/null +++ b/node_modules/xtend/package.json @@ -0,0 +1,86 @@ +{ + "_from": "xtend@^4.0.0", + "_id": "xtend@4.0.2", + "_inBundle": false, + "_integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "_location": "/xtend", + "_phantomChildren": {}, + "_requested": { + "type": "range", + "registry": true, + "raw": "xtend@^4.0.0", + "name": "xtend", + "escapedName": "xtend", + "rawSpec": "^4.0.0", + "saveSpec": null, + "fetchSpec": "^4.0.0" + }, + "_requiredBy": [ + "/postgres-interval" + ], + "_resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "_shasum": "bb72779f5fa465186b1f438f674fa347fdb5db54", + "_spec": "xtend@^4.0.0", + "_where": "/Users/aurelialim/wdi/cli-todo-sql/node_modules/postgres-interval", + "author": { + "name": "Raynos", + "email": "raynos2@gmail.com" + }, + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "dependencies": {}, + "deprecated": false, + "description": "extend like a boss", + "devDependencies": { + "tape": "~1.1.0" + }, + "engines": { + "node": ">=0.4" + }, + "homepage": "https://github.com/Raynos/xtend", + "keywords": [ + "extend", + "merge", + "options", + "opts", + "object", + "array" + ], + "license": "MIT", + "main": "immutable", + "name": "xtend", + "repository": { + "type": "git", + "url": "git://github.com/Raynos/xtend.git" + }, + "scripts": { + "test": "node test" + }, + "testling": { + "files": "test.js", + "browsers": [ + "ie/7..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest" + ] + }, + "version": "4.0.2" +} diff --git a/node_modules/xtend/test.js b/node_modules/xtend/test.js new file mode 100644 index 00000000..b895b42b --- /dev/null +++ b/node_modules/xtend/test.js @@ -0,0 +1,103 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("prototype pollution", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + extend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) + +test("prototype pollution mutable", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + mutableExtend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 00000000..557c7c98 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,119 @@ +{ + "name": "cli-todo-sql", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" + }, + "packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, + "pg": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.3.2.tgz", + "integrity": "sha512-hOoRCTriXS+VWwyXHchRjWb9yv3Koq8irlwwXniqhdgK0AbfWvEnybGS2HIUE+UdCSTuYAM4WGPujFpPg9Vcaw==", + "requires": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.3.0", + "pg-pool": "^3.2.1", + "pg-protocol": "^1.2.5", + "pg-types": "^2.1.0", + "pgpass": "1.x", + "semver": "4.3.2" + } + }, + "pg-connection-string": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.3.0.tgz", + "integrity": "sha512-ukMTJXLI7/hZIwTW7hGMZJ0Lj0S2XQBCJ4Shv4y1zgQ/vqVea+FLhzywvPj0ujSuofu+yA4MYHGZPTsgjBgJ+w==" + }, + "pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" + }, + "pg-pool": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.2.1.tgz", + "integrity": "sha512-BQDPWUeKenVrMMDN9opfns/kZo4lxmSWhIqo+cSAF7+lfi9ZclQbr9vfnlNaPr8wYF3UYjm5X0yPAhbcgqNOdA==" + }, + "pg-protocol": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.2.5.tgz", + "integrity": "sha512-1uYCckkuTfzz/FCefvavRywkowa6M5FohNMF5OjKrqo9PSR8gYc8poVmwwYQaBxhmQdBjhtP514eXy9/Us2xKg==" + }, + "pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "requires": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + } + }, + "pgpass": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.2.tgz", + "integrity": "sha1-Knu0G2BltnkH6R2hsHwYR8h3swY=", + "requires": { + "split": "^1.0.0" + } + }, + "postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" + }, + "postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=" + }, + "postgres-date": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.6.tgz", + "integrity": "sha512-o2a4gxeFcox+CgB3Ig/kNHBP23PiEXHCXx7pcIIsvzoNz4qv+lKTyiSkjOXIMNUl12MO/mOYl2K6wR9X5K6Plg==" + }, + "postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "requires": { + "xtend": "^4.0.0" + } + }, + "semver": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-4.3.2.tgz", + "integrity": "sha1-x6BxWKgL7dBSNVt3DYLWZA+AO+c=" + }, + "split": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz", + "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==", + "requires": { + "through": "2" + } + }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=" + }, + "xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 00000000..d0840e53 --- /dev/null +++ b/package.json @@ -0,0 +1,22 @@ +{ + "name": "cli-todo-sql", + "version": "1.0.0", + "description": "![https://i.giphy.com/media/26ufnwz3wDUli7GU0/giphy.webp](https://i.giphy.com/media/26ufnwz3wDUli7GU0/giphy.webp)", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/loribean/cli-todo-sql.git" + }, + "author": "", + "license": "ISC", + "bugs": { + "url": "https://github.com/loribean/cli-todo-sql/issues" + }, + "homepage": "https://github.com/loribean/cli-todo-sql#readme", + "dependencies": { + "pg": "^8.3.2" + } +} From 8cb4147a26cd1aa63be7b0b8ebc3ec5a44021e1a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Wed, 26 Aug 2020 21:18:50 +0800 Subject: [PATCH 2/9] show functionality works --- index.js | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index a9a49aff..a4ff0a01 100644 --- a/index.js +++ b/index.js @@ -20,7 +20,8 @@ client.connect((error)=> { console.log('ERROR AT CONNECT', error.message) } else { if(operation ==="add"){ - queryText = 'INSERT INTO items (name, done) VALUES ($1, $2) RETURNING id'; + + queryText = 'INSERT INTO items (name, done) VALUES ($1, $2) RETURNING name'; values = [chore, choreStatus]; client.query(queryText,values, (err, res) => { if (err) { @@ -29,6 +30,16 @@ client.connect((error)=> { console.log("result", res.rows); } }); + } else if (operation ==='show'){ + queryText = 'SELECT * FROM items'; + client.query(queryText, (err, res) => { + if (err) { + console.log("query error", err.message); + } else { + console.log("result", res.rows); + } + }); + } } }) \ No newline at end of file From 756c04030a6f1e1ec1e5ba55d6ce003ebc403c15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Wed, 26 Aug 2020 22:05:26 +0800 Subject: [PATCH 3/9] done functionality added --- index.js | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index a4ff0a01..212db215 100644 --- a/index.js +++ b/index.js @@ -36,7 +36,18 @@ client.connect((error)=> { if (err) { console.log("query error", err.message); } else { - console.log("result", res.rows); + console.log("here are your tasks", res.rows); + } + }); + + } else if(operation ==="done"){ + let id = parseInt(chore); + queryText = `UPDATE items SET done=true WHERE id =${id} RETURNING name`; + client.query(queryText, (err, res) => { + if (err) { + console.log("query error", err.message); + } else { + console.log("done[X]", res.rows); } }); From 624594289a7ab2bcb284d3b4520a882e4325435e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Wed, 26 Aug 2020 22:17:33 +0800 Subject: [PATCH 4/9] time stamp created --- index.js | 5 +++-- seed.sql | 19 +++++++++++++++++++ tables.sql | 6 ++++++ 3 files changed, 28 insertions(+), 2 deletions(-) create mode 100644 seed.sql create mode 100644 tables.sql diff --git a/index.js b/index.js index 212db215..09bac0e3 100644 --- a/index.js +++ b/index.js @@ -1,7 +1,7 @@ console.log("works!!", process.argv[2]); const operation = process.argv[2]; const chore = process.argv[3]; -let choreStatus = false; +let choreStatus = "[ ]"; const pg = require('pg'); const configs = { @@ -42,7 +42,8 @@ client.connect((error)=> { } else if(operation ==="done"){ let id = parseInt(chore); - queryText = `UPDATE items SET done=true WHERE id =${id} RETURNING name`; + choreStatus ='[X]'; + queryText = `UPDATE items SET done=${choreStatus} WHERE id =${id} RETURNING name`; client.query(queryText, (err, res) => { if (err) { console.log("query error", err.message); diff --git a/seed.sql b/seed.sql new file mode 100644 index 00000000..044ae24e --- /dev/null +++ b/seed.sql @@ -0,0 +1,19 @@ +INSERT INTO items +(name, done) +VALUES +('Walk the dog', '[ ]'); + +INSERT INTO items +(name, done) +VALUES +('Feed the cats', '[ ]'); + +INSERT INTO items +(name, done) +VALUES +('Water the plants', '[ ]'); + +INSERT INTO items +(name, done) +VALUES +('Complete your homework', '[ ]'); \ No newline at end of file diff --git a/tables.sql b/tables.sql new file mode 100644 index 00000000..e1fe3715 --- /dev/null +++ b/tables.sql @@ -0,0 +1,6 @@ +CREATE TABLE IF NOT EXISTS item( +id SERIAL PRIMARY KEY, +name TEXT, +done TEXT, +createdAt TIMESTAMP DEFAULT now() +) \ No newline at end of file From 9594eef2cf8b763227620b9fb6dc242bcfbe086b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Wed, 26 Aug 2020 22:36:29 +0800 Subject: [PATCH 5/9] updated time function --- index.js | 4 ++-- tables.sql | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/index.js b/index.js index 09bac0e3..f509ce4a 100644 --- a/index.js +++ b/index.js @@ -42,8 +42,8 @@ client.connect((error)=> { } else if(operation ==="done"){ let id = parseInt(chore); - choreStatus ='[X]'; - queryText = `UPDATE items SET done=${choreStatus} WHERE id =${id} RETURNING name`; + + queryText = `UPDATE items SET done='[X]', doneAt= NOW() WHERE id =${id} RETURNING name, doneAt`; client.query(queryText, (err, res) => { if (err) { console.log("query error", err.message); diff --git a/tables.sql b/tables.sql index e1fe3715..3a6d5c47 100644 --- a/tables.sql +++ b/tables.sql @@ -1,6 +1,7 @@ -CREATE TABLE IF NOT EXISTS item( +CREATE TABLE IF NOT EXISTS items( id SERIAL PRIMARY KEY, name TEXT, done TEXT, -createdAt TIMESTAMP DEFAULT now() +createdAt TIMESTAMP DEFAULT now(), +doneAt TIMESTAMP NULL ) \ No newline at end of file From b5fa2e1ac3091f53afdc8043fa6a097d9d337174 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Wed, 26 Aug 2020 22:54:06 +0800 Subject: [PATCH 6/9] done with getting average time function --- index.js | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index f509ce4a..06870622 100644 --- a/index.js +++ b/index.js @@ -52,6 +52,15 @@ client.connect((error)=> { } }); - } + } else if (operation === 'stats' && chore === "complete-time"){ + queryText = 'SELECT AVG(doneAt-createdAt) FROM items'; + client.query(queryText, (err, res) => { + if (err) { + console.log("query error", err.message); + } else { + console.log(res.rows); + } + }) } +} }) \ No newline at end of file From b09372895b0f44399159c5bc7db7863e45ced15a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Wed, 26 Aug 2020 23:44:30 +0800 Subject: [PATCH 7/9] done with average per day --- index.js | 16 ++++++++++++++++ tables.sql | 4 +++- 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index 06870622..8647ee34 100644 --- a/index.js +++ b/index.js @@ -59,6 +59,22 @@ client.connect((error)=> { console.log("query error", err.message); } else { console.log(res.rows); + } + }) + } else if (operation === 'stats' && chore === "add-time"){ + queryText = "SELECT COUNT(id), DATE_TRUNC('day',createdAt) FROM items GROUP BY DATE_TRUNC('day',createdAt)" + client.query(queryText, (err, res) => { + if (err) { + console.log("query error", err.message); + } else { + let listobj = res.rows; + let counter =0; + for(i=0; i Date: Thu, 27 Aug 2020 01:21:25 +0800 Subject: [PATCH 8/9] cant figure out how to convert to an integer so I can sort from best to worst --- index.js | 11 ++++++++++- tables.sql | 2 +- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/index.js b/index.js index 8647ee34..47054bfd 100644 --- a/index.js +++ b/index.js @@ -43,7 +43,7 @@ client.connect((error)=> { } else if(operation ==="done"){ let id = parseInt(chore); - queryText = `UPDATE items SET done='[X]', doneAt= NOW() WHERE id =${id} RETURNING name, doneAt`; + queryText = `UPDATE items SET done='[X]', doneAt= NOW() WHERE id =${id} RETURNING name, doneAt,timeTaken`; client.query(queryText, (err, res) => { if (err) { console.log("query error", err.message); @@ -77,6 +77,15 @@ client.connect((error)=> { } }) + } else if(operation === 'stats' && chore === "best-worst") { + queryText ="select name, doneAt-createdAt as timeTaken from items"; + client.query(queryText, (err, res) => { + if(err){ + console.log("query error", err.message); + } else { + console.log(res.rows); + } + }) } } }) \ No newline at end of file diff --git a/tables.sql b/tables.sql index 332f8be5..e03c6f9e 100644 --- a/tables.sql +++ b/tables.sql @@ -4,6 +4,6 @@ name TEXT, done TEXT, createdAt TIMESTAMP DEFAULT now(), doneAt TIMESTAMP NULL, -timeTaken TIMESTAMP NULL, +timeTaken TIMESTAMP NULL ) \ No newline at end of file From a66d33d04e34437042971f65639a25c95e014613 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cloribean=E2=80=9D?= <“aureliadotlim@gmail.com> Date: Thu, 27 Aug 2020 10:29:02 +0800 Subject: [PATCH 9/9] Finished with best to worst --- index.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/index.js b/index.js index 47054bfd..f12ea60b 100644 --- a/index.js +++ b/index.js @@ -78,7 +78,7 @@ client.connect((error)=> { } }) } else if(operation === 'stats' && chore === "best-worst") { - queryText ="select name, doneAt-createdAt as timeTaken from items"; + queryText ="select name, EXTRACT(epoch FROM doneAt-createdAt) AS timeTaken FROM items WHERE doneAt IS NOT NULL ORDER BY timeTaken ASC"; client.query(queryText, (err, res) => { if(err){ console.log("query error", err.message);